]> gcc.gnu.org Git - gcc.git/blob - gcc/reload1.c
Make more robust in two places.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
186
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
192
193 /* Indexed by pseudo reg number N,
194 says may not delete stores into the real (memory) home of pseudo N.
195 This is set if we already substituted a memory equivalent in some uses,
196 which happens when we have to eliminate the fp from it. */
197 static char *cannot_omit_stores;
198
199 /* Nonzero if indirect addressing is supported on the machine; this means
200 that spilling (REG n) does not require reloading it into a register in
201 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
202 value indicates the level of indirect addressing supported, e.g., two
203 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
204 a hard register. */
205
206 static char spill_indirect_levels;
207
208 /* Nonzero if indirect addressing is supported when the innermost MEM is
209 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
210 which these are valid is the same as spill_indirect_levels, above. */
211
212 char indirect_symref_ok;
213
214 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
215
216 char double_reg_address_ok;
217
218 /* Record the stack slot for each spilled hard register. */
219
220 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
221
222 /* Width allocated so far for that stack slot. */
223
224 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
225
226 /* Indexed by register class and basic block number, nonzero if there is
227 any need for a spill register of that class in that basic block.
228 The pointer is 0 if we did stupid allocation and don't know
229 the structure of basic blocks. */
230
231 char *basic_block_needs[N_REG_CLASSES];
232
233 /* First uid used by insns created by reload in this function.
234 Used in find_equiv_reg. */
235 int reload_first_uid;
236
237 /* Flag set by local-alloc or global-alloc if anything is live in
238 a call-clobbered reg across calls. */
239
240 int caller_save_needed;
241
242 /* Set to 1 while reload_as_needed is operating.
243 Required by some machines to handle any generated moves differently. */
244
245 int reload_in_progress = 0;
246
247 /* These arrays record the insn_code of insns that may be needed to
248 perform input and output reloads of special objects. They provide a
249 place to pass a scratch register. */
250
251 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
252 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
253
254 /* This obstack is used for allocation of rtl during register elimination.
255 The allocated storage can be freed once find_reloads has processed the
256 insn. */
257
258 struct obstack reload_obstack;
259 char *reload_firstobj;
260
261 #define obstack_chunk_alloc xmalloc
262 #define obstack_chunk_free free
263
264 /* List of labels that must never be deleted. */
265 extern rtx forced_labels;
266 \f
267 /* This structure is used to record information about register eliminations.
268 Each array entry describes one possible way of eliminating a register
269 in favor of another. If there is more than one way of eliminating a
270 particular register, the most preferred should be specified first. */
271
272 static struct elim_table
273 {
274 int from; /* Register number to be eliminated. */
275 int to; /* Register number used as replacement. */
276 int initial_offset; /* Initial difference between values. */
277 int can_eliminate; /* Non-zero if this elimination can be done. */
278 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
279 insns made by reload. */
280 int offset; /* Current offset between the two regs. */
281 int max_offset; /* Maximum offset between the two regs. */
282 int previous_offset; /* Offset at end of previous insn. */
283 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
284 rtx from_rtx; /* REG rtx for the register to be eliminated.
285 We cannot simply compare the number since
286 we might then spuriously replace a hard
287 register corresponding to a pseudo
288 assigned to the reg to be eliminated. */
289 rtx to_rtx; /* REG rtx for the replacement. */
290 } reg_eliminate[] =
291
292 /* If a set of eliminable registers was specified, define the table from it.
293 Otherwise, default to the normal case of the frame pointer being
294 replaced by the stack pointer. */
295
296 #ifdef ELIMINABLE_REGS
297 ELIMINABLE_REGS;
298 #else
299 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
300 #endif
301
302 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
303
304 /* Record the number of pending eliminations that have an offset not equal
305 to their initial offset. If non-zero, we use a new copy of each
306 replacement result in any insns encountered. */
307 static int num_not_at_initial_offset;
308
309 /* Count the number of registers that we may be able to eliminate. */
310 static int num_eliminable;
311
312 /* For each label, we record the offset of each elimination. If we reach
313 a label by more than one path and an offset differs, we cannot do the
314 elimination. This information is indexed by the number of the label.
315 The first table is an array of flags that records whether we have yet
316 encountered a label and the second table is an array of arrays, one
317 entry in the latter array for each elimination. */
318
319 static char *offsets_known_at;
320 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
321
322 /* Number of labels in the current function. */
323
324 static int num_labels;
325
326 struct hard_reg_n_uses { int regno; int uses; };
327 \f
328 static int possible_group_p PROTO((int, int *));
329 static void count_possible_groups PROTO((int *, enum machine_mode *,
330 int *));
331 static int modes_equiv_for_class_p PROTO((enum machine_mode,
332 enum machine_mode,
333 enum reg_class));
334 static void spill_failure PROTO((rtx));
335 static int new_spill_reg PROTO((int, int, int *, int *, int,
336 FILE *));
337 static void delete_dead_insn PROTO((rtx));
338 static void alter_reg PROTO((int, int));
339 static void mark_scratch_live PROTO((rtx));
340 static void set_label_offsets PROTO((rtx, rtx, int));
341 static int eliminate_regs_in_insn PROTO((rtx, int));
342 static void mark_not_eliminable PROTO((rtx, rtx));
343 static int spill_hard_reg PROTO((int, int, FILE *, int));
344 static void scan_paradoxical_subregs PROTO((rtx));
345 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
346 struct hard_reg_n_uses *));
347 static void order_regs_for_reload PROTO((void));
348 static void reload_as_needed PROTO((rtx, int));
349 static void forget_old_reloads_1 PROTO((rtx, rtx));
350 static int reload_reg_class_lower PROTO((short *, short *));
351 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
352 enum machine_mode));
353 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
354 enum machine_mode));
355 static int reload_reg_free_p PROTO((int, int, enum reload_type));
356 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
357 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
358 static int allocate_reload_reg PROTO((int, rtx, int, int));
359 static void choose_reload_regs PROTO((rtx, rtx));
360 static void merge_assigned_reloads PROTO((rtx));
361 static void emit_reload_insns PROTO((rtx));
362 static void delete_output_reload PROTO((rtx, int, rtx));
363 static void inc_for_reload PROTO((rtx, rtx, int));
364 static int constraint_accepts_reg_p PROTO((char *, rtx));
365 static int count_occurrences PROTO((rtx, rtx));
366 \f
367 /* Initialize the reload pass once per compilation. */
368
369 void
370 init_reload ()
371 {
372 register int i;
373
374 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
375 Set spill_indirect_levels to the number of levels such addressing is
376 permitted, zero if it is not permitted at all. */
377
378 register rtx tem
379 = gen_rtx (MEM, Pmode,
380 gen_rtx (PLUS, Pmode,
381 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
382 GEN_INT (4)));
383 spill_indirect_levels = 0;
384
385 while (memory_address_p (QImode, tem))
386 {
387 spill_indirect_levels++;
388 tem = gen_rtx (MEM, Pmode, tem);
389 }
390
391 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
392
393 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
394 indirect_symref_ok = memory_address_p (QImode, tem);
395
396 /* See if reg+reg is a valid (and offsettable) address. */
397
398 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
399 {
400 tem = gen_rtx (PLUS, Pmode,
401 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
402 gen_rtx (REG, Pmode, i));
403 /* This way, we make sure that reg+reg is an offsettable address. */
404 tem = plus_constant (tem, 4);
405
406 if (memory_address_p (QImode, tem))
407 {
408 double_reg_address_ok = 1;
409 break;
410 }
411 }
412
413 /* Initialize obstack for our rtl allocation. */
414 gcc_obstack_init (&reload_obstack);
415 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
416 }
417
418 /* Main entry point for the reload pass.
419
420 FIRST is the first insn of the function being compiled.
421
422 GLOBAL nonzero means we were called from global_alloc
423 and should attempt to reallocate any pseudoregs that we
424 displace from hard regs we will use for reloads.
425 If GLOBAL is zero, we do not have enough information to do that,
426 so any pseudo reg that is spilled must go to the stack.
427
428 DUMPFILE is the global-reg debugging dump file stream, or 0.
429 If it is nonzero, messages are written to it to describe
430 which registers are seized as reload regs, which pseudo regs
431 are spilled from them, and where the pseudo regs are reallocated to.
432
433 Return value is nonzero if reload failed
434 and we must not do any more for this function. */
435
436 int
437 reload (first, global, dumpfile)
438 rtx first;
439 int global;
440 FILE *dumpfile;
441 {
442 register int class;
443 register int i, j;
444 register rtx insn;
445 register struct elim_table *ep;
446
447 int something_changed;
448 int something_needs_reloads;
449 int something_needs_elimination;
450 int new_basic_block_needs;
451 enum reg_class caller_save_spill_class = NO_REGS;
452 int caller_save_group_size = 1;
453
454 /* Nonzero means we couldn't get enough spill regs. */
455 int failure = 0;
456
457 /* The basic block number currently being processed for INSN. */
458 int this_block;
459
460 /* Make sure even insns with volatile mem refs are recognizable. */
461 init_recog ();
462
463 /* Enable find_equiv_reg to distinguish insns made by reload. */
464 reload_first_uid = get_max_uid ();
465
466 for (i = 0; i < N_REG_CLASSES; i++)
467 basic_block_needs[i] = 0;
468
469 #ifdef SECONDARY_MEMORY_NEEDED
470 /* Initialize the secondary memory table. */
471 clear_secondary_mem ();
472 #endif
473
474 /* Remember which hard regs appear explicitly
475 before we merge into `regs_ever_live' the ones in which
476 pseudo regs have been allocated. */
477 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
478
479 /* We don't have a stack slot for any spill reg yet. */
480 bzero (spill_stack_slot, sizeof spill_stack_slot);
481 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
482
483 /* Initialize the save area information for caller-save, in case some
484 are needed. */
485 init_save_areas ();
486
487 /* Compute which hard registers are now in use
488 as homes for pseudo registers.
489 This is done here rather than (eg) in global_alloc
490 because this point is reached even if not optimizing. */
491
492 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
493 mark_home_live (i);
494
495 for (i = 0; i < scratch_list_length; i++)
496 if (scratch_list[i])
497 mark_scratch_live (scratch_list[i]);
498
499 /* Make sure that the last insn in the chain
500 is not something that needs reloading. */
501 emit_note (NULL_PTR, NOTE_INSN_DELETED);
502
503 /* Find all the pseudo registers that didn't get hard regs
504 but do have known equivalent constants or memory slots.
505 These include parameters (known equivalent to parameter slots)
506 and cse'd or loop-moved constant memory addresses.
507
508 Record constant equivalents in reg_equiv_constant
509 so they will be substituted by find_reloads.
510 Record memory equivalents in reg_mem_equiv so they can
511 be substituted eventually by altering the REG-rtx's. */
512
513 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
514 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
515 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
516 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
517 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
518 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
519 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
520 bzero (reg_equiv_init, max_regno * sizeof (rtx));
521 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
522 bzero (reg_equiv_address, max_regno * sizeof (rtx));
523 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
524 bzero (reg_max_ref_width, max_regno * sizeof (int));
525 cannot_omit_stores = (char *) alloca (max_regno);
526 bzero (cannot_omit_stores, max_regno);
527
528 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
529 Also find all paradoxical subregs
530 and find largest such for each pseudo. */
531
532 for (insn = first; insn; insn = NEXT_INSN (insn))
533 {
534 rtx set = single_set (insn);
535
536 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
537 {
538 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
539 if (note
540 #ifdef LEGITIMATE_PIC_OPERAND_P
541 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
542 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
543 #endif
544 )
545 {
546 rtx x = XEXP (note, 0);
547 i = REGNO (SET_DEST (set));
548 if (i > LAST_VIRTUAL_REGISTER)
549 {
550 if (GET_CODE (x) == MEM)
551 reg_equiv_memory_loc[i] = x;
552 else if (CONSTANT_P (x))
553 {
554 if (LEGITIMATE_CONSTANT_P (x))
555 reg_equiv_constant[i] = x;
556 else
557 reg_equiv_memory_loc[i]
558 = force_const_mem (GET_MODE (SET_DEST (set)), x);
559 }
560 else
561 continue;
562
563 /* If this register is being made equivalent to a MEM
564 and the MEM is not SET_SRC, the equivalencing insn
565 is one with the MEM as a SET_DEST and it occurs later.
566 So don't mark this insn now. */
567 if (GET_CODE (x) != MEM
568 || rtx_equal_p (SET_SRC (set), x))
569 reg_equiv_init[i] = insn;
570 }
571 }
572 }
573
574 /* If this insn is setting a MEM from a register equivalent to it,
575 this is the equivalencing insn. */
576 else if (set && GET_CODE (SET_DEST (set)) == MEM
577 && GET_CODE (SET_SRC (set)) == REG
578 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
579 && rtx_equal_p (SET_DEST (set),
580 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
581 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
582
583 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
584 scan_paradoxical_subregs (PATTERN (insn));
585 }
586
587 /* Does this function require a frame pointer? */
588
589 frame_pointer_needed = (! flag_omit_frame_pointer
590 #ifdef EXIT_IGNORE_STACK
591 /* ?? If EXIT_IGNORE_STACK is set, we will not save
592 and restore sp for alloca. So we can't eliminate
593 the frame pointer in that case. At some point,
594 we should improve this by emitting the
595 sp-adjusting insns for this case. */
596 || (current_function_calls_alloca
597 && EXIT_IGNORE_STACK)
598 #endif
599 || FRAME_POINTER_REQUIRED);
600
601 num_eliminable = 0;
602
603 /* Initialize the table of registers to eliminate. The way we do this
604 depends on how the eliminable registers were defined. */
605 #ifdef ELIMINABLE_REGS
606 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
607 {
608 ep->can_eliminate = ep->can_eliminate_previous
609 = (CAN_ELIMINATE (ep->from, ep->to)
610 && (ep->from != HARD_FRAME_POINTER_REGNUM
611 || ! frame_pointer_needed));
612 }
613 #else
614 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
615 = ! frame_pointer_needed;
616 #endif
617
618 /* Count the number of eliminable registers and build the FROM and TO
619 REG rtx's. Note that code in gen_rtx will cause, e.g.,
620 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
621 We depend on this. */
622 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
623 {
624 num_eliminable += ep->can_eliminate;
625 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
626 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
627 }
628
629 num_labels = max_label_num () - get_first_label_num ();
630
631 /* Allocate the tables used to store offset information at labels. */
632 offsets_known_at = (char *) alloca (num_labels);
633 offsets_at
634 = (int (*)[NUM_ELIMINABLE_REGS])
635 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
636
637 offsets_known_at -= get_first_label_num ();
638 offsets_at -= get_first_label_num ();
639
640 /* Alter each pseudo-reg rtx to contain its hard reg number.
641 Assign stack slots to the pseudos that lack hard regs or equivalents.
642 Do not touch virtual registers. */
643
644 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
645 alter_reg (i, -1);
646
647 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
648 because the stack size may be a part of the offset computation for
649 register elimination. */
650 assign_stack_local (BLKmode, 0, 0);
651
652 /* If we have some registers we think can be eliminated, scan all insns to
653 see if there is an insn that sets one of these registers to something
654 other than itself plus a constant. If so, the register cannot be
655 eliminated. Doing this scan here eliminates an extra pass through the
656 main reload loop in the most common case where register elimination
657 cannot be done. */
658 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
659 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
660 || GET_CODE (insn) == CALL_INSN)
661 note_stores (PATTERN (insn), mark_not_eliminable);
662
663 #ifndef REGISTER_CONSTRAINTS
664 /* If all the pseudo regs have hard regs,
665 except for those that are never referenced,
666 we know that no reloads are needed. */
667 /* But that is not true if there are register constraints, since
668 in that case some pseudos might be in the wrong kind of hard reg. */
669
670 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
671 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
672 break;
673
674 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
675 return;
676 #endif
677
678 /* Compute the order of preference for hard registers to spill.
679 Store them by decreasing preference in potential_reload_regs. */
680
681 order_regs_for_reload ();
682
683 /* So far, no hard regs have been spilled. */
684 n_spills = 0;
685 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
686 spill_reg_order[i] = -1;
687
688 /* On most machines, we can't use any register explicitly used in the
689 rtl as a spill register. But on some, we have to. Those will have
690 taken care to keep the life of hard regs as short as possible. */
691
692 #ifdef SMALL_REGISTER_CLASSES
693 CLEAR_HARD_REG_SET (forbidden_regs);
694 #else
695 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
696 #endif
697
698 /* Spill any hard regs that we know we can't eliminate. */
699 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
700 if (! ep->can_eliminate)
701 {
702 spill_hard_reg (ep->from, global, dumpfile, 1);
703 regs_ever_live[ep->from] = 1;
704 }
705
706 if (global)
707 for (i = 0; i < N_REG_CLASSES; i++)
708 {
709 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
710 bzero (basic_block_needs[i], n_basic_blocks);
711 }
712
713 /* From now on, we need to emit any moves without making new pseudos. */
714 reload_in_progress = 1;
715
716 /* This loop scans the entire function each go-round
717 and repeats until one repetition spills no additional hard regs. */
718
719 /* This flag is set when a pseudo reg is spilled,
720 to require another pass. Note that getting an additional reload
721 reg does not necessarily imply any pseudo reg was spilled;
722 sometimes we find a reload reg that no pseudo reg was allocated in. */
723 something_changed = 1;
724 /* This flag is set if there are any insns that require reloading. */
725 something_needs_reloads = 0;
726 /* This flag is set if there are any insns that require register
727 eliminations. */
728 something_needs_elimination = 0;
729 while (something_changed)
730 {
731 rtx after_call = 0;
732
733 /* For each class, number of reload regs needed in that class.
734 This is the maximum over all insns of the needs in that class
735 of the individual insn. */
736 int max_needs[N_REG_CLASSES];
737 /* For each class, size of group of consecutive regs
738 that is needed for the reloads of this class. */
739 int group_size[N_REG_CLASSES];
740 /* For each class, max number of consecutive groups needed.
741 (Each group contains group_size[CLASS] consecutive registers.) */
742 int max_groups[N_REG_CLASSES];
743 /* For each class, max number needed of regs that don't belong
744 to any of the groups. */
745 int max_nongroups[N_REG_CLASSES];
746 /* For each class, the machine mode which requires consecutive
747 groups of regs of that class.
748 If two different modes ever require groups of one class,
749 they must be the same size and equally restrictive for that class,
750 otherwise we can't handle the complexity. */
751 enum machine_mode group_mode[N_REG_CLASSES];
752 /* Record the insn where each maximum need is first found. */
753 rtx max_needs_insn[N_REG_CLASSES];
754 rtx max_groups_insn[N_REG_CLASSES];
755 rtx max_nongroups_insn[N_REG_CLASSES];
756 rtx x;
757 int starting_frame_size = get_frame_size ();
758 static char *reg_class_names[] = REG_CLASS_NAMES;
759
760 something_changed = 0;
761 bzero (max_needs, sizeof max_needs);
762 bzero (max_groups, sizeof max_groups);
763 bzero (max_nongroups, sizeof max_nongroups);
764 bzero (max_needs_insn, sizeof max_needs_insn);
765 bzero (max_groups_insn, sizeof max_groups_insn);
766 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
767 bzero (group_size, sizeof group_size);
768 for (i = 0; i < N_REG_CLASSES; i++)
769 group_mode[i] = VOIDmode;
770
771 /* Keep track of which basic blocks are needing the reloads. */
772 this_block = 0;
773
774 /* Remember whether any element of basic_block_needs
775 changes from 0 to 1 in this pass. */
776 new_basic_block_needs = 0;
777
778 /* Reset all offsets on eliminable registers to their initial values. */
779 #ifdef ELIMINABLE_REGS
780 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
781 {
782 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
783 ep->previous_offset = ep->offset
784 = ep->max_offset = ep->initial_offset;
785 }
786 #else
787 #ifdef INITIAL_FRAME_POINTER_OFFSET
788 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
789 #else
790 if (!FRAME_POINTER_REQUIRED)
791 abort ();
792 reg_eliminate[0].initial_offset = 0;
793 #endif
794 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
795 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
796 #endif
797
798 num_not_at_initial_offset = 0;
799
800 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
801
802 /* Set a known offset for each forced label to be at the initial offset
803 of each elimination. We do this because we assume that all
804 computed jumps occur from a location where each elimination is
805 at its initial offset. */
806
807 for (x = forced_labels; x; x = XEXP (x, 1))
808 if (XEXP (x, 0))
809 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
810
811 /* For each pseudo register that has an equivalent location defined,
812 try to eliminate any eliminable registers (such as the frame pointer)
813 assuming initial offsets for the replacement register, which
814 is the normal case.
815
816 If the resulting location is directly addressable, substitute
817 the MEM we just got directly for the old REG.
818
819 If it is not addressable but is a constant or the sum of a hard reg
820 and constant, it is probably not addressable because the constant is
821 out of range, in that case record the address; we will generate
822 hairy code to compute the address in a register each time it is
823 needed. Similarly if it is a hard register, but one that is not
824 valid as an address register.
825
826 If the location is not addressable, but does not have one of the
827 above forms, assign a stack slot. We have to do this to avoid the
828 potential of producing lots of reloads if, e.g., a location involves
829 a pseudo that didn't get a hard register and has an equivalent memory
830 location that also involves a pseudo that didn't get a hard register.
831
832 Perhaps at some point we will improve reload_when_needed handling
833 so this problem goes away. But that's very hairy. */
834
835 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
836 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
837 {
838 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
839
840 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
841 XEXP (x, 0)))
842 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
843 else if (CONSTANT_P (XEXP (x, 0))
844 || (GET_CODE (XEXP (x, 0)) == REG
845 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
846 || (GET_CODE (XEXP (x, 0)) == PLUS
847 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
848 && (REGNO (XEXP (XEXP (x, 0), 0))
849 < FIRST_PSEUDO_REGISTER)
850 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
851 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
852 else
853 {
854 /* Make a new stack slot. Then indicate that something
855 changed so we go back and recompute offsets for
856 eliminable registers because the allocation of memory
857 below might change some offset. reg_equiv_{mem,address}
858 will be set up for this pseudo on the next pass around
859 the loop. */
860 reg_equiv_memory_loc[i] = 0;
861 reg_equiv_init[i] = 0;
862 alter_reg (i, -1);
863 something_changed = 1;
864 }
865 }
866
867 /* If we allocated another pseudo to the stack, redo elimination
868 bookkeeping. */
869 if (something_changed)
870 continue;
871
872 /* If caller-saves needs a group, initialize the group to include
873 the size and mode required for caller-saves. */
874
875 if (caller_save_group_size > 1)
876 {
877 group_mode[(int) caller_save_spill_class] = Pmode;
878 group_size[(int) caller_save_spill_class] = caller_save_group_size;
879 }
880
881 /* Compute the most additional registers needed by any instruction.
882 Collect information separately for each class of regs. */
883
884 for (insn = first; insn; insn = NEXT_INSN (insn))
885 {
886 if (global && this_block + 1 < n_basic_blocks
887 && insn == basic_block_head[this_block+1])
888 ++this_block;
889
890 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
891 might include REG_LABEL), we need to see what effects this
892 has on the known offsets at labels. */
893
894 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
895 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
896 && REG_NOTES (insn) != 0))
897 set_label_offsets (insn, insn, 0);
898
899 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
900 {
901 /* Nonzero means don't use a reload reg that overlaps
902 the place where a function value can be returned. */
903 rtx avoid_return_reg = 0;
904
905 rtx old_body = PATTERN (insn);
906 int old_code = INSN_CODE (insn);
907 rtx old_notes = REG_NOTES (insn);
908 int did_elimination = 0;
909 int max_total_input_groups = 0, max_total_output_groups = 0;
910
911 /* To compute the number of reload registers of each class
912 needed for an insn, we must similate what choose_reload_regs
913 can do. We do this by splitting an insn into an "input" and
914 an "output" part. RELOAD_OTHER reloads are used in both.
915 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
916 which must be live over the entire input section of reloads,
917 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
918 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
919 inputs.
920
921 The registers needed for output are RELOAD_OTHER and
922 RELOAD_FOR_OUTPUT, which are live for the entire output
923 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
924 reloads for each operand.
925
926 The total number of registers needed is the maximum of the
927 inputs and outputs. */
928
929 /* These just count RELOAD_OTHER. */
930 int insn_needs[N_REG_CLASSES];
931 int insn_groups[N_REG_CLASSES];
932 int insn_total_groups = 0;
933
934 /* Count RELOAD_FOR_INPUT reloads. */
935 int insn_needs_for_inputs[N_REG_CLASSES];
936 int insn_groups_for_inputs[N_REG_CLASSES];
937 int insn_total_groups_for_inputs = 0;
938
939 /* Count RELOAD_FOR_OUTPUT reloads. */
940 int insn_needs_for_outputs[N_REG_CLASSES];
941 int insn_groups_for_outputs[N_REG_CLASSES];
942 int insn_total_groups_for_outputs = 0;
943
944 /* Count RELOAD_FOR_INSN reloads. */
945 int insn_needs_for_insn[N_REG_CLASSES];
946 int insn_groups_for_insn[N_REG_CLASSES];
947 int insn_total_groups_for_insn = 0;
948
949 /* Count RELOAD_FOR_OTHER_ADDRESS reloads. */
950 int insn_needs_for_other_addr[N_REG_CLASSES];
951 int insn_groups_for_other_addr[N_REG_CLASSES];
952 int insn_total_groups_for_other_addr = 0;
953
954 /* Count RELOAD_FOR_INPUT_ADDRESS reloads. */
955 int insn_needs_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
956 int insn_groups_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
957 int insn_total_groups_for_in_addr[MAX_RECOG_OPERANDS];
958
959 /* Count RELOAD_FOR_OUTPUT_ADDRESS reloads. */
960 int insn_needs_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
961 int insn_groups_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
962 int insn_total_groups_for_out_addr[MAX_RECOG_OPERANDS];
963
964 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
965 int insn_needs_for_op_addr[N_REG_CLASSES];
966 int insn_groups_for_op_addr[N_REG_CLASSES];
967 int insn_total_groups_for_op_addr = 0;
968
969 #if 0 /* This wouldn't work nowadays, since optimize_bit_field
970 looks for non-strict memory addresses. */
971 /* Optimization: a bit-field instruction whose field
972 happens to be a byte or halfword in memory
973 can be changed to a move instruction. */
974
975 if (GET_CODE (PATTERN (insn)) == SET)
976 {
977 rtx dest = SET_DEST (PATTERN (insn));
978 rtx src = SET_SRC (PATTERN (insn));
979
980 if (GET_CODE (dest) == ZERO_EXTRACT
981 || GET_CODE (dest) == SIGN_EXTRACT)
982 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
983 if (GET_CODE (src) == ZERO_EXTRACT
984 || GET_CODE (src) == SIGN_EXTRACT)
985 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
986 }
987 #endif
988
989 /* If needed, eliminate any eliminable registers. */
990 if (num_eliminable)
991 did_elimination = eliminate_regs_in_insn (insn, 0);
992
993 #ifdef SMALL_REGISTER_CLASSES
994 /* Set avoid_return_reg if this is an insn
995 that might use the value of a function call. */
996 if (GET_CODE (insn) == CALL_INSN)
997 {
998 if (GET_CODE (PATTERN (insn)) == SET)
999 after_call = SET_DEST (PATTERN (insn));
1000 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1001 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1002 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1003 else
1004 after_call = 0;
1005 }
1006 else if (after_call != 0
1007 && !(GET_CODE (PATTERN (insn)) == SET
1008 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1009 {
1010 if (reg_mentioned_p (after_call, PATTERN (insn)))
1011 avoid_return_reg = after_call;
1012 after_call = 0;
1013 }
1014 #endif /* SMALL_REGISTER_CLASSES */
1015
1016 /* Analyze the instruction. */
1017 find_reloads (insn, 0, spill_indirect_levels, global,
1018 spill_reg_order);
1019
1020 /* Remember for later shortcuts which insns had any reloads or
1021 register eliminations.
1022
1023 One might think that it would be worthwhile to mark insns
1024 that need register replacements but not reloads, but this is
1025 not safe because find_reloads may do some manipulation of
1026 the insn (such as swapping commutative operands), which would
1027 be lost when we restore the old pattern after register
1028 replacement. So the actions of find_reloads must be redone in
1029 subsequent passes or in reload_as_needed.
1030
1031 However, it is safe to mark insns that need reloads
1032 but not register replacement. */
1033
1034 PUT_MODE (insn, (did_elimination ? QImode
1035 : n_reloads ? HImode
1036 : GET_MODE (insn) == DImode ? DImode
1037 : VOIDmode));
1038
1039 /* Discard any register replacements done. */
1040 if (did_elimination)
1041 {
1042 obstack_free (&reload_obstack, reload_firstobj);
1043 PATTERN (insn) = old_body;
1044 INSN_CODE (insn) = old_code;
1045 REG_NOTES (insn) = old_notes;
1046 something_needs_elimination = 1;
1047 }
1048
1049 /* If this insn has no reloads, we need not do anything except
1050 in the case of a CALL_INSN when we have caller-saves and
1051 caller-save needs reloads. */
1052
1053 if (n_reloads == 0
1054 && ! (GET_CODE (insn) == CALL_INSN
1055 && caller_save_spill_class != NO_REGS))
1056 continue;
1057
1058 something_needs_reloads = 1;
1059
1060 for (i = 0; i < N_REG_CLASSES; i++)
1061 {
1062 insn_needs[i] = 0, insn_groups[i] = 0;
1063 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1064 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1065 insn_needs_for_insn[i] = 0, insn_groups_for_insn[i] = 0;
1066 insn_needs_for_op_addr[i] = 0, insn_groups_for_op_addr[i] = 0;
1067 insn_needs_for_other_addr[i] = 0;
1068 insn_groups_for_other_addr[i] = 0;
1069 }
1070
1071 for (i = 0; i < reload_n_operands; i++)
1072 {
1073 insn_total_groups_for_in_addr[i] = 0;
1074 insn_total_groups_for_out_addr[i] = 0;
1075
1076 for (j = 0; j < N_REG_CLASSES; j++)
1077 {
1078 insn_needs_for_in_addr[i][j] = 0;
1079 insn_needs_for_out_addr[i][j] = 0;
1080 insn_groups_for_in_addr[i][j] = 0;
1081 insn_groups_for_out_addr[i][j] = 0;
1082 }
1083 }
1084
1085 /* Count each reload once in every class
1086 containing the reload's own class. */
1087
1088 for (i = 0; i < n_reloads; i++)
1089 {
1090 register enum reg_class *p;
1091 enum reg_class class = reload_reg_class[i];
1092 int size;
1093 enum machine_mode mode;
1094 int *this_groups;
1095 int *this_needs;
1096 int *this_total_groups;
1097
1098 /* Don't count the dummy reloads, for which one of the
1099 regs mentioned in the insn can be used for reloading.
1100 Don't count optional reloads.
1101 Don't count reloads that got combined with others. */
1102 if (reload_reg_rtx[i] != 0
1103 || reload_optional[i] != 0
1104 || (reload_out[i] == 0 && reload_in[i] == 0
1105 && ! reload_secondary_p[i]))
1106 continue;
1107
1108 /* Show that a reload register of this class is needed
1109 in this basic block. We do not use insn_needs and
1110 insn_groups because they are overly conservative for
1111 this purpose. */
1112 if (global && ! basic_block_needs[(int) class][this_block])
1113 {
1114 basic_block_needs[(int) class][this_block] = 1;
1115 new_basic_block_needs = 1;
1116 }
1117
1118 /* Decide which time-of-use to count this reload for. */
1119 switch (reload_when_needed[i])
1120 {
1121 case RELOAD_OTHER:
1122 this_needs = insn_needs;
1123 this_groups = insn_groups;
1124 this_total_groups = &insn_total_groups;
1125 break;
1126
1127 case RELOAD_FOR_INPUT:
1128 this_needs = insn_needs_for_inputs;
1129 this_groups = insn_groups_for_inputs;
1130 this_total_groups = &insn_total_groups_for_inputs;
1131 break;
1132
1133 case RELOAD_FOR_OUTPUT:
1134 this_needs = insn_needs_for_outputs;
1135 this_groups = insn_groups_for_outputs;
1136 this_total_groups = &insn_total_groups_for_outputs;
1137 break;
1138
1139 case RELOAD_FOR_INSN:
1140 this_needs = insn_needs_for_insn;
1141 this_groups = insn_groups_for_insn;
1142 this_total_groups = &insn_total_groups_for_insn;
1143 break;
1144
1145 case RELOAD_FOR_OTHER_ADDRESS:
1146 this_needs = insn_needs_for_other_addr;
1147 this_groups = insn_groups_for_other_addr;
1148 this_total_groups = &insn_total_groups_for_other_addr;
1149 break;
1150
1151 case RELOAD_FOR_INPUT_ADDRESS:
1152 this_needs = insn_needs_for_in_addr[reload_opnum[i]];
1153 this_groups = insn_groups_for_in_addr[reload_opnum[i]];
1154 this_total_groups
1155 = &insn_total_groups_for_in_addr[reload_opnum[i]];
1156 break;
1157
1158 case RELOAD_FOR_OUTPUT_ADDRESS:
1159 this_needs = insn_needs_for_out_addr[reload_opnum[i]];
1160 this_groups = insn_groups_for_out_addr[reload_opnum[i]];
1161 this_total_groups
1162 = &insn_total_groups_for_out_addr[reload_opnum[i]];
1163 break;
1164
1165 case RELOAD_FOR_OPERAND_ADDRESS:
1166 this_needs = insn_needs_for_op_addr;
1167 this_groups = insn_groups_for_op_addr;
1168 this_total_groups = &insn_total_groups_for_op_addr;
1169 break;
1170 }
1171
1172 mode = reload_inmode[i];
1173 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1174 mode = reload_outmode[i];
1175 size = CLASS_MAX_NREGS (class, mode);
1176 if (size > 1)
1177 {
1178 enum machine_mode other_mode, allocate_mode;
1179
1180 /* Count number of groups needed separately from
1181 number of individual regs needed. */
1182 this_groups[(int) class]++;
1183 p = reg_class_superclasses[(int) class];
1184 while (*p != LIM_REG_CLASSES)
1185 this_groups[(int) *p++]++;
1186 (*this_total_groups)++;
1187
1188 /* Record size and mode of a group of this class. */
1189 /* If more than one size group is needed,
1190 make all groups the largest needed size. */
1191 if (group_size[(int) class] < size)
1192 {
1193 other_mode = group_mode[(int) class];
1194 allocate_mode = mode;
1195
1196 group_size[(int) class] = size;
1197 group_mode[(int) class] = mode;
1198 }
1199 else
1200 {
1201 other_mode = mode;
1202 allocate_mode = group_mode[(int) class];
1203 }
1204
1205 /* Crash if two dissimilar machine modes both need
1206 groups of consecutive regs of the same class. */
1207
1208 if (other_mode != VOIDmode
1209 && other_mode != allocate_mode
1210 && ! modes_equiv_for_class_p (allocate_mode,
1211 other_mode,
1212 class))
1213 abort ();
1214 }
1215 else if (size == 1)
1216 {
1217 this_needs[(int) class] += 1;
1218 p = reg_class_superclasses[(int) class];
1219 while (*p != LIM_REG_CLASSES)
1220 this_needs[(int) *p++] += 1;
1221 }
1222 else
1223 abort ();
1224 }
1225
1226 /* All reloads have been counted for this insn;
1227 now merge the various times of use.
1228 This sets insn_needs, etc., to the maximum total number
1229 of registers needed at any point in this insn. */
1230
1231 for (i = 0; i < N_REG_CLASSES; i++)
1232 {
1233 int in_max, out_max;
1234
1235 for (in_max = 0, out_max = 0, j = 0;
1236 j < reload_n_operands; j++)
1237 {
1238 in_max = MAX (in_max, insn_needs_for_in_addr[j][i]);
1239 out_max = MAX (out_max, insn_needs_for_out_addr[j][i]);
1240 }
1241
1242 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1243 and operand addresses but not things used to reload them.
1244 Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads don't
1245 conflict with things needed to reload inputs or
1246 outputs. */
1247
1248 in_max = MAX (in_max, insn_needs_for_op_addr[i]);
1249 out_max = MAX (out_max, insn_needs_for_insn[i]);
1250
1251 insn_needs_for_inputs[i]
1252 = MAX (insn_needs_for_inputs[i]
1253 + insn_needs_for_op_addr[i]
1254 + insn_needs_for_insn[i],
1255 in_max + insn_needs_for_inputs[i]);
1256
1257 insn_needs_for_outputs[i] += out_max;
1258 insn_needs[i] += MAX (MAX (insn_needs_for_inputs[i],
1259 insn_needs_for_outputs[i]),
1260 insn_needs_for_other_addr[i]);
1261
1262 for (in_max = 0, out_max = 0, j = 0;
1263 j < reload_n_operands; j++)
1264 {
1265 in_max = MAX (in_max, insn_groups_for_in_addr[j][i]);
1266 out_max = MAX (out_max, insn_groups_for_out_addr[j][i]);
1267 }
1268
1269 in_max = MAX (in_max, insn_groups_for_op_addr[i]);
1270 out_max = MAX (out_max, insn_groups_for_insn[i]);
1271
1272 insn_groups_for_inputs[i]
1273 = MAX (insn_groups_for_inputs[i]
1274 + insn_groups_for_op_addr[i]
1275 + insn_groups_for_insn[i],
1276 in_max + insn_groups_for_inputs[i]);
1277
1278 insn_groups_for_outputs[i] += out_max;
1279 insn_groups[i] += MAX (MAX (insn_groups_for_inputs[i],
1280 insn_groups_for_outputs[i]),
1281 insn_groups_for_other_addr[i]);
1282 }
1283
1284 for (i = 0; i < reload_n_operands; i++)
1285 {
1286 max_total_input_groups
1287 = MAX (max_total_input_groups,
1288 insn_total_groups_for_in_addr[i]);
1289 max_total_output_groups
1290 = MAX (max_total_output_groups,
1291 insn_total_groups_for_out_addr[i]);
1292 }
1293
1294 max_total_input_groups = MAX (max_total_input_groups,
1295 insn_total_groups_for_op_addr);
1296 max_total_output_groups = MAX (max_total_output_groups,
1297 insn_total_groups_for_insn);
1298
1299 insn_total_groups_for_inputs
1300 = MAX (max_total_input_groups + insn_total_groups_for_op_addr
1301 + insn_total_groups_for_insn,
1302 max_total_input_groups + insn_total_groups_for_inputs);
1303
1304 insn_total_groups_for_outputs += max_total_output_groups;
1305
1306 insn_total_groups += MAX (MAX (insn_total_groups_for_outputs,
1307 insn_total_groups_for_inputs),
1308 insn_total_groups_for_other_addr);
1309
1310 /* If this is a CALL_INSN and caller-saves will need
1311 a spill register, act as if the spill register is
1312 needed for this insn. However, the spill register
1313 can be used by any reload of this insn, so we only
1314 need do something if no need for that class has
1315 been recorded.
1316
1317 The assumption that every CALL_INSN will trigger a
1318 caller-save is highly conservative, however, the number
1319 of cases where caller-saves will need a spill register but
1320 a block containing a CALL_INSN won't need a spill register
1321 of that class should be quite rare.
1322
1323 If a group is needed, the size and mode of the group will
1324 have been set up at the beginning of this loop. */
1325
1326 if (GET_CODE (insn) == CALL_INSN
1327 && caller_save_spill_class != NO_REGS)
1328 {
1329 int *caller_save_needs
1330 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1331
1332 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1333 {
1334 register enum reg_class *p
1335 = reg_class_superclasses[(int) caller_save_spill_class];
1336
1337 caller_save_needs[(int) caller_save_spill_class]++;
1338
1339 while (*p != LIM_REG_CLASSES)
1340 caller_save_needs[(int) *p++] += 1;
1341 }
1342
1343 if (caller_save_group_size > 1)
1344 insn_total_groups = MAX (insn_total_groups, 1);
1345
1346
1347 /* Show that this basic block will need a register of
1348 this class. */
1349
1350 if (global
1351 && ! (basic_block_needs[(int) caller_save_spill_class]
1352 [this_block]))
1353 {
1354 basic_block_needs[(int) caller_save_spill_class]
1355 [this_block] = 1;
1356 new_basic_block_needs = 1;
1357 }
1358 }
1359
1360 #ifdef SMALL_REGISTER_CLASSES
1361 /* If this insn stores the value of a function call,
1362 and that value is in a register that has been spilled,
1363 and if the insn needs a reload in a class
1364 that might use that register as the reload register,
1365 then add add an extra need in that class.
1366 This makes sure we have a register available that does
1367 not overlap the return value. */
1368 if (avoid_return_reg)
1369 {
1370 int regno = REGNO (avoid_return_reg);
1371 int nregs
1372 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1373 int r;
1374 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1375
1376 /* First compute the "basic needs", which counts a
1377 need only in the smallest class in which it
1378 is required. */
1379
1380 bcopy (insn_needs, basic_needs, sizeof basic_needs);
1381 bcopy (insn_groups, basic_groups, sizeof basic_groups);
1382
1383 for (i = 0; i < N_REG_CLASSES; i++)
1384 {
1385 enum reg_class *p;
1386
1387 if (basic_needs[i] >= 0)
1388 for (p = reg_class_superclasses[i];
1389 *p != LIM_REG_CLASSES; p++)
1390 basic_needs[(int) *p] -= basic_needs[i];
1391
1392 if (basic_groups[i] >= 0)
1393 for (p = reg_class_superclasses[i];
1394 *p != LIM_REG_CLASSES; p++)
1395 basic_groups[(int) *p] -= basic_groups[i];
1396 }
1397
1398 /* Now count extra regs if there might be a conflict with
1399 the return value register.
1400
1401 ??? This is not quite correct because we don't properly
1402 handle the case of groups, but if we end up doing
1403 something wrong, it either will end up not mattering or
1404 we will abort elsewhere. */
1405
1406 for (r = regno; r < regno + nregs; r++)
1407 if (spill_reg_order[r] >= 0)
1408 for (i = 0; i < N_REG_CLASSES; i++)
1409 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1410 {
1411 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1412 {
1413 enum reg_class *p;
1414
1415 insn_needs[i]++;
1416 p = reg_class_superclasses[i];
1417 while (*p != LIM_REG_CLASSES)
1418 insn_needs[(int) *p++]++;
1419 }
1420 }
1421 }
1422 #endif /* SMALL_REGISTER_CLASSES */
1423
1424 /* For each class, collect maximum need of any insn. */
1425
1426 for (i = 0; i < N_REG_CLASSES; i++)
1427 {
1428 if (max_needs[i] < insn_needs[i])
1429 {
1430 max_needs[i] = insn_needs[i];
1431 max_needs_insn[i] = insn;
1432 }
1433 if (max_groups[i] < insn_groups[i])
1434 {
1435 max_groups[i] = insn_groups[i];
1436 max_groups_insn[i] = insn;
1437 }
1438 if (insn_total_groups > 0)
1439 if (max_nongroups[i] < insn_needs[i])
1440 {
1441 max_nongroups[i] = insn_needs[i];
1442 max_nongroups_insn[i] = insn;
1443 }
1444 }
1445 }
1446 /* Note that there is a continue statement above. */
1447 }
1448
1449 /* If we allocated any new memory locations, make another pass
1450 since it might have changed elimination offsets. */
1451 if (starting_frame_size != get_frame_size ())
1452 something_changed = 1;
1453
1454 if (dumpfile)
1455 for (i = 0; i < N_REG_CLASSES; i++)
1456 {
1457 if (max_needs[i] > 0)
1458 fprintf (dumpfile,
1459 ";; Need %d reg%s of class %s (for insn %d).\n",
1460 max_needs[i], max_needs[i] == 1 ? "" : "s",
1461 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1462 if (max_nongroups[i] > 0)
1463 fprintf (dumpfile,
1464 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1465 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1466 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1467 if (max_groups[i] > 0)
1468 fprintf (dumpfile,
1469 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1470 max_groups[i], max_groups[i] == 1 ? "" : "s",
1471 mode_name[(int) group_mode[i]],
1472 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1473 }
1474
1475 /* If we have caller-saves, set up the save areas and see if caller-save
1476 will need a spill register. */
1477
1478 if (caller_save_needed
1479 && ! setup_save_areas (&something_changed)
1480 && caller_save_spill_class == NO_REGS)
1481 {
1482 /* The class we will need depends on whether the machine
1483 supports the sum of two registers for an address; see
1484 find_address_reloads for details. */
1485
1486 caller_save_spill_class
1487 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1488 caller_save_group_size
1489 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1490 something_changed = 1;
1491 }
1492
1493 /* See if anything that happened changes which eliminations are valid.
1494 For example, on the Sparc, whether or not the frame pointer can
1495 be eliminated can depend on what registers have been used. We need
1496 not check some conditions again (such as flag_omit_frame_pointer)
1497 since they can't have changed. */
1498
1499 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1500 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1501 #ifdef ELIMINABLE_REGS
1502 || ! CAN_ELIMINATE (ep->from, ep->to)
1503 #endif
1504 )
1505 ep->can_eliminate = 0;
1506
1507 /* Look for the case where we have discovered that we can't replace
1508 register A with register B and that means that we will now be
1509 trying to replace register A with register C. This means we can
1510 no longer replace register C with register B and we need to disable
1511 such an elimination, if it exists. This occurs often with A == ap,
1512 B == sp, and C == fp. */
1513
1514 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1515 {
1516 struct elim_table *op;
1517 register int new_to = -1;
1518
1519 if (! ep->can_eliminate && ep->can_eliminate_previous)
1520 {
1521 /* Find the current elimination for ep->from, if there is a
1522 new one. */
1523 for (op = reg_eliminate;
1524 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1525 if (op->from == ep->from && op->can_eliminate)
1526 {
1527 new_to = op->to;
1528 break;
1529 }
1530
1531 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1532 disable it. */
1533 for (op = reg_eliminate;
1534 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1535 if (op->from == new_to && op->to == ep->to)
1536 op->can_eliminate = 0;
1537 }
1538 }
1539
1540 /* See if any registers that we thought we could eliminate the previous
1541 time are no longer eliminable. If so, something has changed and we
1542 must spill the register. Also, recompute the number of eliminable
1543 registers and see if the frame pointer is needed; it is if there is
1544 no elimination of the frame pointer that we can perform. */
1545
1546 frame_pointer_needed = 1;
1547 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1548 {
1549 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1550 && ep->to != HARD_FRAME_POINTER_REGNUM)
1551 frame_pointer_needed = 0;
1552
1553 if (! ep->can_eliminate && ep->can_eliminate_previous)
1554 {
1555 ep->can_eliminate_previous = 0;
1556 spill_hard_reg (ep->from, global, dumpfile, 1);
1557 regs_ever_live[ep->from] = 1;
1558 something_changed = 1;
1559 num_eliminable--;
1560 }
1561 }
1562
1563 /* If all needs are met, we win. */
1564
1565 for (i = 0; i < N_REG_CLASSES; i++)
1566 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1567 break;
1568 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1569 break;
1570
1571 /* Not all needs are met; must spill some hard regs. */
1572
1573 /* Put all registers spilled so far back in potential_reload_regs, but
1574 put them at the front, since we've already spilled most of the
1575 psuedos in them (we might have left some pseudos unspilled if they
1576 were in a block that didn't need any spill registers of a conflicting
1577 class. We used to try to mark off the need for those registers,
1578 but doing so properly is very complex and reallocating them is the
1579 simpler approach. First, "pack" potential_reload_regs by pushing
1580 any nonnegative entries towards the end. That will leave room
1581 for the registers we already spilled.
1582
1583 Also, undo the marking of the spill registers from the last time
1584 around in FORBIDDEN_REGS since we will be probably be allocating
1585 them again below.
1586
1587 ??? It is theoretically possible that we might end up not using one
1588 of our previously-spilled registers in this allocation, even though
1589 they are at the head of the list. It's not clear what to do about
1590 this, but it was no better before, when we marked off the needs met
1591 by the previously-spilled registers. With the current code, globals
1592 can be allocated into these registers, but locals cannot. */
1593
1594 if (n_spills)
1595 {
1596 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1597 if (potential_reload_regs[i] != -1)
1598 potential_reload_regs[j--] = potential_reload_regs[i];
1599
1600 for (i = 0; i < n_spills; i++)
1601 {
1602 potential_reload_regs[i] = spill_regs[i];
1603 spill_reg_order[spill_regs[i]] = -1;
1604 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1605 }
1606
1607 n_spills = 0;
1608 }
1609
1610 /* Now find more reload regs to satisfy the remaining need
1611 Do it by ascending class number, since otherwise a reg
1612 might be spilled for a big class and might fail to count
1613 for a smaller class even though it belongs to that class.
1614
1615 Count spilled regs in `spills', and add entries to
1616 `spill_regs' and `spill_reg_order'.
1617
1618 ??? Note there is a problem here.
1619 When there is a need for a group in a high-numbered class,
1620 and also need for non-group regs that come from a lower class,
1621 the non-group regs are chosen first. If there aren't many regs,
1622 they might leave no room for a group.
1623
1624 This was happening on the 386. To fix it, we added the code
1625 that calls possible_group_p, so that the lower class won't
1626 break up the last possible group.
1627
1628 Really fixing the problem would require changes above
1629 in counting the regs already spilled, and in choose_reload_regs.
1630 It might be hard to avoid introducing bugs there. */
1631
1632 CLEAR_HARD_REG_SET (counted_for_groups);
1633 CLEAR_HARD_REG_SET (counted_for_nongroups);
1634
1635 for (class = 0; class < N_REG_CLASSES; class++)
1636 {
1637 /* First get the groups of registers.
1638 If we got single registers first, we might fragment
1639 possible groups. */
1640 while (max_groups[class] > 0)
1641 {
1642 /* If any single spilled regs happen to form groups,
1643 count them now. Maybe we don't really need
1644 to spill another group. */
1645 count_possible_groups (group_size, group_mode, max_groups);
1646
1647 if (max_groups[class] <= 0)
1648 break;
1649
1650 /* Groups of size 2 (the only groups used on most machines)
1651 are treated specially. */
1652 if (group_size[class] == 2)
1653 {
1654 /* First, look for a register that will complete a group. */
1655 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1656 {
1657 int other;
1658
1659 j = potential_reload_regs[i];
1660 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1661 &&
1662 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1663 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1664 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1665 && HARD_REGNO_MODE_OK (other, group_mode[class])
1666 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1667 other)
1668 /* We don't want one part of another group.
1669 We could get "two groups" that overlap! */
1670 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1671 ||
1672 (j < FIRST_PSEUDO_REGISTER - 1
1673 && (other = j + 1, spill_reg_order[other] >= 0)
1674 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1675 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1676 && HARD_REGNO_MODE_OK (j, group_mode[class])
1677 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1678 other)
1679 && ! TEST_HARD_REG_BIT (counted_for_groups,
1680 other))))
1681 {
1682 register enum reg_class *p;
1683
1684 /* We have found one that will complete a group,
1685 so count off one group as provided. */
1686 max_groups[class]--;
1687 p = reg_class_superclasses[class];
1688 while (*p != LIM_REG_CLASSES)
1689 max_groups[(int) *p++]--;
1690
1691 /* Indicate both these regs are part of a group. */
1692 SET_HARD_REG_BIT (counted_for_groups, j);
1693 SET_HARD_REG_BIT (counted_for_groups, other);
1694 break;
1695 }
1696 }
1697 /* We can't complete a group, so start one. */
1698 if (i == FIRST_PSEUDO_REGISTER)
1699 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1700 {
1701 int k;
1702 j = potential_reload_regs[i];
1703 /* Verify that J+1 is a potential reload reg. */
1704 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1705 if (potential_reload_regs[k] == j + 1)
1706 break;
1707 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1708 && k < FIRST_PSEUDO_REGISTER
1709 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1710 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1711 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1712 && HARD_REGNO_MODE_OK (j, group_mode[class])
1713 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1714 j + 1)
1715 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1716 break;
1717 }
1718
1719 /* I should be the index in potential_reload_regs
1720 of the new reload reg we have found. */
1721
1722 if (i >= FIRST_PSEUDO_REGISTER)
1723 {
1724 /* There are no groups left to spill. */
1725 spill_failure (max_groups_insn[class]);
1726 failure = 1;
1727 goto failed;
1728 }
1729 else
1730 something_changed
1731 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1732 global, dumpfile);
1733 }
1734 else
1735 {
1736 /* For groups of more than 2 registers,
1737 look for a sufficient sequence of unspilled registers,
1738 and spill them all at once. */
1739 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1740 {
1741 int k;
1742
1743 j = potential_reload_regs[i];
1744 if (j >= 0
1745 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1746 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1747 {
1748 /* Check each reg in the sequence. */
1749 for (k = 0; k < group_size[class]; k++)
1750 if (! (spill_reg_order[j + k] < 0
1751 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1752 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1753 break;
1754 /* We got a full sequence, so spill them all. */
1755 if (k == group_size[class])
1756 {
1757 register enum reg_class *p;
1758 for (k = 0; k < group_size[class]; k++)
1759 {
1760 int idx;
1761 SET_HARD_REG_BIT (counted_for_groups, j + k);
1762 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1763 if (potential_reload_regs[idx] == j + k)
1764 break;
1765 something_changed
1766 |= new_spill_reg (idx, class,
1767 max_needs, NULL_PTR,
1768 global, dumpfile);
1769 }
1770
1771 /* We have found one that will complete a group,
1772 so count off one group as provided. */
1773 max_groups[class]--;
1774 p = reg_class_superclasses[class];
1775 while (*p != LIM_REG_CLASSES)
1776 max_groups[(int) *p++]--;
1777
1778 break;
1779 }
1780 }
1781 }
1782 /* We couldn't find any registers for this reload.
1783 Avoid going into an infinite loop. */
1784 if (i >= FIRST_PSEUDO_REGISTER)
1785 {
1786 /* There are no groups left. */
1787 spill_failure (max_groups_insn[class]);
1788 failure = 1;
1789 goto failed;
1790 }
1791 }
1792 }
1793
1794 /* Now similarly satisfy all need for single registers. */
1795
1796 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1797 {
1798 #ifdef SMALL_REGISTER_CLASSES
1799 /* This should be right for all machines, but only the 386
1800 is known to need it, so this conditional plays safe.
1801 ??? For 2.5, try making this unconditional. */
1802 /* If we spilled enough regs, but they weren't counted
1803 against the non-group need, see if we can count them now.
1804 If so, we can avoid some actual spilling. */
1805 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1806 for (i = 0; i < n_spills; i++)
1807 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1808 spill_regs[i])
1809 && !TEST_HARD_REG_BIT (counted_for_groups,
1810 spill_regs[i])
1811 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1812 spill_regs[i])
1813 && max_nongroups[class] > 0)
1814 {
1815 register enum reg_class *p;
1816
1817 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1818 max_nongroups[class]--;
1819 p = reg_class_superclasses[class];
1820 while (*p != LIM_REG_CLASSES)
1821 max_nongroups[(int) *p++]--;
1822 }
1823 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1824 break;
1825 #endif
1826
1827 /* Consider the potential reload regs that aren't
1828 yet in use as reload regs, in order of preference.
1829 Find the most preferred one that's in this class. */
1830
1831 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1832 if (potential_reload_regs[i] >= 0
1833 && TEST_HARD_REG_BIT (reg_class_contents[class],
1834 potential_reload_regs[i])
1835 /* If this reg will not be available for groups,
1836 pick one that does not foreclose possible groups.
1837 This is a kludge, and not very general,
1838 but it should be sufficient to make the 386 work,
1839 and the problem should not occur on machines with
1840 more registers. */
1841 && (max_nongroups[class] == 0
1842 || possible_group_p (potential_reload_regs[i], max_groups)))
1843 break;
1844
1845 /* If we couldn't get a register, try to get one even if we
1846 might foreclose possible groups. This may cause problems
1847 later, but that's better than aborting now, since it is
1848 possible that we will, in fact, be able to form the needed
1849 group even with this allocation. */
1850
1851 if (i >= FIRST_PSEUDO_REGISTER
1852 && (asm_noperands (max_needs[class] > 0
1853 ? max_needs_insn[class]
1854 : max_nongroups_insn[class])
1855 < 0))
1856 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1857 if (potential_reload_regs[i] >= 0
1858 && TEST_HARD_REG_BIT (reg_class_contents[class],
1859 potential_reload_regs[i]))
1860 break;
1861
1862 /* I should be the index in potential_reload_regs
1863 of the new reload reg we have found. */
1864
1865 if (i >= FIRST_PSEUDO_REGISTER)
1866 {
1867 /* There are no possible registers left to spill. */
1868 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1869 : max_nongroups_insn[class]);
1870 failure = 1;
1871 goto failed;
1872 }
1873 else
1874 something_changed
1875 |= new_spill_reg (i, class, max_needs, max_nongroups,
1876 global, dumpfile);
1877 }
1878 }
1879 }
1880
1881 /* If global-alloc was run, notify it of any register eliminations we have
1882 done. */
1883 if (global)
1884 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1885 if (ep->can_eliminate)
1886 mark_elimination (ep->from, ep->to);
1887
1888 /* Insert code to save and restore call-clobbered hard regs
1889 around calls. Tell if what mode to use so that we will process
1890 those insns in reload_as_needed if we have to. */
1891
1892 if (caller_save_needed)
1893 save_call_clobbered_regs (num_eliminable ? QImode
1894 : caller_save_spill_class != NO_REGS ? HImode
1895 : VOIDmode);
1896
1897 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1898 If that insn didn't set the register (i.e., it copied the register to
1899 memory), just delete that insn instead of the equivalencing insn plus
1900 anything now dead. If we call delete_dead_insn on that insn, we may
1901 delete the insn that actually sets the register if the register die
1902 there and that is incorrect. */
1903
1904 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1905 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1906 && GET_CODE (reg_equiv_init[i]) != NOTE)
1907 {
1908 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1909 delete_dead_insn (reg_equiv_init[i]);
1910 else
1911 {
1912 PUT_CODE (reg_equiv_init[i], NOTE);
1913 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1914 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1915 }
1916 }
1917
1918 /* Use the reload registers where necessary
1919 by generating move instructions to move the must-be-register
1920 values into or out of the reload registers. */
1921
1922 if (something_needs_reloads || something_needs_elimination
1923 || (caller_save_needed && num_eliminable)
1924 || caller_save_spill_class != NO_REGS)
1925 reload_as_needed (first, global);
1926
1927 /* If we were able to eliminate the frame pointer, show that it is no
1928 longer live at the start of any basic block. If it ls live by
1929 virtue of being in a pseudo, that pseudo will be marked live
1930 and hence the frame pointer will be known to be live via that
1931 pseudo. */
1932
1933 if (! frame_pointer_needed)
1934 for (i = 0; i < n_basic_blocks; i++)
1935 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1936 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1937 % REGSET_ELT_BITS));
1938
1939 /* Come here (with failure set nonzero) if we can't get enough spill regs
1940 and we decide not to abort about it. */
1941 failed:
1942
1943 reload_in_progress = 0;
1944
1945 /* Now eliminate all pseudo regs by modifying them into
1946 their equivalent memory references.
1947 The REG-rtx's for the pseudos are modified in place,
1948 so all insns that used to refer to them now refer to memory.
1949
1950 For a reg that has a reg_equiv_address, all those insns
1951 were changed by reloading so that no insns refer to it any longer;
1952 but the DECL_RTL of a variable decl may refer to it,
1953 and if so this causes the debugging info to mention the variable. */
1954
1955 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1956 {
1957 rtx addr = 0;
1958 int in_struct = 0;
1959 if (reg_equiv_mem[i])
1960 {
1961 addr = XEXP (reg_equiv_mem[i], 0);
1962 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1963 }
1964 if (reg_equiv_address[i])
1965 addr = reg_equiv_address[i];
1966 if (addr)
1967 {
1968 if (reg_renumber[i] < 0)
1969 {
1970 rtx reg = regno_reg_rtx[i];
1971 XEXP (reg, 0) = addr;
1972 REG_USERVAR_P (reg) = 0;
1973 MEM_IN_STRUCT_P (reg) = in_struct;
1974 PUT_CODE (reg, MEM);
1975 }
1976 else if (reg_equiv_mem[i])
1977 XEXP (reg_equiv_mem[i], 0) = addr;
1978 }
1979 }
1980
1981 #ifdef PRESERVE_DEATH_INFO_REGNO_P
1982 /* Make a pass over all the insns and remove death notes for things that
1983 are no longer registers or no longer die in the insn (e.g., an input
1984 and output pseudo being tied). */
1985
1986 for (insn = first; insn; insn = NEXT_INSN (insn))
1987 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1988 {
1989 rtx note, next;
1990
1991 for (note = REG_NOTES (insn); note; note = next)
1992 {
1993 next = XEXP (note, 1);
1994 if (REG_NOTE_KIND (note) == REG_DEAD
1995 && (GET_CODE (XEXP (note, 0)) != REG
1996 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
1997 remove_note (insn, note);
1998 }
1999 }
2000 #endif
2001
2002 /* Indicate that we no longer have known memory locations or constants. */
2003 reg_equiv_constant = 0;
2004 reg_equiv_memory_loc = 0;
2005
2006 if (scratch_list)
2007 free (scratch_list);
2008 scratch_list = 0;
2009 if (scratch_block)
2010 free (scratch_block);
2011 scratch_block = 0;
2012
2013 return failure;
2014 }
2015 \f
2016 /* Nonzero if, after spilling reg REGNO for non-groups,
2017 it will still be possible to find a group if we still need one. */
2018
2019 static int
2020 possible_group_p (regno, max_groups)
2021 int regno;
2022 int *max_groups;
2023 {
2024 int i;
2025 int class = (int) NO_REGS;
2026
2027 for (i = 0; i < (int) N_REG_CLASSES; i++)
2028 if (max_groups[i] > 0)
2029 {
2030 class = i;
2031 break;
2032 }
2033
2034 if (class == (int) NO_REGS)
2035 return 1;
2036
2037 /* Consider each pair of consecutive registers. */
2038 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2039 {
2040 /* Ignore pairs that include reg REGNO. */
2041 if (i == regno || i + 1 == regno)
2042 continue;
2043
2044 /* Ignore pairs that are outside the class that needs the group.
2045 ??? Here we fail to handle the case where two different classes
2046 independently need groups. But this never happens with our
2047 current machine descriptions. */
2048 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2049 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2050 continue;
2051
2052 /* A pair of consecutive regs we can still spill does the trick. */
2053 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2054 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2055 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2056 return 1;
2057
2058 /* A pair of one already spilled and one we can spill does it
2059 provided the one already spilled is not otherwise reserved. */
2060 if (spill_reg_order[i] < 0
2061 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2062 && spill_reg_order[i + 1] >= 0
2063 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2064 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2065 return 1;
2066 if (spill_reg_order[i + 1] < 0
2067 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2068 && spill_reg_order[i] >= 0
2069 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2070 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2071 return 1;
2072 }
2073
2074 return 0;
2075 }
2076 \f
2077 /* Count any groups that can be formed from the registers recently spilled.
2078 This is done class by class, in order of ascending class number. */
2079
2080 static void
2081 count_possible_groups (group_size, group_mode, max_groups)
2082 int *group_size;
2083 enum machine_mode *group_mode;
2084 int *max_groups;
2085 {
2086 int i;
2087 /* Now find all consecutive groups of spilled registers
2088 and mark each group off against the need for such groups.
2089 But don't count them against ordinary need, yet. */
2090
2091 for (i = 0; i < N_REG_CLASSES; i++)
2092 if (group_size[i] > 1)
2093 {
2094 HARD_REG_SET new;
2095 int j;
2096
2097 CLEAR_HARD_REG_SET (new);
2098
2099 /* Make a mask of all the regs that are spill regs in class I. */
2100 for (j = 0; j < n_spills; j++)
2101 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2102 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2103 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2104 spill_regs[j]))
2105 SET_HARD_REG_BIT (new, spill_regs[j]);
2106
2107 /* Find each consecutive group of them. */
2108 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2109 if (TEST_HARD_REG_BIT (new, j)
2110 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2111 /* Next line in case group-mode for this class
2112 demands an even-odd pair. */
2113 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2114 {
2115 int k;
2116 for (k = 1; k < group_size[i]; k++)
2117 if (! TEST_HARD_REG_BIT (new, j + k))
2118 break;
2119 if (k == group_size[i])
2120 {
2121 /* We found a group. Mark it off against this class's
2122 need for groups, and against each superclass too. */
2123 register enum reg_class *p;
2124 max_groups[i]--;
2125 p = reg_class_superclasses[i];
2126 while (*p != LIM_REG_CLASSES)
2127 max_groups[(int) *p++]--;
2128 /* Don't count these registers again. */
2129 for (k = 0; k < group_size[i]; k++)
2130 SET_HARD_REG_BIT (counted_for_groups, j + k);
2131 }
2132 /* Skip to the last reg in this group. When j is incremented
2133 above, it will then point to the first reg of the next
2134 possible group. */
2135 j += k - 1;
2136 }
2137 }
2138
2139 }
2140 \f
2141 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2142 another mode that needs to be reloaded for the same register class CLASS.
2143 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2144 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2145
2146 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2147 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2148 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2149 causes unnecessary failures on machines requiring alignment of register
2150 groups when the two modes are different sizes, because the larger mode has
2151 more strict alignment rules than the smaller mode. */
2152
2153 static int
2154 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2155 enum machine_mode allocate_mode, other_mode;
2156 enum reg_class class;
2157 {
2158 register int regno;
2159 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2160 {
2161 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2162 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2163 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2164 return 0;
2165 }
2166 return 1;
2167 }
2168
2169 /* Handle the failure to find a register to spill.
2170 INSN should be one of the insns which needed this particular spill reg. */
2171
2172 static void
2173 spill_failure (insn)
2174 rtx insn;
2175 {
2176 if (asm_noperands (PATTERN (insn)) >= 0)
2177 error_for_asm (insn, "`asm' needs too many reloads");
2178 else
2179 abort ();
2180 }
2181
2182 /* Add a new register to the tables of available spill-registers
2183 (as well as spilling all pseudos allocated to the register).
2184 I is the index of this register in potential_reload_regs.
2185 CLASS is the regclass whose need is being satisfied.
2186 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2187 so that this register can count off against them.
2188 MAX_NONGROUPS is 0 if this register is part of a group.
2189 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2190
2191 static int
2192 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2193 int i;
2194 int class;
2195 int *max_needs;
2196 int *max_nongroups;
2197 int global;
2198 FILE *dumpfile;
2199 {
2200 register enum reg_class *p;
2201 int val;
2202 int regno = potential_reload_regs[i];
2203
2204 if (i >= FIRST_PSEUDO_REGISTER)
2205 abort (); /* Caller failed to find any register. */
2206
2207 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2208 fatal ("fixed or forbidden register was spilled.\n\
2209 This may be due to a compiler bug or to impossible asm statements.");
2210
2211 /* Make reg REGNO an additional reload reg. */
2212
2213 potential_reload_regs[i] = -1;
2214 spill_regs[n_spills] = regno;
2215 spill_reg_order[regno] = n_spills;
2216 if (dumpfile)
2217 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2218
2219 /* Clear off the needs we just satisfied. */
2220
2221 max_needs[class]--;
2222 p = reg_class_superclasses[class];
2223 while (*p != LIM_REG_CLASSES)
2224 max_needs[(int) *p++]--;
2225
2226 if (max_nongroups && max_nongroups[class] > 0)
2227 {
2228 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2229 max_nongroups[class]--;
2230 p = reg_class_superclasses[class];
2231 while (*p != LIM_REG_CLASSES)
2232 max_nongroups[(int) *p++]--;
2233 }
2234
2235 /* Spill every pseudo reg that was allocated to this reg
2236 or to something that overlaps this reg. */
2237
2238 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2239
2240 /* If there are some registers still to eliminate and this register
2241 wasn't ever used before, additional stack space may have to be
2242 allocated to store this register. Thus, we may have changed the offset
2243 between the stack and frame pointers, so mark that something has changed.
2244 (If new pseudos were spilled, thus requiring more space, VAL would have
2245 been set non-zero by the call to spill_hard_reg above since additional
2246 reloads may be needed in that case.
2247
2248 One might think that we need only set VAL to 1 if this is a call-used
2249 register. However, the set of registers that must be saved by the
2250 prologue is not identical to the call-used set. For example, the
2251 register used by the call insn for the return PC is a call-used register,
2252 but must be saved by the prologue. */
2253 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2254 val = 1;
2255
2256 regs_ever_live[spill_regs[n_spills]] = 1;
2257 n_spills++;
2258
2259 return val;
2260 }
2261 \f
2262 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2263 data that is dead in INSN. */
2264
2265 static void
2266 delete_dead_insn (insn)
2267 rtx insn;
2268 {
2269 rtx prev = prev_real_insn (insn);
2270 rtx prev_dest;
2271
2272 /* If the previous insn sets a register that dies in our insn, delete it
2273 too. */
2274 if (prev && GET_CODE (PATTERN (prev)) == SET
2275 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2276 && reg_mentioned_p (prev_dest, PATTERN (insn))
2277 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2278 delete_dead_insn (prev);
2279
2280 PUT_CODE (insn, NOTE);
2281 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2282 NOTE_SOURCE_FILE (insn) = 0;
2283 }
2284
2285 /* Modify the home of pseudo-reg I.
2286 The new home is present in reg_renumber[I].
2287
2288 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2289 or it may be -1, meaning there is none or it is not relevant.
2290 This is used so that all pseudos spilled from a given hard reg
2291 can share one stack slot. */
2292
2293 static void
2294 alter_reg (i, from_reg)
2295 register int i;
2296 int from_reg;
2297 {
2298 /* When outputting an inline function, this can happen
2299 for a reg that isn't actually used. */
2300 if (regno_reg_rtx[i] == 0)
2301 return;
2302
2303 /* If the reg got changed to a MEM at rtl-generation time,
2304 ignore it. */
2305 if (GET_CODE (regno_reg_rtx[i]) != REG)
2306 return;
2307
2308 /* Modify the reg-rtx to contain the new hard reg
2309 number or else to contain its pseudo reg number. */
2310 REGNO (regno_reg_rtx[i])
2311 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2312
2313 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2314 allocate a stack slot for it. */
2315
2316 if (reg_renumber[i] < 0
2317 && reg_n_refs[i] > 0
2318 && reg_equiv_constant[i] == 0
2319 && reg_equiv_memory_loc[i] == 0)
2320 {
2321 register rtx x;
2322 int inherent_size = PSEUDO_REGNO_BYTES (i);
2323 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2324 int adjust = 0;
2325
2326 /* Each pseudo reg has an inherent size which comes from its own mode,
2327 and a total size which provides room for paradoxical subregs
2328 which refer to the pseudo reg in wider modes.
2329
2330 We can use a slot already allocated if it provides both
2331 enough inherent space and enough total space.
2332 Otherwise, we allocate a new slot, making sure that it has no less
2333 inherent space, and no less total space, then the previous slot. */
2334 if (from_reg == -1)
2335 {
2336 /* No known place to spill from => no slot to reuse. */
2337 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2338 #if BYTES_BIG_ENDIAN
2339 /* Cancel the big-endian correction done in assign_stack_local.
2340 Get the address of the beginning of the slot.
2341 This is so we can do a big-endian correction unconditionally
2342 below. */
2343 adjust = inherent_size - total_size;
2344 #endif
2345 }
2346 /* Reuse a stack slot if possible. */
2347 else if (spill_stack_slot[from_reg] != 0
2348 && spill_stack_slot_width[from_reg] >= total_size
2349 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2350 >= inherent_size))
2351 x = spill_stack_slot[from_reg];
2352 /* Allocate a bigger slot. */
2353 else
2354 {
2355 /* Compute maximum size needed, both for inherent size
2356 and for total size. */
2357 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2358 if (spill_stack_slot[from_reg])
2359 {
2360 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2361 > inherent_size)
2362 mode = GET_MODE (spill_stack_slot[from_reg]);
2363 if (spill_stack_slot_width[from_reg] > total_size)
2364 total_size = spill_stack_slot_width[from_reg];
2365 }
2366 /* Make a slot with that size. */
2367 x = assign_stack_local (mode, total_size, -1);
2368 #if BYTES_BIG_ENDIAN
2369 /* Cancel the big-endian correction done in assign_stack_local.
2370 Get the address of the beginning of the slot.
2371 This is so we can do a big-endian correction unconditionally
2372 below. */
2373 adjust = GET_MODE_SIZE (mode) - total_size;
2374 #endif
2375 spill_stack_slot[from_reg] = x;
2376 spill_stack_slot_width[from_reg] = total_size;
2377 }
2378
2379 #if BYTES_BIG_ENDIAN
2380 /* On a big endian machine, the "address" of the slot
2381 is the address of the low part that fits its inherent mode. */
2382 if (inherent_size < total_size)
2383 adjust += (total_size - inherent_size);
2384 #endif /* BYTES_BIG_ENDIAN */
2385
2386 /* If we have any adjustment to make, or if the stack slot is the
2387 wrong mode, make a new stack slot. */
2388 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2389 {
2390 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2391 plus_constant (XEXP (x, 0), adjust));
2392 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2393 }
2394
2395 /* Save the stack slot for later. */
2396 reg_equiv_memory_loc[i] = x;
2397 }
2398 }
2399
2400 /* Mark the slots in regs_ever_live for the hard regs
2401 used by pseudo-reg number REGNO. */
2402
2403 void
2404 mark_home_live (regno)
2405 int regno;
2406 {
2407 register int i, lim;
2408 i = reg_renumber[regno];
2409 if (i < 0)
2410 return;
2411 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2412 while (i < lim)
2413 regs_ever_live[i++] = 1;
2414 }
2415
2416 /* Mark the registers used in SCRATCH as being live. */
2417
2418 static void
2419 mark_scratch_live (scratch)
2420 rtx scratch;
2421 {
2422 register int i;
2423 int regno = REGNO (scratch);
2424 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2425
2426 for (i = regno; i < lim; i++)
2427 regs_ever_live[i] = 1;
2428 }
2429 \f
2430 /* This function handles the tracking of elimination offsets around branches.
2431
2432 X is a piece of RTL being scanned.
2433
2434 INSN is the insn that it came from, if any.
2435
2436 INITIAL_P is non-zero if we are to set the offset to be the initial
2437 offset and zero if we are setting the offset of the label to be the
2438 current offset. */
2439
2440 static void
2441 set_label_offsets (x, insn, initial_p)
2442 rtx x;
2443 rtx insn;
2444 int initial_p;
2445 {
2446 enum rtx_code code = GET_CODE (x);
2447 rtx tem;
2448 int i;
2449 struct elim_table *p;
2450
2451 switch (code)
2452 {
2453 case LABEL_REF:
2454 if (LABEL_REF_NONLOCAL_P (x))
2455 return;
2456
2457 x = XEXP (x, 0);
2458
2459 /* ... fall through ... */
2460
2461 case CODE_LABEL:
2462 /* If we know nothing about this label, set the desired offsets. Note
2463 that this sets the offset at a label to be the offset before a label
2464 if we don't know anything about the label. This is not correct for
2465 the label after a BARRIER, but is the best guess we can make. If
2466 we guessed wrong, we will suppress an elimination that might have
2467 been possible had we been able to guess correctly. */
2468
2469 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2470 {
2471 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2472 offsets_at[CODE_LABEL_NUMBER (x)][i]
2473 = (initial_p ? reg_eliminate[i].initial_offset
2474 : reg_eliminate[i].offset);
2475 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2476 }
2477
2478 /* Otherwise, if this is the definition of a label and it is
2479 preceded by a BARRIER, set our offsets to the known offset of
2480 that label. */
2481
2482 else if (x == insn
2483 && (tem = prev_nonnote_insn (insn)) != 0
2484 && GET_CODE (tem) == BARRIER)
2485 {
2486 num_not_at_initial_offset = 0;
2487 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2488 {
2489 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2490 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2491 if (reg_eliminate[i].can_eliminate
2492 && (reg_eliminate[i].offset
2493 != reg_eliminate[i].initial_offset))
2494 num_not_at_initial_offset++;
2495 }
2496 }
2497
2498 else
2499 /* If neither of the above cases is true, compare each offset
2500 with those previously recorded and suppress any eliminations
2501 where the offsets disagree. */
2502
2503 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2504 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2505 != (initial_p ? reg_eliminate[i].initial_offset
2506 : reg_eliminate[i].offset))
2507 reg_eliminate[i].can_eliminate = 0;
2508
2509 return;
2510
2511 case JUMP_INSN:
2512 set_label_offsets (PATTERN (insn), insn, initial_p);
2513
2514 /* ... fall through ... */
2515
2516 case INSN:
2517 case CALL_INSN:
2518 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2519 and hence must have all eliminations at their initial offsets. */
2520 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2521 if (REG_NOTE_KIND (tem) == REG_LABEL)
2522 set_label_offsets (XEXP (tem, 0), insn, 1);
2523 return;
2524
2525 case ADDR_VEC:
2526 case ADDR_DIFF_VEC:
2527 /* Each of the labels in the address vector must be at their initial
2528 offsets. We want the first first for ADDR_VEC and the second
2529 field for ADDR_DIFF_VEC. */
2530
2531 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2532 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2533 insn, initial_p);
2534 return;
2535
2536 case SET:
2537 /* We only care about setting PC. If the source is not RETURN,
2538 IF_THEN_ELSE, or a label, disable any eliminations not at
2539 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2540 isn't one of those possibilities. For branches to a label,
2541 call ourselves recursively.
2542
2543 Note that this can disable elimination unnecessarily when we have
2544 a non-local goto since it will look like a non-constant jump to
2545 someplace in the current function. This isn't a significant
2546 problem since such jumps will normally be when all elimination
2547 pairs are back to their initial offsets. */
2548
2549 if (SET_DEST (x) != pc_rtx)
2550 return;
2551
2552 switch (GET_CODE (SET_SRC (x)))
2553 {
2554 case PC:
2555 case RETURN:
2556 return;
2557
2558 case LABEL_REF:
2559 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2560 return;
2561
2562 case IF_THEN_ELSE:
2563 tem = XEXP (SET_SRC (x), 1);
2564 if (GET_CODE (tem) == LABEL_REF)
2565 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2566 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2567 break;
2568
2569 tem = XEXP (SET_SRC (x), 2);
2570 if (GET_CODE (tem) == LABEL_REF)
2571 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2572 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2573 break;
2574 return;
2575 }
2576
2577 /* If we reach here, all eliminations must be at their initial
2578 offset because we are doing a jump to a variable address. */
2579 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2580 if (p->offset != p->initial_offset)
2581 p->can_eliminate = 0;
2582 }
2583 }
2584 \f
2585 /* Used for communication between the next two function to properly share
2586 the vector for an ASM_OPERANDS. */
2587
2588 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2589
2590 /* Scan X and replace any eliminable registers (such as fp) with a
2591 replacement (such as sp), plus an offset.
2592
2593 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2594 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2595 MEM, we are allowed to replace a sum of a register and the constant zero
2596 with the register, which we cannot do outside a MEM. In addition, we need
2597 to record the fact that a register is referenced outside a MEM.
2598
2599 If INSN is an insn, it is the insn containing X. If we replace a REG
2600 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2601 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2602 that the REG is being modified.
2603
2604 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2605 That's used when we eliminate in expressions stored in notes.
2606 This means, do not set ref_outside_mem even if the reference
2607 is outside of MEMs.
2608
2609 If we see a modification to a register we know about, take the
2610 appropriate action (see case SET, below).
2611
2612 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2613 replacements done assuming all offsets are at their initial values. If
2614 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2615 encounter, return the actual location so that find_reloads will do
2616 the proper thing. */
2617
2618 rtx
2619 eliminate_regs (x, mem_mode, insn)
2620 rtx x;
2621 enum machine_mode mem_mode;
2622 rtx insn;
2623 {
2624 enum rtx_code code = GET_CODE (x);
2625 struct elim_table *ep;
2626 int regno;
2627 rtx new;
2628 int i, j;
2629 char *fmt;
2630 int copied = 0;
2631
2632 switch (code)
2633 {
2634 case CONST_INT:
2635 case CONST_DOUBLE:
2636 case CONST:
2637 case SYMBOL_REF:
2638 case CODE_LABEL:
2639 case PC:
2640 case CC0:
2641 case ASM_INPUT:
2642 case ADDR_VEC:
2643 case ADDR_DIFF_VEC:
2644 case RETURN:
2645 return x;
2646
2647 case REG:
2648 regno = REGNO (x);
2649
2650 /* First handle the case where we encounter a bare register that
2651 is eliminable. Replace it with a PLUS. */
2652 if (regno < FIRST_PSEUDO_REGISTER)
2653 {
2654 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2655 ep++)
2656 if (ep->from_rtx == x && ep->can_eliminate)
2657 {
2658 if (! mem_mode
2659 /* Refs inside notes don't count for this purpose. */
2660 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2661 || GET_CODE (insn) == INSN_LIST)))
2662 ep->ref_outside_mem = 1;
2663 return plus_constant (ep->to_rtx, ep->previous_offset);
2664 }
2665
2666 }
2667 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2668 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2669 {
2670 /* In this case, find_reloads would attempt to either use an
2671 incorrect address (if something is not at its initial offset)
2672 or substitute an replaced address into an insn (which loses
2673 if the offset is changed by some later action). So we simply
2674 return the replaced stack slot (assuming it is changed by
2675 elimination) and ignore the fact that this is actually a
2676 reference to the pseudo. Ensure we make a copy of the
2677 address in case it is shared. */
2678 new = eliminate_regs (reg_equiv_memory_loc[regno],
2679 mem_mode, insn);
2680 if (new != reg_equiv_memory_loc[regno])
2681 {
2682 cannot_omit_stores[regno] = 1;
2683 return copy_rtx (new);
2684 }
2685 }
2686 return x;
2687
2688 case PLUS:
2689 /* If this is the sum of an eliminable register and a constant, rework
2690 the sum. */
2691 if (GET_CODE (XEXP (x, 0)) == REG
2692 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2693 && CONSTANT_P (XEXP (x, 1)))
2694 {
2695 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2696 ep++)
2697 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2698 {
2699 if (! mem_mode
2700 /* Refs inside notes don't count for this purpose. */
2701 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2702 || GET_CODE (insn) == INSN_LIST)))
2703 ep->ref_outside_mem = 1;
2704
2705 /* The only time we want to replace a PLUS with a REG (this
2706 occurs when the constant operand of the PLUS is the negative
2707 of the offset) is when we are inside a MEM. We won't want
2708 to do so at other times because that would change the
2709 structure of the insn in a way that reload can't handle.
2710 We special-case the commonest situation in
2711 eliminate_regs_in_insn, so just replace a PLUS with a
2712 PLUS here, unless inside a MEM. */
2713 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2714 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2715 return ep->to_rtx;
2716 else
2717 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2718 plus_constant (XEXP (x, 1),
2719 ep->previous_offset));
2720 }
2721
2722 /* If the register is not eliminable, we are done since the other
2723 operand is a constant. */
2724 return x;
2725 }
2726
2727 /* If this is part of an address, we want to bring any constant to the
2728 outermost PLUS. We will do this by doing register replacement in
2729 our operands and seeing if a constant shows up in one of them.
2730
2731 We assume here this is part of an address (or a "load address" insn)
2732 since an eliminable register is not likely to appear in any other
2733 context.
2734
2735 If we have (plus (eliminable) (reg)), we want to produce
2736 (plus (plus (replacement) (reg) (const))). If this was part of a
2737 normal add insn, (plus (replacement) (reg)) will be pushed as a
2738 reload. This is the desired action. */
2739
2740 {
2741 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2742 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2743
2744 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2745 {
2746 /* If one side is a PLUS and the other side is a pseudo that
2747 didn't get a hard register but has a reg_equiv_constant,
2748 we must replace the constant here since it may no longer
2749 be in the position of any operand. */
2750 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2751 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2752 && reg_renumber[REGNO (new1)] < 0
2753 && reg_equiv_constant != 0
2754 && reg_equiv_constant[REGNO (new1)] != 0)
2755 new1 = reg_equiv_constant[REGNO (new1)];
2756 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2757 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2758 && reg_renumber[REGNO (new0)] < 0
2759 && reg_equiv_constant[REGNO (new0)] != 0)
2760 new0 = reg_equiv_constant[REGNO (new0)];
2761
2762 new = form_sum (new0, new1);
2763
2764 /* As above, if we are not inside a MEM we do not want to
2765 turn a PLUS into something else. We might try to do so here
2766 for an addition of 0 if we aren't optimizing. */
2767 if (! mem_mode && GET_CODE (new) != PLUS)
2768 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2769 else
2770 return new;
2771 }
2772 }
2773 return x;
2774
2775 case EXPR_LIST:
2776 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2777 if (XEXP (x, 0))
2778 {
2779 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2780 if (new != XEXP (x, 0))
2781 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2782 }
2783
2784 /* ... fall through ... */
2785
2786 case INSN_LIST:
2787 /* Now do eliminations in the rest of the chain. If this was
2788 an EXPR_LIST, this might result in allocating more memory than is
2789 strictly needed, but it simplifies the code. */
2790 if (XEXP (x, 1))
2791 {
2792 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2793 if (new != XEXP (x, 1))
2794 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2795 }
2796 return x;
2797
2798 case CALL:
2799 case COMPARE:
2800 case MINUS:
2801 case MULT:
2802 case DIV: case UDIV:
2803 case MOD: case UMOD:
2804 case AND: case IOR: case XOR:
2805 case LSHIFT: case ASHIFT: case ROTATE:
2806 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2807 case NE: case EQ:
2808 case GE: case GT: case GEU: case GTU:
2809 case LE: case LT: case LEU: case LTU:
2810 {
2811 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2812 rtx new1
2813 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2814
2815 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2816 return gen_rtx (code, GET_MODE (x), new0, new1);
2817 }
2818 return x;
2819
2820 case PRE_INC:
2821 case POST_INC:
2822 case PRE_DEC:
2823 case POST_DEC:
2824 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2825 if (ep->to_rtx == XEXP (x, 0))
2826 {
2827 int size = GET_MODE_SIZE (mem_mode);
2828
2829 /* If more bytes than MEM_MODE are pushed, account for them. */
2830 #ifdef PUSH_ROUNDING
2831 if (ep->to_rtx == stack_pointer_rtx)
2832 size = PUSH_ROUNDING (size);
2833 #endif
2834 if (code == PRE_DEC || code == POST_DEC)
2835 ep->offset += size;
2836 else
2837 ep->offset -= size;
2838 }
2839
2840 /* Fall through to generic unary operation case. */
2841 case USE:
2842 case STRICT_LOW_PART:
2843 case NEG: case NOT:
2844 case SIGN_EXTEND: case ZERO_EXTEND:
2845 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2846 case FLOAT: case FIX:
2847 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2848 case ABS:
2849 case SQRT:
2850 case FFS:
2851 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2852 if (new != XEXP (x, 0))
2853 return gen_rtx (code, GET_MODE (x), new);
2854 return x;
2855
2856 case SUBREG:
2857 /* Similar to above processing, but preserve SUBREG_WORD.
2858 Convert (subreg (mem)) to (mem) if not paradoxical.
2859 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2860 pseudo didn't get a hard reg, we must replace this with the
2861 eliminated version of the memory location because push_reloads
2862 may do the replacement in certain circumstances. */
2863 if (GET_CODE (SUBREG_REG (x)) == REG
2864 && (GET_MODE_SIZE (GET_MODE (x))
2865 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2866 && reg_equiv_memory_loc != 0
2867 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2868 {
2869 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2870 mem_mode, insn);
2871
2872 /* If we didn't change anything, we must retain the pseudo. */
2873 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2874 new = XEXP (x, 0);
2875 else
2876 /* Otherwise, ensure NEW isn't shared in case we have to reload
2877 it. */
2878 new = copy_rtx (new);
2879 }
2880 else
2881 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2882
2883 if (new != XEXP (x, 0))
2884 {
2885 if (GET_CODE (new) == MEM
2886 && (GET_MODE_SIZE (GET_MODE (x))
2887 <= GET_MODE_SIZE (GET_MODE (new)))
2888 #ifdef LOAD_EXTEND_OP
2889 /* On these machines we will be reloading what is
2890 inside the SUBREG if it originally was a pseudo and
2891 the inner and outer modes are both a word or
2892 smaller. So leave the SUBREG then. */
2893 && ! (GET_CODE (SUBREG_REG (x)) == REG
2894 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2895 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD)
2896 #endif
2897 )
2898 {
2899 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2900 enum machine_mode mode = GET_MODE (x);
2901
2902 #if BYTES_BIG_ENDIAN
2903 offset += (MIN (UNITS_PER_WORD,
2904 GET_MODE_SIZE (GET_MODE (new)))
2905 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2906 #endif
2907
2908 PUT_MODE (new, mode);
2909 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2910 return new;
2911 }
2912 else
2913 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2914 }
2915
2916 return x;
2917
2918 case CLOBBER:
2919 /* If clobbering a register that is the replacement register for an
2920 elimination we still think can be performed, note that it cannot
2921 be performed. Otherwise, we need not be concerned about it. */
2922 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2923 if (ep->to_rtx == XEXP (x, 0))
2924 ep->can_eliminate = 0;
2925
2926 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2927 if (new != XEXP (x, 0))
2928 return gen_rtx (code, GET_MODE (x), new);
2929 return x;
2930
2931 case ASM_OPERANDS:
2932 {
2933 rtx *temp_vec;
2934 /* Properly handle sharing input and constraint vectors. */
2935 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2936 {
2937 /* When we come to a new vector not seen before,
2938 scan all its elements; keep the old vector if none
2939 of them changes; otherwise, make a copy. */
2940 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2941 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2942 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2943 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2944 mem_mode, insn);
2945
2946 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2947 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2948 break;
2949
2950 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2951 new_asm_operands_vec = old_asm_operands_vec;
2952 else
2953 new_asm_operands_vec
2954 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2955 }
2956
2957 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2958 if (new_asm_operands_vec == old_asm_operands_vec)
2959 return x;
2960
2961 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2962 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2963 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2964 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2965 ASM_OPERANDS_SOURCE_FILE (x),
2966 ASM_OPERANDS_SOURCE_LINE (x));
2967 new->volatil = x->volatil;
2968 return new;
2969 }
2970
2971 case SET:
2972 /* Check for setting a register that we know about. */
2973 if (GET_CODE (SET_DEST (x)) == REG)
2974 {
2975 /* See if this is setting the replacement register for an
2976 elimination.
2977
2978 If DEST is the hard frame pointer, we do nothing because we
2979 assume that all assignments to the frame pointer are for
2980 non-local gotos and are being done at a time when they are valid
2981 and do not disturb anything else. Some machines want to
2982 eliminate a fake argument pointer (or even a fake frame pointer)
2983 with either the real frame or the stack pointer. Assignments to
2984 the hard frame pointer must not prevent this elimination. */
2985
2986 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2987 ep++)
2988 if (ep->to_rtx == SET_DEST (x)
2989 && SET_DEST (x) != hard_frame_pointer_rtx)
2990 {
2991 /* If it is being incremented, adjust the offset. Otherwise,
2992 this elimination can't be done. */
2993 rtx src = SET_SRC (x);
2994
2995 if (GET_CODE (src) == PLUS
2996 && XEXP (src, 0) == SET_DEST (x)
2997 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2998 ep->offset -= INTVAL (XEXP (src, 1));
2999 else
3000 ep->can_eliminate = 0;
3001 }
3002
3003 /* Now check to see we are assigning to a register that can be
3004 eliminated. If so, it must be as part of a PARALLEL, since we
3005 will not have been called if this is a single SET. So indicate
3006 that we can no longer eliminate this reg. */
3007 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3008 ep++)
3009 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3010 ep->can_eliminate = 0;
3011 }
3012
3013 /* Now avoid the loop below in this common case. */
3014 {
3015 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3016 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3017
3018 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3019 write a CLOBBER insn. */
3020 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3021 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3022 && GET_CODE (insn) != INSN_LIST)
3023 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3024
3025 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3026 return gen_rtx (SET, VOIDmode, new0, new1);
3027 }
3028
3029 return x;
3030
3031 case MEM:
3032 /* Our only special processing is to pass the mode of the MEM to our
3033 recursive call and copy the flags. While we are here, handle this
3034 case more efficiently. */
3035 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3036 if (new != XEXP (x, 0))
3037 {
3038 new = gen_rtx (MEM, GET_MODE (x), new);
3039 new->volatil = x->volatil;
3040 new->unchanging = x->unchanging;
3041 new->in_struct = x->in_struct;
3042 return new;
3043 }
3044 else
3045 return x;
3046 }
3047
3048 /* Process each of our operands recursively. If any have changed, make a
3049 copy of the rtx. */
3050 fmt = GET_RTX_FORMAT (code);
3051 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3052 {
3053 if (*fmt == 'e')
3054 {
3055 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3056 if (new != XEXP (x, i) && ! copied)
3057 {
3058 rtx new_x = rtx_alloc (code);
3059 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3060 + (sizeof (new_x->fld[0])
3061 * GET_RTX_LENGTH (code))));
3062 x = new_x;
3063 copied = 1;
3064 }
3065 XEXP (x, i) = new;
3066 }
3067 else if (*fmt == 'E')
3068 {
3069 int copied_vec = 0;
3070 for (j = 0; j < XVECLEN (x, i); j++)
3071 {
3072 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3073 if (new != XVECEXP (x, i, j) && ! copied_vec)
3074 {
3075 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3076 &XVECEXP (x, i, 0));
3077 if (! copied)
3078 {
3079 rtx new_x = rtx_alloc (code);
3080 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3081 + (sizeof (new_x->fld[0])
3082 * GET_RTX_LENGTH (code))));
3083 x = new_x;
3084 copied = 1;
3085 }
3086 XVEC (x, i) = new_v;
3087 copied_vec = 1;
3088 }
3089 XVECEXP (x, i, j) = new;
3090 }
3091 }
3092 }
3093
3094 return x;
3095 }
3096 \f
3097 /* Scan INSN and eliminate all eliminable registers in it.
3098
3099 If REPLACE is nonzero, do the replacement destructively. Also
3100 delete the insn as dead it if it is setting an eliminable register.
3101
3102 If REPLACE is zero, do all our allocations in reload_obstack.
3103
3104 If no eliminations were done and this insn doesn't require any elimination
3105 processing (these are not identical conditions: it might be updating sp,
3106 but not referencing fp; this needs to be seen during reload_as_needed so
3107 that the offset between fp and sp can be taken into consideration), zero
3108 is returned. Otherwise, 1 is returned. */
3109
3110 static int
3111 eliminate_regs_in_insn (insn, replace)
3112 rtx insn;
3113 int replace;
3114 {
3115 rtx old_body = PATTERN (insn);
3116 rtx new_body;
3117 int val = 0;
3118 struct elim_table *ep;
3119
3120 if (! replace)
3121 push_obstacks (&reload_obstack, &reload_obstack);
3122
3123 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3124 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3125 {
3126 /* Check for setting an eliminable register. */
3127 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3128 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3129 {
3130 /* In this case this insn isn't serving a useful purpose. We
3131 will delete it in reload_as_needed once we know that this
3132 elimination is, in fact, being done.
3133
3134 If REPLACE isn't set, we can't delete this insn, but neededn't
3135 process it since it won't be used unless something changes. */
3136 if (replace)
3137 delete_dead_insn (insn);
3138 val = 1;
3139 goto done;
3140 }
3141
3142 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3143 in the insn is the negative of the offset in FROM. Substitute
3144 (set (reg) (reg to)) for the insn and change its code.
3145
3146 We have to do this here, rather than in eliminate_regs, do that we can
3147 change the insn code. */
3148
3149 if (GET_CODE (SET_SRC (old_body)) == PLUS
3150 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3151 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3152 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3153 ep++)
3154 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
3155 && ep->can_eliminate)
3156 {
3157 /* We must stop at the first elimination that will be used.
3158 If this one would replace the PLUS with a REG, do it
3159 now. Otherwise, quit the loop and let eliminate_regs
3160 do its normal replacement. */
3161 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3162 {
3163 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3164 SET_DEST (old_body), ep->to_rtx);
3165 INSN_CODE (insn) = -1;
3166 val = 1;
3167 goto done;
3168 }
3169
3170 break;
3171 }
3172 }
3173
3174 old_asm_operands_vec = 0;
3175
3176 /* Replace the body of this insn with a substituted form. If we changed
3177 something, return non-zero.
3178
3179 If we are replacing a body that was a (set X (plus Y Z)), try to
3180 re-recognize the insn. We do this in case we had a simple addition
3181 but now can do this as a load-address. This saves an insn in this
3182 common case. */
3183
3184 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3185 if (new_body != old_body)
3186 {
3187 /* If we aren't replacing things permanently and we changed something,
3188 make another copy to ensure that all the RTL is new. Otherwise
3189 things can go wrong if find_reload swaps commutative operands
3190 and one is inside RTL that has been copied while the other is not. */
3191
3192 /* Don't copy an asm_operands because (1) there's no need and (2)
3193 copy_rtx can't do it properly when there are multiple outputs. */
3194 if (! replace && asm_noperands (old_body) < 0)
3195 new_body = copy_rtx (new_body);
3196
3197 /* If we had a move insn but now we don't, rerecognize it. */
3198 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3199 && (GET_CODE (new_body) != SET
3200 || GET_CODE (SET_SRC (new_body)) != REG))
3201 /* If this was a load from or store to memory, compare
3202 the MEM in recog_operand to the one in the insn. If they
3203 are not equal, then rerecognize the insn. */
3204 || (GET_CODE (old_body) == SET
3205 && ((GET_CODE (SET_SRC (old_body)) == MEM
3206 && SET_SRC (old_body) != recog_operand[1])
3207 || (GET_CODE (SET_DEST (old_body)) == MEM
3208 && SET_DEST (old_body) != recog_operand[0])))
3209 /* If this was an add insn before, rerecognize. */
3210 ||
3211 (GET_CODE (old_body) == SET
3212 && GET_CODE (SET_SRC (old_body)) == PLUS))
3213 {
3214 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3215 /* If recognition fails, store the new body anyway.
3216 It's normal to have recognition failures here
3217 due to bizarre memory addresses; reloading will fix them. */
3218 PATTERN (insn) = new_body;
3219 }
3220 else
3221 PATTERN (insn) = new_body;
3222
3223 val = 1;
3224 }
3225
3226 /* Loop through all elimination pairs. See if any have changed and
3227 recalculate the number not at initial offset.
3228
3229 Compute the maximum offset (minimum offset if the stack does not
3230 grow downward) for each elimination pair.
3231
3232 We also detect a cases where register elimination cannot be done,
3233 namely, if a register would be both changed and referenced outside a MEM
3234 in the resulting insn since such an insn is often undefined and, even if
3235 not, we cannot know what meaning will be given to it. Note that it is
3236 valid to have a register used in an address in an insn that changes it
3237 (presumably with a pre- or post-increment or decrement).
3238
3239 If anything changes, return nonzero. */
3240
3241 num_not_at_initial_offset = 0;
3242 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3243 {
3244 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3245 ep->can_eliminate = 0;
3246
3247 ep->ref_outside_mem = 0;
3248
3249 if (ep->previous_offset != ep->offset)
3250 val = 1;
3251
3252 ep->previous_offset = ep->offset;
3253 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3254 num_not_at_initial_offset++;
3255
3256 #ifdef STACK_GROWS_DOWNWARD
3257 ep->max_offset = MAX (ep->max_offset, ep->offset);
3258 #else
3259 ep->max_offset = MIN (ep->max_offset, ep->offset);
3260 #endif
3261 }
3262
3263 done:
3264 /* If we changed something, perform elmination in REG_NOTES. This is
3265 needed even when REPLACE is zero because a REG_DEAD note might refer
3266 to a register that we eliminate and could cause a different number
3267 of spill registers to be needed in the final reload pass than in
3268 the pre-passes. */
3269 if (val && REG_NOTES (insn) != 0)
3270 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3271
3272 if (! replace)
3273 pop_obstacks ();
3274
3275 return val;
3276 }
3277
3278 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3279 replacement we currently believe is valid, mark it as not eliminable if X
3280 modifies DEST in any way other than by adding a constant integer to it.
3281
3282 If DEST is the frame pointer, we do nothing because we assume that
3283 all assignments to the hard frame pointer are nonlocal gotos and are being
3284 done at a time when they are valid and do not disturb anything else.
3285 Some machines want to eliminate a fake argument pointer with either the
3286 frame or stack pointer. Assignments to the hard frame pointer must not
3287 prevent this elimination.
3288
3289 Called via note_stores from reload before starting its passes to scan
3290 the insns of the function. */
3291
3292 static void
3293 mark_not_eliminable (dest, x)
3294 rtx dest;
3295 rtx x;
3296 {
3297 register int i;
3298
3299 /* A SUBREG of a hard register here is just changing its mode. We should
3300 not see a SUBREG of an eliminable hard register, but check just in
3301 case. */
3302 if (GET_CODE (dest) == SUBREG)
3303 dest = SUBREG_REG (dest);
3304
3305 if (dest == hard_frame_pointer_rtx)
3306 return;
3307
3308 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3309 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3310 && (GET_CODE (x) != SET
3311 || GET_CODE (SET_SRC (x)) != PLUS
3312 || XEXP (SET_SRC (x), 0) != dest
3313 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3314 {
3315 reg_eliminate[i].can_eliminate_previous
3316 = reg_eliminate[i].can_eliminate = 0;
3317 num_eliminable--;
3318 }
3319 }
3320 \f
3321 /* Kick all pseudos out of hard register REGNO.
3322 If GLOBAL is nonzero, try to find someplace else to put them.
3323 If DUMPFILE is nonzero, log actions taken on that file.
3324
3325 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3326 because we found we can't eliminate some register. In the case, no pseudos
3327 are allowed to be in the register, even if they are only in a block that
3328 doesn't require spill registers, unlike the case when we are spilling this
3329 hard reg to produce another spill register.
3330
3331 Return nonzero if any pseudos needed to be kicked out. */
3332
3333 static int
3334 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3335 register int regno;
3336 int global;
3337 FILE *dumpfile;
3338 int cant_eliminate;
3339 {
3340 enum reg_class class = REGNO_REG_CLASS (regno);
3341 int something_changed = 0;
3342 register int i;
3343
3344 SET_HARD_REG_BIT (forbidden_regs, regno);
3345
3346 /* Spill every pseudo reg that was allocated to this reg
3347 or to something that overlaps this reg. */
3348
3349 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3350 if (reg_renumber[i] >= 0
3351 && reg_renumber[i] <= regno
3352 && (reg_renumber[i]
3353 + HARD_REGNO_NREGS (reg_renumber[i],
3354 PSEUDO_REGNO_MODE (i))
3355 > regno))
3356 {
3357 /* If this register belongs solely to a basic block which needed no
3358 spilling of any class that this register is contained in,
3359 leave it be, unless we are spilling this register because
3360 it was a hard register that can't be eliminated. */
3361
3362 if (! cant_eliminate
3363 && basic_block_needs[0]
3364 && reg_basic_block[i] >= 0
3365 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3366 {
3367 enum reg_class *p;
3368
3369 for (p = reg_class_superclasses[(int) class];
3370 *p != LIM_REG_CLASSES; p++)
3371 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3372 break;
3373
3374 if (*p == LIM_REG_CLASSES)
3375 continue;
3376 }
3377
3378 /* Mark it as no longer having a hard register home. */
3379 reg_renumber[i] = -1;
3380 /* We will need to scan everything again. */
3381 something_changed = 1;
3382 if (global)
3383 retry_global_alloc (i, forbidden_regs);
3384
3385 alter_reg (i, regno);
3386 if (dumpfile)
3387 {
3388 if (reg_renumber[i] == -1)
3389 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3390 else
3391 fprintf (dumpfile, " Register %d now in %d.\n\n",
3392 i, reg_renumber[i]);
3393 }
3394 }
3395 for (i = 0; i < scratch_list_length; i++)
3396 {
3397 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3398 {
3399 if (! cant_eliminate && basic_block_needs[0]
3400 && ! basic_block_needs[(int) class][scratch_block[i]])
3401 {
3402 enum reg_class *p;
3403
3404 for (p = reg_class_superclasses[(int) class];
3405 *p != LIM_REG_CLASSES; p++)
3406 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3407 break;
3408
3409 if (*p == LIM_REG_CLASSES)
3410 continue;
3411 }
3412 PUT_CODE (scratch_list[i], SCRATCH);
3413 scratch_list[i] = 0;
3414 something_changed = 1;
3415 continue;
3416 }
3417 }
3418
3419 return something_changed;
3420 }
3421 \f
3422 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
3423
3424 static void
3425 scan_paradoxical_subregs (x)
3426 register rtx x;
3427 {
3428 register int i;
3429 register char *fmt;
3430 register enum rtx_code code = GET_CODE (x);
3431
3432 switch (code)
3433 {
3434 case CONST_INT:
3435 case CONST:
3436 case SYMBOL_REF:
3437 case LABEL_REF:
3438 case CONST_DOUBLE:
3439 case CC0:
3440 case PC:
3441 case REG:
3442 case USE:
3443 case CLOBBER:
3444 return;
3445
3446 case SUBREG:
3447 if (GET_CODE (SUBREG_REG (x)) == REG
3448 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3449 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3450 = GET_MODE_SIZE (GET_MODE (x));
3451 return;
3452 }
3453
3454 fmt = GET_RTX_FORMAT (code);
3455 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3456 {
3457 if (fmt[i] == 'e')
3458 scan_paradoxical_subregs (XEXP (x, i));
3459 else if (fmt[i] == 'E')
3460 {
3461 register int j;
3462 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3463 scan_paradoxical_subregs (XVECEXP (x, i, j));
3464 }
3465 }
3466 }
3467 \f
3468 static int
3469 hard_reg_use_compare (p1, p2)
3470 struct hard_reg_n_uses *p1, *p2;
3471 {
3472 int tem = p1->uses - p2->uses;
3473 if (tem != 0) return tem;
3474 /* If regs are equally good, sort by regno,
3475 so that the results of qsort leave nothing to chance. */
3476 return p1->regno - p2->regno;
3477 }
3478
3479 /* Choose the order to consider regs for use as reload registers
3480 based on how much trouble would be caused by spilling one.
3481 Store them in order of decreasing preference in potential_reload_regs. */
3482
3483 static void
3484 order_regs_for_reload ()
3485 {
3486 register int i;
3487 register int o = 0;
3488 int large = 0;
3489
3490 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3491
3492 CLEAR_HARD_REG_SET (bad_spill_regs);
3493
3494 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3495 potential_reload_regs[i] = -1;
3496
3497 /* Count number of uses of each hard reg by pseudo regs allocated to it
3498 and then order them by decreasing use. */
3499
3500 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3501 {
3502 hard_reg_n_uses[i].uses = 0;
3503 hard_reg_n_uses[i].regno = i;
3504 }
3505
3506 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3507 {
3508 int regno = reg_renumber[i];
3509 if (regno >= 0)
3510 {
3511 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3512 while (regno < lim)
3513 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3514 }
3515 large += reg_n_refs[i];
3516 }
3517
3518 /* Now fixed registers (which cannot safely be used for reloading)
3519 get a very high use count so they will be considered least desirable.
3520 Registers used explicitly in the rtl code are almost as bad. */
3521
3522 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3523 {
3524 if (fixed_regs[i])
3525 {
3526 hard_reg_n_uses[i].uses += 2 * large + 2;
3527 SET_HARD_REG_BIT (bad_spill_regs, i);
3528 }
3529 else if (regs_explicitly_used[i])
3530 {
3531 hard_reg_n_uses[i].uses += large + 1;
3532 #ifndef SMALL_REGISTER_CLASSES
3533 /* ??? We are doing this here because of the potential that
3534 bad code may be generated if a register explicitly used in
3535 an insn was used as a spill register for that insn. But
3536 not using these are spill registers may lose on some machine.
3537 We'll have to see how this works out. */
3538 SET_HARD_REG_BIT (bad_spill_regs, i);
3539 #endif
3540 }
3541 }
3542 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3543 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3544
3545 #ifdef ELIMINABLE_REGS
3546 /* If registers other than the frame pointer are eliminable, mark them as
3547 poor choices. */
3548 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3549 {
3550 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3551 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3552 }
3553 #endif
3554
3555 /* Prefer registers not so far used, for use in temporary loading.
3556 Among them, if REG_ALLOC_ORDER is defined, use that order.
3557 Otherwise, prefer registers not preserved by calls. */
3558
3559 #ifdef REG_ALLOC_ORDER
3560 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3561 {
3562 int regno = reg_alloc_order[i];
3563
3564 if (hard_reg_n_uses[regno].uses == 0)
3565 potential_reload_regs[o++] = regno;
3566 }
3567 #else
3568 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3569 {
3570 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3571 potential_reload_regs[o++] = i;
3572 }
3573 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3574 {
3575 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3576 potential_reload_regs[o++] = i;
3577 }
3578 #endif
3579
3580 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3581 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3582
3583 /* Now add the regs that are already used,
3584 preferring those used less often. The fixed and otherwise forbidden
3585 registers will be at the end of this list. */
3586
3587 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3588 if (hard_reg_n_uses[i].uses != 0)
3589 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3590 }
3591 \f
3592 /* Reload pseudo-registers into hard regs around each insn as needed.
3593 Additional register load insns are output before the insn that needs it
3594 and perhaps store insns after insns that modify the reloaded pseudo reg.
3595
3596 reg_last_reload_reg and reg_reloaded_contents keep track of
3597 which registers are already available in reload registers.
3598 We update these for the reloads that we perform,
3599 as the insns are scanned. */
3600
3601 static void
3602 reload_as_needed (first, live_known)
3603 rtx first;
3604 int live_known;
3605 {
3606 register rtx insn;
3607 register int i;
3608 int this_block = 0;
3609 rtx x;
3610 rtx after_call = 0;
3611
3612 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3613 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3614 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3615 reg_has_output_reload = (char *) alloca (max_regno);
3616 for (i = 0; i < n_spills; i++)
3617 {
3618 reg_reloaded_contents[i] = -1;
3619 reg_reloaded_insn[i] = 0;
3620 }
3621
3622 /* Reset all offsets on eliminable registers to their initial values. */
3623 #ifdef ELIMINABLE_REGS
3624 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3625 {
3626 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3627 reg_eliminate[i].initial_offset);
3628 reg_eliminate[i].previous_offset
3629 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3630 }
3631 #else
3632 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3633 reg_eliminate[0].previous_offset
3634 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3635 #endif
3636
3637 num_not_at_initial_offset = 0;
3638
3639 for (insn = first; insn;)
3640 {
3641 register rtx next = NEXT_INSN (insn);
3642
3643 /* Notice when we move to a new basic block. */
3644 if (live_known && this_block + 1 < n_basic_blocks
3645 && insn == basic_block_head[this_block+1])
3646 ++this_block;
3647
3648 /* If we pass a label, copy the offsets from the label information
3649 into the current offsets of each elimination. */
3650 if (GET_CODE (insn) == CODE_LABEL)
3651 {
3652 num_not_at_initial_offset = 0;
3653 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3654 {
3655 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3656 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3657 if (reg_eliminate[i].can_eliminate
3658 && (reg_eliminate[i].offset
3659 != reg_eliminate[i].initial_offset))
3660 num_not_at_initial_offset++;
3661 }
3662 }
3663
3664 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3665 {
3666 rtx avoid_return_reg = 0;
3667
3668 #ifdef SMALL_REGISTER_CLASSES
3669 /* Set avoid_return_reg if this is an insn
3670 that might use the value of a function call. */
3671 if (GET_CODE (insn) == CALL_INSN)
3672 {
3673 if (GET_CODE (PATTERN (insn)) == SET)
3674 after_call = SET_DEST (PATTERN (insn));
3675 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3676 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3677 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3678 else
3679 after_call = 0;
3680 }
3681 else if (after_call != 0
3682 && !(GET_CODE (PATTERN (insn)) == SET
3683 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3684 {
3685 if (reg_mentioned_p (after_call, PATTERN (insn)))
3686 avoid_return_reg = after_call;
3687 after_call = 0;
3688 }
3689 #endif /* SMALL_REGISTER_CLASSES */
3690
3691 /* If this is a USE and CLOBBER of a MEM, ensure that any
3692 references to eliminable registers have been removed. */
3693
3694 if ((GET_CODE (PATTERN (insn)) == USE
3695 || GET_CODE (PATTERN (insn)) == CLOBBER)
3696 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3697 XEXP (XEXP (PATTERN (insn), 0), 0)
3698 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3699 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3700
3701 /* If we need to do register elimination processing, do so.
3702 This might delete the insn, in which case we are done. */
3703 if (num_eliminable && GET_MODE (insn) == QImode)
3704 {
3705 eliminate_regs_in_insn (insn, 1);
3706 if (GET_CODE (insn) == NOTE)
3707 {
3708 insn = next;
3709 continue;
3710 }
3711 }
3712
3713 if (GET_MODE (insn) == VOIDmode)
3714 n_reloads = 0;
3715 /* First find the pseudo regs that must be reloaded for this insn.
3716 This info is returned in the tables reload_... (see reload.h).
3717 Also modify the body of INSN by substituting RELOAD
3718 rtx's for those pseudo regs. */
3719 else
3720 {
3721 bzero (reg_has_output_reload, max_regno);
3722 CLEAR_HARD_REG_SET (reg_is_output_reload);
3723
3724 find_reloads (insn, 1, spill_indirect_levels, live_known,
3725 spill_reg_order);
3726 }
3727
3728 if (n_reloads > 0)
3729 {
3730 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3731 rtx p;
3732 int class;
3733
3734 /* If this block has not had spilling done for a
3735 particular clas and we have any non-optionals that need a
3736 spill reg in that class, abort. */
3737
3738 for (class = 0; class < N_REG_CLASSES; class++)
3739 if (basic_block_needs[class] != 0
3740 && basic_block_needs[class][this_block] == 0)
3741 for (i = 0; i < n_reloads; i++)
3742 if (class == (int) reload_reg_class[i]
3743 && reload_reg_rtx[i] == 0
3744 && ! reload_optional[i]
3745 && (reload_in[i] != 0 || reload_out[i] != 0
3746 || reload_secondary_p[i] != 0))
3747 abort ();
3748
3749 /* Now compute which reload regs to reload them into. Perhaps
3750 reusing reload regs from previous insns, or else output
3751 load insns to reload them. Maybe output store insns too.
3752 Record the choices of reload reg in reload_reg_rtx. */
3753 choose_reload_regs (insn, avoid_return_reg);
3754
3755 #ifdef SMALL_REGISTER_CLASSES
3756 /* Merge any reloads that we didn't combine for fear of
3757 increasing the number of spill registers needed but now
3758 discover can be safely merged. */
3759 merge_assigned_reloads (insn);
3760 #endif
3761
3762 /* Generate the insns to reload operands into or out of
3763 their reload regs. */
3764 emit_reload_insns (insn);
3765
3766 /* Substitute the chosen reload regs from reload_reg_rtx
3767 into the insn's body (or perhaps into the bodies of other
3768 load and store insn that we just made for reloading
3769 and that we moved the structure into). */
3770 subst_reloads ();
3771
3772 /* If this was an ASM, make sure that all the reload insns
3773 we have generated are valid. If not, give an error
3774 and delete them. */
3775
3776 if (asm_noperands (PATTERN (insn)) >= 0)
3777 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3778 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3779 && (recog_memoized (p) < 0
3780 || (insn_extract (p),
3781 ! constrain_operands (INSN_CODE (p), 1))))
3782 {
3783 error_for_asm (insn,
3784 "`asm' operand requires impossible reload");
3785 PUT_CODE (p, NOTE);
3786 NOTE_SOURCE_FILE (p) = 0;
3787 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3788 }
3789 }
3790 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3791 is no longer validly lying around to save a future reload.
3792 Note that this does not detect pseudos that were reloaded
3793 for this insn in order to be stored in
3794 (obeying register constraints). That is correct; such reload
3795 registers ARE still valid. */
3796 note_stores (PATTERN (insn), forget_old_reloads_1);
3797
3798 /* There may have been CLOBBER insns placed after INSN. So scan
3799 between INSN and NEXT and use them to forget old reloads. */
3800 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3801 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3802 note_stores (PATTERN (x), forget_old_reloads_1);
3803
3804 #ifdef AUTO_INC_DEC
3805 /* Likewise for regs altered by auto-increment in this insn.
3806 But note that the reg-notes are not changed by reloading:
3807 they still contain the pseudo-regs, not the spill regs. */
3808 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3809 if (REG_NOTE_KIND (x) == REG_INC)
3810 {
3811 /* See if this pseudo reg was reloaded in this insn.
3812 If so, its last-reload info is still valid
3813 because it is based on this insn's reload. */
3814 for (i = 0; i < n_reloads; i++)
3815 if (reload_out[i] == XEXP (x, 0))
3816 break;
3817
3818 if (i == n_reloads)
3819 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
3820 }
3821 #endif
3822 }
3823 /* A reload reg's contents are unknown after a label. */
3824 if (GET_CODE (insn) == CODE_LABEL)
3825 for (i = 0; i < n_spills; i++)
3826 {
3827 reg_reloaded_contents[i] = -1;
3828 reg_reloaded_insn[i] = 0;
3829 }
3830
3831 /* Don't assume a reload reg is still good after a call insn
3832 if it is a call-used reg. */
3833 else if (GET_CODE (insn) == CALL_INSN)
3834 for (i = 0; i < n_spills; i++)
3835 if (call_used_regs[spill_regs[i]])
3836 {
3837 reg_reloaded_contents[i] = -1;
3838 reg_reloaded_insn[i] = 0;
3839 }
3840
3841 /* In case registers overlap, allow certain insns to invalidate
3842 particular hard registers. */
3843
3844 #ifdef INSN_CLOBBERS_REGNO_P
3845 for (i = 0 ; i < n_spills ; i++)
3846 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3847 {
3848 reg_reloaded_contents[i] = -1;
3849 reg_reloaded_insn[i] = 0;
3850 }
3851 #endif
3852
3853 insn = next;
3854
3855 #ifdef USE_C_ALLOCA
3856 alloca (0);
3857 #endif
3858 }
3859 }
3860
3861 /* Discard all record of any value reloaded from X,
3862 or reloaded in X from someplace else;
3863 unless X is an output reload reg of the current insn.
3864
3865 X may be a hard reg (the reload reg)
3866 or it may be a pseudo reg that was reloaded from. */
3867
3868 static void
3869 forget_old_reloads_1 (x, ignored)
3870 rtx x;
3871 rtx ignored;
3872 {
3873 register int regno;
3874 int nr;
3875 int offset = 0;
3876
3877 /* note_stores does give us subregs of hard regs. */
3878 while (GET_CODE (x) == SUBREG)
3879 {
3880 offset += SUBREG_WORD (x);
3881 x = SUBREG_REG (x);
3882 }
3883
3884 if (GET_CODE (x) != REG)
3885 return;
3886
3887 regno = REGNO (x) + offset;
3888
3889 if (regno >= FIRST_PSEUDO_REGISTER)
3890 nr = 1;
3891 else
3892 {
3893 int i;
3894 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3895 /* Storing into a spilled-reg invalidates its contents.
3896 This can happen if a block-local pseudo is allocated to that reg
3897 and it wasn't spilled because this block's total need is 0.
3898 Then some insn might have an optional reload and use this reg. */
3899 for (i = 0; i < nr; i++)
3900 if (spill_reg_order[regno + i] >= 0
3901 /* But don't do this if the reg actually serves as an output
3902 reload reg in the current instruction. */
3903 && (n_reloads == 0
3904 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3905 {
3906 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3907 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3908 }
3909 }
3910
3911 /* Since value of X has changed,
3912 forget any value previously copied from it. */
3913
3914 while (nr-- > 0)
3915 /* But don't forget a copy if this is the output reload
3916 that establishes the copy's validity. */
3917 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3918 reg_last_reload_reg[regno + nr] = 0;
3919 }
3920 \f
3921 /* For each reload, the mode of the reload register. */
3922 static enum machine_mode reload_mode[MAX_RELOADS];
3923
3924 /* For each reload, the largest number of registers it will require. */
3925 static int reload_nregs[MAX_RELOADS];
3926
3927 /* Comparison function for qsort to decide which of two reloads
3928 should be handled first. *P1 and *P2 are the reload numbers. */
3929
3930 static int
3931 reload_reg_class_lower (p1, p2)
3932 short *p1, *p2;
3933 {
3934 register int r1 = *p1, r2 = *p2;
3935 register int t;
3936
3937 /* Consider required reloads before optional ones. */
3938 t = reload_optional[r1] - reload_optional[r2];
3939 if (t != 0)
3940 return t;
3941
3942 /* Count all solitary classes before non-solitary ones. */
3943 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3944 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3945 if (t != 0)
3946 return t;
3947
3948 /* Aside from solitaires, consider all multi-reg groups first. */
3949 t = reload_nregs[r2] - reload_nregs[r1];
3950 if (t != 0)
3951 return t;
3952
3953 /* Consider reloads in order of increasing reg-class number. */
3954 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3955 if (t != 0)
3956 return t;
3957
3958 /* If reloads are equally urgent, sort by reload number,
3959 so that the results of qsort leave nothing to chance. */
3960 return r1 - r2;
3961 }
3962 \f
3963 /* The following HARD_REG_SETs indicate when each hard register is
3964 used for a reload of various parts of the current insn. */
3965
3966 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3967 static HARD_REG_SET reload_reg_used;
3968 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3969 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
3970 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3971 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
3972 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
3973 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
3974 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
3975 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
3976 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3977 static HARD_REG_SET reload_reg_used_in_op_addr;
3978 /* If reg is in use for a RELOAD_FOR_INSN reload. */
3979 static HARD_REG_SET reload_reg_used_in_insn;
3980 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
3981 static HARD_REG_SET reload_reg_used_in_other_addr;
3982
3983 /* If reg is in use as a reload reg for any sort of reload. */
3984 static HARD_REG_SET reload_reg_used_at_all;
3985
3986 /* If reg is use as an inherited reload. We just mark the first register
3987 in the group. */
3988 static HARD_REG_SET reload_reg_used_for_inherit;
3989
3990 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
3991 TYPE. MODE is used to indicate how many consecutive regs are
3992 actually used. */
3993
3994 static void
3995 mark_reload_reg_in_use (regno, opnum, type, mode)
3996 int regno;
3997 int opnum;
3998 enum reload_type type;
3999 enum machine_mode mode;
4000 {
4001 int nregs = HARD_REGNO_NREGS (regno, mode);
4002 int i;
4003
4004 for (i = regno; i < nregs + regno; i++)
4005 {
4006 switch (type)
4007 {
4008 case RELOAD_OTHER:
4009 SET_HARD_REG_BIT (reload_reg_used, i);
4010 break;
4011
4012 case RELOAD_FOR_INPUT_ADDRESS:
4013 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4014 break;
4015
4016 case RELOAD_FOR_OUTPUT_ADDRESS:
4017 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4018 break;
4019
4020 case RELOAD_FOR_OPERAND_ADDRESS:
4021 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4022 break;
4023
4024 case RELOAD_FOR_OTHER_ADDRESS:
4025 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4026 break;
4027
4028 case RELOAD_FOR_INPUT:
4029 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4030 break;
4031
4032 case RELOAD_FOR_OUTPUT:
4033 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4034 break;
4035
4036 case RELOAD_FOR_INSN:
4037 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4038 break;
4039 }
4040
4041 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4042 }
4043 }
4044
4045 /* Similarly, but show REGNO is no longer in use for a reload. */
4046
4047 static void
4048 clear_reload_reg_in_use (regno, opnum, type, mode)
4049 int regno;
4050 int opnum;
4051 enum reload_type type;
4052 enum machine_mode mode;
4053 {
4054 int nregs = HARD_REGNO_NREGS (regno, mode);
4055 int i;
4056
4057 for (i = regno; i < nregs + regno; i++)
4058 {
4059 switch (type)
4060 {
4061 case RELOAD_OTHER:
4062 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4063 break;
4064
4065 case RELOAD_FOR_INPUT_ADDRESS:
4066 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4067 break;
4068
4069 case RELOAD_FOR_OUTPUT_ADDRESS:
4070 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4071 break;
4072
4073 case RELOAD_FOR_OPERAND_ADDRESS:
4074 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4075 break;
4076
4077 case RELOAD_FOR_OTHER_ADDRESS:
4078 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4079 break;
4080
4081 case RELOAD_FOR_INPUT:
4082 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4083 break;
4084
4085 case RELOAD_FOR_OUTPUT:
4086 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4087 break;
4088
4089 case RELOAD_FOR_INSN:
4090 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4091 break;
4092 }
4093 }
4094 }
4095
4096 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4097 specified by OPNUM and TYPE. */
4098
4099 static int
4100 reload_reg_free_p (regno, opnum, type)
4101 int regno;
4102 int opnum;
4103 enum reload_type type;
4104 {
4105 int i;
4106
4107 /* In use for a RELOAD_OTHER means it's not available for anything except
4108 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
4109 to be used only for inputs. */
4110
4111 if (type != RELOAD_FOR_OTHER_ADDRESS
4112 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4113 return 0;
4114
4115 switch (type)
4116 {
4117 case RELOAD_OTHER:
4118 /* In use for anything means not available for a RELOAD_OTHER. */
4119 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4120
4121 /* The other kinds of use can sometimes share a register. */
4122 case RELOAD_FOR_INPUT:
4123 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4124 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4125 return 0;
4126
4127 /* If it is used for some other input, can't use it. */
4128 for (i = 0; i < reload_n_operands; i++)
4129 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4130 return 0;
4131
4132 /* If it is used in a later operand's address, can't use it. */
4133 for (i = opnum + 1; i < reload_n_operands; i++)
4134 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4135 return 0;
4136
4137 return 1;
4138
4139 case RELOAD_FOR_INPUT_ADDRESS:
4140 /* Can't use a register if it is used for an input address for this
4141 operand or used as an input in an earlier one. */
4142 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4143 return 0;
4144
4145 for (i = 0; i < opnum; i++)
4146 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4147 return 0;
4148
4149 return 1;
4150
4151 case RELOAD_FOR_OUTPUT_ADDRESS:
4152 /* Can't use a register if it is used for an output address for this
4153 operand or used as an output in this or a later operand. */
4154 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4155 return 0;
4156
4157 for (i = opnum; i < reload_n_operands; i++)
4158 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4159 return 0;
4160
4161 return 1;
4162
4163 case RELOAD_FOR_OPERAND_ADDRESS:
4164 for (i = 0; i < reload_n_operands; i++)
4165 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4166 return 0;
4167
4168 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4169 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4170
4171 case RELOAD_FOR_OUTPUT:
4172 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4173 outputs, or an operand address for this or an earlier output. */
4174 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4175 return 0;
4176
4177 for (i = 0; i < reload_n_operands; i++)
4178 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4179 return 0;
4180
4181 for (i = 0; i <= opnum; i++)
4182 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4183 return 0;
4184
4185 return 1;
4186
4187 case RELOAD_FOR_INSN:
4188 for (i = 0; i < reload_n_operands; i++)
4189 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4190 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4191 return 0;
4192
4193 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4194 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4195
4196 case RELOAD_FOR_OTHER_ADDRESS:
4197 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4198 }
4199 abort ();
4200 }
4201
4202 /* Return 1 if the value in reload reg REGNO, as used by a reload
4203 needed for the part of the insn specified by OPNUM and TYPE,
4204 is not in use for a reload in any prior part of the insn.
4205
4206 We can assume that the reload reg was already tested for availability
4207 at the time it is needed, and we should not check this again,
4208 in case the reg has already been marked in use. */
4209
4210 static int
4211 reload_reg_free_before_p (regno, opnum, type)
4212 int regno;
4213 int opnum;
4214 enum reload_type type;
4215 {
4216 int i;
4217
4218 switch (type)
4219 {
4220 case RELOAD_FOR_OTHER_ADDRESS:
4221 /* These always come first. */
4222 return 1;
4223
4224 case RELOAD_OTHER:
4225 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4226
4227 /* If this use is for part of the insn,
4228 check the reg is not in use for any prior part. It is tempting
4229 to try to do this by falling through from objecs that occur
4230 later in the insn to ones that occur earlier, but that will not
4231 correctly take into account the fact that here we MUST ignore
4232 things that would prevent the register from being allocated in
4233 the first place, since we know that it was allocated. */
4234
4235 case RELOAD_FOR_OUTPUT_ADDRESS:
4236 /* Earlier reloads are for earlier outputs or their addresses,
4237 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4238 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4239 RELOAD_OTHER).. */
4240 for (i = 0; i < opnum; i++)
4241 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4242 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4243 return 0;
4244
4245 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4246 return 0;
4247
4248 for (i = 0; i < reload_n_operands; i++)
4249 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4250 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4251 return 0;
4252
4253 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4254 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4255 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4256
4257 case RELOAD_FOR_OUTPUT:
4258 /* This can't be used in the output address for this operand and
4259 anything that can't be used for it, except that we've already
4260 tested for RELOAD_FOR_INSN objects. */
4261
4262 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4263 return 0;
4264
4265 for (i = 0; i < opnum; i++)
4266 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4267 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4268 return 0;
4269
4270 for (i = 0; i < reload_n_operands; i++)
4271 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4272 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4273 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4274 return 0;
4275
4276 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4277
4278 case RELOAD_FOR_OPERAND_ADDRESS:
4279 case RELOAD_FOR_INSN:
4280 /* These can't conflict with inputs, or each other, so all we have to
4281 test is input addresses and the addresses of OTHER items. */
4282
4283 for (i = 0; i < reload_n_operands; i++)
4284 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4285 return 0;
4286
4287 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4288
4289 case RELOAD_FOR_INPUT:
4290 /* The only things earlier are the address for this and
4291 earlier inputs, other inputs (which we know we don't conflict
4292 with), and addresses of RELOAD_OTHER objects. */
4293
4294 for (i = 0; i <= opnum; i++)
4295 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4296 return 0;
4297
4298 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4299
4300 case RELOAD_FOR_INPUT_ADDRESS:
4301 /* Similarly, all we have to check is for use in earlier inputs'
4302 addresses. */
4303 for (i = 0; i < opnum; i++)
4304 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4305 return 0;
4306
4307 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4308 }
4309 abort ();
4310 }
4311
4312 /* Return 1 if the value in reload reg REGNO, as used by a reload
4313 needed for the part of the insn specified by OPNUM and TYPE,
4314 is still available in REGNO at the end of the insn.
4315
4316 We can assume that the reload reg was already tested for availability
4317 at the time it is needed, and we should not check this again,
4318 in case the reg has already been marked in use. */
4319
4320 static int
4321 reload_reg_reaches_end_p (regno, opnum, type)
4322 int regno;
4323 int opnum;
4324 enum reload_type type;
4325 {
4326 int i;
4327
4328 switch (type)
4329 {
4330 case RELOAD_OTHER:
4331 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4332 its value must reach the end. */
4333 return 1;
4334
4335 /* If this use is for part of the insn,
4336 its value reaches if no subsequent part uses the same register.
4337 Just like the above function, don't try to do this with lots
4338 of fallthroughs. */
4339
4340 case RELOAD_FOR_OTHER_ADDRESS:
4341 /* Here we check for everything else, since these don't conflict
4342 with anything else and everything comes later. */
4343
4344 for (i = 0; i < reload_n_operands; i++)
4345 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4346 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4347 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4348 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4349 return 0;
4350
4351 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4352 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4353 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4354
4355 case RELOAD_FOR_INPUT_ADDRESS:
4356 /* Similar, except that we check only for this and subsequent inputs
4357 and the address of only subsequent inputs and we do not need
4358 to check for RELOAD_OTHER objects since they are known not to
4359 conflict. */
4360
4361 for (i = opnum; i < reload_n_operands; i++)
4362 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4363 return 0;
4364
4365 for (i = opnum + 1; i < reload_n_operands; i++)
4366 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4367 return 0;
4368
4369 for (i = 0; i < reload_n_operands; i++)
4370 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4371 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4372 return 0;
4373
4374 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4375 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4376
4377 case RELOAD_FOR_INPUT:
4378 /* Similar to input address, except we start at the next operand for
4379 both input and input address and we do not check for
4380 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4381 would conflict. */
4382
4383 for (i = opnum + 1; i < reload_n_operands; i++)
4384 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4385 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4386 return 0;
4387
4388 /* ... fall through ... */
4389
4390 case RELOAD_FOR_OPERAND_ADDRESS:
4391 /* Check outputs and their addresses. */
4392
4393 for (i = 0; i < reload_n_operands; i++)
4394 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4395 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4396 return 0;
4397
4398 return 1;
4399
4400 case RELOAD_FOR_INSN:
4401 /* These conflict with other outputs with with RELOAD_OTHER. So
4402 we need only check for output addresses. */
4403
4404 opnum = -1;
4405
4406 /* ... fall through ... */
4407
4408 case RELOAD_FOR_OUTPUT:
4409 case RELOAD_FOR_OUTPUT_ADDRESS:
4410 /* We already know these can't conflict with a later output. So the
4411 only thing to check are later output addresses. */
4412 for (i = opnum + 1; i < reload_n_operands; i++)
4413 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4414 return 0;
4415
4416 return 1;
4417 }
4418
4419 abort ();
4420 }
4421 \f
4422 /* Vector of reload-numbers showing the order in which the reloads should
4423 be processed. */
4424 short reload_order[MAX_RELOADS];
4425
4426 /* Indexed by reload number, 1 if incoming value
4427 inherited from previous insns. */
4428 char reload_inherited[MAX_RELOADS];
4429
4430 /* For an inherited reload, this is the insn the reload was inherited from,
4431 if we know it. Otherwise, this is 0. */
4432 rtx reload_inheritance_insn[MAX_RELOADS];
4433
4434 /* If non-zero, this is a place to get the value of the reload,
4435 rather than using reload_in. */
4436 rtx reload_override_in[MAX_RELOADS];
4437
4438 /* For each reload, the index in spill_regs of the spill register used,
4439 or -1 if we did not need one of the spill registers for this reload. */
4440 int reload_spill_index[MAX_RELOADS];
4441
4442 /* Index of last register assigned as a spill register. We allocate in
4443 a round-robin fashio. */
4444
4445 static int last_spill_reg = 0;
4446
4447 /* Find a spill register to use as a reload register for reload R.
4448 LAST_RELOAD is non-zero if this is the last reload for the insn being
4449 processed.
4450
4451 Set reload_reg_rtx[R] to the register allocated.
4452
4453 If NOERROR is nonzero, we return 1 if successful,
4454 or 0 if we couldn't find a spill reg and we didn't change anything. */
4455
4456 static int
4457 allocate_reload_reg (r, insn, last_reload, noerror)
4458 int r;
4459 rtx insn;
4460 int last_reload;
4461 int noerror;
4462 {
4463 int i;
4464 int pass;
4465 int count;
4466 rtx new;
4467 int regno;
4468
4469 /* If we put this reload ahead, thinking it is a group,
4470 then insist on finding a group. Otherwise we can grab a
4471 reg that some other reload needs.
4472 (That can happen when we have a 68000 DATA_OR_FP_REG
4473 which is a group of data regs or one fp reg.)
4474 We need not be so restrictive if there are no more reloads
4475 for this insn.
4476
4477 ??? Really it would be nicer to have smarter handling
4478 for that kind of reg class, where a problem like this is normal.
4479 Perhaps those classes should be avoided for reloading
4480 by use of more alternatives. */
4481
4482 int force_group = reload_nregs[r] > 1 && ! last_reload;
4483
4484 /* If we want a single register and haven't yet found one,
4485 take any reg in the right class and not in use.
4486 If we want a consecutive group, here is where we look for it.
4487
4488 We use two passes so we can first look for reload regs to
4489 reuse, which are already in use for other reloads in this insn,
4490 and only then use additional registers.
4491 I think that maximizing reuse is needed to make sure we don't
4492 run out of reload regs. Suppose we have three reloads, and
4493 reloads A and B can share regs. These need two regs.
4494 Suppose A and B are given different regs.
4495 That leaves none for C. */
4496 for (pass = 0; pass < 2; pass++)
4497 {
4498 /* I is the index in spill_regs.
4499 We advance it round-robin between insns to use all spill regs
4500 equally, so that inherited reloads have a chance
4501 of leapfrogging each other. */
4502
4503 for (count = 0, i = last_spill_reg; count < n_spills; count++)
4504 {
4505 int class = (int) reload_reg_class[r];
4506
4507 i = (i + 1) % n_spills;
4508
4509 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4510 reload_when_needed[r])
4511 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4512 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4513 /* Look first for regs to share, then for unshared. But
4514 don't share regs used for inherited reloads; they are
4515 the ones we want to preserve. */
4516 && (pass
4517 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4518 spill_regs[i])
4519 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4520 spill_regs[i]))))
4521 {
4522 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4523 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4524 (on 68000) got us two FP regs. If NR is 1,
4525 we would reject both of them. */
4526 if (force_group)
4527 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4528 /* If we need only one reg, we have already won. */
4529 if (nr == 1)
4530 {
4531 /* But reject a single reg if we demand a group. */
4532 if (force_group)
4533 continue;
4534 break;
4535 }
4536 /* Otherwise check that as many consecutive regs as we need
4537 are available here.
4538 Also, don't use for a group registers that are
4539 needed for nongroups. */
4540 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4541 while (nr > 1)
4542 {
4543 regno = spill_regs[i] + nr - 1;
4544 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4545 && spill_reg_order[regno] >= 0
4546 && reload_reg_free_p (regno, reload_opnum[r],
4547 reload_when_needed[r])
4548 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4549 regno)))
4550 break;
4551 nr--;
4552 }
4553 if (nr == 1)
4554 break;
4555 }
4556 }
4557
4558 /* If we found something on pass 1, omit pass 2. */
4559 if (count < n_spills)
4560 break;
4561 }
4562
4563 /* We should have found a spill register by now. */
4564 if (count == n_spills)
4565 {
4566 if (noerror)
4567 return 0;
4568 goto failure;
4569 }
4570
4571 /* I is the index in SPILL_REG_RTX of the reload register we are to
4572 allocate. Get an rtx for it and find its register number. */
4573
4574 new = spill_reg_rtx[i];
4575
4576 if (new == 0 || GET_MODE (new) != reload_mode[r])
4577 spill_reg_rtx[i] = new
4578 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4579
4580 regno = true_regnum (new);
4581
4582 /* Detect when the reload reg can't hold the reload mode.
4583 This used to be one `if', but Sequent compiler can't handle that. */
4584 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4585 {
4586 enum machine_mode test_mode = VOIDmode;
4587 if (reload_in[r])
4588 test_mode = GET_MODE (reload_in[r]);
4589 /* If reload_in[r] has VOIDmode, it means we will load it
4590 in whatever mode the reload reg has: to wit, reload_mode[r].
4591 We have already tested that for validity. */
4592 /* Aside from that, we need to test that the expressions
4593 to reload from or into have modes which are valid for this
4594 reload register. Otherwise the reload insns would be invalid. */
4595 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4596 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4597 if (! (reload_out[r] != 0
4598 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4599 {
4600 /* The reg is OK. */
4601 last_spill_reg = i;
4602
4603 /* Mark as in use for this insn the reload regs we use
4604 for this. */
4605 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4606 reload_when_needed[r], reload_mode[r]);
4607
4608 reload_reg_rtx[r] = new;
4609 reload_spill_index[r] = i;
4610 return 1;
4611 }
4612 }
4613
4614 /* The reg is not OK. */
4615 if (noerror)
4616 return 0;
4617
4618 failure:
4619 if (asm_noperands (PATTERN (insn)) < 0)
4620 /* It's the compiler's fault. */
4621 abort ();
4622
4623 /* It's the user's fault; the operand's mode and constraint
4624 don't match. Disable this reload so we don't crash in final. */
4625 error_for_asm (insn,
4626 "`asm' operand constraint incompatible with operand size");
4627 reload_in[r] = 0;
4628 reload_out[r] = 0;
4629 reload_reg_rtx[r] = 0;
4630 reload_optional[r] = 1;
4631 reload_secondary_p[r] = 1;
4632
4633 return 1;
4634 }
4635 \f
4636 /* Assign hard reg targets for the pseudo-registers we must reload
4637 into hard regs for this insn.
4638 Also output the instructions to copy them in and out of the hard regs.
4639
4640 For machines with register classes, we are responsible for
4641 finding a reload reg in the proper class. */
4642
4643 static void
4644 choose_reload_regs (insn, avoid_return_reg)
4645 rtx insn;
4646 rtx avoid_return_reg;
4647 {
4648 register int i, j;
4649 int max_group_size = 1;
4650 enum reg_class group_class = NO_REGS;
4651 int inheritance;
4652
4653 rtx save_reload_reg_rtx[MAX_RELOADS];
4654 char save_reload_inherited[MAX_RELOADS];
4655 rtx save_reload_inheritance_insn[MAX_RELOADS];
4656 rtx save_reload_override_in[MAX_RELOADS];
4657 int save_reload_spill_index[MAX_RELOADS];
4658 HARD_REG_SET save_reload_reg_used;
4659 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4660 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4661 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4662 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4663 HARD_REG_SET save_reload_reg_used_in_op_addr;
4664 HARD_REG_SET save_reload_reg_used_in_insn;
4665 HARD_REG_SET save_reload_reg_used_in_other_addr;
4666 HARD_REG_SET save_reload_reg_used_at_all;
4667
4668 bzero (reload_inherited, MAX_RELOADS);
4669 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4670 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4671
4672 CLEAR_HARD_REG_SET (reload_reg_used);
4673 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4674 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4675 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4676 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4677
4678 for (i = 0; i < reload_n_operands; i++)
4679 {
4680 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4681 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4682 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4683 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4684 }
4685
4686 #ifdef SMALL_REGISTER_CLASSES
4687 /* Don't bother with avoiding the return reg
4688 if we have no mandatory reload that could use it. */
4689 if (avoid_return_reg)
4690 {
4691 int do_avoid = 0;
4692 int regno = REGNO (avoid_return_reg);
4693 int nregs
4694 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4695 int r;
4696
4697 for (r = regno; r < regno + nregs; r++)
4698 if (spill_reg_order[r] >= 0)
4699 for (j = 0; j < n_reloads; j++)
4700 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4701 && (reload_in[j] != 0 || reload_out[j] != 0
4702 || reload_secondary_p[j])
4703 &&
4704 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4705 do_avoid = 1;
4706 if (!do_avoid)
4707 avoid_return_reg = 0;
4708 }
4709 #endif /* SMALL_REGISTER_CLASSES */
4710
4711 #if 0 /* Not needed, now that we can always retry without inheritance. */
4712 /* See if we have more mandatory reloads than spill regs.
4713 If so, then we cannot risk optimizations that could prevent
4714 reloads from sharing one spill register.
4715
4716 Since we will try finding a better register than reload_reg_rtx
4717 unless it is equal to reload_in or reload_out, count such reloads. */
4718
4719 {
4720 int tem = 0;
4721 #ifdef SMALL_REGISTER_CLASSES
4722 int tem = (avoid_return_reg != 0);
4723 #endif
4724 for (j = 0; j < n_reloads; j++)
4725 if (! reload_optional[j]
4726 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4727 && (reload_reg_rtx[j] == 0
4728 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4729 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4730 tem++;
4731 if (tem > n_spills)
4732 must_reuse = 1;
4733 }
4734 #endif
4735
4736 #ifdef SMALL_REGISTER_CLASSES
4737 /* Don't use the subroutine call return reg for a reload
4738 if we are supposed to avoid it. */
4739 if (avoid_return_reg)
4740 {
4741 int regno = REGNO (avoid_return_reg);
4742 int nregs
4743 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4744 int r;
4745
4746 for (r = regno; r < regno + nregs; r++)
4747 if (spill_reg_order[r] >= 0)
4748 SET_HARD_REG_BIT (reload_reg_used, r);
4749 }
4750 #endif /* SMALL_REGISTER_CLASSES */
4751
4752 /* In order to be certain of getting the registers we need,
4753 we must sort the reloads into order of increasing register class.
4754 Then our grabbing of reload registers will parallel the process
4755 that provided the reload registers.
4756
4757 Also note whether any of the reloads wants a consecutive group of regs.
4758 If so, record the maximum size of the group desired and what
4759 register class contains all the groups needed by this insn. */
4760
4761 for (j = 0; j < n_reloads; j++)
4762 {
4763 reload_order[j] = j;
4764 reload_spill_index[j] = -1;
4765
4766 reload_mode[j]
4767 = (reload_inmode[j] == VOIDmode
4768 || (GET_MODE_SIZE (reload_outmode[j])
4769 > GET_MODE_SIZE (reload_inmode[j])))
4770 ? reload_outmode[j] : reload_inmode[j];
4771
4772 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4773
4774 if (reload_nregs[j] > 1)
4775 {
4776 max_group_size = MAX (reload_nregs[j], max_group_size);
4777 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4778 }
4779
4780 /* If we have already decided to use a certain register,
4781 don't use it in another way. */
4782 if (reload_reg_rtx[j])
4783 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
4784 reload_when_needed[j], reload_mode[j]);
4785 }
4786
4787 if (n_reloads > 1)
4788 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4789
4790 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4791 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4792 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4793 sizeof reload_inheritance_insn);
4794 bcopy (reload_override_in, save_reload_override_in,
4795 sizeof reload_override_in);
4796 bcopy (reload_spill_index, save_reload_spill_index,
4797 sizeof reload_spill_index);
4798 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4799 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4800 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4801 reload_reg_used_in_op_addr);
4802 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4803 reload_reg_used_in_insn);
4804 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4805 reload_reg_used_in_other_addr);
4806
4807 for (i = 0; i < reload_n_operands; i++)
4808 {
4809 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4810 reload_reg_used_in_output[i]);
4811 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4812 reload_reg_used_in_input[i]);
4813 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4814 reload_reg_used_in_input_addr[i]);
4815 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4816 reload_reg_used_in_output_addr[i]);
4817 }
4818
4819 /* If -O, try first with inheritance, then turning it off.
4820 If not -O, don't do inheritance.
4821 Using inheritance when not optimizing leads to paradoxes
4822 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4823 because one side of the comparison might be inherited. */
4824
4825 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
4826 {
4827 /* Process the reloads in order of preference just found.
4828 Beyond this point, subregs can be found in reload_reg_rtx.
4829
4830 This used to look for an existing reloaded home for all
4831 of the reloads, and only then perform any new reloads.
4832 But that could lose if the reloads were done out of reg-class order
4833 because a later reload with a looser constraint might have an old
4834 home in a register needed by an earlier reload with a tighter constraint.
4835
4836 To solve this, we make two passes over the reloads, in the order
4837 described above. In the first pass we try to inherit a reload
4838 from a previous insn. If there is a later reload that needs a
4839 class that is a proper subset of the class being processed, we must
4840 also allocate a spill register during the first pass.
4841
4842 Then make a second pass over the reloads to allocate any reloads
4843 that haven't been given registers yet. */
4844
4845 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
4846
4847 for (j = 0; j < n_reloads; j++)
4848 {
4849 register int r = reload_order[j];
4850
4851 /* Ignore reloads that got marked inoperative. */
4852 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4853 continue;
4854
4855 /* If find_reloads chose a to use reload_in or reload_out as a reload
4856 register, we don't need to chose one. Otherwise, try even if it found
4857 one since we might save an insn if we find the value lying around. */
4858 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4859 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4860 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4861 continue;
4862
4863 #if 0 /* No longer needed for correct operation.
4864 It might give better code, or might not; worth an experiment? */
4865 /* If this is an optional reload, we can't inherit from earlier insns
4866 until we are sure that any non-optional reloads have been allocated.
4867 The following code takes advantage of the fact that optional reloads
4868 are at the end of reload_order. */
4869 if (reload_optional[r] != 0)
4870 for (i = 0; i < j; i++)
4871 if ((reload_out[reload_order[i]] != 0
4872 || reload_in[reload_order[i]] != 0
4873 || reload_secondary_p[reload_order[i]])
4874 && ! reload_optional[reload_order[i]]
4875 && reload_reg_rtx[reload_order[i]] == 0)
4876 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4877 #endif
4878
4879 /* First see if this pseudo is already available as reloaded
4880 for a previous insn. We cannot try to inherit for reloads
4881 that are smaller than the maximum number of registers needed
4882 for groups unless the register we would allocate cannot be used
4883 for the groups.
4884
4885 We could check here to see if this is a secondary reload for
4886 an object that is already in a register of the desired class.
4887 This would avoid the need for the secondary reload register.
4888 But this is complex because we can't easily determine what
4889 objects might want to be loaded via this reload. So let a register
4890 be allocated here. In `emit_reload_insns' we suppress one of the
4891 loads in the case described above. */
4892
4893 if (inheritance)
4894 {
4895 register int regno = -1;
4896 enum machine_mode mode;
4897
4898 if (reload_in[r] == 0)
4899 ;
4900 else if (GET_CODE (reload_in[r]) == REG)
4901 {
4902 regno = REGNO (reload_in[r]);
4903 mode = GET_MODE (reload_in[r]);
4904 }
4905 else if (GET_CODE (reload_in_reg[r]) == REG)
4906 {
4907 regno = REGNO (reload_in_reg[r]);
4908 mode = GET_MODE (reload_in_reg[r]);
4909 }
4910 #if 0
4911 /* This won't work, since REGNO can be a pseudo reg number.
4912 Also, it takes much more hair to keep track of all the things
4913 that can invalidate an inherited reload of part of a pseudoreg. */
4914 else if (GET_CODE (reload_in[r]) == SUBREG
4915 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4916 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4917 #endif
4918
4919 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4920 {
4921 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4922
4923 if (reg_reloaded_contents[i] == regno
4924 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
4925 >= GET_MODE_SIZE (mode))
4926 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4927 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4928 spill_regs[i])
4929 && (reload_nregs[r] == max_group_size
4930 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4931 spill_regs[i]))
4932 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
4933 reload_when_needed[r])
4934 && reload_reg_free_before_p (spill_regs[i],
4935 reload_opnum[r],
4936 reload_when_needed[r]))
4937 {
4938 /* If a group is needed, verify that all the subsequent
4939 registers still have their values intact. */
4940 int nr
4941 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4942 int k;
4943
4944 for (k = 1; k < nr; k++)
4945 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4946 != regno)
4947 break;
4948
4949 if (k == nr)
4950 {
4951 int i1;
4952
4953 /* We found a register that contains the
4954 value we need. If this register is the
4955 same as an `earlyclobber' operand of the
4956 current insn, just mark it as a place to
4957 reload from since we can't use it as the
4958 reload register itself. */
4959
4960 for (i1 = 0; i1 < n_earlyclobbers; i1++)
4961 if (reg_overlap_mentioned_for_reload_p
4962 (reg_last_reload_reg[regno],
4963 reload_earlyclobbers[i1]))
4964 break;
4965
4966 if (i1 != n_earlyclobbers
4967 /* Don't really use the inherited spill reg
4968 if we need it wider than we've got it. */
4969 || (GET_MODE_SIZE (reload_mode[r])
4970 > GET_MODE_SIZE (mode)))
4971 reload_override_in[r] = reg_last_reload_reg[regno];
4972 else
4973 {
4974 int k;
4975 /* We can use this as a reload reg. */
4976 /* Mark the register as in use for this part of
4977 the insn. */
4978 mark_reload_reg_in_use (spill_regs[i],
4979 reload_opnum[r],
4980 reload_when_needed[r],
4981 reload_mode[r]);
4982 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4983 reload_inherited[r] = 1;
4984 reload_inheritance_insn[r]
4985 = reg_reloaded_insn[i];
4986 reload_spill_index[r] = i;
4987 for (k = 0; k < nr; k++)
4988 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
4989 spill_regs[i + k]);
4990 }
4991 }
4992 }
4993 }
4994 }
4995
4996 /* Here's another way to see if the value is already lying around. */
4997 if (inheritance
4998 && reload_in[r] != 0
4999 && ! reload_inherited[r]
5000 && reload_out[r] == 0
5001 && (CONSTANT_P (reload_in[r])
5002 || GET_CODE (reload_in[r]) == PLUS
5003 || GET_CODE (reload_in[r]) == REG
5004 || GET_CODE (reload_in[r]) == MEM)
5005 && (reload_nregs[r] == max_group_size
5006 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5007 {
5008 register rtx equiv
5009 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5010 -1, NULL_PTR, 0, reload_mode[r]);
5011 int regno;
5012
5013 if (equiv != 0)
5014 {
5015 if (GET_CODE (equiv) == REG)
5016 regno = REGNO (equiv);
5017 else if (GET_CODE (equiv) == SUBREG)
5018 {
5019 regno = REGNO (SUBREG_REG (equiv));
5020 if (regno < FIRST_PSEUDO_REGISTER)
5021 regno += SUBREG_WORD (equiv);
5022 }
5023 else
5024 abort ();
5025 }
5026
5027 /* If we found a spill reg, reject it unless it is free
5028 and of the desired class. */
5029 if (equiv != 0
5030 && ((spill_reg_order[regno] >= 0
5031 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5032 reload_when_needed[r]))
5033 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5034 regno)))
5035 equiv = 0;
5036
5037 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5038 equiv = 0;
5039
5040 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5041 equiv = 0;
5042
5043 /* We found a register that contains the value we need.
5044 If this register is the same as an `earlyclobber' operand
5045 of the current insn, just mark it as a place to reload from
5046 since we can't use it as the reload register itself. */
5047
5048 if (equiv != 0)
5049 for (i = 0; i < n_earlyclobbers; i++)
5050 if (reg_overlap_mentioned_for_reload_p (equiv,
5051 reload_earlyclobbers[i]))
5052 {
5053 reload_override_in[r] = equiv;
5054 equiv = 0;
5055 break;
5056 }
5057
5058 /* JRV: If the equiv register we have found is explicitly
5059 clobbered in the current insn, mark but don't use, as above. */
5060
5061 if (equiv != 0 && regno_clobbered_p (regno, insn))
5062 {
5063 reload_override_in[r] = equiv;
5064 equiv = 0;
5065 }
5066
5067 /* If we found an equivalent reg, say no code need be generated
5068 to load it, and use it as our reload reg. */
5069 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5070 {
5071 reload_reg_rtx[r] = equiv;
5072 reload_inherited[r] = 1;
5073 /* If it is a spill reg,
5074 mark the spill reg as in use for this insn. */
5075 i = spill_reg_order[regno];
5076 if (i >= 0)
5077 {
5078 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5079 int k;
5080 mark_reload_reg_in_use (regno, reload_opnum[r],
5081 reload_when_needed[r],
5082 reload_mode[r]);
5083 for (k = 0; k < nr; k++)
5084 SET_HARD_REG_BIT (reload_reg_used_for_inherit, regno + k);
5085 }
5086 }
5087 }
5088
5089 /* If we found a register to use already, or if this is an optional
5090 reload, we are done. */
5091 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5092 continue;
5093
5094 #if 0 /* No longer needed for correct operation. Might or might not
5095 give better code on the average. Want to experiment? */
5096
5097 /* See if there is a later reload that has a class different from our
5098 class that intersects our class or that requires less register
5099 than our reload. If so, we must allocate a register to this
5100 reload now, since that reload might inherit a previous reload
5101 and take the only available register in our class. Don't do this
5102 for optional reloads since they will force all previous reloads
5103 to be allocated. Also don't do this for reloads that have been
5104 turned off. */
5105
5106 for (i = j + 1; i < n_reloads; i++)
5107 {
5108 int s = reload_order[i];
5109
5110 if ((reload_in[s] == 0 && reload_out[s] == 0
5111 && ! reload_secondary_p[s])
5112 || reload_optional[s])
5113 continue;
5114
5115 if ((reload_reg_class[s] != reload_reg_class[r]
5116 && reg_classes_intersect_p (reload_reg_class[r],
5117 reload_reg_class[s]))
5118 || reload_nregs[s] < reload_nregs[r])
5119 break;
5120 }
5121
5122 if (i == n_reloads)
5123 continue;
5124
5125 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5126 #endif
5127 }
5128
5129 /* Now allocate reload registers for anything non-optional that
5130 didn't get one yet. */
5131 for (j = 0; j < n_reloads; j++)
5132 {
5133 register int r = reload_order[j];
5134
5135 /* Ignore reloads that got marked inoperative. */
5136 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5137 continue;
5138
5139 /* Skip reloads that already have a register allocated or are
5140 optional. */
5141 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5142 continue;
5143
5144 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5145 break;
5146 }
5147
5148 /* If that loop got all the way, we have won. */
5149 if (j == n_reloads)
5150 break;
5151
5152 fail:
5153 /* Loop around and try without any inheritance. */
5154 /* First undo everything done by the failed attempt
5155 to allocate with inheritance. */
5156 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
5157 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
5158 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
5159 sizeof reload_inheritance_insn);
5160 bcopy (save_reload_override_in, reload_override_in,
5161 sizeof reload_override_in);
5162 bcopy (save_reload_spill_index, reload_spill_index,
5163 sizeof reload_spill_index);
5164 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5165 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5166 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5167 save_reload_reg_used_in_op_addr);
5168 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5169 save_reload_reg_used_in_insn);
5170 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5171 save_reload_reg_used_in_other_addr);
5172
5173 for (i = 0; i < reload_n_operands; i++)
5174 {
5175 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5176 save_reload_reg_used_in_input[i]);
5177 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5178 save_reload_reg_used_in_output[i]);
5179 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5180 save_reload_reg_used_in_input_addr[i]);
5181 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5182 save_reload_reg_used_in_output_addr[i]);
5183 }
5184 }
5185
5186 /* If we thought we could inherit a reload, because it seemed that
5187 nothing else wanted the same reload register earlier in the insn,
5188 verify that assumption, now that all reloads have been assigned. */
5189
5190 for (j = 0; j < n_reloads; j++)
5191 {
5192 register int r = reload_order[j];
5193
5194 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5195 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5196 reload_opnum[r],
5197 reload_when_needed[r]))
5198 reload_inherited[r] = 0;
5199
5200 /* If we found a better place to reload from,
5201 validate it in the same fashion, if it is a reload reg. */
5202 if (reload_override_in[r]
5203 && (GET_CODE (reload_override_in[r]) == REG
5204 || GET_CODE (reload_override_in[r]) == SUBREG))
5205 {
5206 int regno = true_regnum (reload_override_in[r]);
5207 if (spill_reg_order[regno] >= 0
5208 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5209 reload_when_needed[r]))
5210 reload_override_in[r] = 0;
5211 }
5212 }
5213
5214 /* Now that reload_override_in is known valid,
5215 actually override reload_in. */
5216 for (j = 0; j < n_reloads; j++)
5217 if (reload_override_in[j])
5218 reload_in[j] = reload_override_in[j];
5219
5220 /* If this reload won't be done because it has been cancelled or is
5221 optional and not inherited, clear reload_reg_rtx so other
5222 routines (such as subst_reloads) don't get confused. */
5223 for (j = 0; j < n_reloads; j++)
5224 if (reload_reg_rtx[j] != 0
5225 && ((reload_optional[j] && ! reload_inherited[j])
5226 || (reload_in[j] == 0 && reload_out[j] == 0
5227 && ! reload_secondary_p[j])))
5228 {
5229 int regno = true_regnum (reload_reg_rtx[j]);
5230
5231 if (spill_reg_order[regno] >= 0)
5232 clear_reload_reg_in_use (regno, reload_opnum[j],
5233 reload_when_needed[j], reload_mode[j]);
5234 reload_reg_rtx[j] = 0;
5235 }
5236
5237 /* Record which pseudos and which spill regs have output reloads. */
5238 for (j = 0; j < n_reloads; j++)
5239 {
5240 register int r = reload_order[j];
5241
5242 i = reload_spill_index[r];
5243
5244 /* I is nonneg if this reload used one of the spill regs.
5245 If reload_reg_rtx[r] is 0, this is an optional reload
5246 that we opted to ignore. */
5247 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5248 && reload_reg_rtx[r] != 0)
5249 {
5250 register int nregno = REGNO (reload_out[r]);
5251 int nr = 1;
5252
5253 if (nregno < FIRST_PSEUDO_REGISTER)
5254 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5255
5256 while (--nr >= 0)
5257 reg_has_output_reload[nregno + nr] = 1;
5258
5259 if (i >= 0)
5260 {
5261 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5262 while (--nr >= 0)
5263 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5264 }
5265
5266 if (reload_when_needed[r] != RELOAD_OTHER
5267 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5268 && reload_when_needed[r] != RELOAD_FOR_INSN)
5269 abort ();
5270 }
5271 }
5272 }
5273 \f
5274 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5275 reloads of the same item for fear that we might not have enough reload
5276 registers. However, normally they will get the same reload register
5277 and hence actually need not be loaded twice.
5278
5279 Here we check for the most common case of this phenomenon: when we have
5280 a number of reloads for the same object, each of which were allocated
5281 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5282 reload, and is not modified in the insn itself. If we find such,
5283 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5284 This will not increase the number of spill registers needed and will
5285 prevent redundant code. */
5286
5287 #ifdef SMALL_REGISTER_CLASSES
5288
5289 static void
5290 merge_assigned_reloads (insn)
5291 rtx insn;
5292 {
5293 int i, j;
5294
5295 /* Scan all the reloads looking for ones that only load values and
5296 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5297 assigned and not modified by INSN. */
5298
5299 for (i = 0; i < n_reloads; i++)
5300 {
5301 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5302 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5303 || reg_set_p (reload_reg_rtx[i], insn))
5304 continue;
5305
5306 /* Look at all other reloads. Ensure that the only use of this
5307 reload_reg_rtx is in a reload that just loads the same value
5308 as we do. Note that any secondary reloads must be of the identical
5309 class since the values, modes, and result registers are the
5310 same, so we need not do anything with any secondary reloads. */
5311
5312 for (j = 0; j < n_reloads; j++)
5313 {
5314 if (i == j || reload_reg_rtx[j] == 0
5315 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5316 reload_reg_rtx[i]))
5317 continue;
5318
5319 /* If the reload regs aren't exactly the same (e.g, different modes)
5320 or if the values are different, we can't merge anything with this
5321 reload register. */
5322
5323 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5324 || reload_out[j] != 0 || reload_in[j] == 0
5325 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5326 break;
5327 }
5328
5329 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5330 we, in fact, found any matching reloads. */
5331
5332 if (j == n_reloads)
5333 {
5334 for (j = 0; j < n_reloads; j++)
5335 if (i != j && reload_reg_rtx[j] != 0
5336 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5337 {
5338 reload_when_needed[i] = RELOAD_OTHER;
5339 reload_in[j] = 0;
5340 transfer_replacements (i, j);
5341 }
5342
5343 /* If this is now RELOAD_OTHER, look for any reloads that load
5344 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5345 if they were for inputs, RELOAD_OTHER for outputs. Note that
5346 this test is equivalent to looking for reloads for this operand
5347 number. */
5348
5349 if (reload_when_needed[i] == RELOAD_OTHER)
5350 for (j = 0; j < n_reloads; j++)
5351 if (reload_in[j] != 0
5352 && reload_when_needed[i] != RELOAD_OTHER
5353 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5354 reload_in[i]))
5355 reload_when_needed[j]
5356 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5357 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5358 }
5359 }
5360 }
5361 #endif /* SMALL_RELOAD_CLASSES */
5362 \f
5363 /* Output insns to reload values in and out of the chosen reload regs. */
5364
5365 static void
5366 emit_reload_insns (insn)
5367 rtx insn;
5368 {
5369 register int j;
5370 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5371 rtx other_input_address_reload_insns = 0;
5372 rtx other_input_reload_insns = 0;
5373 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5374 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5375 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5376 rtx operand_reload_insns = 0;
5377 rtx following_insn = NEXT_INSN (insn);
5378 rtx before_insn = insn;
5379 int special;
5380 /* Values to be put in spill_reg_store are put here first. */
5381 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5382
5383 for (j = 0; j < reload_n_operands; j++)
5384 input_reload_insns[j] = input_address_reload_insns[j]
5385 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5386
5387 /* If this is a CALL_INSN preceded by USE insns, any reload insns
5388 must go in front of the first USE insn, not in front of INSN. */
5389
5390 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
5391 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
5392 while (GET_CODE (PREV_INSN (before_insn)) == INSN
5393 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
5394 before_insn = PREV_INSN (before_insn);
5395
5396 /* If INSN is followed by any CLOBBER insns made by find_reloads,
5397 put our reloads after them since they may otherwise be
5398 misinterpreted. */
5399
5400 while (GET_CODE (following_insn) == INSN
5401 && GET_MODE (following_insn) == DImode
5402 && GET_CODE (PATTERN (following_insn)) == CLOBBER
5403 && NEXT_INSN (following_insn) != 0)
5404 following_insn = NEXT_INSN (following_insn);
5405
5406 /* Now output the instructions to copy the data into and out of the
5407 reload registers. Do these in the order that the reloads were reported,
5408 since reloads of base and index registers precede reloads of operands
5409 and the operands may need the base and index registers reloaded. */
5410
5411 for (j = 0; j < n_reloads; j++)
5412 {
5413 register rtx old;
5414 rtx oldequiv_reg = 0;
5415 rtx store_insn = 0;
5416
5417 old = reload_in[j];
5418 if (old != 0 && ! reload_inherited[j]
5419 && ! rtx_equal_p (reload_reg_rtx[j], old)
5420 && reload_reg_rtx[j] != 0)
5421 {
5422 register rtx reloadreg = reload_reg_rtx[j];
5423 rtx oldequiv = 0;
5424 enum machine_mode mode;
5425 rtx *where;
5426
5427 /* Determine the mode to reload in.
5428 This is very tricky because we have three to choose from.
5429 There is the mode the insn operand wants (reload_inmode[J]).
5430 There is the mode of the reload register RELOADREG.
5431 There is the intrinsic mode of the operand, which we could find
5432 by stripping some SUBREGs.
5433 It turns out that RELOADREG's mode is irrelevant:
5434 we can change that arbitrarily.
5435
5436 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5437 then the reload reg may not support QImode moves, so use SImode.
5438 If foo is in memory due to spilling a pseudo reg, this is safe,
5439 because the QImode value is in the least significant part of a
5440 slot big enough for a SImode. If foo is some other sort of
5441 memory reference, then it is impossible to reload this case,
5442 so previous passes had better make sure this never happens.
5443
5444 Then consider a one-word union which has SImode and one of its
5445 members is a float, being fetched as (SUBREG:SF union:SI).
5446 We must fetch that as SFmode because we could be loading into
5447 a float-only register. In this case OLD's mode is correct.
5448
5449 Consider an immediate integer: it has VOIDmode. Here we need
5450 to get a mode from something else.
5451
5452 In some cases, there is a fourth mode, the operand's
5453 containing mode. If the insn specifies a containing mode for
5454 this operand, it overrides all others.
5455
5456 I am not sure whether the algorithm here is always right,
5457 but it does the right things in those cases. */
5458
5459 mode = GET_MODE (old);
5460 if (mode == VOIDmode)
5461 mode = reload_inmode[j];
5462
5463 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5464 /* If we need a secondary register for this operation, see if
5465 the value is already in a register in that class. Don't
5466 do this if the secondary register will be used as a scratch
5467 register. */
5468
5469 if (reload_secondary_reload[j] >= 0
5470 && reload_secondary_icode[j] == CODE_FOR_nothing
5471 && optimize)
5472 oldequiv
5473 = find_equiv_reg (old, insn,
5474 reload_reg_class[reload_secondary_reload[j]],
5475 -1, NULL_PTR, 0, mode);
5476 #endif
5477
5478 /* If reloading from memory, see if there is a register
5479 that already holds the same value. If so, reload from there.
5480 We can pass 0 as the reload_reg_p argument because
5481 any other reload has either already been emitted,
5482 in which case find_equiv_reg will see the reload-insn,
5483 or has yet to be emitted, in which case it doesn't matter
5484 because we will use this equiv reg right away. */
5485
5486 if (oldequiv == 0 && optimize
5487 && (GET_CODE (old) == MEM
5488 || (GET_CODE (old) == REG
5489 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5490 && reg_renumber[REGNO (old)] < 0)))
5491 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5492 -1, NULL_PTR, 0, mode);
5493
5494 if (oldequiv)
5495 {
5496 int regno = true_regnum (oldequiv);
5497
5498 /* If OLDEQUIV is a spill register, don't use it for this
5499 if any other reload needs it at an earlier stage of this insn
5500 or at this stage. */
5501 if (spill_reg_order[regno] >= 0
5502 && (! reload_reg_free_p (regno, reload_opnum[j],
5503 reload_when_needed[j])
5504 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5505 reload_when_needed[j])))
5506 oldequiv = 0;
5507
5508 /* If OLDEQUIV is not a spill register,
5509 don't use it if any other reload wants it. */
5510 if (spill_reg_order[regno] < 0)
5511 {
5512 int k;
5513 for (k = 0; k < n_reloads; k++)
5514 if (reload_reg_rtx[k] != 0 && k != j
5515 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5516 oldequiv))
5517 {
5518 oldequiv = 0;
5519 break;
5520 }
5521 }
5522
5523 /* If it is no cheaper to copy from OLDEQUIV into the
5524 reload register than it would be to move from memory,
5525 don't use it. Likewise, if we need a secondary register
5526 or memory. */
5527
5528 if (oldequiv != 0
5529 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5530 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5531 reload_reg_class[j])
5532 >= MEMORY_MOVE_COST (mode)))
5533 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5534 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5535 mode, oldequiv)
5536 != NO_REGS)
5537 #endif
5538 #ifdef SECONDARY_MEMORY_NEEDED
5539 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5540 REGNO_REG_CLASS (regno),
5541 mode)
5542 #endif
5543 ))
5544 oldequiv = 0;
5545 }
5546
5547 if (oldequiv == 0)
5548 oldequiv = old;
5549 else if (GET_CODE (oldequiv) == REG)
5550 oldequiv_reg = oldequiv;
5551 else if (GET_CODE (oldequiv) == SUBREG)
5552 oldequiv_reg = SUBREG_REG (oldequiv);
5553
5554 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5555 then load RELOADREG from OLDEQUIV. Note that we cannot use
5556 gen_lowpart_common since it can do the wrong thing when
5557 RELOADREG has a multi-word mode. Note that RELOADREG
5558 must always be a REG here. */
5559
5560 if (GET_MODE (reloadreg) != mode)
5561 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5562 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5563 oldequiv = SUBREG_REG (oldequiv);
5564 if (GET_MODE (oldequiv) != VOIDmode
5565 && mode != GET_MODE (oldequiv))
5566 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5567
5568 /* Switch to the right place to emit the reload insns. */
5569 switch (reload_when_needed[j])
5570 {
5571 case RELOAD_OTHER:
5572 where = &other_input_reload_insns;
5573 break;
5574 case RELOAD_FOR_INPUT:
5575 where = &input_reload_insns[reload_opnum[j]];
5576 break;
5577 case RELOAD_FOR_INPUT_ADDRESS:
5578 where = &input_address_reload_insns[reload_opnum[j]];
5579 break;
5580 case RELOAD_FOR_OUTPUT_ADDRESS:
5581 where = &output_address_reload_insns[reload_opnum[j]];
5582 break;
5583 case RELOAD_FOR_OPERAND_ADDRESS:
5584 where = &operand_reload_insns;
5585 break;
5586 case RELOAD_FOR_OTHER_ADDRESS:
5587 where = &other_input_address_reload_insns;
5588 break;
5589 default:
5590 abort ();
5591 }
5592
5593 push_to_sequence (*where);
5594 special = 0;
5595
5596 /* Auto-increment addresses must be reloaded in a special way. */
5597 if (GET_CODE (oldequiv) == POST_INC
5598 || GET_CODE (oldequiv) == POST_DEC
5599 || GET_CODE (oldequiv) == PRE_INC
5600 || GET_CODE (oldequiv) == PRE_DEC)
5601 {
5602 /* We are not going to bother supporting the case where a
5603 incremented register can't be copied directly from
5604 OLDEQUIV since this seems highly unlikely. */
5605 if (reload_secondary_reload[j] >= 0)
5606 abort ();
5607 /* Prevent normal processing of this reload. */
5608 special = 1;
5609 /* Output a special code sequence for this case. */
5610 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5611 }
5612
5613 /* If we are reloading a pseudo-register that was set by the previous
5614 insn, see if we can get rid of that pseudo-register entirely
5615 by redirecting the previous insn into our reload register. */
5616
5617 else if (optimize && GET_CODE (old) == REG
5618 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5619 && dead_or_set_p (insn, old)
5620 /* This is unsafe if some other reload
5621 uses the same reg first. */
5622 && reload_reg_free_before_p (REGNO (reloadreg),
5623 reload_opnum[j],
5624 reload_when_needed[j]))
5625 {
5626 rtx temp = PREV_INSN (insn);
5627 while (temp && GET_CODE (temp) == NOTE)
5628 temp = PREV_INSN (temp);
5629 if (temp
5630 && GET_CODE (temp) == INSN
5631 && GET_CODE (PATTERN (temp)) == SET
5632 && SET_DEST (PATTERN (temp)) == old
5633 /* Make sure we can access insn_operand_constraint. */
5634 && asm_noperands (PATTERN (temp)) < 0
5635 /* This is unsafe if prev insn rejects our reload reg. */
5636 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5637 reloadreg)
5638 /* This is unsafe if operand occurs more than once in current
5639 insn. Perhaps some occurrences aren't reloaded. */
5640 && count_occurrences (PATTERN (insn), old) == 1
5641 /* Don't risk splitting a matching pair of operands. */
5642 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5643 {
5644 /* Store into the reload register instead of the pseudo. */
5645 SET_DEST (PATTERN (temp)) = reloadreg;
5646 /* If these are the only uses of the pseudo reg,
5647 pretend for GDB it lives in the reload reg we used. */
5648 if (reg_n_deaths[REGNO (old)] == 1
5649 && reg_n_sets[REGNO (old)] == 1)
5650 {
5651 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5652 alter_reg (REGNO (old), -1);
5653 }
5654 special = 1;
5655 }
5656 }
5657
5658 /* We can't do that, so output an insn to load RELOADREG. */
5659
5660 if (! special)
5661 {
5662 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5663 rtx second_reload_reg = 0;
5664 enum insn_code icode;
5665
5666 /* If we have a secondary reload, pick up the secondary register
5667 and icode, if any. If OLDEQUIV and OLD are different or
5668 if this is an in-out reload, recompute whether or not we
5669 still need a secondary register and what the icode should
5670 be. If we still need a secondary register and the class or
5671 icode is different, go back to reloading from OLD if using
5672 OLDEQUIV means that we got the wrong type of register. We
5673 cannot have different class or icode due to an in-out reload
5674 because we don't make such reloads when both the input and
5675 output need secondary reload registers. */
5676
5677 if (reload_secondary_reload[j] >= 0)
5678 {
5679 int secondary_reload = reload_secondary_reload[j];
5680 rtx real_oldequiv = oldequiv;
5681 rtx real_old = old;
5682
5683 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5684 and similarly for OLD.
5685 See comments in find_secondary_reload in reload.c. */
5686 if (GET_CODE (oldequiv) == REG
5687 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5688 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5689 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5690
5691 if (GET_CODE (old) == REG
5692 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5693 && reg_equiv_mem[REGNO (old)] != 0)
5694 real_old = reg_equiv_mem[REGNO (old)];
5695
5696 second_reload_reg = reload_reg_rtx[secondary_reload];
5697 icode = reload_secondary_icode[j];
5698
5699 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5700 || (reload_in[j] != 0 && reload_out[j] != 0))
5701 {
5702 enum reg_class new_class
5703 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5704 mode, real_oldequiv);
5705
5706 if (new_class == NO_REGS)
5707 second_reload_reg = 0;
5708 else
5709 {
5710 enum insn_code new_icode;
5711 enum machine_mode new_mode;
5712
5713 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5714 REGNO (second_reload_reg)))
5715 oldequiv = old, real_oldequiv = real_old;
5716 else
5717 {
5718 new_icode = reload_in_optab[(int) mode];
5719 if (new_icode != CODE_FOR_nothing
5720 && ((insn_operand_predicate[(int) new_icode][0]
5721 && ! ((*insn_operand_predicate[(int) new_icode][0])
5722 (reloadreg, mode)))
5723 || (insn_operand_predicate[(int) new_icode][1]
5724 && ! ((*insn_operand_predicate[(int) new_icode][1])
5725 (real_oldequiv, mode)))))
5726 new_icode = CODE_FOR_nothing;
5727
5728 if (new_icode == CODE_FOR_nothing)
5729 new_mode = mode;
5730 else
5731 new_mode = insn_operand_mode[(int) new_icode][2];
5732
5733 if (GET_MODE (second_reload_reg) != new_mode)
5734 {
5735 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5736 new_mode))
5737 oldequiv = old, real_oldequiv = real_old;
5738 else
5739 second_reload_reg
5740 = gen_rtx (REG, new_mode,
5741 REGNO (second_reload_reg));
5742 }
5743 }
5744 }
5745 }
5746
5747 /* If we still need a secondary reload register, check
5748 to see if it is being used as a scratch or intermediate
5749 register and generate code appropriately. If we need
5750 a scratch register, use REAL_OLDEQUIV since the form of
5751 the insn may depend on the actual address if it is
5752 a MEM. */
5753
5754 if (second_reload_reg)
5755 {
5756 if (icode != CODE_FOR_nothing)
5757 {
5758 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5759 second_reload_reg));
5760 special = 1;
5761 }
5762 else
5763 {
5764 /* See if we need a scratch register to load the
5765 intermediate register (a tertiary reload). */
5766 enum insn_code tertiary_icode
5767 = reload_secondary_icode[secondary_reload];
5768
5769 if (tertiary_icode != CODE_FOR_nothing)
5770 {
5771 rtx third_reload_reg
5772 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5773
5774 emit_insn ((GEN_FCN (tertiary_icode)
5775 (second_reload_reg, real_oldequiv,
5776 third_reload_reg)));
5777 }
5778 else
5779 gen_input_reload (second_reload_reg, oldequiv,
5780 reload_opnum[j],
5781 reload_when_needed[j]);
5782
5783 oldequiv = second_reload_reg;
5784 }
5785 }
5786 }
5787 #endif
5788
5789 if (! special)
5790 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5791 reload_when_needed[j]);
5792
5793 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5794 /* We may have to make a REG_DEAD note for the secondary reload
5795 register in the insns we just made. Find the last insn that
5796 mentioned the register. */
5797 if (! special && second_reload_reg
5798 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5799 {
5800 rtx prev;
5801
5802 for (prev = get_last_insn (); prev;
5803 prev = PREV_INSN (prev))
5804 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
5805 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5806 PATTERN (prev)))
5807 {
5808 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5809 second_reload_reg,
5810 REG_NOTES (prev));
5811 break;
5812 }
5813 }
5814 #endif
5815 }
5816
5817 /* End this sequence. */
5818 *where = get_insns ();
5819 end_sequence ();
5820 }
5821
5822 /* Add a note saying the input reload reg
5823 dies in this insn, if anyone cares. */
5824 #ifdef PRESERVE_DEATH_INFO_REGNO_P
5825 if (old != 0
5826 && reload_reg_rtx[j] != old
5827 && reload_reg_rtx[j] != 0
5828 && reload_out[j] == 0
5829 && ! reload_inherited[j]
5830 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5831 {
5832 register rtx reloadreg = reload_reg_rtx[j];
5833
5834 #if 0
5835 /* We can't abort here because we need to support this for sched.c.
5836 It's not terrible to miss a REG_DEAD note, but we should try
5837 to figure out how to do this correctly. */
5838 /* The code below is incorrect for address-only reloads. */
5839 if (reload_when_needed[j] != RELOAD_OTHER
5840 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5841 abort ();
5842 #endif
5843
5844 /* Add a death note to this insn, for an input reload. */
5845
5846 if ((reload_when_needed[j] == RELOAD_OTHER
5847 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5848 && ! dead_or_set_p (insn, reloadreg))
5849 REG_NOTES (insn)
5850 = gen_rtx (EXPR_LIST, REG_DEAD,
5851 reloadreg, REG_NOTES (insn));
5852 }
5853
5854 /* When we inherit a reload, the last marked death of the reload reg
5855 may no longer really be a death. */
5856 if (reload_reg_rtx[j] != 0
5857 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5858 && reload_inherited[j])
5859 {
5860 /* Handle inheriting an output reload.
5861 Remove the death note from the output reload insn. */
5862 if (reload_spill_index[j] >= 0
5863 && GET_CODE (reload_in[j]) == REG
5864 && spill_reg_store[reload_spill_index[j]] != 0
5865 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5866 REG_DEAD, REGNO (reload_reg_rtx[j])))
5867 remove_death (REGNO (reload_reg_rtx[j]),
5868 spill_reg_store[reload_spill_index[j]]);
5869 /* Likewise for input reloads that were inherited. */
5870 else if (reload_spill_index[j] >= 0
5871 && GET_CODE (reload_in[j]) == REG
5872 && spill_reg_store[reload_spill_index[j]] == 0
5873 && reload_inheritance_insn[j] != 0
5874 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
5875 REGNO (reload_reg_rtx[j])))
5876 remove_death (REGNO (reload_reg_rtx[j]),
5877 reload_inheritance_insn[j]);
5878 else
5879 {
5880 rtx prev;
5881
5882 /* We got this register from find_equiv_reg.
5883 Search back for its last death note and get rid of it.
5884 But don't search back too far.
5885 Don't go past a place where this reg is set,
5886 since a death note before that remains valid. */
5887 for (prev = PREV_INSN (insn);
5888 prev && GET_CODE (prev) != CODE_LABEL;
5889 prev = PREV_INSN (prev))
5890 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5891 && dead_or_set_p (prev, reload_reg_rtx[j]))
5892 {
5893 if (find_regno_note (prev, REG_DEAD,
5894 REGNO (reload_reg_rtx[j])))
5895 remove_death (REGNO (reload_reg_rtx[j]), prev);
5896 break;
5897 }
5898 }
5899 }
5900
5901 /* We might have used find_equiv_reg above to choose an alternate
5902 place from which to reload. If so, and it died, we need to remove
5903 that death and move it to one of the insns we just made. */
5904
5905 if (oldequiv_reg != 0
5906 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5907 {
5908 rtx prev, prev1;
5909
5910 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5911 prev = PREV_INSN (prev))
5912 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5913 && dead_or_set_p (prev, oldequiv_reg))
5914 {
5915 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5916 {
5917 for (prev1 = this_reload_insn;
5918 prev1; prev1 = PREV_INSN (prev1))
5919 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
5920 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5921 PATTERN (prev1)))
5922 {
5923 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5924 oldequiv_reg,
5925 REG_NOTES (prev1));
5926 break;
5927 }
5928 remove_death (REGNO (oldequiv_reg), prev);
5929 }
5930 break;
5931 }
5932 }
5933 #endif
5934
5935 /* If we are reloading a register that was recently stored in with an
5936 output-reload, see if we can prove there was
5937 actually no need to store the old value in it. */
5938
5939 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5940 && reload_in[j] != 0
5941 && GET_CODE (reload_in[j]) == REG
5942 #if 0
5943 /* There doesn't seem to be any reason to restrict this to pseudos
5944 and doing so loses in the case where we are copying from a
5945 register of the wrong class. */
5946 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5947 #endif
5948 && spill_reg_store[reload_spill_index[j]] != 0
5949 /* This is unsafe if some other reload uses the same reg first. */
5950 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
5951 reload_opnum[j], reload_when_needed[j])
5952 && dead_or_set_p (insn, reload_in[j])
5953 /* This is unsafe if operand occurs more than once in current
5954 insn. Perhaps some occurrences weren't reloaded. */
5955 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5956 delete_output_reload (insn, j,
5957 spill_reg_store[reload_spill_index[j]]);
5958
5959 /* Input-reloading is done. Now do output-reloading,
5960 storing the value from the reload-register after the main insn
5961 if reload_out[j] is nonzero.
5962
5963 ??? At some point we need to support handling output reloads of
5964 JUMP_INSNs or insns that set cc0. */
5965 old = reload_out[j];
5966 if (old != 0
5967 && reload_reg_rtx[j] != old
5968 && reload_reg_rtx[j] != 0)
5969 {
5970 register rtx reloadreg = reload_reg_rtx[j];
5971 register rtx second_reloadreg = 0;
5972 rtx note, p;
5973 enum machine_mode mode;
5974 int special = 0;
5975
5976 /* An output operand that dies right away does need a reload,
5977 but need not be copied from it. Show the new location in the
5978 REG_UNUSED note. */
5979 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5980 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5981 {
5982 XEXP (note, 0) = reload_reg_rtx[j];
5983 continue;
5984 }
5985 else if (GET_CODE (old) == SCRATCH)
5986 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5987 but we don't want to make an output reload. */
5988 continue;
5989
5990 #if 0
5991 /* Strip off of OLD any size-increasing SUBREGs such as
5992 (SUBREG:SI foo:QI 0). */
5993
5994 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5995 && (GET_MODE_SIZE (GET_MODE (old))
5996 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5997 old = SUBREG_REG (old);
5998 #endif
5999
6000 /* If is a JUMP_INSN, we can't support output reloads yet. */
6001 if (GET_CODE (insn) == JUMP_INSN)
6002 abort ();
6003
6004 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6005
6006 /* Determine the mode to reload in.
6007 See comments above (for input reloading). */
6008
6009 mode = GET_MODE (old);
6010 if (mode == VOIDmode)
6011 {
6012 /* VOIDmode should never happen for an output. */
6013 if (asm_noperands (PATTERN (insn)) < 0)
6014 /* It's the compiler's fault. */
6015 abort ();
6016 error_for_asm (insn, "output operand is constant in `asm'");
6017 /* Prevent crash--use something we know is valid. */
6018 mode = word_mode;
6019 old = gen_rtx (REG, mode, REGNO (reloadreg));
6020 }
6021
6022 if (GET_MODE (reloadreg) != mode)
6023 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6024
6025 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6026
6027 /* If we need two reload regs, set RELOADREG to the intermediate
6028 one, since it will be stored into OUT. We might need a secondary
6029 register only for an input reload, so check again here. */
6030
6031 if (reload_secondary_reload[j] >= 0)
6032 {
6033 rtx real_old = old;
6034
6035 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6036 && reg_equiv_mem[REGNO (old)] != 0)
6037 real_old = reg_equiv_mem[REGNO (old)];
6038
6039 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6040 mode, real_old)
6041 != NO_REGS))
6042 {
6043 second_reloadreg = reloadreg;
6044 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
6045
6046 /* See if RELOADREG is to be used as a scratch register
6047 or as an intermediate register. */
6048 if (reload_secondary_icode[j] != CODE_FOR_nothing)
6049 {
6050 emit_insn ((GEN_FCN (reload_secondary_icode[j])
6051 (real_old, second_reloadreg, reloadreg)));
6052 special = 1;
6053 }
6054 else
6055 {
6056 /* See if we need both a scratch and intermediate reload
6057 register. */
6058 int secondary_reload = reload_secondary_reload[j];
6059 enum insn_code tertiary_icode
6060 = reload_secondary_icode[secondary_reload];
6061 rtx pat;
6062
6063 if (GET_MODE (reloadreg) != mode)
6064 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6065
6066 if (tertiary_icode != CODE_FOR_nothing)
6067 {
6068 rtx third_reloadreg
6069 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
6070 pat = (GEN_FCN (tertiary_icode)
6071 (reloadreg, second_reloadreg, third_reloadreg));
6072 }
6073 #ifdef SECONDARY_MEMORY_NEEDED
6074 /* If we need a memory location to do the move, do it that way. */
6075 else if (GET_CODE (reloadreg) == REG
6076 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
6077 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
6078 REGNO_REG_CLASS (REGNO (second_reloadreg)),
6079 GET_MODE (second_reloadreg)))
6080 {
6081 /* Get the memory to use and rewrite both registers
6082 to its mode. */
6083 rtx loc
6084 = get_secondary_mem (reloadreg,
6085 GET_MODE (second_reloadreg),
6086 reload_opnum[j],
6087 reload_when_needed[j]);
6088 rtx tmp_reloadreg;
6089
6090 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
6091 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
6092 REGNO (second_reloadreg));
6093
6094 if (GET_MODE (loc) != GET_MODE (reloadreg))
6095 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
6096 REGNO (reloadreg));
6097 else
6098 tmp_reloadreg = reloadreg;
6099
6100 emit_move_insn (loc, second_reloadreg);
6101 pat = gen_move_insn (tmp_reloadreg, loc);
6102 }
6103 #endif
6104 else
6105 pat = gen_move_insn (reloadreg, second_reloadreg);
6106
6107 emit_insn (pat);
6108 }
6109 }
6110 }
6111 #endif
6112
6113 /* Output the last reload insn. */
6114 if (! special)
6115 {
6116 #ifdef SECONDARY_MEMORY_NEEDED
6117 /* If we need a memory location to do the move, do it that way. */
6118 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
6119 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
6120 REGNO_REG_CLASS (REGNO (reloadreg)),
6121 GET_MODE (reloadreg)))
6122 {
6123 /* Get the memory to use and rewrite both registers to
6124 its mode. */
6125 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
6126 reload_opnum[j],
6127 reload_when_needed[j]);
6128
6129 if (GET_MODE (loc) != GET_MODE (reloadreg))
6130 reloadreg = gen_rtx (REG, GET_MODE (loc),
6131 REGNO (reloadreg));
6132
6133 if (GET_MODE (loc) != GET_MODE (old))
6134 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
6135
6136 emit_insn (gen_move_insn (loc, reloadreg));
6137 emit_insn (gen_move_insn (old, loc));
6138 }
6139 else
6140 #endif
6141 emit_insn (gen_move_insn (old, reloadreg));
6142 }
6143
6144 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6145 /* If final will look at death notes for this reg,
6146 put one on the last output-reload insn to use it. Similarly
6147 for any secondary register. */
6148 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6149 for (p = get_last_insn (); p; p = PREV_INSN (p))
6150 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6151 && reg_overlap_mentioned_for_reload_p (reloadreg,
6152 PATTERN (p)))
6153 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6154 reloadreg, REG_NOTES (p));
6155
6156 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6157 if (! special
6158 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6159 for (p = get_last_insn (); p; p = PREV_INSN (p))
6160 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6161 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6162 PATTERN (p)))
6163 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6164 second_reloadreg, REG_NOTES (p));
6165 #endif
6166 #endif
6167 /* Look at all insns we emitted, just to be safe. */
6168 for (p = get_insns (); p; p = NEXT_INSN (p))
6169 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6170 {
6171 /* If this output reload doesn't come from a spill reg,
6172 clear any memory of reloaded copies of the pseudo reg.
6173 If this output reload comes from a spill reg,
6174 reg_has_output_reload will make this do nothing. */
6175 note_stores (PATTERN (p), forget_old_reloads_1);
6176
6177 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6178 store_insn = p;
6179 }
6180
6181 output_reload_insns[reload_opnum[j]] = get_insns ();
6182 end_sequence ();
6183
6184 }
6185
6186 if (reload_spill_index[j] >= 0)
6187 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6188 }
6189
6190 /* Now write all the insns we made for reloads in the order expected by
6191 the allocation functions. Prior to the insn being reloaded, we write
6192 the following reloads:
6193
6194 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6195
6196 RELOAD_OTHER reloads.
6197
6198 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6199 the RELOAD_FOR_INPUT reload for the operand.
6200
6201 RELOAD_FOR_OPERAND_ADDRESS reloads.
6202
6203 After the insn being reloaded, we write the following:
6204
6205 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6206 the RELOAD_FOR_OUTPUT reload for that operand. */
6207
6208 emit_insns_before (other_input_address_reload_insns, before_insn);
6209 emit_insns_before (other_input_reload_insns, before_insn);
6210
6211 for (j = 0; j < reload_n_operands; j++)
6212 {
6213 emit_insns_before (input_address_reload_insns[j], before_insn);
6214 emit_insns_before (input_reload_insns[j], before_insn);
6215 }
6216
6217 emit_insns_before (operand_reload_insns, before_insn);
6218
6219 for (j = 0; j < reload_n_operands; j++)
6220 {
6221 emit_insns_before (output_address_reload_insns[j], following_insn);
6222 emit_insns_before (output_reload_insns[j], following_insn);
6223 }
6224
6225 /* Move death notes from INSN
6226 to output-operand-address and output reload insns. */
6227 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6228 {
6229 rtx insn1;
6230 /* Loop over those insns, last ones first. */
6231 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6232 insn1 = PREV_INSN (insn1))
6233 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6234 {
6235 rtx source = SET_SRC (PATTERN (insn1));
6236 rtx dest = SET_DEST (PATTERN (insn1));
6237
6238 /* The note we will examine next. */
6239 rtx reg_notes = REG_NOTES (insn);
6240 /* The place that pointed to this note. */
6241 rtx *prev_reg_note = &REG_NOTES (insn);
6242
6243 /* If the note is for something used in the source of this
6244 reload insn, or in the output address, move the note. */
6245 while (reg_notes)
6246 {
6247 rtx next_reg_notes = XEXP (reg_notes, 1);
6248 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6249 && GET_CODE (XEXP (reg_notes, 0)) == REG
6250 && ((GET_CODE (dest) != REG
6251 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6252 dest))
6253 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6254 source)))
6255 {
6256 *prev_reg_note = next_reg_notes;
6257 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6258 REG_NOTES (insn1) = reg_notes;
6259 }
6260 else
6261 prev_reg_note = &XEXP (reg_notes, 1);
6262
6263 reg_notes = next_reg_notes;
6264 }
6265 }
6266 }
6267 #endif
6268
6269 /* For all the spill regs newly reloaded in this instruction,
6270 record what they were reloaded from, so subsequent instructions
6271 can inherit the reloads.
6272
6273 Update spill_reg_store for the reloads of this insn.
6274 Copy the elements that were updated in the loop above. */
6275
6276 for (j = 0; j < n_reloads; j++)
6277 {
6278 register int r = reload_order[j];
6279 register int i = reload_spill_index[r];
6280
6281 /* I is nonneg if this reload used one of the spill regs.
6282 If reload_reg_rtx[r] is 0, this is an optional reload
6283 that we opted to ignore.
6284
6285 Also ignore reloads that don't reach the end of the insn,
6286 since we will eventually see the one that does. */
6287
6288 if (i >= 0 && reload_reg_rtx[r] != 0
6289 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6290 reload_when_needed[r]))
6291 {
6292 /* First, clear out memory of what used to be in this spill reg.
6293 If consecutive registers are used, clear them all. */
6294 int nr
6295 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6296 int k;
6297
6298 for (k = 0; k < nr; k++)
6299 {
6300 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6301 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6302 }
6303
6304 /* Maybe the spill reg contains a copy of reload_out. */
6305 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6306 {
6307 register int nregno = REGNO (reload_out[r]);
6308 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6309 : HARD_REGNO_NREGS (nregno,
6310 GET_MODE (reload_reg_rtx[r])));
6311
6312 spill_reg_store[i] = new_spill_reg_store[i];
6313 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6314
6315 /* If NREGNO is a hard register, it may occupy more than
6316 one register. If it does, say what is in the
6317 rest of the registers assuming that both registers
6318 agree on how many words the object takes. If not,
6319 invalidate the subsequent registers. */
6320
6321 if (nregno < FIRST_PSEUDO_REGISTER)
6322 for (k = 1; k < nnr; k++)
6323 reg_last_reload_reg[nregno + k]
6324 = (nr == nnr ? gen_rtx (REG, word_mode,
6325 REGNO (reload_reg_rtx[r]) + k)
6326 : 0);
6327
6328 /* Now do the inverse operation. */
6329 for (k = 0; k < nr; k++)
6330 {
6331 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6332 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6333 : nregno + k);
6334 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6335 }
6336 }
6337
6338 /* Maybe the spill reg contains a copy of reload_in. Only do
6339 something if there will not be an output reload for
6340 the register being reloaded. */
6341 else if (reload_out[r] == 0
6342 && reload_in[r] != 0
6343 && ((GET_CODE (reload_in[r]) == REG
6344 && ! reg_has_output_reload[REGNO (reload_in[r])]
6345 || (GET_CODE (reload_in_reg[r]) == REG
6346 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))))
6347 {
6348 register int nregno;
6349 int nnr;
6350
6351 if (GET_CODE (reload_in[r]) == REG)
6352 nregno = REGNO (reload_in[r]);
6353 else
6354 nregno = REGNO (reload_in_reg[r]);
6355
6356 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6357 : HARD_REGNO_NREGS (nregno,
6358 GET_MODE (reload_reg_rtx[r])));
6359
6360 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6361
6362 if (nregno < FIRST_PSEUDO_REGISTER)
6363 for (k = 1; k < nnr; k++)
6364 reg_last_reload_reg[nregno + k]
6365 = (nr == nnr ? gen_rtx (REG, word_mode,
6366 REGNO (reload_reg_rtx[r]) + k)
6367 : 0);
6368
6369 /* Unless we inherited this reload, show we haven't
6370 recently done a store. */
6371 if (! reload_inherited[r])
6372 spill_reg_store[i] = 0;
6373
6374 for (k = 0; k < nr; k++)
6375 {
6376 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6377 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6378 : nregno + k);
6379 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6380 = insn;
6381 }
6382 }
6383 }
6384
6385 /* The following if-statement was #if 0'd in 1.34 (or before...).
6386 It's reenabled in 1.35 because supposedly nothing else
6387 deals with this problem. */
6388
6389 /* If a register gets output-reloaded from a non-spill register,
6390 that invalidates any previous reloaded copy of it.
6391 But forget_old_reloads_1 won't get to see it, because
6392 it thinks only about the original insn. So invalidate it here. */
6393 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6394 {
6395 register int nregno = REGNO (reload_out[r]);
6396 reg_last_reload_reg[nregno] = 0;
6397 }
6398 }
6399 }
6400 \f
6401 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6402 operand OPNUM with reload type TYPE.
6403
6404 Returns first insn emitted. */
6405
6406 rtx
6407 gen_input_reload (reloadreg, in, opnum, type)
6408 rtx reloadreg;
6409 rtx in;
6410 int opnum;
6411 enum reload_type type;
6412 {
6413 rtx last = get_last_insn ();
6414
6415 /* How to do this reload can get quite tricky. Normally, we are being
6416 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6417 register that didn't get a hard register. In that case we can just
6418 call emit_move_insn.
6419
6420 We can also be asked to reload a PLUS that adds a register or a MEM to
6421 another register, constant or MEM. This can occur during frame pointer
6422 elimination and while reloading addresses. This case is handled by
6423 trying to emit a single insn to perform the add. If it is not valid,
6424 we use a two insn sequence.
6425
6426 Finally, we could be called to handle an 'o' constraint by putting
6427 an address into a register. In that case, we first try to do this
6428 with a named pattern of "reload_load_address". If no such pattern
6429 exists, we just emit a SET insn and hope for the best (it will normally
6430 be valid on machines that use 'o').
6431
6432 This entire process is made complex because reload will never
6433 process the insns we generate here and so we must ensure that
6434 they will fit their constraints and also by the fact that parts of
6435 IN might be being reloaded separately and replaced with spill registers.
6436 Because of this, we are, in some sense, just guessing the right approach
6437 here. The one listed above seems to work.
6438
6439 ??? At some point, this whole thing needs to be rethought. */
6440
6441 if (GET_CODE (in) == PLUS
6442 && (GET_CODE (XEXP (in, 0)) == REG
6443 || GET_CODE (XEXP (in, 0)) == MEM)
6444 && (GET_CODE (XEXP (in, 1)) == REG
6445 || CONSTANT_P (XEXP (in, 1))
6446 || GET_CODE (XEXP (in, 1)) == MEM))
6447 {
6448 /* We need to compute the sum of a register or a MEM and another
6449 register, constant, or MEM, and put it into the reload
6450 register. The best possible way of doing this is if the machine
6451 has a three-operand ADD insn that accepts the required operands.
6452
6453 The simplest approach is to try to generate such an insn and see if it
6454 is recognized and matches its constraints. If so, it can be used.
6455
6456 It might be better not to actually emit the insn unless it is valid,
6457 but we need to pass the insn as an operand to `recog' and
6458 `insn_extract' and it is simpler to emit and then delete the insn if
6459 not valid than to dummy things up. */
6460
6461 rtx op0, op1, tem, insn;
6462 int code;
6463
6464 op0 = find_replacement (&XEXP (in, 0));
6465 op1 = find_replacement (&XEXP (in, 1));
6466
6467 /* Since constraint checking is strict, commutativity won't be
6468 checked, so we need to do that here to avoid spurious failure
6469 if the add instruction is two-address and the second operand
6470 of the add is the same as the reload reg, which is frequently
6471 the case. If the insn would be A = B + A, rearrange it so
6472 it will be A = A + B as constrain_operands expects. */
6473
6474 if (GET_CODE (XEXP (in, 1)) == REG
6475 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6476 tem = op0, op0 = op1, op1 = tem;
6477
6478 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6479 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6480
6481 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6482 code = recog_memoized (insn);
6483
6484 if (code >= 0)
6485 {
6486 insn_extract (insn);
6487 /* We want constrain operands to treat this insn strictly in
6488 its validity determination, i.e., the way it would after reload
6489 has completed. */
6490 if (constrain_operands (code, 1))
6491 return insn;
6492 }
6493
6494 delete_insns_since (last);
6495
6496 /* If that failed, we must use a conservative two-insn sequence.
6497 use move to copy constant, MEM, or pseudo register to the reload
6498 register since "move" will be able to handle an arbitrary operand,
6499 unlike add which can't, in general. Then add the registers.
6500
6501 If there is another way to do this for a specific machine, a
6502 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6503 we emit below. */
6504
6505 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6506 || (GET_CODE (op1) == REG
6507 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6508 tem = op0, op0 = op1, op1 = tem;
6509
6510 emit_insn (gen_move_insn (reloadreg, op0));
6511
6512 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6513 This fixes a problem on the 32K where the stack pointer cannot
6514 be used as an operand of an add insn. */
6515
6516 if (rtx_equal_p (op0, op1))
6517 op1 = reloadreg;
6518
6519 emit_insn (gen_add2_insn (reloadreg, op1));
6520 }
6521
6522 #ifdef SECONDARY_MEMORY_NEEDED
6523 /* If we need a memory location to do the move, do it that way. */
6524 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6525 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6526 REGNO_REG_CLASS (REGNO (reloadreg)),
6527 GET_MODE (reloadreg)))
6528 {
6529 /* Get the memory to use and rewrite both registers to its mode. */
6530 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6531
6532 if (GET_MODE (loc) != GET_MODE (reloadreg))
6533 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6534
6535 if (GET_MODE (loc) != GET_MODE (in))
6536 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6537
6538 emit_insn (gen_move_insn (loc, in));
6539 emit_insn (gen_move_insn (reloadreg, loc));
6540 }
6541 #endif
6542
6543 /* If IN is a simple operand, use gen_move_insn. */
6544 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6545 emit_insn (gen_move_insn (reloadreg, in));
6546
6547 #ifdef HAVE_reload_load_address
6548 else if (HAVE_reload_load_address)
6549 emit_insn (gen_reload_load_address (reloadreg, in));
6550 #endif
6551
6552 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6553 else
6554 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6555
6556 /* Return the first insn emitted.
6557 We can not just return get_last_insn, because there may have
6558 been multiple instructions emitted. Also note that gen_move_insn may
6559 emit more than one insn itself, so we can not assume that there is one
6560 insn emitted per emit_insn_before call. */
6561
6562 return last ? NEXT_INSN (last) : get_insns ();
6563 }
6564 \f
6565 /* Delete a previously made output-reload
6566 whose result we now believe is not needed.
6567 First we double-check.
6568
6569 INSN is the insn now being processed.
6570 OUTPUT_RELOAD_INSN is the insn of the output reload.
6571 J is the reload-number for this insn. */
6572
6573 static void
6574 delete_output_reload (insn, j, output_reload_insn)
6575 rtx insn;
6576 int j;
6577 rtx output_reload_insn;
6578 {
6579 register rtx i1;
6580
6581 /* Get the raw pseudo-register referred to. */
6582
6583 rtx reg = reload_in[j];
6584 while (GET_CODE (reg) == SUBREG)
6585 reg = SUBREG_REG (reg);
6586
6587 /* If the pseudo-reg we are reloading is no longer referenced
6588 anywhere between the store into it and here,
6589 and no jumps or labels intervene, then the value can get
6590 here through the reload reg alone.
6591 Otherwise, give up--return. */
6592 for (i1 = NEXT_INSN (output_reload_insn);
6593 i1 != insn; i1 = NEXT_INSN (i1))
6594 {
6595 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6596 return;
6597 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6598 && reg_mentioned_p (reg, PATTERN (i1)))
6599 return;
6600 }
6601
6602 if (cannot_omit_stores[REGNO (reg)])
6603 return;
6604
6605 /* If this insn will store in the pseudo again,
6606 the previous store can be removed. */
6607 if (reload_out[j] == reload_in[j])
6608 delete_insn (output_reload_insn);
6609
6610 /* See if the pseudo reg has been completely replaced
6611 with reload regs. If so, delete the store insn
6612 and forget we had a stack slot for the pseudo. */
6613 else if (reg_n_deaths[REGNO (reg)] == 1
6614 && reg_basic_block[REGNO (reg)] >= 0
6615 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6616 {
6617 rtx i2;
6618
6619 /* We know that it was used only between here
6620 and the beginning of the current basic block.
6621 (We also know that the last use before INSN was
6622 the output reload we are thinking of deleting, but never mind that.)
6623 Search that range; see if any ref remains. */
6624 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6625 {
6626 rtx set = single_set (i2);
6627
6628 /* Uses which just store in the pseudo don't count,
6629 since if they are the only uses, they are dead. */
6630 if (set != 0 && SET_DEST (set) == reg)
6631 continue;
6632 if (GET_CODE (i2) == CODE_LABEL
6633 || GET_CODE (i2) == JUMP_INSN)
6634 break;
6635 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6636 && reg_mentioned_p (reg, PATTERN (i2)))
6637 /* Some other ref remains;
6638 we can't do anything. */
6639 return;
6640 }
6641
6642 /* Delete the now-dead stores into this pseudo. */
6643 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6644 {
6645 rtx set = single_set (i2);
6646
6647 if (set != 0 && SET_DEST (set) == reg)
6648 delete_insn (i2);
6649 if (GET_CODE (i2) == CODE_LABEL
6650 || GET_CODE (i2) == JUMP_INSN)
6651 break;
6652 }
6653
6654 /* For the debugging info,
6655 say the pseudo lives in this reload reg. */
6656 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6657 alter_reg (REGNO (reg), -1);
6658 }
6659 }
6660 \f
6661 /* Output reload-insns to reload VALUE into RELOADREG.
6662 VALUE is an autoincrement or autodecrement RTX whose operand
6663 is a register or memory location;
6664 so reloading involves incrementing that location.
6665
6666 INC_AMOUNT is the number to increment or decrement by (always positive).
6667 This cannot be deduced from VALUE. */
6668
6669 static void
6670 inc_for_reload (reloadreg, value, inc_amount)
6671 rtx reloadreg;
6672 rtx value;
6673 int inc_amount;
6674 {
6675 /* REG or MEM to be copied and incremented. */
6676 rtx incloc = XEXP (value, 0);
6677 /* Nonzero if increment after copying. */
6678 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6679 rtx last;
6680 rtx inc;
6681 rtx add_insn;
6682 int code;
6683
6684 /* No hard register is equivalent to this register after
6685 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6686 we could inc/dec that register as well (maybe even using it for
6687 the source), but I'm not sure it's worth worrying about. */
6688 if (GET_CODE (incloc) == REG)
6689 reg_last_reload_reg[REGNO (incloc)] = 0;
6690
6691 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6692 inc_amount = - inc_amount;
6693
6694 inc = GEN_INT (inc_amount);
6695
6696 /* If this is post-increment, first copy the location to the reload reg. */
6697 if (post)
6698 emit_insn (gen_move_insn (reloadreg, incloc));
6699
6700 /* See if we can directly increment INCLOC. Use a method similar to that
6701 in gen_input_reload. */
6702
6703 last = get_last_insn ();
6704 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6705 gen_rtx (PLUS, GET_MODE (incloc),
6706 incloc, inc)));
6707
6708 code = recog_memoized (add_insn);
6709 if (code >= 0)
6710 {
6711 insn_extract (add_insn);
6712 if (constrain_operands (code, 1))
6713 {
6714 /* If this is a pre-increment and we have incremented the value
6715 where it lives, copy the incremented value to RELOADREG to
6716 be used as an address. */
6717
6718 if (! post)
6719 emit_insn (gen_move_insn (reloadreg, incloc));
6720
6721 return;
6722 }
6723 }
6724
6725 delete_insns_since (last);
6726
6727 /* If couldn't do the increment directly, must increment in RELOADREG.
6728 The way we do this depends on whether this is pre- or post-increment.
6729 For pre-increment, copy INCLOC to the reload register, increment it
6730 there, then save back. */
6731
6732 if (! post)
6733 {
6734 emit_insn (gen_move_insn (reloadreg, incloc));
6735 emit_insn (gen_add2_insn (reloadreg, inc));
6736 emit_insn (gen_move_insn (incloc, reloadreg));
6737 }
6738 else
6739 {
6740 /* Postincrement.
6741 Because this might be a jump insn or a compare, and because RELOADREG
6742 may not be available after the insn in an input reload, we must do
6743 the incrementation before the insn being reloaded for.
6744
6745 We have already copied INCLOC to RELOADREG. Increment the copy in
6746 RELOADREG, save that back, then decrement RELOADREG so it has
6747 the original value. */
6748
6749 emit_insn (gen_add2_insn (reloadreg, inc));
6750 emit_insn (gen_move_insn (incloc, reloadreg));
6751 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6752 }
6753
6754 return;
6755 }
6756 \f
6757 /* Return 1 if we are certain that the constraint-string STRING allows
6758 the hard register REG. Return 0 if we can't be sure of this. */
6759
6760 static int
6761 constraint_accepts_reg_p (string, reg)
6762 char *string;
6763 rtx reg;
6764 {
6765 int value = 0;
6766 int regno = true_regnum (reg);
6767 int c;
6768
6769 /* Initialize for first alternative. */
6770 value = 0;
6771 /* Check that each alternative contains `g' or `r'. */
6772 while (1)
6773 switch (c = *string++)
6774 {
6775 case 0:
6776 /* If an alternative lacks `g' or `r', we lose. */
6777 return value;
6778 case ',':
6779 /* If an alternative lacks `g' or `r', we lose. */
6780 if (value == 0)
6781 return 0;
6782 /* Initialize for next alternative. */
6783 value = 0;
6784 break;
6785 case 'g':
6786 case 'r':
6787 /* Any general reg wins for this alternative. */
6788 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6789 value = 1;
6790 break;
6791 default:
6792 /* Any reg in specified class wins for this alternative. */
6793 {
6794 enum reg_class class = REG_CLASS_FROM_LETTER (c);
6795
6796 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
6797 value = 1;
6798 }
6799 }
6800 }
6801 \f
6802 /* Return the number of places FIND appears within X, but don't count
6803 an occurrence if some SET_DEST is FIND. */
6804
6805 static int
6806 count_occurrences (x, find)
6807 register rtx x, find;
6808 {
6809 register int i, j;
6810 register enum rtx_code code;
6811 register char *format_ptr;
6812 int count;
6813
6814 if (x == find)
6815 return 1;
6816 if (x == 0)
6817 return 0;
6818
6819 code = GET_CODE (x);
6820
6821 switch (code)
6822 {
6823 case REG:
6824 case QUEUED:
6825 case CONST_INT:
6826 case CONST_DOUBLE:
6827 case SYMBOL_REF:
6828 case CODE_LABEL:
6829 case PC:
6830 case CC0:
6831 return 0;
6832
6833 case SET:
6834 if (SET_DEST (x) == find)
6835 return count_occurrences (SET_SRC (x), find);
6836 break;
6837 }
6838
6839 format_ptr = GET_RTX_FORMAT (code);
6840 count = 0;
6841
6842 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6843 {
6844 switch (*format_ptr++)
6845 {
6846 case 'e':
6847 count += count_occurrences (XEXP (x, i), find);
6848 break;
6849
6850 case 'E':
6851 if (XVEC (x, i) != NULL)
6852 {
6853 for (j = 0; j < XVECLEN (x, i); j++)
6854 count += count_occurrences (XVECEXP (x, i, j), find);
6855 }
6856 break;
6857 }
6858 }
6859 return count;
6860 }
This page took 0.375478 seconds and 5 git commands to generate.