]> gcc.gnu.org Git - gcc.git/blob - gcc/reload1.c
(expand_call): In target code, move PARALLEL case above
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-5, 1996 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include <stdio.h>
23 #include "config.h"
24 #include "rtl.h"
25 #include "obstack.h"
26 #include "insn-config.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "flags.h"
30 #include "expr.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "reload.h"
34 #include "recog.h"
35 #include "basic-block.h"
36 #include "output.h"
37 #include "real.h"
38
39 /* This file contains the reload pass of the compiler, which is
40 run after register allocation has been done. It checks that
41 each insn is valid (operands required to be in registers really
42 are in registers of the proper class) and fixes up invalid ones
43 by copying values temporarily into registers for the insns
44 that need them.
45
46 The results of register allocation are described by the vector
47 reg_renumber; the insns still contain pseudo regs, but reg_renumber
48 can be used to find which hard reg, if any, a pseudo reg is in.
49
50 The technique we always use is to free up a few hard regs that are
51 called ``reload regs'', and for each place where a pseudo reg
52 must be in a hard reg, copy it temporarily into one of the reload regs.
53
54 All the pseudos that were formerly allocated to the hard regs that
55 are now in use as reload regs must be ``spilled''. This means
56 that they go to other hard regs, or to stack slots if no other
57 available hard regs can be found. Spilling can invalidate more
58 insns, requiring additional need for reloads, so we must keep checking
59 until the process stabilizes.
60
61 For machines with different classes of registers, we must keep track
62 of the register class needed for each reload, and make sure that
63 we allocate enough reload registers of each class.
64
65 The file reload.c contains the code that checks one insn for
66 validity and reports the reloads that it needs. This file
67 is in charge of scanning the entire rtl code, accumulating the
68 reload needs, spilling, assigning reload registers to use for
69 fixing up each insn, and generating the new insns to copy values
70 into the reload registers. */
71
72
73 #ifndef REGISTER_MOVE_COST
74 #define REGISTER_MOVE_COST(x, y) 2
75 #endif
76
77 #ifndef MEMORY_MOVE_COST
78 #define MEMORY_MOVE_COST(x) 4
79 #endif
80 \f
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
84
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
88
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
92
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
98
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
104
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
109
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
113
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
116
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
120
121 /* During reload_as_needed, element N contains the last pseudo regno
122 reloaded into the Nth reload register. This vector is in parallel
123 with spill_regs. If that pseudo reg occupied more than one register,
124 reg_reloaded_contents points to that pseudo for each spill register in
125 use; all of these must remain set for an inheritance to occur. */
126 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
127
128 /* During reload_as_needed, element N contains the insn for which
129 the Nth reload register was last used. This vector is in parallel
130 with spill_regs, and its contents are significant only when
131 reg_reloaded_contents is significant. */
132 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
133
134 /* Number of spill-regs so far; number of valid elements of spill_regs. */
135 static int n_spills;
136
137 /* In parallel with spill_regs, contains REG rtx's for those regs.
138 Holds the last rtx used for any given reg, or 0 if it has never
139 been used for spilling yet. This rtx is reused, provided it has
140 the proper mode. */
141 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
142
143 /* In parallel with spill_regs, contains nonzero for a spill reg
144 that was stored after the last time it was used.
145 The precise value is the insn generated to do the store. */
146 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
147
148 /* This table is the inverse mapping of spill_regs:
149 indexed by hard reg number,
150 it contains the position of that reg in spill_regs,
151 or -1 for something that is not in spill_regs. */
152 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
153
154 /* This reg set indicates registers that may not be used for retrying global
155 allocation. The registers that may not be used include all spill registers
156 and the frame pointer (if we are using one). */
157 HARD_REG_SET forbidden_regs;
158
159 /* This reg set indicates registers that are not good for spill registers.
160 They will not be used to complete groups of spill registers. This includes
161 all fixed registers, registers that may be eliminated, and, if
162 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
163
164 (spill_reg_order prevents these registers from being used to start a
165 group.) */
166 static HARD_REG_SET bad_spill_regs;
167
168 /* Describes order of use of registers for reloading
169 of spilled pseudo-registers. `spills' is the number of
170 elements that are actually valid; new ones are added at the end. */
171 static short spill_regs[FIRST_PSEUDO_REGISTER];
172
173 /* This reg set indicates those registers that have been used a spill
174 registers. This information is used in reorg.c, to help figure out
175 what registers are live at any point. It is assumed that all spill_regs
176 are dead at every CODE_LABEL. */
177
178 HARD_REG_SET used_spill_regs;
179
180 /* Index of last register assigned as a spill register. We allocate in
181 a round-robin fashion. */
182
183 static int last_spill_reg;
184
185 /* Describes order of preference for putting regs into spill_regs.
186 Contains the numbers of all the hard regs, in order most preferred first.
187 This order is different for each function.
188 It is set up by order_regs_for_reload.
189 Empty elements at the end contain -1. */
190 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
191
192 /* 1 for a hard register that appears explicitly in the rtl
193 (for example, function value registers, special registers
194 used by insns, structure value pointer registers). */
195 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
196
197 /* Indicates if a register was counted against the need for
198 groups. 0 means it can count against max_nongroup instead. */
199 static HARD_REG_SET counted_for_groups;
200
201 /* Indicates if a register was counted against the need for
202 non-groups. 0 means it can become part of a new group.
203 During choose_reload_regs, 1 here means don't use this reg
204 as part of a group, even if it seems to be otherwise ok. */
205 static HARD_REG_SET counted_for_nongroups;
206
207 /* Indexed by pseudo reg number N,
208 says may not delete stores into the real (memory) home of pseudo N.
209 This is set if we already substituted a memory equivalent in some uses,
210 which happens when we have to eliminate the fp from it. */
211 static char *cannot_omit_stores;
212
213 /* Nonzero if indirect addressing is supported on the machine; this means
214 that spilling (REG n) does not require reloading it into a register in
215 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
216 value indicates the level of indirect addressing supported, e.g., two
217 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
218 a hard register. */
219
220 static char spill_indirect_levels;
221
222 /* Nonzero if indirect addressing is supported when the innermost MEM is
223 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
224 which these are valid is the same as spill_indirect_levels, above. */
225
226 char indirect_symref_ok;
227
228 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
229
230 char double_reg_address_ok;
231
232 /* Record the stack slot for each spilled hard register. */
233
234 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
235
236 /* Width allocated so far for that stack slot. */
237
238 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240 /* Indexed by register class and basic block number, nonzero if there is
241 any need for a spill register of that class in that basic block.
242 The pointer is 0 if we did stupid allocation and don't know
243 the structure of basic blocks. */
244
245 char *basic_block_needs[N_REG_CLASSES];
246
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
250
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253
254 int caller_save_needed;
255
256 /* Set to 1 while reload_as_needed is operating.
257 Required by some machines to handle any generated moves differently. */
258
259 int reload_in_progress = 0;
260
261 /* These arrays record the insn_code of insns that may be needed to
262 perform input and output reloads of special objects. They provide a
263 place to pass a scratch register. */
264
265 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
266 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
267
268 /* This obstack is used for allocation of rtl during register elimination.
269 The allocated storage can be freed once find_reloads has processed the
270 insn. */
271
272 struct obstack reload_obstack;
273 char *reload_firstobj;
274
275 #define obstack_chunk_alloc xmalloc
276 #define obstack_chunk_free free
277
278 /* List of labels that must never be deleted. */
279 extern rtx forced_labels;
280
281 /* Allocation number table from global register allocation. */
282 extern int *reg_allocno;
283 \f
284 /* This structure is used to record information about register eliminations.
285 Each array entry describes one possible way of eliminating a register
286 in favor of another. If there is more than one way of eliminating a
287 particular register, the most preferred should be specified first. */
288
289 static struct elim_table
290 {
291 int from; /* Register number to be eliminated. */
292 int to; /* Register number used as replacement. */
293 int initial_offset; /* Initial difference between values. */
294 int can_eliminate; /* Non-zero if this elimination can be done. */
295 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
296 insns made by reload. */
297 int offset; /* Current offset between the two regs. */
298 int max_offset; /* Maximum offset between the two regs. */
299 int previous_offset; /* Offset at end of previous insn. */
300 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
301 rtx from_rtx; /* REG rtx for the register to be eliminated.
302 We cannot simply compare the number since
303 we might then spuriously replace a hard
304 register corresponding to a pseudo
305 assigned to the reg to be eliminated. */
306 rtx to_rtx; /* REG rtx for the replacement. */
307 } reg_eliminate[] =
308
309 /* If a set of eliminable registers was specified, define the table from it.
310 Otherwise, default to the normal case of the frame pointer being
311 replaced by the stack pointer. */
312
313 #ifdef ELIMINABLE_REGS
314 ELIMINABLE_REGS;
315 #else
316 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
317 #endif
318
319 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
320
321 /* Record the number of pending eliminations that have an offset not equal
322 to their initial offset. If non-zero, we use a new copy of each
323 replacement result in any insns encountered. */
324 static int num_not_at_initial_offset;
325
326 /* Count the number of registers that we may be able to eliminate. */
327 static int num_eliminable;
328
329 /* For each label, we record the offset of each elimination. If we reach
330 a label by more than one path and an offset differs, we cannot do the
331 elimination. This information is indexed by the number of the label.
332 The first table is an array of flags that records whether we have yet
333 encountered a label and the second table is an array of arrays, one
334 entry in the latter array for each elimination. */
335
336 static char *offsets_known_at;
337 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
338
339 /* Number of labels in the current function. */
340
341 static int num_labels;
342
343 struct hard_reg_n_uses { int regno; int uses; };
344 \f
345 static int possible_group_p PROTO((int, int *));
346 static void count_possible_groups PROTO((int *, enum machine_mode *,
347 int *, int));
348 static int modes_equiv_for_class_p PROTO((enum machine_mode,
349 enum machine_mode,
350 enum reg_class));
351 static void spill_failure PROTO((rtx));
352 static int new_spill_reg PROTO((int, int, int *, int *, int,
353 FILE *));
354 static void delete_dead_insn PROTO((rtx));
355 static void alter_reg PROTO((int, int));
356 static void mark_scratch_live PROTO((rtx));
357 static void set_label_offsets PROTO((rtx, rtx, int));
358 static int eliminate_regs_in_insn PROTO((rtx, int));
359 static void mark_not_eliminable PROTO((rtx, rtx));
360 static int spill_hard_reg PROTO((int, int, FILE *, int));
361 static void scan_paradoxical_subregs PROTO((rtx));
362 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
363 struct hard_reg_n_uses *));
364 static void order_regs_for_reload PROTO((int));
365 static int compare_spill_regs PROTO((short *, short *));
366 static void reload_as_needed PROTO((rtx, int));
367 static void forget_old_reloads_1 PROTO((rtx, rtx));
368 static int reload_reg_class_lower PROTO((short *, short *));
369 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
370 enum machine_mode));
371 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
372 enum machine_mode));
373 static int reload_reg_free_p PROTO((int, int, enum reload_type));
374 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
375 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
376 static int reloads_conflict PROTO((int, int));
377 static int allocate_reload_reg PROTO((int, rtx, int, int));
378 static void choose_reload_regs PROTO((rtx, rtx));
379 static void merge_assigned_reloads PROTO((rtx));
380 static void emit_reload_insns PROTO((rtx));
381 static void delete_output_reload PROTO((rtx, int, rtx));
382 static void inc_for_reload PROTO((rtx, rtx, int));
383 static int constraint_accepts_reg_p PROTO((char *, rtx));
384 static int count_occurrences PROTO((rtx, rtx));
385 \f
386 /* Initialize the reload pass once per compilation. */
387
388 void
389 init_reload ()
390 {
391 register int i;
392
393 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
394 Set spill_indirect_levels to the number of levels such addressing is
395 permitted, zero if it is not permitted at all. */
396
397 register rtx tem
398 = gen_rtx (MEM, Pmode,
399 gen_rtx (PLUS, Pmode,
400 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
401 GEN_INT (4)));
402 spill_indirect_levels = 0;
403
404 while (memory_address_p (QImode, tem))
405 {
406 spill_indirect_levels++;
407 tem = gen_rtx (MEM, Pmode, tem);
408 }
409
410 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
411
412 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
413 indirect_symref_ok = memory_address_p (QImode, tem);
414
415 /* See if reg+reg is a valid (and offsettable) address. */
416
417 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
418 {
419 tem = gen_rtx (PLUS, Pmode,
420 gen_rtx (REG, Pmode, HARD_FRAME_POINTER_REGNUM),
421 gen_rtx (REG, Pmode, i));
422 /* This way, we make sure that reg+reg is an offsettable address. */
423 tem = plus_constant (tem, 4);
424
425 if (memory_address_p (QImode, tem))
426 {
427 double_reg_address_ok = 1;
428 break;
429 }
430 }
431
432 /* Initialize obstack for our rtl allocation. */
433 gcc_obstack_init (&reload_obstack);
434 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
435 }
436
437 /* Main entry point for the reload pass.
438
439 FIRST is the first insn of the function being compiled.
440
441 GLOBAL nonzero means we were called from global_alloc
442 and should attempt to reallocate any pseudoregs that we
443 displace from hard regs we will use for reloads.
444 If GLOBAL is zero, we do not have enough information to do that,
445 so any pseudo reg that is spilled must go to the stack.
446
447 DUMPFILE is the global-reg debugging dump file stream, or 0.
448 If it is nonzero, messages are written to it to describe
449 which registers are seized as reload regs, which pseudo regs
450 are spilled from them, and where the pseudo regs are reallocated to.
451
452 Return value is nonzero if reload failed
453 and we must not do any more for this function. */
454
455 int
456 reload (first, global, dumpfile)
457 rtx first;
458 int global;
459 FILE *dumpfile;
460 {
461 register int class;
462 register int i, j, k;
463 register rtx insn;
464 register struct elim_table *ep;
465
466 int something_changed;
467 int something_needs_reloads;
468 int something_needs_elimination;
469 int new_basic_block_needs;
470 enum reg_class caller_save_spill_class = NO_REGS;
471 int caller_save_group_size = 1;
472
473 /* Nonzero means we couldn't get enough spill regs. */
474 int failure = 0;
475
476 /* The basic block number currently being processed for INSN. */
477 int this_block;
478
479 /* Make sure even insns with volatile mem refs are recognizable. */
480 init_recog ();
481
482 /* Enable find_equiv_reg to distinguish insns made by reload. */
483 reload_first_uid = get_max_uid ();
484
485 for (i = 0; i < N_REG_CLASSES; i++)
486 basic_block_needs[i] = 0;
487
488 #ifdef SECONDARY_MEMORY_NEEDED
489 /* Initialize the secondary memory table. */
490 clear_secondary_mem ();
491 #endif
492
493 /* Remember which hard regs appear explicitly
494 before we merge into `regs_ever_live' the ones in which
495 pseudo regs have been allocated. */
496 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
497
498 /* We don't have a stack slot for any spill reg yet. */
499 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
500 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
501
502 /* Initialize the save area information for caller-save, in case some
503 are needed. */
504 init_save_areas ();
505
506 /* Compute which hard registers are now in use
507 as homes for pseudo registers.
508 This is done here rather than (eg) in global_alloc
509 because this point is reached even if not optimizing. */
510
511 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
512 mark_home_live (i);
513
514 for (i = 0; i < scratch_list_length; i++)
515 if (scratch_list[i])
516 mark_scratch_live (scratch_list[i]);
517
518 /* Make sure that the last insn in the chain
519 is not something that needs reloading. */
520 emit_note (NULL_PTR, NOTE_INSN_DELETED);
521
522 /* Find all the pseudo registers that didn't get hard regs
523 but do have known equivalent constants or memory slots.
524 These include parameters (known equivalent to parameter slots)
525 and cse'd or loop-moved constant memory addresses.
526
527 Record constant equivalents in reg_equiv_constant
528 so they will be substituted by find_reloads.
529 Record memory equivalents in reg_mem_equiv so they can
530 be substituted eventually by altering the REG-rtx's. */
531
532 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
533 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
534 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
535 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
536 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
537 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
538 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
539 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
540 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
541 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
542 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
543 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
544 cannot_omit_stores = (char *) alloca (max_regno);
545 bzero (cannot_omit_stores, max_regno);
546
547 #ifdef SMALL_REGISTER_CLASSES
548 CLEAR_HARD_REG_SET (forbidden_regs);
549 #endif
550
551 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
552 Also find all paradoxical subregs and find largest such for each pseudo.
553 On machines with small register classes, record hard registers that
554 are used for user variables. These can never be used for spills.
555 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
556 caller-saved registers must be marked live. */
557
558 for (insn = first; insn; insn = NEXT_INSN (insn))
559 {
560 rtx set = single_set (insn);
561
562 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
563 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
564 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
565 if (! call_used_regs[i])
566 regs_ever_live[i] = 1;
567
568 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
569 {
570 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
571 if (note
572 #ifdef LEGITIMATE_PIC_OPERAND_P
573 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
574 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
575 #endif
576 )
577 {
578 rtx x = XEXP (note, 0);
579 i = REGNO (SET_DEST (set));
580 if (i > LAST_VIRTUAL_REGISTER)
581 {
582 if (GET_CODE (x) == MEM)
583 reg_equiv_memory_loc[i] = x;
584 else if (CONSTANT_P (x))
585 {
586 if (LEGITIMATE_CONSTANT_P (x))
587 reg_equiv_constant[i] = x;
588 else
589 reg_equiv_memory_loc[i]
590 = force_const_mem (GET_MODE (SET_DEST (set)), x);
591 }
592 else
593 continue;
594
595 /* If this register is being made equivalent to a MEM
596 and the MEM is not SET_SRC, the equivalencing insn
597 is one with the MEM as a SET_DEST and it occurs later.
598 So don't mark this insn now. */
599 if (GET_CODE (x) != MEM
600 || rtx_equal_p (SET_SRC (set), x))
601 reg_equiv_init[i] = insn;
602 }
603 }
604 }
605
606 /* If this insn is setting a MEM from a register equivalent to it,
607 this is the equivalencing insn. */
608 else if (set && GET_CODE (SET_DEST (set)) == MEM
609 && GET_CODE (SET_SRC (set)) == REG
610 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
611 && rtx_equal_p (SET_DEST (set),
612 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
613 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
614
615 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
616 scan_paradoxical_subregs (PATTERN (insn));
617 }
618
619 /* Does this function require a frame pointer? */
620
621 frame_pointer_needed = (! flag_omit_frame_pointer
622 #ifdef EXIT_IGNORE_STACK
623 /* ?? If EXIT_IGNORE_STACK is set, we will not save
624 and restore sp for alloca. So we can't eliminate
625 the frame pointer in that case. At some point,
626 we should improve this by emitting the
627 sp-adjusting insns for this case. */
628 || (current_function_calls_alloca
629 && EXIT_IGNORE_STACK)
630 #endif
631 || FRAME_POINTER_REQUIRED);
632
633 num_eliminable = 0;
634
635 /* Initialize the table of registers to eliminate. The way we do this
636 depends on how the eliminable registers were defined. */
637 #ifdef ELIMINABLE_REGS
638 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
639 {
640 ep->can_eliminate = ep->can_eliminate_previous
641 = (CAN_ELIMINATE (ep->from, ep->to)
642 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
643 }
644 #else
645 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
646 = ! frame_pointer_needed;
647 #endif
648
649 /* Count the number of eliminable registers and build the FROM and TO
650 REG rtx's. Note that code in gen_rtx will cause, e.g.,
651 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
652 We depend on this. */
653 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
654 {
655 num_eliminable += ep->can_eliminate;
656 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
657 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
658 }
659
660 num_labels = max_label_num () - get_first_label_num ();
661
662 /* Allocate the tables used to store offset information at labels. */
663 offsets_known_at = (char *) alloca (num_labels);
664 offsets_at
665 = (int (*)[NUM_ELIMINABLE_REGS])
666 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
667
668 offsets_known_at -= get_first_label_num ();
669 offsets_at -= get_first_label_num ();
670
671 /* Alter each pseudo-reg rtx to contain its hard reg number.
672 Assign stack slots to the pseudos that lack hard regs or equivalents.
673 Do not touch virtual registers. */
674
675 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
676 alter_reg (i, -1);
677
678 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
679 because the stack size may be a part of the offset computation for
680 register elimination. */
681 assign_stack_local (BLKmode, 0, 0);
682
683 /* If we have some registers we think can be eliminated, scan all insns to
684 see if there is an insn that sets one of these registers to something
685 other than itself plus a constant. If so, the register cannot be
686 eliminated. Doing this scan here eliminates an extra pass through the
687 main reload loop in the most common case where register elimination
688 cannot be done. */
689 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
690 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
691 || GET_CODE (insn) == CALL_INSN)
692 note_stores (PATTERN (insn), mark_not_eliminable);
693
694 #ifndef REGISTER_CONSTRAINTS
695 /* If all the pseudo regs have hard regs,
696 except for those that are never referenced,
697 we know that no reloads are needed. */
698 /* But that is not true if there are register constraints, since
699 in that case some pseudos might be in the wrong kind of hard reg. */
700
701 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
702 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
703 break;
704
705 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
706 return;
707 #endif
708
709 /* Compute the order of preference for hard registers to spill.
710 Store them by decreasing preference in potential_reload_regs. */
711
712 order_regs_for_reload (global);
713
714 /* So far, no hard regs have been spilled. */
715 n_spills = 0;
716 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
717 spill_reg_order[i] = -1;
718
719 /* Initialize to -1, which means take the first spill register. */
720 last_spill_reg = -1;
721
722 /* On most machines, we can't use any register explicitly used in the
723 rtl as a spill register. But on some, we have to. Those will have
724 taken care to keep the life of hard regs as short as possible. */
725
726 #ifndef SMALL_REGISTER_CLASSES
727 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
728 #endif
729
730 /* Spill any hard regs that we know we can't eliminate. */
731 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
732 if (! ep->can_eliminate)
733 spill_hard_reg (ep->from, global, dumpfile, 1);
734
735 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
736 if (frame_pointer_needed)
737 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
738 #endif
739
740 if (global)
741 for (i = 0; i < N_REG_CLASSES; i++)
742 {
743 basic_block_needs[i] = (char *) alloca (n_basic_blocks);
744 bzero (basic_block_needs[i], n_basic_blocks);
745 }
746
747 /* From now on, we need to emit any moves without making new pseudos. */
748 reload_in_progress = 1;
749
750 /* This loop scans the entire function each go-round
751 and repeats until one repetition spills no additional hard regs. */
752
753 /* This flag is set when a pseudo reg is spilled,
754 to require another pass. Note that getting an additional reload
755 reg does not necessarily imply any pseudo reg was spilled;
756 sometimes we find a reload reg that no pseudo reg was allocated in. */
757 something_changed = 1;
758 /* This flag is set if there are any insns that require reloading. */
759 something_needs_reloads = 0;
760 /* This flag is set if there are any insns that require register
761 eliminations. */
762 something_needs_elimination = 0;
763 while (something_changed)
764 {
765 rtx after_call = 0;
766
767 /* For each class, number of reload regs needed in that class.
768 This is the maximum over all insns of the needs in that class
769 of the individual insn. */
770 int max_needs[N_REG_CLASSES];
771 /* For each class, size of group of consecutive regs
772 that is needed for the reloads of this class. */
773 int group_size[N_REG_CLASSES];
774 /* For each class, max number of consecutive groups needed.
775 (Each group contains group_size[CLASS] consecutive registers.) */
776 int max_groups[N_REG_CLASSES];
777 /* For each class, max number needed of regs that don't belong
778 to any of the groups. */
779 int max_nongroups[N_REG_CLASSES];
780 /* For each class, the machine mode which requires consecutive
781 groups of regs of that class.
782 If two different modes ever require groups of one class,
783 they must be the same size and equally restrictive for that class,
784 otherwise we can't handle the complexity. */
785 enum machine_mode group_mode[N_REG_CLASSES];
786 /* Record the insn where each maximum need is first found. */
787 rtx max_needs_insn[N_REG_CLASSES];
788 rtx max_groups_insn[N_REG_CLASSES];
789 rtx max_nongroups_insn[N_REG_CLASSES];
790 rtx x;
791 int starting_frame_size = get_frame_size ();
792 int previous_frame_pointer_needed = frame_pointer_needed;
793 static char *reg_class_names[] = REG_CLASS_NAMES;
794
795 something_changed = 0;
796 bzero ((char *) max_needs, sizeof max_needs);
797 bzero ((char *) max_groups, sizeof max_groups);
798 bzero ((char *) max_nongroups, sizeof max_nongroups);
799 bzero ((char *) max_needs_insn, sizeof max_needs_insn);
800 bzero ((char *) max_groups_insn, sizeof max_groups_insn);
801 bzero ((char *) max_nongroups_insn, sizeof max_nongroups_insn);
802 bzero ((char *) group_size, sizeof group_size);
803 for (i = 0; i < N_REG_CLASSES; i++)
804 group_mode[i] = VOIDmode;
805
806 /* Keep track of which basic blocks are needing the reloads. */
807 this_block = 0;
808
809 /* Remember whether any element of basic_block_needs
810 changes from 0 to 1 in this pass. */
811 new_basic_block_needs = 0;
812
813 /* Reset all offsets on eliminable registers to their initial values. */
814 #ifdef ELIMINABLE_REGS
815 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
816 {
817 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
818 ep->previous_offset = ep->offset
819 = ep->max_offset = ep->initial_offset;
820 }
821 #else
822 #ifdef INITIAL_FRAME_POINTER_OFFSET
823 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
824 #else
825 if (!FRAME_POINTER_REQUIRED)
826 abort ();
827 reg_eliminate[0].initial_offset = 0;
828 #endif
829 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
830 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
831 #endif
832
833 num_not_at_initial_offset = 0;
834
835 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
836
837 /* Set a known offset for each forced label to be at the initial offset
838 of each elimination. We do this because we assume that all
839 computed jumps occur from a location where each elimination is
840 at its initial offset. */
841
842 for (x = forced_labels; x; x = XEXP (x, 1))
843 if (XEXP (x, 0))
844 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
845
846 /* For each pseudo register that has an equivalent location defined,
847 try to eliminate any eliminable registers (such as the frame pointer)
848 assuming initial offsets for the replacement register, which
849 is the normal case.
850
851 If the resulting location is directly addressable, substitute
852 the MEM we just got directly for the old REG.
853
854 If it is not addressable but is a constant or the sum of a hard reg
855 and constant, it is probably not addressable because the constant is
856 out of range, in that case record the address; we will generate
857 hairy code to compute the address in a register each time it is
858 needed. Similarly if it is a hard register, but one that is not
859 valid as an address register.
860
861 If the location is not addressable, but does not have one of the
862 above forms, assign a stack slot. We have to do this to avoid the
863 potential of producing lots of reloads if, e.g., a location involves
864 a pseudo that didn't get a hard register and has an equivalent memory
865 location that also involves a pseudo that didn't get a hard register.
866
867 Perhaps at some point we will improve reload_when_needed handling
868 so this problem goes away. But that's very hairy. */
869
870 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
871 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
872 {
873 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
874
875 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
876 XEXP (x, 0)))
877 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
878 else if (CONSTANT_P (XEXP (x, 0))
879 || (GET_CODE (XEXP (x, 0)) == REG
880 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
881 || (GET_CODE (XEXP (x, 0)) == PLUS
882 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
883 && (REGNO (XEXP (XEXP (x, 0), 0))
884 < FIRST_PSEUDO_REGISTER)
885 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
886 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
887 else
888 {
889 /* Make a new stack slot. Then indicate that something
890 changed so we go back and recompute offsets for
891 eliminable registers because the allocation of memory
892 below might change some offset. reg_equiv_{mem,address}
893 will be set up for this pseudo on the next pass around
894 the loop. */
895 reg_equiv_memory_loc[i] = 0;
896 reg_equiv_init[i] = 0;
897 alter_reg (i, -1);
898 something_changed = 1;
899 }
900 }
901
902 /* If we allocated another pseudo to the stack, redo elimination
903 bookkeeping. */
904 if (something_changed)
905 continue;
906
907 /* If caller-saves needs a group, initialize the group to include
908 the size and mode required for caller-saves. */
909
910 if (caller_save_group_size > 1)
911 {
912 group_mode[(int) caller_save_spill_class] = Pmode;
913 group_size[(int) caller_save_spill_class] = caller_save_group_size;
914 }
915
916 /* Compute the most additional registers needed by any instruction.
917 Collect information separately for each class of regs. */
918
919 for (insn = first; insn; insn = NEXT_INSN (insn))
920 {
921 if (global && this_block + 1 < n_basic_blocks
922 && insn == basic_block_head[this_block+1])
923 ++this_block;
924
925 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
926 might include REG_LABEL), we need to see what effects this
927 has on the known offsets at labels. */
928
929 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
930 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
931 && REG_NOTES (insn) != 0))
932 set_label_offsets (insn, insn, 0);
933
934 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
935 {
936 /* Nonzero means don't use a reload reg that overlaps
937 the place where a function value can be returned. */
938 rtx avoid_return_reg = 0;
939
940 rtx old_body = PATTERN (insn);
941 int old_code = INSN_CODE (insn);
942 rtx old_notes = REG_NOTES (insn);
943 int did_elimination = 0;
944
945 /* To compute the number of reload registers of each class
946 needed for an insn, we must simulate what choose_reload_regs
947 can do. We do this by splitting an insn into an "input" and
948 an "output" part. RELOAD_OTHER reloads are used in both.
949 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
950 which must be live over the entire input section of reloads,
951 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
952 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
953 inputs.
954
955 The registers needed for output are RELOAD_OTHER and
956 RELOAD_FOR_OUTPUT, which are live for the entire output
957 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
958 reloads for each operand.
959
960 The total number of registers needed is the maximum of the
961 inputs and outputs. */
962
963 struct needs
964 {
965 /* [0] is normal, [1] is nongroup. */
966 int regs[2][N_REG_CLASSES];
967 int groups[N_REG_CLASSES];
968 };
969
970 /* Each `struct needs' corresponds to one RELOAD_... type. */
971 struct {
972 struct needs other;
973 struct needs input;
974 struct needs output;
975 struct needs insn;
976 struct needs other_addr;
977 struct needs op_addr;
978 struct needs op_addr_reload;
979 struct needs in_addr[MAX_RECOG_OPERANDS];
980 struct needs out_addr[MAX_RECOG_OPERANDS];
981 } insn_needs;
982
983 /* If needed, eliminate any eliminable registers. */
984 if (num_eliminable)
985 did_elimination = eliminate_regs_in_insn (insn, 0);
986
987 #ifdef SMALL_REGISTER_CLASSES
988 /* Set avoid_return_reg if this is an insn
989 that might use the value of a function call. */
990 if (GET_CODE (insn) == CALL_INSN)
991 {
992 if (GET_CODE (PATTERN (insn)) == SET)
993 after_call = SET_DEST (PATTERN (insn));
994 else if (GET_CODE (PATTERN (insn)) == PARALLEL
995 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
996 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
997 else
998 after_call = 0;
999 }
1000 else if (after_call != 0
1001 && !(GET_CODE (PATTERN (insn)) == SET
1002 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1003 {
1004 if (reg_referenced_p (after_call, PATTERN (insn)))
1005 avoid_return_reg = after_call;
1006 after_call = 0;
1007 }
1008 #endif /* SMALL_REGISTER_CLASSES */
1009
1010 /* Analyze the instruction. */
1011 find_reloads (insn, 0, spill_indirect_levels, global,
1012 spill_reg_order);
1013
1014 /* Remember for later shortcuts which insns had any reloads or
1015 register eliminations.
1016
1017 One might think that it would be worthwhile to mark insns
1018 that need register replacements but not reloads, but this is
1019 not safe because find_reloads may do some manipulation of
1020 the insn (such as swapping commutative operands), which would
1021 be lost when we restore the old pattern after register
1022 replacement. So the actions of find_reloads must be redone in
1023 subsequent passes or in reload_as_needed.
1024
1025 However, it is safe to mark insns that need reloads
1026 but not register replacement. */
1027
1028 PUT_MODE (insn, (did_elimination ? QImode
1029 : n_reloads ? HImode
1030 : GET_MODE (insn) == DImode ? DImode
1031 : VOIDmode));
1032
1033 /* Discard any register replacements done. */
1034 if (did_elimination)
1035 {
1036 obstack_free (&reload_obstack, reload_firstobj);
1037 PATTERN (insn) = old_body;
1038 INSN_CODE (insn) = old_code;
1039 REG_NOTES (insn) = old_notes;
1040 something_needs_elimination = 1;
1041 }
1042
1043 /* If this insn has no reloads, we need not do anything except
1044 in the case of a CALL_INSN when we have caller-saves and
1045 caller-save needs reloads. */
1046
1047 if (n_reloads == 0
1048 && ! (GET_CODE (insn) == CALL_INSN
1049 && caller_save_spill_class != NO_REGS))
1050 continue;
1051
1052 something_needs_reloads = 1;
1053 bzero ((char *) &insn_needs, sizeof insn_needs);
1054
1055 /* Count each reload once in every class
1056 containing the reload's own class. */
1057
1058 for (i = 0; i < n_reloads; i++)
1059 {
1060 register enum reg_class *p;
1061 enum reg_class class = reload_reg_class[i];
1062 int size;
1063 enum machine_mode mode;
1064 int nongroup_need;
1065 struct needs *this_needs;
1066
1067 /* Don't count the dummy reloads, for which one of the
1068 regs mentioned in the insn can be used for reloading.
1069 Don't count optional reloads.
1070 Don't count reloads that got combined with others. */
1071 if (reload_reg_rtx[i] != 0
1072 || reload_optional[i] != 0
1073 || (reload_out[i] == 0 && reload_in[i] == 0
1074 && ! reload_secondary_p[i]))
1075 continue;
1076
1077 /* Show that a reload register of this class is needed
1078 in this basic block. We do not use insn_needs and
1079 insn_groups because they are overly conservative for
1080 this purpose. */
1081 if (global && ! basic_block_needs[(int) class][this_block])
1082 {
1083 basic_block_needs[(int) class][this_block] = 1;
1084 new_basic_block_needs = 1;
1085 }
1086
1087
1088 mode = reload_inmode[i];
1089 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1090 mode = reload_outmode[i];
1091 size = CLASS_MAX_NREGS (class, mode);
1092
1093 /* If this class doesn't want a group, determine if we have
1094 a nongroup need or a regular need. We have a nongroup
1095 need if this reload conflicts with a group reload whose
1096 class intersects with this reload's class. */
1097
1098 nongroup_need = 0;
1099 if (size == 1)
1100 for (j = 0; j < n_reloads; j++)
1101 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1102 (GET_MODE_SIZE (reload_outmode[j])
1103 > GET_MODE_SIZE (reload_inmode[j]))
1104 ? reload_outmode[j]
1105 : reload_inmode[j])
1106 > 1)
1107 && (!reload_optional[j])
1108 && (reload_in[j] != 0 || reload_out[j] != 0
1109 || reload_secondary_p[j])
1110 && reloads_conflict (i, j)
1111 && reg_classes_intersect_p (class,
1112 reload_reg_class[j]))
1113 {
1114 nongroup_need = 1;
1115 break;
1116 }
1117
1118 /* Decide which time-of-use to count this reload for. */
1119 switch (reload_when_needed[i])
1120 {
1121 case RELOAD_OTHER:
1122 this_needs = &insn_needs.other;
1123 break;
1124 case RELOAD_FOR_INPUT:
1125 this_needs = &insn_needs.input;
1126 break;
1127 case RELOAD_FOR_OUTPUT:
1128 this_needs = &insn_needs.output;
1129 break;
1130 case RELOAD_FOR_INSN:
1131 this_needs = &insn_needs.insn;
1132 break;
1133 case RELOAD_FOR_OTHER_ADDRESS:
1134 this_needs = &insn_needs.other_addr;
1135 break;
1136 case RELOAD_FOR_INPUT_ADDRESS:
1137 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1138 break;
1139 case RELOAD_FOR_OUTPUT_ADDRESS:
1140 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1141 break;
1142 case RELOAD_FOR_OPERAND_ADDRESS:
1143 this_needs = &insn_needs.op_addr;
1144 break;
1145 case RELOAD_FOR_OPADDR_ADDR:
1146 this_needs = &insn_needs.op_addr_reload;
1147 break;
1148 }
1149
1150 if (size > 1)
1151 {
1152 enum machine_mode other_mode, allocate_mode;
1153
1154 /* Count number of groups needed separately from
1155 number of individual regs needed. */
1156 this_needs->groups[(int) class]++;
1157 p = reg_class_superclasses[(int) class];
1158 while (*p != LIM_REG_CLASSES)
1159 this_needs->groups[(int) *p++]++;
1160
1161 /* Record size and mode of a group of this class. */
1162 /* If more than one size group is needed,
1163 make all groups the largest needed size. */
1164 if (group_size[(int) class] < size)
1165 {
1166 other_mode = group_mode[(int) class];
1167 allocate_mode = mode;
1168
1169 group_size[(int) class] = size;
1170 group_mode[(int) class] = mode;
1171 }
1172 else
1173 {
1174 other_mode = mode;
1175 allocate_mode = group_mode[(int) class];
1176 }
1177
1178 /* Crash if two dissimilar machine modes both need
1179 groups of consecutive regs of the same class. */
1180
1181 if (other_mode != VOIDmode && other_mode != allocate_mode
1182 && ! modes_equiv_for_class_p (allocate_mode,
1183 other_mode, class))
1184 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1185 insn);
1186 }
1187 else if (size == 1)
1188 {
1189 this_needs->regs[nongroup_need][(int) class] += 1;
1190 p = reg_class_superclasses[(int) class];
1191 while (*p != LIM_REG_CLASSES)
1192 this_needs->regs[nongroup_need][(int) *p++] += 1;
1193 }
1194 else
1195 abort ();
1196 }
1197
1198 /* All reloads have been counted for this insn;
1199 now merge the various times of use.
1200 This sets insn_needs, etc., to the maximum total number
1201 of registers needed at any point in this insn. */
1202
1203 for (i = 0; i < N_REG_CLASSES; i++)
1204 {
1205 int in_max, out_max;
1206
1207 /* Compute normal and nongroup needs. */
1208 for (j = 0; j <= 1; j++)
1209 {
1210 for (in_max = 0, out_max = 0, k = 0;
1211 k < reload_n_operands; k++)
1212 {
1213 in_max
1214 = MAX (in_max, insn_needs.in_addr[k].regs[j][i]);
1215 out_max
1216 = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1217 }
1218
1219 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1220 and operand addresses but not things used to reload
1221 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1222 don't conflict with things needed to reload inputs or
1223 outputs. */
1224
1225 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1226 insn_needs.op_addr_reload.regs[j][i]),
1227 in_max);
1228
1229 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1230
1231 insn_needs.input.regs[j][i]
1232 = MAX (insn_needs.input.regs[j][i]
1233 + insn_needs.op_addr.regs[j][i]
1234 + insn_needs.insn.regs[j][i],
1235 in_max + insn_needs.input.regs[j][i]);
1236
1237 insn_needs.output.regs[j][i] += out_max;
1238 insn_needs.other.regs[j][i]
1239 += MAX (MAX (insn_needs.input.regs[j][i],
1240 insn_needs.output.regs[j][i]),
1241 insn_needs.other_addr.regs[j][i]);
1242
1243 }
1244
1245 /* Now compute group needs. */
1246 for (in_max = 0, out_max = 0, j = 0;
1247 j < reload_n_operands; j++)
1248 {
1249 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1250 out_max
1251 = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1252 }
1253
1254 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1255 insn_needs.op_addr_reload.groups[i]),
1256 in_max);
1257 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1258
1259 insn_needs.input.groups[i]
1260 = MAX (insn_needs.input.groups[i]
1261 + insn_needs.op_addr.groups[i]
1262 + insn_needs.insn.groups[i],
1263 in_max + insn_needs.input.groups[i]);
1264
1265 insn_needs.output.groups[i] += out_max;
1266 insn_needs.other.groups[i]
1267 += MAX (MAX (insn_needs.input.groups[i],
1268 insn_needs.output.groups[i]),
1269 insn_needs.other_addr.groups[i]);
1270 }
1271
1272 /* If this is a CALL_INSN and caller-saves will need
1273 a spill register, act as if the spill register is
1274 needed for this insn. However, the spill register
1275 can be used by any reload of this insn, so we only
1276 need do something if no need for that class has
1277 been recorded.
1278
1279 The assumption that every CALL_INSN will trigger a
1280 caller-save is highly conservative, however, the number
1281 of cases where caller-saves will need a spill register but
1282 a block containing a CALL_INSN won't need a spill register
1283 of that class should be quite rare.
1284
1285 If a group is needed, the size and mode of the group will
1286 have been set up at the beginning of this loop. */
1287
1288 if (GET_CODE (insn) == CALL_INSN
1289 && caller_save_spill_class != NO_REGS)
1290 {
1291 /* See if this register would conflict with any reload
1292 that needs a group. */
1293 int nongroup_need = 0;
1294 int *caller_save_needs;
1295
1296 for (j = 0; j < n_reloads; j++)
1297 if ((CLASS_MAX_NREGS (reload_reg_class[j],
1298 (GET_MODE_SIZE (reload_outmode[j])
1299 > GET_MODE_SIZE (reload_inmode[j]))
1300 ? reload_outmode[j]
1301 : reload_inmode[j])
1302 > 1)
1303 && reg_classes_intersect_p (caller_save_spill_class,
1304 reload_reg_class[j]))
1305 {
1306 nongroup_need = 1;
1307 break;
1308 }
1309
1310 caller_save_needs
1311 = (caller_save_group_size > 1
1312 ? insn_needs.other.groups
1313 : insn_needs.other.regs[nongroup_need]);
1314
1315 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1316 {
1317 register enum reg_class *p
1318 = reg_class_superclasses[(int) caller_save_spill_class];
1319
1320 caller_save_needs[(int) caller_save_spill_class]++;
1321
1322 while (*p != LIM_REG_CLASSES)
1323 caller_save_needs[(int) *p++] += 1;
1324 }
1325
1326 /* Show that this basic block will need a register of
1327 this class. */
1328
1329 if (global
1330 && ! (basic_block_needs[(int) caller_save_spill_class]
1331 [this_block]))
1332 {
1333 basic_block_needs[(int) caller_save_spill_class]
1334 [this_block] = 1;
1335 new_basic_block_needs = 1;
1336 }
1337 }
1338
1339 #ifdef SMALL_REGISTER_CLASSES
1340 /* If this insn stores the value of a function call,
1341 and that value is in a register that has been spilled,
1342 and if the insn needs a reload in a class
1343 that might use that register as the reload register,
1344 then add add an extra need in that class.
1345 This makes sure we have a register available that does
1346 not overlap the return value. */
1347
1348 if (avoid_return_reg)
1349 {
1350 int regno = REGNO (avoid_return_reg);
1351 int nregs
1352 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1353 int r;
1354 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1355
1356 /* First compute the "basic needs", which counts a
1357 need only in the smallest class in which it
1358 is required. */
1359
1360 bcopy ((char *) insn_needs.other.regs[0],
1361 (char *) basic_needs, sizeof basic_needs);
1362 bcopy ((char *) insn_needs.other.groups,
1363 (char *) basic_groups, sizeof basic_groups);
1364
1365 for (i = 0; i < N_REG_CLASSES; i++)
1366 {
1367 enum reg_class *p;
1368
1369 if (basic_needs[i] >= 0)
1370 for (p = reg_class_superclasses[i];
1371 *p != LIM_REG_CLASSES; p++)
1372 basic_needs[(int) *p] -= basic_needs[i];
1373
1374 if (basic_groups[i] >= 0)
1375 for (p = reg_class_superclasses[i];
1376 *p != LIM_REG_CLASSES; p++)
1377 basic_groups[(int) *p] -= basic_groups[i];
1378 }
1379
1380 /* Now count extra regs if there might be a conflict with
1381 the return value register. */
1382
1383 for (r = regno; r < regno + nregs; r++)
1384 if (spill_reg_order[r] >= 0)
1385 for (i = 0; i < N_REG_CLASSES; i++)
1386 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1387 {
1388 if (basic_needs[i] > 0)
1389 {
1390 enum reg_class *p;
1391
1392 insn_needs.other.regs[0][i]++;
1393 p = reg_class_superclasses[i];
1394 while (*p != LIM_REG_CLASSES)
1395 insn_needs.other.regs[0][(int) *p++]++;
1396 }
1397 if (basic_groups[i] > 0)
1398 {
1399 enum reg_class *p;
1400
1401 insn_needs.other.groups[i]++;
1402 p = reg_class_superclasses[i];
1403 while (*p != LIM_REG_CLASSES)
1404 insn_needs.other.groups[(int) *p++]++;
1405 }
1406 }
1407 }
1408 #endif /* SMALL_REGISTER_CLASSES */
1409
1410 /* For each class, collect maximum need of any insn. */
1411
1412 for (i = 0; i < N_REG_CLASSES; i++)
1413 {
1414 if (max_needs[i] < insn_needs.other.regs[0][i])
1415 {
1416 max_needs[i] = insn_needs.other.regs[0][i];
1417 max_needs_insn[i] = insn;
1418 }
1419 if (max_groups[i] < insn_needs.other.groups[i])
1420 {
1421 max_groups[i] = insn_needs.other.groups[i];
1422 max_groups_insn[i] = insn;
1423 }
1424 if (max_nongroups[i] < insn_needs.other.regs[1][i])
1425 {
1426 max_nongroups[i] = insn_needs.other.regs[1][i];
1427 max_nongroups_insn[i] = insn;
1428 }
1429 }
1430 }
1431 /* Note that there is a continue statement above. */
1432 }
1433
1434 /* If we allocated any new memory locations, make another pass
1435 since it might have changed elimination offsets. */
1436 if (starting_frame_size != get_frame_size ())
1437 something_changed = 1;
1438
1439 if (dumpfile)
1440 for (i = 0; i < N_REG_CLASSES; i++)
1441 {
1442 if (max_needs[i] > 0)
1443 fprintf (dumpfile,
1444 ";; Need %d reg%s of class %s (for insn %d).\n",
1445 max_needs[i], max_needs[i] == 1 ? "" : "s",
1446 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1447 if (max_nongroups[i] > 0)
1448 fprintf (dumpfile,
1449 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1450 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1451 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1452 if (max_groups[i] > 0)
1453 fprintf (dumpfile,
1454 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1455 max_groups[i], max_groups[i] == 1 ? "" : "s",
1456 mode_name[(int) group_mode[i]],
1457 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1458 }
1459
1460 /* If we have caller-saves, set up the save areas and see if caller-save
1461 will need a spill register. */
1462
1463 if (caller_save_needed)
1464 {
1465 /* Set the offsets for setup_save_areas. */
1466 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
1467 ep++)
1468 ep->previous_offset = ep->max_offset;
1469
1470 if ( ! setup_save_areas (&something_changed)
1471 && caller_save_spill_class == NO_REGS)
1472 {
1473 /* The class we will need depends on whether the machine
1474 supports the sum of two registers for an address; see
1475 find_address_reloads for details. */
1476
1477 caller_save_spill_class
1478 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1479 caller_save_group_size
1480 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1481 something_changed = 1;
1482 }
1483 }
1484
1485 /* See if anything that happened changes which eliminations are valid.
1486 For example, on the Sparc, whether or not the frame pointer can
1487 be eliminated can depend on what registers have been used. We need
1488 not check some conditions again (such as flag_omit_frame_pointer)
1489 since they can't have changed. */
1490
1491 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1492 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1493 #ifdef ELIMINABLE_REGS
1494 || ! CAN_ELIMINATE (ep->from, ep->to)
1495 #endif
1496 )
1497 ep->can_eliminate = 0;
1498
1499 /* Look for the case where we have discovered that we can't replace
1500 register A with register B and that means that we will now be
1501 trying to replace register A with register C. This means we can
1502 no longer replace register C with register B and we need to disable
1503 such an elimination, if it exists. This occurs often with A == ap,
1504 B == sp, and C == fp. */
1505
1506 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1507 {
1508 struct elim_table *op;
1509 register int new_to = -1;
1510
1511 if (! ep->can_eliminate && ep->can_eliminate_previous)
1512 {
1513 /* Find the current elimination for ep->from, if there is a
1514 new one. */
1515 for (op = reg_eliminate;
1516 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1517 if (op->from == ep->from && op->can_eliminate)
1518 {
1519 new_to = op->to;
1520 break;
1521 }
1522
1523 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1524 disable it. */
1525 for (op = reg_eliminate;
1526 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1527 if (op->from == new_to && op->to == ep->to)
1528 op->can_eliminate = 0;
1529 }
1530 }
1531
1532 /* See if any registers that we thought we could eliminate the previous
1533 time are no longer eliminable. If so, something has changed and we
1534 must spill the register. Also, recompute the number of eliminable
1535 registers and see if the frame pointer is needed; it is if there is
1536 no elimination of the frame pointer that we can perform. */
1537
1538 frame_pointer_needed = 1;
1539 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1540 {
1541 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
1542 && ep->to != HARD_FRAME_POINTER_REGNUM)
1543 frame_pointer_needed = 0;
1544
1545 if (! ep->can_eliminate && ep->can_eliminate_previous)
1546 {
1547 ep->can_eliminate_previous = 0;
1548 spill_hard_reg (ep->from, global, dumpfile, 1);
1549 something_changed = 1;
1550 num_eliminable--;
1551 }
1552 }
1553
1554 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1555 /* If we didn't need a frame pointer last time, but we do now, spill
1556 the hard frame pointer. */
1557 if (frame_pointer_needed && ! previous_frame_pointer_needed)
1558 {
1559 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, global, dumpfile, 1);
1560 something_changed = 1;
1561 }
1562 #endif
1563
1564 /* If all needs are met, we win. */
1565
1566 for (i = 0; i < N_REG_CLASSES; i++)
1567 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1568 break;
1569 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1570 break;
1571
1572 /* Not all needs are met; must spill some hard regs. */
1573
1574 /* Put all registers spilled so far back in potential_reload_regs, but
1575 put them at the front, since we've already spilled most of the
1576 pseudos in them (we might have left some pseudos unspilled if they
1577 were in a block that didn't need any spill registers of a conflicting
1578 class. We used to try to mark off the need for those registers,
1579 but doing so properly is very complex and reallocating them is the
1580 simpler approach. First, "pack" potential_reload_regs by pushing
1581 any nonnegative entries towards the end. That will leave room
1582 for the registers we already spilled.
1583
1584 Also, undo the marking of the spill registers from the last time
1585 around in FORBIDDEN_REGS since we will be probably be allocating
1586 them again below.
1587
1588 ??? It is theoretically possible that we might end up not using one
1589 of our previously-spilled registers in this allocation, even though
1590 they are at the head of the list. It's not clear what to do about
1591 this, but it was no better before, when we marked off the needs met
1592 by the previously-spilled registers. With the current code, globals
1593 can be allocated into these registers, but locals cannot. */
1594
1595 if (n_spills)
1596 {
1597 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1598 if (potential_reload_regs[i] != -1)
1599 potential_reload_regs[j--] = potential_reload_regs[i];
1600
1601 for (i = 0; i < n_spills; i++)
1602 {
1603 potential_reload_regs[i] = spill_regs[i];
1604 spill_reg_order[spill_regs[i]] = -1;
1605 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1606 }
1607
1608 n_spills = 0;
1609 }
1610
1611 /* Now find more reload regs to satisfy the remaining need
1612 Do it by ascending class number, since otherwise a reg
1613 might be spilled for a big class and might fail to count
1614 for a smaller class even though it belongs to that class.
1615
1616 Count spilled regs in `spills', and add entries to
1617 `spill_regs' and `spill_reg_order'.
1618
1619 ??? Note there is a problem here.
1620 When there is a need for a group in a high-numbered class,
1621 and also need for non-group regs that come from a lower class,
1622 the non-group regs are chosen first. If there aren't many regs,
1623 they might leave no room for a group.
1624
1625 This was happening on the 386. To fix it, we added the code
1626 that calls possible_group_p, so that the lower class won't
1627 break up the last possible group.
1628
1629 Really fixing the problem would require changes above
1630 in counting the regs already spilled, and in choose_reload_regs.
1631 It might be hard to avoid introducing bugs there. */
1632
1633 CLEAR_HARD_REG_SET (counted_for_groups);
1634 CLEAR_HARD_REG_SET (counted_for_nongroups);
1635
1636 for (class = 0; class < N_REG_CLASSES; class++)
1637 {
1638 /* First get the groups of registers.
1639 If we got single registers first, we might fragment
1640 possible groups. */
1641 while (max_groups[class] > 0)
1642 {
1643 /* If any single spilled regs happen to form groups,
1644 count them now. Maybe we don't really need
1645 to spill another group. */
1646 count_possible_groups (group_size, group_mode, max_groups,
1647 class);
1648
1649 if (max_groups[class] <= 0)
1650 break;
1651
1652 /* Groups of size 2 (the only groups used on most machines)
1653 are treated specially. */
1654 if (group_size[class] == 2)
1655 {
1656 /* First, look for a register that will complete a group. */
1657 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1658 {
1659 int other;
1660
1661 j = potential_reload_regs[i];
1662 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1663 &&
1664 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1665 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1666 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1667 && HARD_REGNO_MODE_OK (other, group_mode[class])
1668 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1669 other)
1670 /* We don't want one part of another group.
1671 We could get "two groups" that overlap! */
1672 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1673 ||
1674 (j < FIRST_PSEUDO_REGISTER - 1
1675 && (other = j + 1, spill_reg_order[other] >= 0)
1676 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1677 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1678 && HARD_REGNO_MODE_OK (j, group_mode[class])
1679 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1680 other)
1681 && ! TEST_HARD_REG_BIT (counted_for_groups,
1682 other))))
1683 {
1684 register enum reg_class *p;
1685
1686 /* We have found one that will complete a group,
1687 so count off one group as provided. */
1688 max_groups[class]--;
1689 p = reg_class_superclasses[class];
1690 while (*p != LIM_REG_CLASSES)
1691 {
1692 if (group_size [(int) *p] <= group_size [class])
1693 max_groups[(int) *p]--;
1694 p++;
1695 }
1696
1697 /* Indicate both these regs are part of a group. */
1698 SET_HARD_REG_BIT (counted_for_groups, j);
1699 SET_HARD_REG_BIT (counted_for_groups, other);
1700 break;
1701 }
1702 }
1703 /* We can't complete a group, so start one. */
1704 #ifdef SMALL_REGISTER_CLASSES
1705 /* Look for a pair neither of which is explicitly used. */
1706 if (i == FIRST_PSEUDO_REGISTER)
1707 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1708 {
1709 int k;
1710 j = potential_reload_regs[i];
1711 /* Verify that J+1 is a potential reload reg. */
1712 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1713 if (potential_reload_regs[k] == j + 1)
1714 break;
1715 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1716 && k < FIRST_PSEUDO_REGISTER
1717 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1718 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1719 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1720 && HARD_REGNO_MODE_OK (j, group_mode[class])
1721 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1722 j + 1)
1723 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1)
1724 /* Reject J at this stage
1725 if J+1 was explicitly used. */
1726 && ! regs_explicitly_used[j + 1])
1727 break;
1728 }
1729 #endif
1730 /* Now try any group at all
1731 whose registers are not in bad_spill_regs. */
1732 if (i == FIRST_PSEUDO_REGISTER)
1733 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1734 {
1735 int k;
1736 j = potential_reload_regs[i];
1737 /* Verify that J+1 is a potential reload reg. */
1738 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1739 if (potential_reload_regs[k] == j + 1)
1740 break;
1741 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1742 && k < FIRST_PSEUDO_REGISTER
1743 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1744 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1745 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1746 && HARD_REGNO_MODE_OK (j, group_mode[class])
1747 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1748 j + 1)
1749 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1750 break;
1751 }
1752
1753 /* I should be the index in potential_reload_regs
1754 of the new reload reg we have found. */
1755
1756 if (i >= FIRST_PSEUDO_REGISTER)
1757 {
1758 /* There are no groups left to spill. */
1759 spill_failure (max_groups_insn[class]);
1760 failure = 1;
1761 goto failed;
1762 }
1763 else
1764 something_changed
1765 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1766 global, dumpfile);
1767 }
1768 else
1769 {
1770 /* For groups of more than 2 registers,
1771 look for a sufficient sequence of unspilled registers,
1772 and spill them all at once. */
1773 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1774 {
1775 int k;
1776
1777 j = potential_reload_regs[i];
1778 if (j >= 0
1779 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1780 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1781 {
1782 /* Check each reg in the sequence. */
1783 for (k = 0; k < group_size[class]; k++)
1784 if (! (spill_reg_order[j + k] < 0
1785 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1786 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1787 break;
1788 /* We got a full sequence, so spill them all. */
1789 if (k == group_size[class])
1790 {
1791 register enum reg_class *p;
1792 for (k = 0; k < group_size[class]; k++)
1793 {
1794 int idx;
1795 SET_HARD_REG_BIT (counted_for_groups, j + k);
1796 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1797 if (potential_reload_regs[idx] == j + k)
1798 break;
1799 something_changed
1800 |= new_spill_reg (idx, class,
1801 max_needs, NULL_PTR,
1802 global, dumpfile);
1803 }
1804
1805 /* We have found one that will complete a group,
1806 so count off one group as provided. */
1807 max_groups[class]--;
1808 p = reg_class_superclasses[class];
1809 while (*p != LIM_REG_CLASSES)
1810 {
1811 if (group_size [(int) *p]
1812 <= group_size [class])
1813 max_groups[(int) *p]--;
1814 p++;
1815 }
1816 break;
1817 }
1818 }
1819 }
1820 /* We couldn't find any registers for this reload.
1821 Avoid going into an infinite loop. */
1822 if (i >= FIRST_PSEUDO_REGISTER)
1823 {
1824 /* There are no groups left. */
1825 spill_failure (max_groups_insn[class]);
1826 failure = 1;
1827 goto failed;
1828 }
1829 }
1830 }
1831
1832 /* Now similarly satisfy all need for single registers. */
1833
1834 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1835 {
1836 /* If we spilled enough regs, but they weren't counted
1837 against the non-group need, see if we can count them now.
1838 If so, we can avoid some actual spilling. */
1839 if (max_needs[class] <= 0 && max_nongroups[class] > 0)
1840 for (i = 0; i < n_spills; i++)
1841 if (TEST_HARD_REG_BIT (reg_class_contents[class],
1842 spill_regs[i])
1843 && !TEST_HARD_REG_BIT (counted_for_groups,
1844 spill_regs[i])
1845 && !TEST_HARD_REG_BIT (counted_for_nongroups,
1846 spill_regs[i])
1847 && max_nongroups[class] > 0)
1848 {
1849 register enum reg_class *p;
1850
1851 SET_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]);
1852 max_nongroups[class]--;
1853 p = reg_class_superclasses[class];
1854 while (*p != LIM_REG_CLASSES)
1855 max_nongroups[(int) *p++]--;
1856 }
1857 if (max_needs[class] <= 0 && max_nongroups[class] <= 0)
1858 break;
1859
1860 /* Consider the potential reload regs that aren't
1861 yet in use as reload regs, in order of preference.
1862 Find the most preferred one that's in this class. */
1863
1864 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1865 if (potential_reload_regs[i] >= 0
1866 && TEST_HARD_REG_BIT (reg_class_contents[class],
1867 potential_reload_regs[i])
1868 /* If this reg will not be available for groups,
1869 pick one that does not foreclose possible groups.
1870 This is a kludge, and not very general,
1871 but it should be sufficient to make the 386 work,
1872 and the problem should not occur on machines with
1873 more registers. */
1874 && (max_nongroups[class] == 0
1875 || possible_group_p (potential_reload_regs[i], max_groups)))
1876 break;
1877
1878 /* If we couldn't get a register, try to get one even if we
1879 might foreclose possible groups. This may cause problems
1880 later, but that's better than aborting now, since it is
1881 possible that we will, in fact, be able to form the needed
1882 group even with this allocation. */
1883
1884 if (i >= FIRST_PSEUDO_REGISTER
1885 && (asm_noperands (max_needs[class] > 0
1886 ? max_needs_insn[class]
1887 : max_nongroups_insn[class])
1888 < 0))
1889 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1890 if (potential_reload_regs[i] >= 0
1891 && TEST_HARD_REG_BIT (reg_class_contents[class],
1892 potential_reload_regs[i]))
1893 break;
1894
1895 /* I should be the index in potential_reload_regs
1896 of the new reload reg we have found. */
1897
1898 if (i >= FIRST_PSEUDO_REGISTER)
1899 {
1900 /* There are no possible registers left to spill. */
1901 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1902 : max_nongroups_insn[class]);
1903 failure = 1;
1904 goto failed;
1905 }
1906 else
1907 something_changed
1908 |= new_spill_reg (i, class, max_needs, max_nongroups,
1909 global, dumpfile);
1910 }
1911 }
1912 }
1913
1914 /* If global-alloc was run, notify it of any register eliminations we have
1915 done. */
1916 if (global)
1917 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1918 if (ep->can_eliminate)
1919 mark_elimination (ep->from, ep->to);
1920
1921 /* Insert code to save and restore call-clobbered hard regs
1922 around calls. Tell if what mode to use so that we will process
1923 those insns in reload_as_needed if we have to. */
1924
1925 if (caller_save_needed)
1926 save_call_clobbered_regs (num_eliminable ? QImode
1927 : caller_save_spill_class != NO_REGS ? HImode
1928 : VOIDmode);
1929
1930 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1931 If that insn didn't set the register (i.e., it copied the register to
1932 memory), just delete that insn instead of the equivalencing insn plus
1933 anything now dead. If we call delete_dead_insn on that insn, we may
1934 delete the insn that actually sets the register if the register die
1935 there and that is incorrect. */
1936
1937 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1938 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1939 && GET_CODE (reg_equiv_init[i]) != NOTE)
1940 {
1941 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1942 delete_dead_insn (reg_equiv_init[i]);
1943 else
1944 {
1945 PUT_CODE (reg_equiv_init[i], NOTE);
1946 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1947 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1948 }
1949 }
1950
1951 /* Use the reload registers where necessary
1952 by generating move instructions to move the must-be-register
1953 values into or out of the reload registers. */
1954
1955 if (something_needs_reloads || something_needs_elimination
1956 || (caller_save_needed && num_eliminable)
1957 || caller_save_spill_class != NO_REGS)
1958 reload_as_needed (first, global);
1959
1960 /* If we were able to eliminate the frame pointer, show that it is no
1961 longer live at the start of any basic block. If it ls live by
1962 virtue of being in a pseudo, that pseudo will be marked live
1963 and hence the frame pointer will be known to be live via that
1964 pseudo. */
1965
1966 if (! frame_pointer_needed)
1967 for (i = 0; i < n_basic_blocks; i++)
1968 basic_block_live_at_start[i][HARD_FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1969 &= ~ ((REGSET_ELT_TYPE) 1 << (HARD_FRAME_POINTER_REGNUM
1970 % REGSET_ELT_BITS));
1971
1972 /* Come here (with failure set nonzero) if we can't get enough spill regs
1973 and we decide not to abort about it. */
1974 failed:
1975
1976 reload_in_progress = 0;
1977
1978 /* Now eliminate all pseudo regs by modifying them into
1979 their equivalent memory references.
1980 The REG-rtx's for the pseudos are modified in place,
1981 so all insns that used to refer to them now refer to memory.
1982
1983 For a reg that has a reg_equiv_address, all those insns
1984 were changed by reloading so that no insns refer to it any longer;
1985 but the DECL_RTL of a variable decl may refer to it,
1986 and if so this causes the debugging info to mention the variable. */
1987
1988 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1989 {
1990 rtx addr = 0;
1991 int in_struct = 0;
1992 if (reg_equiv_mem[i])
1993 {
1994 addr = XEXP (reg_equiv_mem[i], 0);
1995 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
1996 }
1997 if (reg_equiv_address[i])
1998 addr = reg_equiv_address[i];
1999 if (addr)
2000 {
2001 if (reg_renumber[i] < 0)
2002 {
2003 rtx reg = regno_reg_rtx[i];
2004 XEXP (reg, 0) = addr;
2005 REG_USERVAR_P (reg) = 0;
2006 MEM_IN_STRUCT_P (reg) = in_struct;
2007 PUT_CODE (reg, MEM);
2008 }
2009 else if (reg_equiv_mem[i])
2010 XEXP (reg_equiv_mem[i], 0) = addr;
2011 }
2012 }
2013
2014 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2015 /* Make a pass over all the insns and remove death notes for things that
2016 are no longer registers or no longer die in the insn (e.g., an input
2017 and output pseudo being tied). */
2018
2019 for (insn = first; insn; insn = NEXT_INSN (insn))
2020 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2021 {
2022 rtx note, next;
2023
2024 for (note = REG_NOTES (insn); note; note = next)
2025 {
2026 next = XEXP (note, 1);
2027 if (REG_NOTE_KIND (note) == REG_DEAD
2028 && (GET_CODE (XEXP (note, 0)) != REG
2029 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2030 remove_note (insn, note);
2031 }
2032 }
2033 #endif
2034
2035 /* Indicate that we no longer have known memory locations or constants. */
2036 reg_equiv_constant = 0;
2037 reg_equiv_memory_loc = 0;
2038
2039 if (scratch_list)
2040 free (scratch_list);
2041 scratch_list = 0;
2042 if (scratch_block)
2043 free (scratch_block);
2044 scratch_block = 0;
2045
2046 CLEAR_HARD_REG_SET (used_spill_regs);
2047 for (i = 0; i < n_spills; i++)
2048 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
2049
2050 return failure;
2051 }
2052 \f
2053 /* Nonzero if, after spilling reg REGNO for non-groups,
2054 it will still be possible to find a group if we still need one. */
2055
2056 static int
2057 possible_group_p (regno, max_groups)
2058 int regno;
2059 int *max_groups;
2060 {
2061 int i;
2062 int class = (int) NO_REGS;
2063
2064 for (i = 0; i < (int) N_REG_CLASSES; i++)
2065 if (max_groups[i] > 0)
2066 {
2067 class = i;
2068 break;
2069 }
2070
2071 if (class == (int) NO_REGS)
2072 return 1;
2073
2074 /* Consider each pair of consecutive registers. */
2075 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2076 {
2077 /* Ignore pairs that include reg REGNO. */
2078 if (i == regno || i + 1 == regno)
2079 continue;
2080
2081 /* Ignore pairs that are outside the class that needs the group.
2082 ??? Here we fail to handle the case where two different classes
2083 independently need groups. But this never happens with our
2084 current machine descriptions. */
2085 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2086 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2087 continue;
2088
2089 /* A pair of consecutive regs we can still spill does the trick. */
2090 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2091 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2092 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2093 return 1;
2094
2095 /* A pair of one already spilled and one we can spill does it
2096 provided the one already spilled is not otherwise reserved. */
2097 if (spill_reg_order[i] < 0
2098 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2099 && spill_reg_order[i + 1] >= 0
2100 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2101 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2102 return 1;
2103 if (spill_reg_order[i + 1] < 0
2104 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2105 && spill_reg_order[i] >= 0
2106 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2107 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2108 return 1;
2109 }
2110
2111 return 0;
2112 }
2113 \f
2114 /* Count any groups of CLASS that can be formed from the registers recently
2115 spilled. */
2116
2117 static void
2118 count_possible_groups (group_size, group_mode, max_groups, class)
2119 int *group_size;
2120 enum machine_mode *group_mode;
2121 int *max_groups;
2122 int class;
2123 {
2124 HARD_REG_SET new;
2125 int i, j;
2126
2127 /* Now find all consecutive groups of spilled registers
2128 and mark each group off against the need for such groups.
2129 But don't count them against ordinary need, yet. */
2130
2131 if (group_size[class] == 0)
2132 return;
2133
2134 CLEAR_HARD_REG_SET (new);
2135
2136 /* Make a mask of all the regs that are spill regs in class I. */
2137 for (i = 0; i < n_spills; i++)
2138 if (TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
2139 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[i])
2140 && ! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
2141 SET_HARD_REG_BIT (new, spill_regs[i]);
2142
2143 /* Find each consecutive group of them. */
2144 for (i = 0; i < FIRST_PSEUDO_REGISTER && max_groups[class] > 0; i++)
2145 if (TEST_HARD_REG_BIT (new, i)
2146 && i + group_size[class] <= FIRST_PSEUDO_REGISTER
2147 && HARD_REGNO_MODE_OK (i, group_mode[class]))
2148 {
2149 for (j = 1; j < group_size[class]; j++)
2150 if (! TEST_HARD_REG_BIT (new, i + j))
2151 break;
2152
2153 if (j == group_size[class])
2154 {
2155 /* We found a group. Mark it off against this class's need for
2156 groups, and against each superclass too. */
2157 register enum reg_class *p;
2158
2159 max_groups[class]--;
2160 p = reg_class_superclasses[class];
2161 while (*p != LIM_REG_CLASSES)
2162 {
2163 if (group_size [(int) *p] <= group_size [class])
2164 max_groups[(int) *p]--;
2165 p++;
2166 }
2167
2168 /* Don't count these registers again. */
2169 for (j = 0; j < group_size[class]; j++)
2170 SET_HARD_REG_BIT (counted_for_groups, i + j);
2171 }
2172
2173 /* Skip to the last reg in this group. When i is incremented above,
2174 it will then point to the first reg of the next possible group. */
2175 i += j - 1;
2176 }
2177 }
2178 \f
2179 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2180 another mode that needs to be reloaded for the same register class CLASS.
2181 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2182 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2183
2184 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2185 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2186 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2187 causes unnecessary failures on machines requiring alignment of register
2188 groups when the two modes are different sizes, because the larger mode has
2189 more strict alignment rules than the smaller mode. */
2190
2191 static int
2192 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2193 enum machine_mode allocate_mode, other_mode;
2194 enum reg_class class;
2195 {
2196 register int regno;
2197 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2198 {
2199 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2200 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2201 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2202 return 0;
2203 }
2204 return 1;
2205 }
2206
2207 /* Handle the failure to find a register to spill.
2208 INSN should be one of the insns which needed this particular spill reg. */
2209
2210 static void
2211 spill_failure (insn)
2212 rtx insn;
2213 {
2214 if (asm_noperands (PATTERN (insn)) >= 0)
2215 error_for_asm (insn, "`asm' needs too many reloads");
2216 else
2217 fatal_insn ("Unable to find a register to spill.", insn);
2218 }
2219
2220 /* Add a new register to the tables of available spill-registers
2221 (as well as spilling all pseudos allocated to the register).
2222 I is the index of this register in potential_reload_regs.
2223 CLASS is the regclass whose need is being satisfied.
2224 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2225 so that this register can count off against them.
2226 MAX_NONGROUPS is 0 if this register is part of a group.
2227 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2228
2229 static int
2230 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2231 int i;
2232 int class;
2233 int *max_needs;
2234 int *max_nongroups;
2235 int global;
2236 FILE *dumpfile;
2237 {
2238 register enum reg_class *p;
2239 int val;
2240 int regno = potential_reload_regs[i];
2241
2242 if (i >= FIRST_PSEUDO_REGISTER)
2243 abort (); /* Caller failed to find any register. */
2244
2245 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2246 fatal ("fixed or forbidden register was spilled.\n\
2247 This may be due to a compiler bug or to impossible asm\n\
2248 statements or clauses.");
2249
2250 /* Make reg REGNO an additional reload reg. */
2251
2252 potential_reload_regs[i] = -1;
2253 spill_regs[n_spills] = regno;
2254 spill_reg_order[regno] = n_spills;
2255 if (dumpfile)
2256 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2257
2258 /* Clear off the needs we just satisfied. */
2259
2260 max_needs[class]--;
2261 p = reg_class_superclasses[class];
2262 while (*p != LIM_REG_CLASSES)
2263 max_needs[(int) *p++]--;
2264
2265 if (max_nongroups && max_nongroups[class] > 0)
2266 {
2267 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2268 max_nongroups[class]--;
2269 p = reg_class_superclasses[class];
2270 while (*p != LIM_REG_CLASSES)
2271 max_nongroups[(int) *p++]--;
2272 }
2273
2274 /* Spill every pseudo reg that was allocated to this reg
2275 or to something that overlaps this reg. */
2276
2277 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2278
2279 /* If there are some registers still to eliminate and this register
2280 wasn't ever used before, additional stack space may have to be
2281 allocated to store this register. Thus, we may have changed the offset
2282 between the stack and frame pointers, so mark that something has changed.
2283 (If new pseudos were spilled, thus requiring more space, VAL would have
2284 been set non-zero by the call to spill_hard_reg above since additional
2285 reloads may be needed in that case.
2286
2287 One might think that we need only set VAL to 1 if this is a call-used
2288 register. However, the set of registers that must be saved by the
2289 prologue is not identical to the call-used set. For example, the
2290 register used by the call insn for the return PC is a call-used register,
2291 but must be saved by the prologue. */
2292 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2293 val = 1;
2294
2295 regs_ever_live[spill_regs[n_spills]] = 1;
2296 n_spills++;
2297
2298 return val;
2299 }
2300 \f
2301 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2302 data that is dead in INSN. */
2303
2304 static void
2305 delete_dead_insn (insn)
2306 rtx insn;
2307 {
2308 rtx prev = prev_real_insn (insn);
2309 rtx prev_dest;
2310
2311 /* If the previous insn sets a register that dies in our insn, delete it
2312 too. */
2313 if (prev && GET_CODE (PATTERN (prev)) == SET
2314 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2315 && reg_mentioned_p (prev_dest, PATTERN (insn))
2316 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2317 delete_dead_insn (prev);
2318
2319 PUT_CODE (insn, NOTE);
2320 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2321 NOTE_SOURCE_FILE (insn) = 0;
2322 }
2323
2324 /* Modify the home of pseudo-reg I.
2325 The new home is present in reg_renumber[I].
2326
2327 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2328 or it may be -1, meaning there is none or it is not relevant.
2329 This is used so that all pseudos spilled from a given hard reg
2330 can share one stack slot. */
2331
2332 static void
2333 alter_reg (i, from_reg)
2334 register int i;
2335 int from_reg;
2336 {
2337 /* When outputting an inline function, this can happen
2338 for a reg that isn't actually used. */
2339 if (regno_reg_rtx[i] == 0)
2340 return;
2341
2342 /* If the reg got changed to a MEM at rtl-generation time,
2343 ignore it. */
2344 if (GET_CODE (regno_reg_rtx[i]) != REG)
2345 return;
2346
2347 /* Modify the reg-rtx to contain the new hard reg
2348 number or else to contain its pseudo reg number. */
2349 REGNO (regno_reg_rtx[i])
2350 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2351
2352 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2353 allocate a stack slot for it. */
2354
2355 if (reg_renumber[i] < 0
2356 && reg_n_refs[i] > 0
2357 && reg_equiv_constant[i] == 0
2358 && reg_equiv_memory_loc[i] == 0)
2359 {
2360 register rtx x;
2361 int inherent_size = PSEUDO_REGNO_BYTES (i);
2362 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2363 int adjust = 0;
2364
2365 /* Each pseudo reg has an inherent size which comes from its own mode,
2366 and a total size which provides room for paradoxical subregs
2367 which refer to the pseudo reg in wider modes.
2368
2369 We can use a slot already allocated if it provides both
2370 enough inherent space and enough total space.
2371 Otherwise, we allocate a new slot, making sure that it has no less
2372 inherent space, and no less total space, then the previous slot. */
2373 if (from_reg == -1)
2374 {
2375 /* No known place to spill from => no slot to reuse. */
2376 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2377 if (BYTES_BIG_ENDIAN)
2378 /* Cancel the big-endian correction done in assign_stack_local.
2379 Get the address of the beginning of the slot.
2380 This is so we can do a big-endian correction unconditionally
2381 below. */
2382 adjust = inherent_size - total_size;
2383
2384 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2385 }
2386 /* Reuse a stack slot if possible. */
2387 else if (spill_stack_slot[from_reg] != 0
2388 && spill_stack_slot_width[from_reg] >= total_size
2389 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2390 >= inherent_size))
2391 x = spill_stack_slot[from_reg];
2392 /* Allocate a bigger slot. */
2393 else
2394 {
2395 /* Compute maximum size needed, both for inherent size
2396 and for total size. */
2397 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2398 rtx stack_slot;
2399 if (spill_stack_slot[from_reg])
2400 {
2401 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2402 > inherent_size)
2403 mode = GET_MODE (spill_stack_slot[from_reg]);
2404 if (spill_stack_slot_width[from_reg] > total_size)
2405 total_size = spill_stack_slot_width[from_reg];
2406 }
2407 /* Make a slot with that size. */
2408 x = assign_stack_local (mode, total_size, -1);
2409 stack_slot = x;
2410 if (BYTES_BIG_ENDIAN)
2411 {
2412 /* Cancel the big-endian correction done in assign_stack_local.
2413 Get the address of the beginning of the slot.
2414 This is so we can do a big-endian correction unconditionally
2415 below. */
2416 adjust = GET_MODE_SIZE (mode) - total_size;
2417 if (adjust)
2418 stack_slot = gen_rtx (MEM, mode_for_size (total_size
2419 * BITS_PER_UNIT,
2420 MODE_INT, 1),
2421 plus_constant (XEXP (x, 0), adjust));
2422 }
2423 spill_stack_slot[from_reg] = stack_slot;
2424 spill_stack_slot_width[from_reg] = total_size;
2425 }
2426
2427 /* On a big endian machine, the "address" of the slot
2428 is the address of the low part that fits its inherent mode. */
2429 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2430 adjust += (total_size - inherent_size);
2431
2432 /* If we have any adjustment to make, or if the stack slot is the
2433 wrong mode, make a new stack slot. */
2434 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2435 {
2436 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2437 plus_constant (XEXP (x, 0), adjust));
2438 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2439 }
2440
2441 /* Save the stack slot for later. */
2442 reg_equiv_memory_loc[i] = x;
2443 }
2444 }
2445
2446 /* Mark the slots in regs_ever_live for the hard regs
2447 used by pseudo-reg number REGNO. */
2448
2449 void
2450 mark_home_live (regno)
2451 int regno;
2452 {
2453 register int i, lim;
2454 i = reg_renumber[regno];
2455 if (i < 0)
2456 return;
2457 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2458 while (i < lim)
2459 regs_ever_live[i++] = 1;
2460 }
2461
2462 /* Mark the registers used in SCRATCH as being live. */
2463
2464 static void
2465 mark_scratch_live (scratch)
2466 rtx scratch;
2467 {
2468 register int i;
2469 int regno = REGNO (scratch);
2470 int lim = regno + HARD_REGNO_NREGS (regno, GET_MODE (scratch));
2471
2472 for (i = regno; i < lim; i++)
2473 regs_ever_live[i] = 1;
2474 }
2475 \f
2476 /* This function handles the tracking of elimination offsets around branches.
2477
2478 X is a piece of RTL being scanned.
2479
2480 INSN is the insn that it came from, if any.
2481
2482 INITIAL_P is non-zero if we are to set the offset to be the initial
2483 offset and zero if we are setting the offset of the label to be the
2484 current offset. */
2485
2486 static void
2487 set_label_offsets (x, insn, initial_p)
2488 rtx x;
2489 rtx insn;
2490 int initial_p;
2491 {
2492 enum rtx_code code = GET_CODE (x);
2493 rtx tem;
2494 int i;
2495 struct elim_table *p;
2496
2497 switch (code)
2498 {
2499 case LABEL_REF:
2500 if (LABEL_REF_NONLOCAL_P (x))
2501 return;
2502
2503 x = XEXP (x, 0);
2504
2505 /* ... fall through ... */
2506
2507 case CODE_LABEL:
2508 /* If we know nothing about this label, set the desired offsets. Note
2509 that this sets the offset at a label to be the offset before a label
2510 if we don't know anything about the label. This is not correct for
2511 the label after a BARRIER, but is the best guess we can make. If
2512 we guessed wrong, we will suppress an elimination that might have
2513 been possible had we been able to guess correctly. */
2514
2515 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2516 {
2517 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2518 offsets_at[CODE_LABEL_NUMBER (x)][i]
2519 = (initial_p ? reg_eliminate[i].initial_offset
2520 : reg_eliminate[i].offset);
2521 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2522 }
2523
2524 /* Otherwise, if this is the definition of a label and it is
2525 preceded by a BARRIER, set our offsets to the known offset of
2526 that label. */
2527
2528 else if (x == insn
2529 && (tem = prev_nonnote_insn (insn)) != 0
2530 && GET_CODE (tem) == BARRIER)
2531 {
2532 num_not_at_initial_offset = 0;
2533 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2534 {
2535 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2536 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2537 if (reg_eliminate[i].can_eliminate
2538 && (reg_eliminate[i].offset
2539 != reg_eliminate[i].initial_offset))
2540 num_not_at_initial_offset++;
2541 }
2542 }
2543
2544 else
2545 /* If neither of the above cases is true, compare each offset
2546 with those previously recorded and suppress any eliminations
2547 where the offsets disagree. */
2548
2549 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2550 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2551 != (initial_p ? reg_eliminate[i].initial_offset
2552 : reg_eliminate[i].offset))
2553 reg_eliminate[i].can_eliminate = 0;
2554
2555 return;
2556
2557 case JUMP_INSN:
2558 set_label_offsets (PATTERN (insn), insn, initial_p);
2559
2560 /* ... fall through ... */
2561
2562 case INSN:
2563 case CALL_INSN:
2564 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2565 and hence must have all eliminations at their initial offsets. */
2566 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2567 if (REG_NOTE_KIND (tem) == REG_LABEL)
2568 set_label_offsets (XEXP (tem, 0), insn, 1);
2569 return;
2570
2571 case ADDR_VEC:
2572 case ADDR_DIFF_VEC:
2573 /* Each of the labels in the address vector must be at their initial
2574 offsets. We want the first first for ADDR_VEC and the second
2575 field for ADDR_DIFF_VEC. */
2576
2577 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2578 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2579 insn, initial_p);
2580 return;
2581
2582 case SET:
2583 /* We only care about setting PC. If the source is not RETURN,
2584 IF_THEN_ELSE, or a label, disable any eliminations not at
2585 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2586 isn't one of those possibilities. For branches to a label,
2587 call ourselves recursively.
2588
2589 Note that this can disable elimination unnecessarily when we have
2590 a non-local goto since it will look like a non-constant jump to
2591 someplace in the current function. This isn't a significant
2592 problem since such jumps will normally be when all elimination
2593 pairs are back to their initial offsets. */
2594
2595 if (SET_DEST (x) != pc_rtx)
2596 return;
2597
2598 switch (GET_CODE (SET_SRC (x)))
2599 {
2600 case PC:
2601 case RETURN:
2602 return;
2603
2604 case LABEL_REF:
2605 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2606 return;
2607
2608 case IF_THEN_ELSE:
2609 tem = XEXP (SET_SRC (x), 1);
2610 if (GET_CODE (tem) == LABEL_REF)
2611 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2612 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2613 break;
2614
2615 tem = XEXP (SET_SRC (x), 2);
2616 if (GET_CODE (tem) == LABEL_REF)
2617 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2618 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2619 break;
2620 return;
2621 }
2622
2623 /* If we reach here, all eliminations must be at their initial
2624 offset because we are doing a jump to a variable address. */
2625 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2626 if (p->offset != p->initial_offset)
2627 p->can_eliminate = 0;
2628 }
2629 }
2630 \f
2631 /* Used for communication between the next two function to properly share
2632 the vector for an ASM_OPERANDS. */
2633
2634 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2635
2636 /* Scan X and replace any eliminable registers (such as fp) with a
2637 replacement (such as sp), plus an offset.
2638
2639 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2640 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2641 MEM, we are allowed to replace a sum of a register and the constant zero
2642 with the register, which we cannot do outside a MEM. In addition, we need
2643 to record the fact that a register is referenced outside a MEM.
2644
2645 If INSN is an insn, it is the insn containing X. If we replace a REG
2646 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2647 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2648 that the REG is being modified.
2649
2650 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2651 That's used when we eliminate in expressions stored in notes.
2652 This means, do not set ref_outside_mem even if the reference
2653 is outside of MEMs.
2654
2655 If we see a modification to a register we know about, take the
2656 appropriate action (see case SET, below).
2657
2658 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2659 replacements done assuming all offsets are at their initial values. If
2660 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2661 encounter, return the actual location so that find_reloads will do
2662 the proper thing. */
2663
2664 rtx
2665 eliminate_regs (x, mem_mode, insn)
2666 rtx x;
2667 enum machine_mode mem_mode;
2668 rtx insn;
2669 {
2670 enum rtx_code code = GET_CODE (x);
2671 struct elim_table *ep;
2672 int regno;
2673 rtx new;
2674 int i, j;
2675 char *fmt;
2676 int copied = 0;
2677
2678 switch (code)
2679 {
2680 case CONST_INT:
2681 case CONST_DOUBLE:
2682 case CONST:
2683 case SYMBOL_REF:
2684 case CODE_LABEL:
2685 case PC:
2686 case CC0:
2687 case ASM_INPUT:
2688 case ADDR_VEC:
2689 case ADDR_DIFF_VEC:
2690 case RETURN:
2691 return x;
2692
2693 case REG:
2694 regno = REGNO (x);
2695
2696 /* First handle the case where we encounter a bare register that
2697 is eliminable. Replace it with a PLUS. */
2698 if (regno < FIRST_PSEUDO_REGISTER)
2699 {
2700 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2701 ep++)
2702 if (ep->from_rtx == x && ep->can_eliminate)
2703 {
2704 if (! mem_mode
2705 /* Refs inside notes don't count for this purpose. */
2706 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2707 || GET_CODE (insn) == INSN_LIST)))
2708 ep->ref_outside_mem = 1;
2709 return plus_constant (ep->to_rtx, ep->previous_offset);
2710 }
2711
2712 }
2713 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2714 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2715 {
2716 /* In this case, find_reloads would attempt to either use an
2717 incorrect address (if something is not at its initial offset)
2718 or substitute an replaced address into an insn (which loses
2719 if the offset is changed by some later action). So we simply
2720 return the replaced stack slot (assuming it is changed by
2721 elimination) and ignore the fact that this is actually a
2722 reference to the pseudo. Ensure we make a copy of the
2723 address in case it is shared. */
2724 new = eliminate_regs (reg_equiv_memory_loc[regno],
2725 mem_mode, insn);
2726 if (new != reg_equiv_memory_loc[regno])
2727 {
2728 cannot_omit_stores[regno] = 1;
2729 return copy_rtx (new);
2730 }
2731 }
2732 return x;
2733
2734 case PLUS:
2735 /* If this is the sum of an eliminable register and a constant, rework
2736 the sum. */
2737 if (GET_CODE (XEXP (x, 0)) == REG
2738 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2739 && CONSTANT_P (XEXP (x, 1)))
2740 {
2741 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2742 ep++)
2743 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2744 {
2745 if (! mem_mode
2746 /* Refs inside notes don't count for this purpose. */
2747 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2748 || GET_CODE (insn) == INSN_LIST)))
2749 ep->ref_outside_mem = 1;
2750
2751 /* The only time we want to replace a PLUS with a REG (this
2752 occurs when the constant operand of the PLUS is the negative
2753 of the offset) is when we are inside a MEM. We won't want
2754 to do so at other times because that would change the
2755 structure of the insn in a way that reload can't handle.
2756 We special-case the commonest situation in
2757 eliminate_regs_in_insn, so just replace a PLUS with a
2758 PLUS here, unless inside a MEM. */
2759 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2760 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2761 return ep->to_rtx;
2762 else
2763 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2764 plus_constant (XEXP (x, 1),
2765 ep->previous_offset));
2766 }
2767
2768 /* If the register is not eliminable, we are done since the other
2769 operand is a constant. */
2770 return x;
2771 }
2772
2773 /* If this is part of an address, we want to bring any constant to the
2774 outermost PLUS. We will do this by doing register replacement in
2775 our operands and seeing if a constant shows up in one of them.
2776
2777 We assume here this is part of an address (or a "load address" insn)
2778 since an eliminable register is not likely to appear in any other
2779 context.
2780
2781 If we have (plus (eliminable) (reg)), we want to produce
2782 (plus (plus (replacement) (reg) (const))). If this was part of a
2783 normal add insn, (plus (replacement) (reg)) will be pushed as a
2784 reload. This is the desired action. */
2785
2786 {
2787 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2788 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2789
2790 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2791 {
2792 /* If one side is a PLUS and the other side is a pseudo that
2793 didn't get a hard register but has a reg_equiv_constant,
2794 we must replace the constant here since it may no longer
2795 be in the position of any operand. */
2796 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2797 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2798 && reg_renumber[REGNO (new1)] < 0
2799 && reg_equiv_constant != 0
2800 && reg_equiv_constant[REGNO (new1)] != 0)
2801 new1 = reg_equiv_constant[REGNO (new1)];
2802 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2803 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2804 && reg_renumber[REGNO (new0)] < 0
2805 && reg_equiv_constant[REGNO (new0)] != 0)
2806 new0 = reg_equiv_constant[REGNO (new0)];
2807
2808 new = form_sum (new0, new1);
2809
2810 /* As above, if we are not inside a MEM we do not want to
2811 turn a PLUS into something else. We might try to do so here
2812 for an addition of 0 if we aren't optimizing. */
2813 if (! mem_mode && GET_CODE (new) != PLUS)
2814 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2815 else
2816 return new;
2817 }
2818 }
2819 return x;
2820
2821 case MULT:
2822 /* If this is the product of an eliminable register and a
2823 constant, apply the distribute law and move the constant out
2824 so that we have (plus (mult ..) ..). This is needed in order
2825 to keep load-address insns valid. This case is pathological.
2826 We ignore the possibility of overflow here. */
2827 if (GET_CODE (XEXP (x, 0)) == REG
2828 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2829 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2830 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2831 ep++)
2832 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2833 {
2834 if (! mem_mode
2835 /* Refs inside notes don't count for this purpose. */
2836 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2837 || GET_CODE (insn) == INSN_LIST)))
2838 ep->ref_outside_mem = 1;
2839
2840 return
2841 plus_constant (gen_rtx (MULT, Pmode, ep->to_rtx, XEXP (x, 1)),
2842 ep->previous_offset * INTVAL (XEXP (x, 1)));
2843 }
2844
2845 /* ... fall through ... */
2846
2847 case CALL:
2848 case COMPARE:
2849 case MINUS:
2850 case DIV: case UDIV:
2851 case MOD: case UMOD:
2852 case AND: case IOR: case XOR:
2853 case ROTATERT: case ROTATE:
2854 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2855 case NE: case EQ:
2856 case GE: case GT: case GEU: case GTU:
2857 case LE: case LT: case LEU: case LTU:
2858 {
2859 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2860 rtx new1
2861 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2862
2863 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2864 return gen_rtx (code, GET_MODE (x), new0, new1);
2865 }
2866 return x;
2867
2868 case EXPR_LIST:
2869 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2870 if (XEXP (x, 0))
2871 {
2872 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2873 if (new != XEXP (x, 0))
2874 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2875 }
2876
2877 /* ... fall through ... */
2878
2879 case INSN_LIST:
2880 /* Now do eliminations in the rest of the chain. If this was
2881 an EXPR_LIST, this might result in allocating more memory than is
2882 strictly needed, but it simplifies the code. */
2883 if (XEXP (x, 1))
2884 {
2885 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2886 if (new != XEXP (x, 1))
2887 return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2888 }
2889 return x;
2890
2891 case PRE_INC:
2892 case POST_INC:
2893 case PRE_DEC:
2894 case POST_DEC:
2895 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2896 if (ep->to_rtx == XEXP (x, 0))
2897 {
2898 int size = GET_MODE_SIZE (mem_mode);
2899
2900 /* If more bytes than MEM_MODE are pushed, account for them. */
2901 #ifdef PUSH_ROUNDING
2902 if (ep->to_rtx == stack_pointer_rtx)
2903 size = PUSH_ROUNDING (size);
2904 #endif
2905 if (code == PRE_DEC || code == POST_DEC)
2906 ep->offset += size;
2907 else
2908 ep->offset -= size;
2909 }
2910
2911 /* Fall through to generic unary operation case. */
2912 case STRICT_LOW_PART:
2913 case NEG: case NOT:
2914 case SIGN_EXTEND: case ZERO_EXTEND:
2915 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2916 case FLOAT: case FIX:
2917 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2918 case ABS:
2919 case SQRT:
2920 case FFS:
2921 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2922 if (new != XEXP (x, 0))
2923 return gen_rtx (code, GET_MODE (x), new);
2924 return x;
2925
2926 case SUBREG:
2927 /* Similar to above processing, but preserve SUBREG_WORD.
2928 Convert (subreg (mem)) to (mem) if not paradoxical.
2929 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2930 pseudo didn't get a hard reg, we must replace this with the
2931 eliminated version of the memory location because push_reloads
2932 may do the replacement in certain circumstances. */
2933 if (GET_CODE (SUBREG_REG (x)) == REG
2934 && (GET_MODE_SIZE (GET_MODE (x))
2935 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2936 && reg_equiv_memory_loc != 0
2937 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2938 {
2939 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2940 mem_mode, insn);
2941
2942 /* If we didn't change anything, we must retain the pseudo. */
2943 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2944 new = SUBREG_REG (x);
2945 else
2946 {
2947 /* Otherwise, ensure NEW isn't shared in case we have to reload
2948 it. */
2949 new = copy_rtx (new);
2950
2951 /* In this case, we must show that the pseudo is used in this
2952 insn so that delete_output_reload will do the right thing. */
2953 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2954 && GET_CODE (insn) != INSN_LIST)
2955 emit_insn_before (gen_rtx (USE, VOIDmode, SUBREG_REG (x)),
2956 insn);
2957 }
2958 }
2959 else
2960 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2961
2962 if (new != XEXP (x, 0))
2963 {
2964 if (GET_CODE (new) == MEM
2965 && (GET_MODE_SIZE (GET_MODE (x))
2966 <= GET_MODE_SIZE (GET_MODE (new)))
2967 #ifdef LOAD_EXTEND_OP
2968 /* On these machines we will be reloading what is
2969 inside the SUBREG if it originally was a pseudo and
2970 the inner and outer modes are both a word or
2971 smaller. So leave the SUBREG then. */
2972 && ! (GET_CODE (SUBREG_REG (x)) == REG
2973 && GET_MODE_SIZE (GET_MODE (x)) <= UNITS_PER_WORD
2974 && GET_MODE_SIZE (GET_MODE (new)) <= UNITS_PER_WORD
2975 && (GET_MODE_SIZE (GET_MODE (x))
2976 > GET_MODE_SIZE (GET_MODE (new)))
2977 && INTEGRAL_MODE_P (GET_MODE (new))
2978 && LOAD_EXTEND_OP (GET_MODE (new)) != NIL)
2979 #endif
2980 )
2981 {
2982 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2983 enum machine_mode mode = GET_MODE (x);
2984
2985 if (BYTES_BIG_ENDIAN)
2986 offset += (MIN (UNITS_PER_WORD,
2987 GET_MODE_SIZE (GET_MODE (new)))
2988 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2989
2990 PUT_MODE (new, mode);
2991 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2992 return new;
2993 }
2994 else
2995 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2996 }
2997
2998 return x;
2999
3000 case USE:
3001 /* If using a register that is the source of an eliminate we still
3002 think can be performed, note it cannot be performed since we don't
3003 know how this register is used. */
3004 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3005 if (ep->from_rtx == XEXP (x, 0))
3006 ep->can_eliminate = 0;
3007
3008 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3009 if (new != XEXP (x, 0))
3010 return gen_rtx (code, GET_MODE (x), new);
3011 return x;
3012
3013 case CLOBBER:
3014 /* If clobbering a register that is the replacement register for an
3015 elimination we still think can be performed, note that it cannot
3016 be performed. Otherwise, we need not be concerned about it. */
3017 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3018 if (ep->to_rtx == XEXP (x, 0))
3019 ep->can_eliminate = 0;
3020
3021 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
3022 if (new != XEXP (x, 0))
3023 return gen_rtx (code, GET_MODE (x), new);
3024 return x;
3025
3026 case ASM_OPERANDS:
3027 {
3028 rtx *temp_vec;
3029 /* Properly handle sharing input and constraint vectors. */
3030 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
3031 {
3032 /* When we come to a new vector not seen before,
3033 scan all its elements; keep the old vector if none
3034 of them changes; otherwise, make a copy. */
3035 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
3036 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
3037 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3038 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
3039 mem_mode, insn);
3040
3041 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
3042 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
3043 break;
3044
3045 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
3046 new_asm_operands_vec = old_asm_operands_vec;
3047 else
3048 new_asm_operands_vec
3049 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
3050 }
3051
3052 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
3053 if (new_asm_operands_vec == old_asm_operands_vec)
3054 return x;
3055
3056 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3057 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3058 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
3059 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3060 ASM_OPERANDS_SOURCE_FILE (x),
3061 ASM_OPERANDS_SOURCE_LINE (x));
3062 new->volatil = x->volatil;
3063 return new;
3064 }
3065
3066 case SET:
3067 /* Check for setting a register that we know about. */
3068 if (GET_CODE (SET_DEST (x)) == REG)
3069 {
3070 /* See if this is setting the replacement register for an
3071 elimination.
3072
3073 If DEST is the hard frame pointer, we do nothing because we
3074 assume that all assignments to the frame pointer are for
3075 non-local gotos and are being done at a time when they are valid
3076 and do not disturb anything else. Some machines want to
3077 eliminate a fake argument pointer (or even a fake frame pointer)
3078 with either the real frame or the stack pointer. Assignments to
3079 the hard frame pointer must not prevent this elimination. */
3080
3081 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3082 ep++)
3083 if (ep->to_rtx == SET_DEST (x)
3084 && SET_DEST (x) != hard_frame_pointer_rtx)
3085 {
3086 /* If it is being incremented, adjust the offset. Otherwise,
3087 this elimination can't be done. */
3088 rtx src = SET_SRC (x);
3089
3090 if (GET_CODE (src) == PLUS
3091 && XEXP (src, 0) == SET_DEST (x)
3092 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3093 ep->offset -= INTVAL (XEXP (src, 1));
3094 else
3095 ep->can_eliminate = 0;
3096 }
3097
3098 /* Now check to see we are assigning to a register that can be
3099 eliminated. If so, it must be as part of a PARALLEL, since we
3100 will not have been called if this is a single SET. So indicate
3101 that we can no longer eliminate this reg. */
3102 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3103 ep++)
3104 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3105 ep->can_eliminate = 0;
3106 }
3107
3108 /* Now avoid the loop below in this common case. */
3109 {
3110 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3111 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3112
3113 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3114 write a CLOBBER insn. */
3115 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3116 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3117 && GET_CODE (insn) != INSN_LIST)
3118 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3119
3120 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3121 return gen_rtx (SET, VOIDmode, new0, new1);
3122 }
3123
3124 return x;
3125
3126 case MEM:
3127 /* Our only special processing is to pass the mode of the MEM to our
3128 recursive call and copy the flags. While we are here, handle this
3129 case more efficiently. */
3130 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3131 if (new != XEXP (x, 0))
3132 {
3133 new = gen_rtx (MEM, GET_MODE (x), new);
3134 new->volatil = x->volatil;
3135 new->unchanging = x->unchanging;
3136 new->in_struct = x->in_struct;
3137 return new;
3138 }
3139 else
3140 return x;
3141 }
3142
3143 /* Process each of our operands recursively. If any have changed, make a
3144 copy of the rtx. */
3145 fmt = GET_RTX_FORMAT (code);
3146 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3147 {
3148 if (*fmt == 'e')
3149 {
3150 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3151 if (new != XEXP (x, i) && ! copied)
3152 {
3153 rtx new_x = rtx_alloc (code);
3154 bcopy ((char *) x, (char *) new_x,
3155 (sizeof (*new_x) - sizeof (new_x->fld)
3156 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3157 x = new_x;
3158 copied = 1;
3159 }
3160 XEXP (x, i) = new;
3161 }
3162 else if (*fmt == 'E')
3163 {
3164 int copied_vec = 0;
3165 for (j = 0; j < XVECLEN (x, i); j++)
3166 {
3167 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3168 if (new != XVECEXP (x, i, j) && ! copied_vec)
3169 {
3170 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3171 &XVECEXP (x, i, 0));
3172 if (! copied)
3173 {
3174 rtx new_x = rtx_alloc (code);
3175 bcopy ((char *) x, (char *) new_x,
3176 (sizeof (*new_x) - sizeof (new_x->fld)
3177 + (sizeof (new_x->fld[0])
3178 * GET_RTX_LENGTH (code))));
3179 x = new_x;
3180 copied = 1;
3181 }
3182 XVEC (x, i) = new_v;
3183 copied_vec = 1;
3184 }
3185 XVECEXP (x, i, j) = new;
3186 }
3187 }
3188 }
3189
3190 return x;
3191 }
3192 \f
3193 /* Scan INSN and eliminate all eliminable registers in it.
3194
3195 If REPLACE is nonzero, do the replacement destructively. Also
3196 delete the insn as dead it if it is setting an eliminable register.
3197
3198 If REPLACE is zero, do all our allocations in reload_obstack.
3199
3200 If no eliminations were done and this insn doesn't require any elimination
3201 processing (these are not identical conditions: it might be updating sp,
3202 but not referencing fp; this needs to be seen during reload_as_needed so
3203 that the offset between fp and sp can be taken into consideration), zero
3204 is returned. Otherwise, 1 is returned. */
3205
3206 static int
3207 eliminate_regs_in_insn (insn, replace)
3208 rtx insn;
3209 int replace;
3210 {
3211 rtx old_body = PATTERN (insn);
3212 rtx old_set = single_set (insn);
3213 rtx new_body;
3214 int val = 0;
3215 struct elim_table *ep;
3216
3217 if (! replace)
3218 push_obstacks (&reload_obstack, &reload_obstack);
3219
3220 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3221 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3222 {
3223 /* Check for setting an eliminable register. */
3224 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3225 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3226 {
3227 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3228 /* If this is setting the frame pointer register to the
3229 hardware frame pointer register and this is an elimination
3230 that will be done (tested above), this insn is really
3231 adjusting the frame pointer downward to compensate for
3232 the adjustment done before a nonlocal goto. */
3233 if (ep->from == FRAME_POINTER_REGNUM
3234 && ep->to == HARD_FRAME_POINTER_REGNUM)
3235 {
3236 rtx src = SET_SRC (old_set);
3237 int offset, ok = 0;
3238
3239 if (src == ep->to_rtx)
3240 offset = 0, ok = 1;
3241 else if (GET_CODE (src) == PLUS
3242 && GET_CODE (XEXP (src, 0)) == CONST_INT)
3243 offset = INTVAL (XEXP (src, 0)), ok = 1;
3244
3245 if (ok)
3246 {
3247 if (replace)
3248 {
3249 rtx src
3250 = plus_constant (ep->to_rtx, offset - ep->offset);
3251
3252 /* First see if this insn remains valid when we
3253 make the change. If not, keep the INSN_CODE
3254 the same and let reload fit it up. */
3255 validate_change (insn, &SET_SRC (old_set), src, 1);
3256 validate_change (insn, &SET_DEST (old_set),
3257 ep->to_rtx, 1);
3258 if (! apply_change_group ())
3259 {
3260 SET_SRC (old_set) = src;
3261 SET_DEST (old_set) = ep->to_rtx;
3262 }
3263 }
3264
3265 val = 1;
3266 goto done;
3267 }
3268 }
3269 #endif
3270
3271 /* In this case this insn isn't serving a useful purpose. We
3272 will delete it in reload_as_needed once we know that this
3273 elimination is, in fact, being done.
3274
3275 If REPLACE isn't set, we can't delete this insn, but needn't
3276 process it since it won't be used unless something changes. */
3277 if (replace)
3278 delete_dead_insn (insn);
3279 val = 1;
3280 goto done;
3281 }
3282
3283 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3284 in the insn is the negative of the offset in FROM. Substitute
3285 (set (reg) (reg to)) for the insn and change its code.
3286
3287 We have to do this here, rather than in eliminate_regs, do that we can
3288 change the insn code. */
3289
3290 if (GET_CODE (SET_SRC (old_set)) == PLUS
3291 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3292 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3293 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3294 ep++)
3295 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3296 && ep->can_eliminate)
3297 {
3298 /* We must stop at the first elimination that will be used.
3299 If this one would replace the PLUS with a REG, do it
3300 now. Otherwise, quit the loop and let eliminate_regs
3301 do its normal replacement. */
3302 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3303 {
3304 /* We assume here that we don't need a PARALLEL of
3305 any CLOBBERs for this assignment. There's not
3306 much we can do if we do need it. */
3307 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3308 SET_DEST (old_set), ep->to_rtx);
3309 INSN_CODE (insn) = -1;
3310 val = 1;
3311 goto done;
3312 }
3313
3314 break;
3315 }
3316 }
3317
3318 old_asm_operands_vec = 0;
3319
3320 /* Replace the body of this insn with a substituted form. If we changed
3321 something, return non-zero.
3322
3323 If we are replacing a body that was a (set X (plus Y Z)), try to
3324 re-recognize the insn. We do this in case we had a simple addition
3325 but now can do this as a load-address. This saves an insn in this
3326 common case. */
3327
3328 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3329 if (new_body != old_body)
3330 {
3331 /* If we aren't replacing things permanently and we changed something,
3332 make another copy to ensure that all the RTL is new. Otherwise
3333 things can go wrong if find_reload swaps commutative operands
3334 and one is inside RTL that has been copied while the other is not. */
3335
3336 /* Don't copy an asm_operands because (1) there's no need and (2)
3337 copy_rtx can't do it properly when there are multiple outputs. */
3338 if (! replace && asm_noperands (old_body) < 0)
3339 new_body = copy_rtx (new_body);
3340
3341 /* If we had a move insn but now we don't, rerecognize it. This will
3342 cause spurious re-recognition if the old move had a PARALLEL since
3343 the new one still will, but we can't call single_set without
3344 having put NEW_BODY into the insn and the re-recognition won't
3345 hurt in this rare case. */
3346 if (old_set != 0
3347 && ((GET_CODE (SET_SRC (old_set)) == REG
3348 && (GET_CODE (new_body) != SET
3349 || GET_CODE (SET_SRC (new_body)) != REG))
3350 /* If this was a load from or store to memory, compare
3351 the MEM in recog_operand to the one in the insn. If they
3352 are not equal, then rerecognize the insn. */
3353 || (old_set != 0
3354 && ((GET_CODE (SET_SRC (old_set)) == MEM
3355 && SET_SRC (old_set) != recog_operand[1])
3356 || (GET_CODE (SET_DEST (old_set)) == MEM
3357 && SET_DEST (old_set) != recog_operand[0])))
3358 /* If this was an add insn before, rerecognize. */
3359 || GET_CODE (SET_SRC (old_set)) == PLUS))
3360 {
3361 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3362 /* If recognition fails, store the new body anyway.
3363 It's normal to have recognition failures here
3364 due to bizarre memory addresses; reloading will fix them. */
3365 PATTERN (insn) = new_body;
3366 }
3367 else
3368 PATTERN (insn) = new_body;
3369
3370 val = 1;
3371 }
3372
3373 /* Loop through all elimination pairs. See if any have changed and
3374 recalculate the number not at initial offset.
3375
3376 Compute the maximum offset (minimum offset if the stack does not
3377 grow downward) for each elimination pair.
3378
3379 We also detect a cases where register elimination cannot be done,
3380 namely, if a register would be both changed and referenced outside a MEM
3381 in the resulting insn since such an insn is often undefined and, even if
3382 not, we cannot know what meaning will be given to it. Note that it is
3383 valid to have a register used in an address in an insn that changes it
3384 (presumably with a pre- or post-increment or decrement).
3385
3386 If anything changes, return nonzero. */
3387
3388 num_not_at_initial_offset = 0;
3389 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3390 {
3391 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3392 ep->can_eliminate = 0;
3393
3394 ep->ref_outside_mem = 0;
3395
3396 if (ep->previous_offset != ep->offset)
3397 val = 1;
3398
3399 ep->previous_offset = ep->offset;
3400 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3401 num_not_at_initial_offset++;
3402
3403 #ifdef STACK_GROWS_DOWNWARD
3404 ep->max_offset = MAX (ep->max_offset, ep->offset);
3405 #else
3406 ep->max_offset = MIN (ep->max_offset, ep->offset);
3407 #endif
3408 }
3409
3410 done:
3411 /* If we changed something, perform elimination in REG_NOTES. This is
3412 needed even when REPLACE is zero because a REG_DEAD note might refer
3413 to a register that we eliminate and could cause a different number
3414 of spill registers to be needed in the final reload pass than in
3415 the pre-passes. */
3416 if (val && REG_NOTES (insn) != 0)
3417 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3418
3419 if (! replace)
3420 pop_obstacks ();
3421
3422 return val;
3423 }
3424
3425 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3426 replacement we currently believe is valid, mark it as not eliminable if X
3427 modifies DEST in any way other than by adding a constant integer to it.
3428
3429 If DEST is the frame pointer, we do nothing because we assume that
3430 all assignments to the hard frame pointer are nonlocal gotos and are being
3431 done at a time when they are valid and do not disturb anything else.
3432 Some machines want to eliminate a fake argument pointer with either the
3433 frame or stack pointer. Assignments to the hard frame pointer must not
3434 prevent this elimination.
3435
3436 Called via note_stores from reload before starting its passes to scan
3437 the insns of the function. */
3438
3439 static void
3440 mark_not_eliminable (dest, x)
3441 rtx dest;
3442 rtx x;
3443 {
3444 register int i;
3445
3446 /* A SUBREG of a hard register here is just changing its mode. We should
3447 not see a SUBREG of an eliminable hard register, but check just in
3448 case. */
3449 if (GET_CODE (dest) == SUBREG)
3450 dest = SUBREG_REG (dest);
3451
3452 if (dest == hard_frame_pointer_rtx)
3453 return;
3454
3455 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3456 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3457 && (GET_CODE (x) != SET
3458 || GET_CODE (SET_SRC (x)) != PLUS
3459 || XEXP (SET_SRC (x), 0) != dest
3460 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3461 {
3462 reg_eliminate[i].can_eliminate_previous
3463 = reg_eliminate[i].can_eliminate = 0;
3464 num_eliminable--;
3465 }
3466 }
3467 \f
3468 /* Kick all pseudos out of hard register REGNO.
3469 If GLOBAL is nonzero, try to find someplace else to put them.
3470 If DUMPFILE is nonzero, log actions taken on that file.
3471
3472 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3473 because we found we can't eliminate some register. In the case, no pseudos
3474 are allowed to be in the register, even if they are only in a block that
3475 doesn't require spill registers, unlike the case when we are spilling this
3476 hard reg to produce another spill register.
3477
3478 Return nonzero if any pseudos needed to be kicked out. */
3479
3480 static int
3481 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3482 register int regno;
3483 int global;
3484 FILE *dumpfile;
3485 int cant_eliminate;
3486 {
3487 enum reg_class class = REGNO_REG_CLASS (regno);
3488 int something_changed = 0;
3489 register int i;
3490
3491 SET_HARD_REG_BIT (forbidden_regs, regno);
3492
3493 if (cant_eliminate)
3494 regs_ever_live[regno] = 1;
3495
3496 /* Spill every pseudo reg that was allocated to this reg
3497 or to something that overlaps this reg. */
3498
3499 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3500 if (reg_renumber[i] >= 0
3501 && reg_renumber[i] <= regno
3502 && (reg_renumber[i]
3503 + HARD_REGNO_NREGS (reg_renumber[i],
3504 PSEUDO_REGNO_MODE (i))
3505 > regno))
3506 {
3507 /* If this register belongs solely to a basic block which needed no
3508 spilling of any class that this register is contained in,
3509 leave it be, unless we are spilling this register because
3510 it was a hard register that can't be eliminated. */
3511
3512 if (! cant_eliminate
3513 && basic_block_needs[0]
3514 && reg_basic_block[i] >= 0
3515 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3516 {
3517 enum reg_class *p;
3518
3519 for (p = reg_class_superclasses[(int) class];
3520 *p != LIM_REG_CLASSES; p++)
3521 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3522 break;
3523
3524 if (*p == LIM_REG_CLASSES)
3525 continue;
3526 }
3527
3528 /* Mark it as no longer having a hard register home. */
3529 reg_renumber[i] = -1;
3530 /* We will need to scan everything again. */
3531 something_changed = 1;
3532 if (global)
3533 retry_global_alloc (i, forbidden_regs);
3534
3535 alter_reg (i, regno);
3536 if (dumpfile)
3537 {
3538 if (reg_renumber[i] == -1)
3539 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3540 else
3541 fprintf (dumpfile, " Register %d now in %d.\n\n",
3542 i, reg_renumber[i]);
3543 }
3544 }
3545 for (i = 0; i < scratch_list_length; i++)
3546 {
3547 if (scratch_list[i] && REGNO (scratch_list[i]) == regno)
3548 {
3549 if (! cant_eliminate && basic_block_needs[0]
3550 && ! basic_block_needs[(int) class][scratch_block[i]])
3551 {
3552 enum reg_class *p;
3553
3554 for (p = reg_class_superclasses[(int) class];
3555 *p != LIM_REG_CLASSES; p++)
3556 if (basic_block_needs[(int) *p][scratch_block[i]] > 0)
3557 break;
3558
3559 if (*p == LIM_REG_CLASSES)
3560 continue;
3561 }
3562 PUT_CODE (scratch_list[i], SCRATCH);
3563 scratch_list[i] = 0;
3564 something_changed = 1;
3565 continue;
3566 }
3567 }
3568
3569 return something_changed;
3570 }
3571 \f
3572 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3573 Also mark any hard registers used to store user variables as
3574 forbidden from being used for spill registers. */
3575
3576 static void
3577 scan_paradoxical_subregs (x)
3578 register rtx x;
3579 {
3580 register int i;
3581 register char *fmt;
3582 register enum rtx_code code = GET_CODE (x);
3583
3584 switch (code)
3585 {
3586 case REG:
3587 #ifdef SMALL_REGISTER_CLASSES
3588 if (REGNO (x) < FIRST_PSEUDO_REGISTER && REG_USERVAR_P (x))
3589 SET_HARD_REG_BIT (forbidden_regs, REGNO (x));
3590 #endif
3591 return;
3592
3593 case CONST_INT:
3594 case CONST:
3595 case SYMBOL_REF:
3596 case LABEL_REF:
3597 case CONST_DOUBLE:
3598 case CC0:
3599 case PC:
3600 case USE:
3601 case CLOBBER:
3602 return;
3603
3604 case SUBREG:
3605 if (GET_CODE (SUBREG_REG (x)) == REG
3606 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3607 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3608 = GET_MODE_SIZE (GET_MODE (x));
3609 return;
3610 }
3611
3612 fmt = GET_RTX_FORMAT (code);
3613 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3614 {
3615 if (fmt[i] == 'e')
3616 scan_paradoxical_subregs (XEXP (x, i));
3617 else if (fmt[i] == 'E')
3618 {
3619 register int j;
3620 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3621 scan_paradoxical_subregs (XVECEXP (x, i, j));
3622 }
3623 }
3624 }
3625 \f
3626 static int
3627 hard_reg_use_compare (p1, p2)
3628 struct hard_reg_n_uses *p1, *p2;
3629 {
3630 int tem = p1->uses - p2->uses;
3631 if (tem != 0) return tem;
3632 /* If regs are equally good, sort by regno,
3633 so that the results of qsort leave nothing to chance. */
3634 return p1->regno - p2->regno;
3635 }
3636
3637 /* Choose the order to consider regs for use as reload registers
3638 based on how much trouble would be caused by spilling one.
3639 Store them in order of decreasing preference in potential_reload_regs. */
3640
3641 static void
3642 order_regs_for_reload (global)
3643 int global;
3644 {
3645 register int i;
3646 register int o = 0;
3647 int large = 0;
3648
3649 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3650
3651 CLEAR_HARD_REG_SET (bad_spill_regs);
3652
3653 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3654 potential_reload_regs[i] = -1;
3655
3656 /* Count number of uses of each hard reg by pseudo regs allocated to it
3657 and then order them by decreasing use. */
3658
3659 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3660 {
3661 hard_reg_n_uses[i].uses = 0;
3662 hard_reg_n_uses[i].regno = i;
3663 }
3664
3665 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3666 {
3667 int regno = reg_renumber[i];
3668 if (regno >= 0)
3669 {
3670 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3671 while (regno < lim)
3672 {
3673 /* If allocated by local-alloc, show more uses since
3674 we're not going to be able to reallocate it, but
3675 we might if allocated by global alloc. */
3676 if (global && reg_allocno[i] < 0)
3677 hard_reg_n_uses[regno].uses += (reg_n_refs[i] + 1) / 2;
3678
3679 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3680 }
3681 }
3682 large += reg_n_refs[i];
3683 }
3684
3685 /* Now fixed registers (which cannot safely be used for reloading)
3686 get a very high use count so they will be considered least desirable.
3687 Registers used explicitly in the rtl code are almost as bad. */
3688
3689 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3690 {
3691 if (fixed_regs[i])
3692 {
3693 hard_reg_n_uses[i].uses += 2 * large + 2;
3694 SET_HARD_REG_BIT (bad_spill_regs, i);
3695 }
3696 else if (regs_explicitly_used[i])
3697 {
3698 hard_reg_n_uses[i].uses += large + 1;
3699 #ifndef SMALL_REGISTER_CLASSES
3700 /* ??? We are doing this here because of the potential that
3701 bad code may be generated if a register explicitly used in
3702 an insn was used as a spill register for that insn. But
3703 not using these are spill registers may lose on some machine.
3704 We'll have to see how this works out. */
3705 SET_HARD_REG_BIT (bad_spill_regs, i);
3706 #endif
3707 }
3708 }
3709 hard_reg_n_uses[HARD_FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3710 SET_HARD_REG_BIT (bad_spill_regs, HARD_FRAME_POINTER_REGNUM);
3711
3712 #ifdef ELIMINABLE_REGS
3713 /* If registers other than the frame pointer are eliminable, mark them as
3714 poor choices. */
3715 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3716 {
3717 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3718 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3719 }
3720 #endif
3721
3722 /* Prefer registers not so far used, for use in temporary loading.
3723 Among them, if REG_ALLOC_ORDER is defined, use that order.
3724 Otherwise, prefer registers not preserved by calls. */
3725
3726 #ifdef REG_ALLOC_ORDER
3727 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3728 {
3729 int regno = reg_alloc_order[i];
3730
3731 if (hard_reg_n_uses[regno].uses == 0)
3732 potential_reload_regs[o++] = regno;
3733 }
3734 #else
3735 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3736 {
3737 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3738 potential_reload_regs[o++] = i;
3739 }
3740 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3741 {
3742 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3743 potential_reload_regs[o++] = i;
3744 }
3745 #endif
3746
3747 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3748 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3749
3750 /* Now add the regs that are already used,
3751 preferring those used less often. The fixed and otherwise forbidden
3752 registers will be at the end of this list. */
3753
3754 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3755 if (hard_reg_n_uses[i].uses != 0)
3756 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3757 }
3758 \f
3759 /* Used in reload_as_needed to sort the spilled regs. */
3760
3761 static int
3762 compare_spill_regs (r1, r2)
3763 short *r1, *r2;
3764 {
3765 return *r1 - *r2;
3766 }
3767
3768 /* Reload pseudo-registers into hard regs around each insn as needed.
3769 Additional register load insns are output before the insn that needs it
3770 and perhaps store insns after insns that modify the reloaded pseudo reg.
3771
3772 reg_last_reload_reg and reg_reloaded_contents keep track of
3773 which registers are already available in reload registers.
3774 We update these for the reloads that we perform,
3775 as the insns are scanned. */
3776
3777 static void
3778 reload_as_needed (first, live_known)
3779 rtx first;
3780 int live_known;
3781 {
3782 register rtx insn;
3783 register int i;
3784 int this_block = 0;
3785 rtx x;
3786 rtx after_call = 0;
3787
3788 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
3789 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
3790 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3791 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
3792 reg_has_output_reload = (char *) alloca (max_regno);
3793 for (i = 0; i < n_spills; i++)
3794 {
3795 reg_reloaded_contents[i] = -1;
3796 reg_reloaded_insn[i] = 0;
3797 }
3798
3799 /* Reset all offsets on eliminable registers to their initial values. */
3800 #ifdef ELIMINABLE_REGS
3801 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3802 {
3803 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3804 reg_eliminate[i].initial_offset);
3805 reg_eliminate[i].previous_offset
3806 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3807 }
3808 #else
3809 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3810 reg_eliminate[0].previous_offset
3811 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3812 #endif
3813
3814 num_not_at_initial_offset = 0;
3815
3816 /* Order the spilled regs, so that allocate_reload_regs can guarantee to
3817 pack registers with group needs. */
3818 if (n_spills > 1)
3819 {
3820 qsort (spill_regs, n_spills, sizeof (short), compare_spill_regs);
3821 for (i = 0; i < n_spills; i++)
3822 spill_reg_order[spill_regs[i]] = i;
3823 }
3824
3825 for (insn = first; insn;)
3826 {
3827 register rtx next = NEXT_INSN (insn);
3828
3829 /* Notice when we move to a new basic block. */
3830 if (live_known && this_block + 1 < n_basic_blocks
3831 && insn == basic_block_head[this_block+1])
3832 ++this_block;
3833
3834 /* If we pass a label, copy the offsets from the label information
3835 into the current offsets of each elimination. */
3836 if (GET_CODE (insn) == CODE_LABEL)
3837 {
3838 num_not_at_initial_offset = 0;
3839 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3840 {
3841 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3842 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3843 if (reg_eliminate[i].can_eliminate
3844 && (reg_eliminate[i].offset
3845 != reg_eliminate[i].initial_offset))
3846 num_not_at_initial_offset++;
3847 }
3848 }
3849
3850 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3851 {
3852 rtx avoid_return_reg = 0;
3853 rtx oldpat = PATTERN (insn);
3854
3855 #ifdef SMALL_REGISTER_CLASSES
3856 /* Set avoid_return_reg if this is an insn
3857 that might use the value of a function call. */
3858 if (GET_CODE (insn) == CALL_INSN)
3859 {
3860 if (GET_CODE (PATTERN (insn)) == SET)
3861 after_call = SET_DEST (PATTERN (insn));
3862 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3863 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3864 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3865 else
3866 after_call = 0;
3867 }
3868 else if (after_call != 0
3869 && !(GET_CODE (PATTERN (insn)) == SET
3870 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3871 {
3872 if (reg_referenced_p (after_call, PATTERN (insn)))
3873 avoid_return_reg = after_call;
3874 after_call = 0;
3875 }
3876 #endif /* SMALL_REGISTER_CLASSES */
3877
3878 /* If this is a USE and CLOBBER of a MEM, ensure that any
3879 references to eliminable registers have been removed. */
3880
3881 if ((GET_CODE (PATTERN (insn)) == USE
3882 || GET_CODE (PATTERN (insn)) == CLOBBER)
3883 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3884 XEXP (XEXP (PATTERN (insn), 0), 0)
3885 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3886 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3887
3888 /* If we need to do register elimination processing, do so.
3889 This might delete the insn, in which case we are done. */
3890 if (num_eliminable && GET_MODE (insn) == QImode)
3891 {
3892 eliminate_regs_in_insn (insn, 1);
3893 if (GET_CODE (insn) == NOTE)
3894 {
3895 insn = next;
3896 continue;
3897 }
3898 }
3899
3900 if (GET_MODE (insn) == VOIDmode)
3901 n_reloads = 0;
3902 /* First find the pseudo regs that must be reloaded for this insn.
3903 This info is returned in the tables reload_... (see reload.h).
3904 Also modify the body of INSN by substituting RELOAD
3905 rtx's for those pseudo regs. */
3906 else
3907 {
3908 bzero (reg_has_output_reload, max_regno);
3909 CLEAR_HARD_REG_SET (reg_is_output_reload);
3910
3911 find_reloads (insn, 1, spill_indirect_levels, live_known,
3912 spill_reg_order);
3913 }
3914
3915 if (n_reloads > 0)
3916 {
3917 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3918 rtx p;
3919 int class;
3920
3921 /* If this block has not had spilling done for a
3922 particular clas and we have any non-optionals that need a
3923 spill reg in that class, abort. */
3924
3925 for (class = 0; class < N_REG_CLASSES; class++)
3926 if (basic_block_needs[class] != 0
3927 && basic_block_needs[class][this_block] == 0)
3928 for (i = 0; i < n_reloads; i++)
3929 if (class == (int) reload_reg_class[i]
3930 && reload_reg_rtx[i] == 0
3931 && ! reload_optional[i]
3932 && (reload_in[i] != 0 || reload_out[i] != 0
3933 || reload_secondary_p[i] != 0))
3934 fatal_insn ("Non-optional registers need a spill register", insn);
3935
3936 /* Now compute which reload regs to reload them into. Perhaps
3937 reusing reload regs from previous insns, or else output
3938 load insns to reload them. Maybe output store insns too.
3939 Record the choices of reload reg in reload_reg_rtx. */
3940 choose_reload_regs (insn, avoid_return_reg);
3941
3942 #ifdef SMALL_REGISTER_CLASSES
3943 /* Merge any reloads that we didn't combine for fear of
3944 increasing the number of spill registers needed but now
3945 discover can be safely merged. */
3946 merge_assigned_reloads (insn);
3947 #endif
3948
3949 /* Generate the insns to reload operands into or out of
3950 their reload regs. */
3951 emit_reload_insns (insn);
3952
3953 /* Substitute the chosen reload regs from reload_reg_rtx
3954 into the insn's body (or perhaps into the bodies of other
3955 load and store insn that we just made for reloading
3956 and that we moved the structure into). */
3957 subst_reloads ();
3958
3959 /* If this was an ASM, make sure that all the reload insns
3960 we have generated are valid. If not, give an error
3961 and delete them. */
3962
3963 if (asm_noperands (PATTERN (insn)) >= 0)
3964 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3965 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3966 && (recog_memoized (p) < 0
3967 || (insn_extract (p),
3968 ! constrain_operands (INSN_CODE (p), 1))))
3969 {
3970 error_for_asm (insn,
3971 "`asm' operand requires impossible reload");
3972 PUT_CODE (p, NOTE);
3973 NOTE_SOURCE_FILE (p) = 0;
3974 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3975 }
3976 }
3977 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3978 is no longer validly lying around to save a future reload.
3979 Note that this does not detect pseudos that were reloaded
3980 for this insn in order to be stored in
3981 (obeying register constraints). That is correct; such reload
3982 registers ARE still valid. */
3983 note_stores (oldpat, forget_old_reloads_1);
3984
3985 /* There may have been CLOBBER insns placed after INSN. So scan
3986 between INSN and NEXT and use them to forget old reloads. */
3987 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3988 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3989 note_stores (PATTERN (x), forget_old_reloads_1);
3990
3991 #ifdef AUTO_INC_DEC
3992 /* Likewise for regs altered by auto-increment in this insn.
3993 But note that the reg-notes are not changed by reloading:
3994 they still contain the pseudo-regs, not the spill regs. */
3995 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3996 if (REG_NOTE_KIND (x) == REG_INC)
3997 {
3998 /* See if this pseudo reg was reloaded in this insn.
3999 If so, its last-reload info is still valid
4000 because it is based on this insn's reload. */
4001 for (i = 0; i < n_reloads; i++)
4002 if (reload_out[i] == XEXP (x, 0))
4003 break;
4004
4005 if (i == n_reloads)
4006 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4007 }
4008 #endif
4009 }
4010 /* A reload reg's contents are unknown after a label. */
4011 if (GET_CODE (insn) == CODE_LABEL)
4012 for (i = 0; i < n_spills; i++)
4013 {
4014 reg_reloaded_contents[i] = -1;
4015 reg_reloaded_insn[i] = 0;
4016 }
4017
4018 /* Don't assume a reload reg is still good after a call insn
4019 if it is a call-used reg. */
4020 else if (GET_CODE (insn) == CALL_INSN)
4021 for (i = 0; i < n_spills; i++)
4022 if (call_used_regs[spill_regs[i]])
4023 {
4024 reg_reloaded_contents[i] = -1;
4025 reg_reloaded_insn[i] = 0;
4026 }
4027
4028 /* In case registers overlap, allow certain insns to invalidate
4029 particular hard registers. */
4030
4031 #ifdef INSN_CLOBBERS_REGNO_P
4032 for (i = 0 ; i < n_spills ; i++)
4033 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
4034 {
4035 reg_reloaded_contents[i] = -1;
4036 reg_reloaded_insn[i] = 0;
4037 }
4038 #endif
4039
4040 insn = next;
4041
4042 #ifdef USE_C_ALLOCA
4043 alloca (0);
4044 #endif
4045 }
4046 }
4047
4048 /* Discard all record of any value reloaded from X,
4049 or reloaded in X from someplace else;
4050 unless X is an output reload reg of the current insn.
4051
4052 X may be a hard reg (the reload reg)
4053 or it may be a pseudo reg that was reloaded from. */
4054
4055 static void
4056 forget_old_reloads_1 (x, ignored)
4057 rtx x;
4058 rtx ignored;
4059 {
4060 register int regno;
4061 int nr;
4062 int offset = 0;
4063
4064 /* note_stores does give us subregs of hard regs. */
4065 while (GET_CODE (x) == SUBREG)
4066 {
4067 offset += SUBREG_WORD (x);
4068 x = SUBREG_REG (x);
4069 }
4070
4071 if (GET_CODE (x) != REG)
4072 return;
4073
4074 regno = REGNO (x) + offset;
4075
4076 if (regno >= FIRST_PSEUDO_REGISTER)
4077 nr = 1;
4078 else
4079 {
4080 int i;
4081 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4082 /* Storing into a spilled-reg invalidates its contents.
4083 This can happen if a block-local pseudo is allocated to that reg
4084 and it wasn't spilled because this block's total need is 0.
4085 Then some insn might have an optional reload and use this reg. */
4086 for (i = 0; i < nr; i++)
4087 if (spill_reg_order[regno + i] >= 0
4088 /* But don't do this if the reg actually serves as an output
4089 reload reg in the current instruction. */
4090 && (n_reloads == 0
4091 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
4092 {
4093 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
4094 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
4095 }
4096 }
4097
4098 /* Since value of X has changed,
4099 forget any value previously copied from it. */
4100
4101 while (nr-- > 0)
4102 /* But don't forget a copy if this is the output reload
4103 that establishes the copy's validity. */
4104 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4105 reg_last_reload_reg[regno + nr] = 0;
4106 }
4107 \f
4108 /* For each reload, the mode of the reload register. */
4109 static enum machine_mode reload_mode[MAX_RELOADS];
4110
4111 /* For each reload, the largest number of registers it will require. */
4112 static int reload_nregs[MAX_RELOADS];
4113
4114 /* Comparison function for qsort to decide which of two reloads
4115 should be handled first. *P1 and *P2 are the reload numbers. */
4116
4117 static int
4118 reload_reg_class_lower (p1, p2)
4119 short *p1, *p2;
4120 {
4121 register int r1 = *p1, r2 = *p2;
4122 register int t;
4123
4124 /* Consider required reloads before optional ones. */
4125 t = reload_optional[r1] - reload_optional[r2];
4126 if (t != 0)
4127 return t;
4128
4129 /* Count all solitary classes before non-solitary ones. */
4130 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4131 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4132 if (t != 0)
4133 return t;
4134
4135 /* Aside from solitaires, consider all multi-reg groups first. */
4136 t = reload_nregs[r2] - reload_nregs[r1];
4137 if (t != 0)
4138 return t;
4139
4140 /* Consider reloads in order of increasing reg-class number. */
4141 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4142 if (t != 0)
4143 return t;
4144
4145 /* If reloads are equally urgent, sort by reload number,
4146 so that the results of qsort leave nothing to chance. */
4147 return r1 - r2;
4148 }
4149 \f
4150 /* The following HARD_REG_SETs indicate when each hard register is
4151 used for a reload of various parts of the current insn. */
4152
4153 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4154 static HARD_REG_SET reload_reg_used;
4155 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4156 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4157 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4158 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4159 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4160 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4161 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4162 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4163 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4164 static HARD_REG_SET reload_reg_used_in_op_addr;
4165 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4166 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4167 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4168 static HARD_REG_SET reload_reg_used_in_insn;
4169 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4170 static HARD_REG_SET reload_reg_used_in_other_addr;
4171
4172 /* If reg is in use as a reload reg for any sort of reload. */
4173 static HARD_REG_SET reload_reg_used_at_all;
4174
4175 /* If reg is use as an inherited reload. We just mark the first register
4176 in the group. */
4177 static HARD_REG_SET reload_reg_used_for_inherit;
4178
4179 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4180 TYPE. MODE is used to indicate how many consecutive regs are
4181 actually used. */
4182
4183 static void
4184 mark_reload_reg_in_use (regno, opnum, type, mode)
4185 int regno;
4186 int opnum;
4187 enum reload_type type;
4188 enum machine_mode mode;
4189 {
4190 int nregs = HARD_REGNO_NREGS (regno, mode);
4191 int i;
4192
4193 for (i = regno; i < nregs + regno; i++)
4194 {
4195 switch (type)
4196 {
4197 case RELOAD_OTHER:
4198 SET_HARD_REG_BIT (reload_reg_used, i);
4199 break;
4200
4201 case RELOAD_FOR_INPUT_ADDRESS:
4202 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4203 break;
4204
4205 case RELOAD_FOR_OUTPUT_ADDRESS:
4206 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4207 break;
4208
4209 case RELOAD_FOR_OPERAND_ADDRESS:
4210 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4211 break;
4212
4213 case RELOAD_FOR_OPADDR_ADDR:
4214 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4215 break;
4216
4217 case RELOAD_FOR_OTHER_ADDRESS:
4218 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4219 break;
4220
4221 case RELOAD_FOR_INPUT:
4222 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4223 break;
4224
4225 case RELOAD_FOR_OUTPUT:
4226 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4227 break;
4228
4229 case RELOAD_FOR_INSN:
4230 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4231 break;
4232 }
4233
4234 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4235 }
4236 }
4237
4238 /* Similarly, but show REGNO is no longer in use for a reload. */
4239
4240 static void
4241 clear_reload_reg_in_use (regno, opnum, type, mode)
4242 int regno;
4243 int opnum;
4244 enum reload_type type;
4245 enum machine_mode mode;
4246 {
4247 int nregs = HARD_REGNO_NREGS (regno, mode);
4248 int i;
4249
4250 for (i = regno; i < nregs + regno; i++)
4251 {
4252 switch (type)
4253 {
4254 case RELOAD_OTHER:
4255 CLEAR_HARD_REG_BIT (reload_reg_used, i);
4256 break;
4257
4258 case RELOAD_FOR_INPUT_ADDRESS:
4259 CLEAR_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4260 break;
4261
4262 case RELOAD_FOR_OUTPUT_ADDRESS:
4263 CLEAR_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4264 break;
4265
4266 case RELOAD_FOR_OPERAND_ADDRESS:
4267 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4268 break;
4269
4270 case RELOAD_FOR_OPADDR_ADDR:
4271 CLEAR_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4272 break;
4273
4274 case RELOAD_FOR_OTHER_ADDRESS:
4275 CLEAR_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4276 break;
4277
4278 case RELOAD_FOR_INPUT:
4279 CLEAR_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4280 break;
4281
4282 case RELOAD_FOR_OUTPUT:
4283 CLEAR_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4284 break;
4285
4286 case RELOAD_FOR_INSN:
4287 CLEAR_HARD_REG_BIT (reload_reg_used_in_insn, i);
4288 break;
4289 }
4290 }
4291 }
4292
4293 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4294 specified by OPNUM and TYPE. */
4295
4296 static int
4297 reload_reg_free_p (regno, opnum, type)
4298 int regno;
4299 int opnum;
4300 enum reload_type type;
4301 {
4302 int i;
4303
4304 /* In use for a RELOAD_OTHER means it's not available for anything. */
4305 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4306 return 0;
4307
4308 switch (type)
4309 {
4310 case RELOAD_OTHER:
4311 /* In use for anything means we can't use it for RELOAD_OTHER. */
4312 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4313 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4314 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4315 return 0;
4316
4317 for (i = 0; i < reload_n_operands; i++)
4318 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4319 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4320 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4321 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4322 return 0;
4323
4324 return 1;
4325
4326 case RELOAD_FOR_INPUT:
4327 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4328 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4329 return 0;
4330
4331 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4332 return 0;
4333
4334 /* If it is used for some other input, can't use it. */
4335 for (i = 0; i < reload_n_operands; i++)
4336 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4337 return 0;
4338
4339 /* If it is used in a later operand's address, can't use it. */
4340 for (i = opnum + 1; i < reload_n_operands; i++)
4341 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4342 return 0;
4343
4344 return 1;
4345
4346 case RELOAD_FOR_INPUT_ADDRESS:
4347 /* Can't use a register if it is used for an input address for this
4348 operand or used as an input in an earlier one. */
4349 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4350 return 0;
4351
4352 for (i = 0; i < opnum; i++)
4353 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4354 return 0;
4355
4356 return 1;
4357
4358 case RELOAD_FOR_OUTPUT_ADDRESS:
4359 /* Can't use a register if it is used for an output address for this
4360 operand or used as an output in this or a later operand. */
4361 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4362 return 0;
4363
4364 for (i = opnum; i < reload_n_operands; i++)
4365 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4366 return 0;
4367
4368 return 1;
4369
4370 case RELOAD_FOR_OPERAND_ADDRESS:
4371 for (i = 0; i < reload_n_operands; i++)
4372 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4373 return 0;
4374
4375 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4376 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4377
4378 case RELOAD_FOR_OPADDR_ADDR:
4379 for (i = 0; i < reload_n_operands; i++)
4380 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4381 return 0;
4382
4383 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4384
4385 case RELOAD_FOR_OUTPUT:
4386 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4387 outputs, or an operand address for this or an earlier output. */
4388 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4389 return 0;
4390
4391 for (i = 0; i < reload_n_operands; i++)
4392 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4393 return 0;
4394
4395 for (i = 0; i <= opnum; i++)
4396 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4397 return 0;
4398
4399 return 1;
4400
4401 case RELOAD_FOR_INSN:
4402 for (i = 0; i < reload_n_operands; i++)
4403 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4404 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4405 return 0;
4406
4407 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4408 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4409
4410 case RELOAD_FOR_OTHER_ADDRESS:
4411 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4412 }
4413 abort ();
4414 }
4415
4416 /* Return 1 if the value in reload reg REGNO, as used by a reload
4417 needed for the part of the insn specified by OPNUM and TYPE,
4418 is not in use for a reload in any prior part of the insn.
4419
4420 We can assume that the reload reg was already tested for availability
4421 at the time it is needed, and we should not check this again,
4422 in case the reg has already been marked in use. */
4423
4424 static int
4425 reload_reg_free_before_p (regno, opnum, type)
4426 int regno;
4427 int opnum;
4428 enum reload_type type;
4429 {
4430 int i;
4431
4432 switch (type)
4433 {
4434 case RELOAD_FOR_OTHER_ADDRESS:
4435 /* These always come first. */
4436 return 1;
4437
4438 case RELOAD_OTHER:
4439 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4440
4441 /* If this use is for part of the insn,
4442 check the reg is not in use for any prior part. It is tempting
4443 to try to do this by falling through from objecs that occur
4444 later in the insn to ones that occur earlier, but that will not
4445 correctly take into account the fact that here we MUST ignore
4446 things that would prevent the register from being allocated in
4447 the first place, since we know that it was allocated. */
4448
4449 case RELOAD_FOR_OUTPUT_ADDRESS:
4450 /* Earlier reloads are for earlier outputs or their addresses,
4451 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4452 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4453 RELOAD_OTHER).. */
4454 for (i = 0; i < opnum; i++)
4455 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4456 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4457 return 0;
4458
4459 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4460 return 0;
4461
4462 for (i = 0; i < reload_n_operands; i++)
4463 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4464 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4465 return 0;
4466
4467 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4468 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4469 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4470
4471 case RELOAD_FOR_OUTPUT:
4472 /* This can't be used in the output address for this operand and
4473 anything that can't be used for it, except that we've already
4474 tested for RELOAD_FOR_INSN objects. */
4475
4476 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4477 return 0;
4478
4479 for (i = 0; i < opnum; i++)
4480 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4481 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4482 return 0;
4483
4484 for (i = 0; i < reload_n_operands; i++)
4485 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4486 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4487 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4488 return 0;
4489
4490 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4491
4492 case RELOAD_FOR_OPERAND_ADDRESS:
4493 case RELOAD_FOR_OPADDR_ADDR:
4494 case RELOAD_FOR_INSN:
4495 /* These can't conflict with inputs, or each other, so all we have to
4496 test is input addresses and the addresses of OTHER items. */
4497
4498 for (i = 0; i < reload_n_operands; i++)
4499 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4500 return 0;
4501
4502 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4503
4504 case RELOAD_FOR_INPUT:
4505 /* The only things earlier are the address for this and
4506 earlier inputs, other inputs (which we know we don't conflict
4507 with), and addresses of RELOAD_OTHER objects. */
4508
4509 for (i = 0; i <= opnum; i++)
4510 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4511 return 0;
4512
4513 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4514
4515 case RELOAD_FOR_INPUT_ADDRESS:
4516 /* Similarly, all we have to check is for use in earlier inputs'
4517 addresses. */
4518 for (i = 0; i < opnum; i++)
4519 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4520 return 0;
4521
4522 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4523 }
4524 abort ();
4525 }
4526
4527 /* Return 1 if the value in reload reg REGNO, as used by a reload
4528 needed for the part of the insn specified by OPNUM and TYPE,
4529 is still available in REGNO at the end of the insn.
4530
4531 We can assume that the reload reg was already tested for availability
4532 at the time it is needed, and we should not check this again,
4533 in case the reg has already been marked in use. */
4534
4535 static int
4536 reload_reg_reaches_end_p (regno, opnum, type)
4537 int regno;
4538 int opnum;
4539 enum reload_type type;
4540 {
4541 int i;
4542
4543 switch (type)
4544 {
4545 case RELOAD_OTHER:
4546 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4547 its value must reach the end. */
4548 return 1;
4549
4550 /* If this use is for part of the insn,
4551 its value reaches if no subsequent part uses the same register.
4552 Just like the above function, don't try to do this with lots
4553 of fallthroughs. */
4554
4555 case RELOAD_FOR_OTHER_ADDRESS:
4556 /* Here we check for everything else, since these don't conflict
4557 with anything else and everything comes later. */
4558
4559 for (i = 0; i < reload_n_operands; i++)
4560 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4561 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4562 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4563 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4564 return 0;
4565
4566 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4567 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4568 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4569
4570 case RELOAD_FOR_INPUT_ADDRESS:
4571 /* Similar, except that we check only for this and subsequent inputs
4572 and the address of only subsequent inputs and we do not need
4573 to check for RELOAD_OTHER objects since they are known not to
4574 conflict. */
4575
4576 for (i = opnum; i < reload_n_operands; i++)
4577 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4578 return 0;
4579
4580 for (i = opnum + 1; i < reload_n_operands; i++)
4581 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4582 return 0;
4583
4584 for (i = 0; i < reload_n_operands; i++)
4585 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4586 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4587 return 0;
4588
4589 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4590 return 0;
4591
4592 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4593 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4594
4595 case RELOAD_FOR_INPUT:
4596 /* Similar to input address, except we start at the next operand for
4597 both input and input address and we do not check for
4598 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4599 would conflict. */
4600
4601 for (i = opnum + 1; i < reload_n_operands; i++)
4602 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4603 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4604 return 0;
4605
4606 /* ... fall through ... */
4607
4608 case RELOAD_FOR_OPERAND_ADDRESS:
4609 /* Check outputs and their addresses. */
4610
4611 for (i = 0; i < reload_n_operands; i++)
4612 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4613 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4614 return 0;
4615
4616 return 1;
4617
4618 case RELOAD_FOR_OPADDR_ADDR:
4619 for (i = 0; i < reload_n_operands; i++)
4620 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4621 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4622 return 0;
4623
4624 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4625 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4626
4627 case RELOAD_FOR_INSN:
4628 /* These conflict with other outputs with RELOAD_OTHER. So
4629 we need only check for output addresses. */
4630
4631 opnum = -1;
4632
4633 /* ... fall through ... */
4634
4635 case RELOAD_FOR_OUTPUT:
4636 case RELOAD_FOR_OUTPUT_ADDRESS:
4637 /* We already know these can't conflict with a later output. So the
4638 only thing to check are later output addresses. */
4639 for (i = opnum + 1; i < reload_n_operands; i++)
4640 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4641 return 0;
4642
4643 return 1;
4644 }
4645
4646 abort ();
4647 }
4648 \f
4649 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4650 Return 0 otherwise.
4651
4652 This function uses the same algorithm as reload_reg_free_p above. */
4653
4654 static int
4655 reloads_conflict (r1, r2)
4656 int r1, r2;
4657 {
4658 enum reload_type r1_type = reload_when_needed[r1];
4659 enum reload_type r2_type = reload_when_needed[r2];
4660 int r1_opnum = reload_opnum[r1];
4661 int r2_opnum = reload_opnum[r2];
4662
4663 /* RELOAD_OTHER conflicts with everything. */
4664 if (r2_type == RELOAD_OTHER)
4665 return 1;
4666
4667 /* Otherwise, check conflicts differently for each type. */
4668
4669 switch (r1_type)
4670 {
4671 case RELOAD_FOR_INPUT:
4672 return (r2_type == RELOAD_FOR_INSN
4673 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4674 || r2_type == RELOAD_FOR_OPADDR_ADDR
4675 || r2_type == RELOAD_FOR_INPUT
4676 || (r2_type == RELOAD_FOR_INPUT_ADDRESS && r2_opnum > r1_opnum));
4677
4678 case RELOAD_FOR_INPUT_ADDRESS:
4679 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4680 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4681
4682 case RELOAD_FOR_OUTPUT_ADDRESS:
4683 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4684 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4685
4686 case RELOAD_FOR_OPERAND_ADDRESS:
4687 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4688 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4689
4690 case RELOAD_FOR_OPADDR_ADDR:
4691 return (r2_type == RELOAD_FOR_INPUT
4692 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4693
4694 case RELOAD_FOR_OUTPUT:
4695 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4696 || (r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4697 && r2_opnum >= r1_opnum));
4698
4699 case RELOAD_FOR_INSN:
4700 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4701 || r2_type == RELOAD_FOR_INSN
4702 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4703
4704 case RELOAD_FOR_OTHER_ADDRESS:
4705 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4706
4707 case RELOAD_OTHER:
4708 return 1;
4709
4710 default:
4711 abort ();
4712 }
4713 }
4714 \f
4715 /* Vector of reload-numbers showing the order in which the reloads should
4716 be processed. */
4717 short reload_order[MAX_RELOADS];
4718
4719 /* Indexed by reload number, 1 if incoming value
4720 inherited from previous insns. */
4721 char reload_inherited[MAX_RELOADS];
4722
4723 /* For an inherited reload, this is the insn the reload was inherited from,
4724 if we know it. Otherwise, this is 0. */
4725 rtx reload_inheritance_insn[MAX_RELOADS];
4726
4727 /* If non-zero, this is a place to get the value of the reload,
4728 rather than using reload_in. */
4729 rtx reload_override_in[MAX_RELOADS];
4730
4731 /* For each reload, the index in spill_regs of the spill register used,
4732 or -1 if we did not need one of the spill registers for this reload. */
4733 int reload_spill_index[MAX_RELOADS];
4734
4735 /* Find a spill register to use as a reload register for reload R.
4736 LAST_RELOAD is non-zero if this is the last reload for the insn being
4737 processed.
4738
4739 Set reload_reg_rtx[R] to the register allocated.
4740
4741 If NOERROR is nonzero, we return 1 if successful,
4742 or 0 if we couldn't find a spill reg and we didn't change anything. */
4743
4744 static int
4745 allocate_reload_reg (r, insn, last_reload, noerror)
4746 int r;
4747 rtx insn;
4748 int last_reload;
4749 int noerror;
4750 {
4751 int i;
4752 int pass;
4753 int count;
4754 rtx new;
4755 int regno;
4756
4757 /* If we put this reload ahead, thinking it is a group,
4758 then insist on finding a group. Otherwise we can grab a
4759 reg that some other reload needs.
4760 (That can happen when we have a 68000 DATA_OR_FP_REG
4761 which is a group of data regs or one fp reg.)
4762 We need not be so restrictive if there are no more reloads
4763 for this insn.
4764
4765 ??? Really it would be nicer to have smarter handling
4766 for that kind of reg class, where a problem like this is normal.
4767 Perhaps those classes should be avoided for reloading
4768 by use of more alternatives. */
4769
4770 int force_group = reload_nregs[r] > 1 && ! last_reload;
4771
4772 /* If we want a single register and haven't yet found one,
4773 take any reg in the right class and not in use.
4774 If we want a consecutive group, here is where we look for it.
4775
4776 We use two passes so we can first look for reload regs to
4777 reuse, which are already in use for other reloads in this insn,
4778 and only then use additional registers.
4779 I think that maximizing reuse is needed to make sure we don't
4780 run out of reload regs. Suppose we have three reloads, and
4781 reloads A and B can share regs. These need two regs.
4782 Suppose A and B are given different regs.
4783 That leaves none for C. */
4784 for (pass = 0; pass < 2; pass++)
4785 {
4786 /* I is the index in spill_regs.
4787 We advance it round-robin between insns to use all spill regs
4788 equally, so that inherited reloads have a chance
4789 of leapfrogging each other. Don't do this, however, when we have
4790 group needs and failure would be fatal; if we only have a relatively
4791 small number of spill registers, and more than one of them has
4792 group needs, then by starting in the middle, we may end up
4793 allocating the first one in such a way that we are not left with
4794 sufficient groups to handle the rest. */
4795
4796 if (noerror || ! force_group)
4797 i = last_spill_reg;
4798 else
4799 i = -1;
4800
4801 for (count = 0; count < n_spills; count++)
4802 {
4803 int class = (int) reload_reg_class[r];
4804
4805 i = (i + 1) % n_spills;
4806
4807 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4808 reload_when_needed[r])
4809 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4810 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4811 /* Look first for regs to share, then for unshared. But
4812 don't share regs used for inherited reloads; they are
4813 the ones we want to preserve. */
4814 && (pass
4815 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
4816 spill_regs[i])
4817 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
4818 spill_regs[i]))))
4819 {
4820 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4821 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4822 (on 68000) got us two FP regs. If NR is 1,
4823 we would reject both of them. */
4824 if (force_group)
4825 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4826 /* If we need only one reg, we have already won. */
4827 if (nr == 1)
4828 {
4829 /* But reject a single reg if we demand a group. */
4830 if (force_group)
4831 continue;
4832 break;
4833 }
4834 /* Otherwise check that as many consecutive regs as we need
4835 are available here.
4836 Also, don't use for a group registers that are
4837 needed for nongroups. */
4838 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4839 while (nr > 1)
4840 {
4841 regno = spill_regs[i] + nr - 1;
4842 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4843 && spill_reg_order[regno] >= 0
4844 && reload_reg_free_p (regno, reload_opnum[r],
4845 reload_when_needed[r])
4846 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4847 regno)))
4848 break;
4849 nr--;
4850 }
4851 if (nr == 1)
4852 break;
4853 }
4854 }
4855
4856 /* If we found something on pass 1, omit pass 2. */
4857 if (count < n_spills)
4858 break;
4859 }
4860
4861 /* We should have found a spill register by now. */
4862 if (count == n_spills)
4863 {
4864 if (noerror)
4865 return 0;
4866 goto failure;
4867 }
4868
4869 /* I is the index in SPILL_REG_RTX of the reload register we are to
4870 allocate. Get an rtx for it and find its register number. */
4871
4872 new = spill_reg_rtx[i];
4873
4874 if (new == 0 || GET_MODE (new) != reload_mode[r])
4875 spill_reg_rtx[i] = new
4876 = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4877
4878 regno = true_regnum (new);
4879
4880 /* Detect when the reload reg can't hold the reload mode.
4881 This used to be one `if', but Sequent compiler can't handle that. */
4882 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4883 {
4884 enum machine_mode test_mode = VOIDmode;
4885 if (reload_in[r])
4886 test_mode = GET_MODE (reload_in[r]);
4887 /* If reload_in[r] has VOIDmode, it means we will load it
4888 in whatever mode the reload reg has: to wit, reload_mode[r].
4889 We have already tested that for validity. */
4890 /* Aside from that, we need to test that the expressions
4891 to reload from or into have modes which are valid for this
4892 reload register. Otherwise the reload insns would be invalid. */
4893 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4894 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4895 if (! (reload_out[r] != 0
4896 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4897 {
4898 /* The reg is OK. */
4899 last_spill_reg = i;
4900
4901 /* Mark as in use for this insn the reload regs we use
4902 for this. */
4903 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4904 reload_when_needed[r], reload_mode[r]);
4905
4906 reload_reg_rtx[r] = new;
4907 reload_spill_index[r] = i;
4908 return 1;
4909 }
4910 }
4911
4912 /* The reg is not OK. */
4913 if (noerror)
4914 return 0;
4915
4916 failure:
4917 if (asm_noperands (PATTERN (insn)) < 0)
4918 /* It's the compiler's fault. */
4919 fatal_insn ("Could not find a spill register", insn);
4920
4921 /* It's the user's fault; the operand's mode and constraint
4922 don't match. Disable this reload so we don't crash in final. */
4923 error_for_asm (insn,
4924 "`asm' operand constraint incompatible with operand size");
4925 reload_in[r] = 0;
4926 reload_out[r] = 0;
4927 reload_reg_rtx[r] = 0;
4928 reload_optional[r] = 1;
4929 reload_secondary_p[r] = 1;
4930
4931 return 1;
4932 }
4933 \f
4934 /* Assign hard reg targets for the pseudo-registers we must reload
4935 into hard regs for this insn.
4936 Also output the instructions to copy them in and out of the hard regs.
4937
4938 For machines with register classes, we are responsible for
4939 finding a reload reg in the proper class. */
4940
4941 static void
4942 choose_reload_regs (insn, avoid_return_reg)
4943 rtx insn;
4944 rtx avoid_return_reg;
4945 {
4946 register int i, j;
4947 int max_group_size = 1;
4948 enum reg_class group_class = NO_REGS;
4949 int inheritance;
4950
4951 rtx save_reload_reg_rtx[MAX_RELOADS];
4952 char save_reload_inherited[MAX_RELOADS];
4953 rtx save_reload_inheritance_insn[MAX_RELOADS];
4954 rtx save_reload_override_in[MAX_RELOADS];
4955 int save_reload_spill_index[MAX_RELOADS];
4956 HARD_REG_SET save_reload_reg_used;
4957 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4958 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4959 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4960 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4961 HARD_REG_SET save_reload_reg_used_in_op_addr;
4962 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
4963 HARD_REG_SET save_reload_reg_used_in_insn;
4964 HARD_REG_SET save_reload_reg_used_in_other_addr;
4965 HARD_REG_SET save_reload_reg_used_at_all;
4966
4967 bzero (reload_inherited, MAX_RELOADS);
4968 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4969 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
4970
4971 CLEAR_HARD_REG_SET (reload_reg_used);
4972 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4973 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4974 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
4975 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4976 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4977
4978 for (i = 0; i < reload_n_operands; i++)
4979 {
4980 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4981 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4982 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4983 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4984 }
4985
4986 #ifdef SMALL_REGISTER_CLASSES
4987 /* Don't bother with avoiding the return reg
4988 if we have no mandatory reload that could use it. */
4989 if (avoid_return_reg)
4990 {
4991 int do_avoid = 0;
4992 int regno = REGNO (avoid_return_reg);
4993 int nregs
4994 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4995 int r;
4996
4997 for (r = regno; r < regno + nregs; r++)
4998 if (spill_reg_order[r] >= 0)
4999 for (j = 0; j < n_reloads; j++)
5000 if (!reload_optional[j] && reload_reg_rtx[j] == 0
5001 && (reload_in[j] != 0 || reload_out[j] != 0
5002 || reload_secondary_p[j])
5003 &&
5004 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
5005 do_avoid = 1;
5006 if (!do_avoid)
5007 avoid_return_reg = 0;
5008 }
5009 #endif /* SMALL_REGISTER_CLASSES */
5010
5011 #if 0 /* Not needed, now that we can always retry without inheritance. */
5012 /* See if we have more mandatory reloads than spill regs.
5013 If so, then we cannot risk optimizations that could prevent
5014 reloads from sharing one spill register.
5015
5016 Since we will try finding a better register than reload_reg_rtx
5017 unless it is equal to reload_in or reload_out, count such reloads. */
5018
5019 {
5020 int tem = 0;
5021 #ifdef SMALL_REGISTER_CLASSES
5022 int tem = (avoid_return_reg != 0);
5023 #endif
5024 for (j = 0; j < n_reloads; j++)
5025 if (! reload_optional[j]
5026 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5027 && (reload_reg_rtx[j] == 0
5028 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5029 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5030 tem++;
5031 if (tem > n_spills)
5032 must_reuse = 1;
5033 }
5034 #endif
5035
5036 #ifdef SMALL_REGISTER_CLASSES
5037 /* Don't use the subroutine call return reg for a reload
5038 if we are supposed to avoid it. */
5039 if (avoid_return_reg)
5040 {
5041 int regno = REGNO (avoid_return_reg);
5042 int nregs
5043 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
5044 int r;
5045
5046 for (r = regno; r < regno + nregs; r++)
5047 if (spill_reg_order[r] >= 0)
5048 SET_HARD_REG_BIT (reload_reg_used, r);
5049 }
5050 #endif /* SMALL_REGISTER_CLASSES */
5051
5052 /* In order to be certain of getting the registers we need,
5053 we must sort the reloads into order of increasing register class.
5054 Then our grabbing of reload registers will parallel the process
5055 that provided the reload registers.
5056
5057 Also note whether any of the reloads wants a consecutive group of regs.
5058 If so, record the maximum size of the group desired and what
5059 register class contains all the groups needed by this insn. */
5060
5061 for (j = 0; j < n_reloads; j++)
5062 {
5063 reload_order[j] = j;
5064 reload_spill_index[j] = -1;
5065
5066 reload_mode[j]
5067 = (reload_inmode[j] == VOIDmode
5068 || (GET_MODE_SIZE (reload_outmode[j])
5069 > GET_MODE_SIZE (reload_inmode[j])))
5070 ? reload_outmode[j] : reload_inmode[j];
5071
5072 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5073
5074 if (reload_nregs[j] > 1)
5075 {
5076 max_group_size = MAX (reload_nregs[j], max_group_size);
5077 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5078 }
5079
5080 /* If we have already decided to use a certain register,
5081 don't use it in another way. */
5082 if (reload_reg_rtx[j])
5083 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5084 reload_when_needed[j], reload_mode[j]);
5085 }
5086
5087 if (n_reloads > 1)
5088 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5089
5090 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5091 sizeof reload_reg_rtx);
5092 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5093 bcopy ((char *) reload_inheritance_insn,
5094 (char *) save_reload_inheritance_insn,
5095 sizeof reload_inheritance_insn);
5096 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5097 sizeof reload_override_in);
5098 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5099 sizeof reload_spill_index);
5100 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5101 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5102 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5103 reload_reg_used_in_op_addr);
5104
5105 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5106 reload_reg_used_in_op_addr_reload);
5107
5108 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5109 reload_reg_used_in_insn);
5110 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5111 reload_reg_used_in_other_addr);
5112
5113 for (i = 0; i < reload_n_operands; i++)
5114 {
5115 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5116 reload_reg_used_in_output[i]);
5117 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5118 reload_reg_used_in_input[i]);
5119 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5120 reload_reg_used_in_input_addr[i]);
5121 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5122 reload_reg_used_in_output_addr[i]);
5123 }
5124
5125 /* If -O, try first with inheritance, then turning it off.
5126 If not -O, don't do inheritance.
5127 Using inheritance when not optimizing leads to paradoxes
5128 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5129 because one side of the comparison might be inherited. */
5130
5131 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5132 {
5133 /* Process the reloads in order of preference just found.
5134 Beyond this point, subregs can be found in reload_reg_rtx.
5135
5136 This used to look for an existing reloaded home for all
5137 of the reloads, and only then perform any new reloads.
5138 But that could lose if the reloads were done out of reg-class order
5139 because a later reload with a looser constraint might have an old
5140 home in a register needed by an earlier reload with a tighter constraint.
5141
5142 To solve this, we make two passes over the reloads, in the order
5143 described above. In the first pass we try to inherit a reload
5144 from a previous insn. If there is a later reload that needs a
5145 class that is a proper subset of the class being processed, we must
5146 also allocate a spill register during the first pass.
5147
5148 Then make a second pass over the reloads to allocate any reloads
5149 that haven't been given registers yet. */
5150
5151 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5152
5153 for (j = 0; j < n_reloads; j++)
5154 {
5155 register int r = reload_order[j];
5156
5157 /* Ignore reloads that got marked inoperative. */
5158 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5159 continue;
5160
5161 /* If find_reloads chose a to use reload_in or reload_out as a reload
5162 register, we don't need to chose one. Otherwise, try even if it found
5163 one since we might save an insn if we find the value lying around. */
5164 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5165 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5166 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
5167 continue;
5168
5169 #if 0 /* No longer needed for correct operation.
5170 It might give better code, or might not; worth an experiment? */
5171 /* If this is an optional reload, we can't inherit from earlier insns
5172 until we are sure that any non-optional reloads have been allocated.
5173 The following code takes advantage of the fact that optional reloads
5174 are at the end of reload_order. */
5175 if (reload_optional[r] != 0)
5176 for (i = 0; i < j; i++)
5177 if ((reload_out[reload_order[i]] != 0
5178 || reload_in[reload_order[i]] != 0
5179 || reload_secondary_p[reload_order[i]])
5180 && ! reload_optional[reload_order[i]]
5181 && reload_reg_rtx[reload_order[i]] == 0)
5182 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
5183 #endif
5184
5185 /* First see if this pseudo is already available as reloaded
5186 for a previous insn. We cannot try to inherit for reloads
5187 that are smaller than the maximum number of registers needed
5188 for groups unless the register we would allocate cannot be used
5189 for the groups.
5190
5191 We could check here to see if this is a secondary reload for
5192 an object that is already in a register of the desired class.
5193 This would avoid the need for the secondary reload register.
5194 But this is complex because we can't easily determine what
5195 objects might want to be loaded via this reload. So let a register
5196 be allocated here. In `emit_reload_insns' we suppress one of the
5197 loads in the case described above. */
5198
5199 if (inheritance)
5200 {
5201 register int regno = -1;
5202 enum machine_mode mode;
5203
5204 if (reload_in[r] == 0)
5205 ;
5206 else if (GET_CODE (reload_in[r]) == REG)
5207 {
5208 regno = REGNO (reload_in[r]);
5209 mode = GET_MODE (reload_in[r]);
5210 }
5211 else if (GET_CODE (reload_in_reg[r]) == REG)
5212 {
5213 regno = REGNO (reload_in_reg[r]);
5214 mode = GET_MODE (reload_in_reg[r]);
5215 }
5216 #if 0
5217 /* This won't work, since REGNO can be a pseudo reg number.
5218 Also, it takes much more hair to keep track of all the things
5219 that can invalidate an inherited reload of part of a pseudoreg. */
5220 else if (GET_CODE (reload_in[r]) == SUBREG
5221 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5222 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5223 #endif
5224
5225 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5226 {
5227 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
5228
5229 if (reg_reloaded_contents[i] == regno
5230 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
5231 >= GET_MODE_SIZE (mode))
5232 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
5233 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5234 spill_regs[i])
5235 && (reload_nregs[r] == max_group_size
5236 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5237 spill_regs[i]))
5238 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
5239 reload_when_needed[r])
5240 && reload_reg_free_before_p (spill_regs[i],
5241 reload_opnum[r],
5242 reload_when_needed[r]))
5243 {
5244 /* If a group is needed, verify that all the subsequent
5245 registers still have their values intact. */
5246 int nr
5247 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5248 int k;
5249
5250 for (k = 1; k < nr; k++)
5251 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
5252 != regno)
5253 break;
5254
5255 if (k == nr)
5256 {
5257 int i1;
5258
5259 /* We found a register that contains the
5260 value we need. If this register is the
5261 same as an `earlyclobber' operand of the
5262 current insn, just mark it as a place to
5263 reload from since we can't use it as the
5264 reload register itself. */
5265
5266 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5267 if (reg_overlap_mentioned_for_reload_p
5268 (reg_last_reload_reg[regno],
5269 reload_earlyclobbers[i1]))
5270 break;
5271
5272 if (i1 != n_earlyclobbers
5273 /* Don't really use the inherited spill reg
5274 if we need it wider than we've got it. */
5275 || (GET_MODE_SIZE (reload_mode[r])
5276 > GET_MODE_SIZE (mode)))
5277 reload_override_in[r] = reg_last_reload_reg[regno];
5278 else
5279 {
5280 int k;
5281 /* We can use this as a reload reg. */
5282 /* Mark the register as in use for this part of
5283 the insn. */
5284 mark_reload_reg_in_use (spill_regs[i],
5285 reload_opnum[r],
5286 reload_when_needed[r],
5287 reload_mode[r]);
5288 reload_reg_rtx[r] = reg_last_reload_reg[regno];
5289 reload_inherited[r] = 1;
5290 reload_inheritance_insn[r]
5291 = reg_reloaded_insn[i];
5292 reload_spill_index[r] = i;
5293 for (k = 0; k < nr; k++)
5294 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5295 spill_regs[i + k]);
5296 }
5297 }
5298 }
5299 }
5300 }
5301
5302 /* Here's another way to see if the value is already lying around. */
5303 if (inheritance
5304 && reload_in[r] != 0
5305 && ! reload_inherited[r]
5306 && reload_out[r] == 0
5307 && (CONSTANT_P (reload_in[r])
5308 || GET_CODE (reload_in[r]) == PLUS
5309 || GET_CODE (reload_in[r]) == REG
5310 || GET_CODE (reload_in[r]) == MEM)
5311 && (reload_nregs[r] == max_group_size
5312 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5313 {
5314 register rtx equiv
5315 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5316 -1, NULL_PTR, 0, reload_mode[r]);
5317 int regno;
5318
5319 if (equiv != 0)
5320 {
5321 if (GET_CODE (equiv) == REG)
5322 regno = REGNO (equiv);
5323 else if (GET_CODE (equiv) == SUBREG)
5324 {
5325 /* This must be a SUBREG of a hard register.
5326 Make a new REG since this might be used in an
5327 address and not all machines support SUBREGs
5328 there. */
5329 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5330 equiv = gen_rtx (REG, reload_mode[r], regno);
5331 }
5332 else
5333 abort ();
5334 }
5335
5336 /* If we found a spill reg, reject it unless it is free
5337 and of the desired class. */
5338 if (equiv != 0
5339 && ((spill_reg_order[regno] >= 0
5340 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5341 reload_when_needed[r]))
5342 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5343 regno)))
5344 equiv = 0;
5345
5346 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
5347 equiv = 0;
5348
5349 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5350 equiv = 0;
5351
5352 /* We found a register that contains the value we need.
5353 If this register is the same as an `earlyclobber' operand
5354 of the current insn, just mark it as a place to reload from
5355 since we can't use it as the reload register itself. */
5356
5357 if (equiv != 0)
5358 for (i = 0; i < n_earlyclobbers; i++)
5359 if (reg_overlap_mentioned_for_reload_p (equiv,
5360 reload_earlyclobbers[i]))
5361 {
5362 reload_override_in[r] = equiv;
5363 equiv = 0;
5364 break;
5365 }
5366
5367 /* JRV: If the equiv register we have found is
5368 explicitly clobbered in the current insn, mark but
5369 don't use, as above. */
5370
5371 if (equiv != 0 && regno_clobbered_p (regno, insn))
5372 {
5373 reload_override_in[r] = equiv;
5374 equiv = 0;
5375 }
5376
5377 /* If we found an equivalent reg, say no code need be generated
5378 to load it, and use it as our reload reg. */
5379 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5380 {
5381 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
5382 int k;
5383 reload_reg_rtx[r] = equiv;
5384 reload_inherited[r] = 1;
5385
5386 /* If any of the hard registers in EQUIV are spill
5387 registers, mark them as in use for this insn. */
5388 for (k = 0; k < nr; k++)
5389 {
5390 i = spill_reg_order[regno + k];
5391 if (i >= 0)
5392 {
5393 mark_reload_reg_in_use (regno, reload_opnum[r],
5394 reload_when_needed[r],
5395 reload_mode[r]);
5396 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5397 regno + k);
5398 }
5399 }
5400 }
5401 }
5402
5403 /* If we found a register to use already, or if this is an optional
5404 reload, we are done. */
5405 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
5406 continue;
5407
5408 #if 0 /* No longer needed for correct operation. Might or might not
5409 give better code on the average. Want to experiment? */
5410
5411 /* See if there is a later reload that has a class different from our
5412 class that intersects our class or that requires less register
5413 than our reload. If so, we must allocate a register to this
5414 reload now, since that reload might inherit a previous reload
5415 and take the only available register in our class. Don't do this
5416 for optional reloads since they will force all previous reloads
5417 to be allocated. Also don't do this for reloads that have been
5418 turned off. */
5419
5420 for (i = j + 1; i < n_reloads; i++)
5421 {
5422 int s = reload_order[i];
5423
5424 if ((reload_in[s] == 0 && reload_out[s] == 0
5425 && ! reload_secondary_p[s])
5426 || reload_optional[s])
5427 continue;
5428
5429 if ((reload_reg_class[s] != reload_reg_class[r]
5430 && reg_classes_intersect_p (reload_reg_class[r],
5431 reload_reg_class[s]))
5432 || reload_nregs[s] < reload_nregs[r])
5433 break;
5434 }
5435
5436 if (i == n_reloads)
5437 continue;
5438
5439 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
5440 #endif
5441 }
5442
5443 /* Now allocate reload registers for anything non-optional that
5444 didn't get one yet. */
5445 for (j = 0; j < n_reloads; j++)
5446 {
5447 register int r = reload_order[j];
5448
5449 /* Ignore reloads that got marked inoperative. */
5450 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
5451 continue;
5452
5453 /* Skip reloads that already have a register allocated or are
5454 optional. */
5455 if (reload_reg_rtx[r] != 0 || reload_optional[r])
5456 continue;
5457
5458 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
5459 break;
5460 }
5461
5462 /* If that loop got all the way, we have won. */
5463 if (j == n_reloads)
5464 break;
5465
5466 fail:
5467 /* Loop around and try without any inheritance. */
5468 /* First undo everything done by the failed attempt
5469 to allocate with inheritance. */
5470 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
5471 sizeof reload_reg_rtx);
5472 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
5473 sizeof reload_inherited);
5474 bcopy ((char *) save_reload_inheritance_insn,
5475 (char *) reload_inheritance_insn,
5476 sizeof reload_inheritance_insn);
5477 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
5478 sizeof reload_override_in);
5479 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
5480 sizeof reload_spill_index);
5481 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5482 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5483 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5484 save_reload_reg_used_in_op_addr);
5485 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
5486 save_reload_reg_used_in_op_addr_reload);
5487 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5488 save_reload_reg_used_in_insn);
5489 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5490 save_reload_reg_used_in_other_addr);
5491
5492 for (i = 0; i < reload_n_operands; i++)
5493 {
5494 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5495 save_reload_reg_used_in_input[i]);
5496 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5497 save_reload_reg_used_in_output[i]);
5498 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5499 save_reload_reg_used_in_input_addr[i]);
5500 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5501 save_reload_reg_used_in_output_addr[i]);
5502 }
5503 }
5504
5505 /* If we thought we could inherit a reload, because it seemed that
5506 nothing else wanted the same reload register earlier in the insn,
5507 verify that assumption, now that all reloads have been assigned. */
5508
5509 for (j = 0; j < n_reloads; j++)
5510 {
5511 register int r = reload_order[j];
5512
5513 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5514 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5515 reload_opnum[r],
5516 reload_when_needed[r]))
5517 reload_inherited[r] = 0;
5518
5519 /* If we found a better place to reload from,
5520 validate it in the same fashion, if it is a reload reg. */
5521 if (reload_override_in[r]
5522 && (GET_CODE (reload_override_in[r]) == REG
5523 || GET_CODE (reload_override_in[r]) == SUBREG))
5524 {
5525 int regno = true_regnum (reload_override_in[r]);
5526 if (spill_reg_order[regno] >= 0
5527 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5528 reload_when_needed[r]))
5529 reload_override_in[r] = 0;
5530 }
5531 }
5532
5533 /* Now that reload_override_in is known valid,
5534 actually override reload_in. */
5535 for (j = 0; j < n_reloads; j++)
5536 if (reload_override_in[j])
5537 reload_in[j] = reload_override_in[j];
5538
5539 /* If this reload won't be done because it has been cancelled or is
5540 optional and not inherited, clear reload_reg_rtx so other
5541 routines (such as subst_reloads) don't get confused. */
5542 for (j = 0; j < n_reloads; j++)
5543 if (reload_reg_rtx[j] != 0
5544 && ((reload_optional[j] && ! reload_inherited[j])
5545 || (reload_in[j] == 0 && reload_out[j] == 0
5546 && ! reload_secondary_p[j])))
5547 {
5548 int regno = true_regnum (reload_reg_rtx[j]);
5549
5550 if (spill_reg_order[regno] >= 0)
5551 clear_reload_reg_in_use (regno, reload_opnum[j],
5552 reload_when_needed[j], reload_mode[j]);
5553 reload_reg_rtx[j] = 0;
5554 }
5555
5556 /* Record which pseudos and which spill regs have output reloads. */
5557 for (j = 0; j < n_reloads; j++)
5558 {
5559 register int r = reload_order[j];
5560
5561 i = reload_spill_index[r];
5562
5563 /* I is nonneg if this reload used one of the spill regs.
5564 If reload_reg_rtx[r] is 0, this is an optional reload
5565 that we opted to ignore. */
5566 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5567 && reload_reg_rtx[r] != 0)
5568 {
5569 register int nregno = REGNO (reload_out[r]);
5570 int nr = 1;
5571
5572 if (nregno < FIRST_PSEUDO_REGISTER)
5573 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5574
5575 while (--nr >= 0)
5576 reg_has_output_reload[nregno + nr] = 1;
5577
5578 if (i >= 0)
5579 {
5580 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5581 while (--nr >= 0)
5582 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5583 }
5584
5585 if (reload_when_needed[r] != RELOAD_OTHER
5586 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5587 && reload_when_needed[r] != RELOAD_FOR_INSN)
5588 abort ();
5589 }
5590 }
5591 }
5592 \f
5593 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5594 reloads of the same item for fear that we might not have enough reload
5595 registers. However, normally they will get the same reload register
5596 and hence actually need not be loaded twice.
5597
5598 Here we check for the most common case of this phenomenon: when we have
5599 a number of reloads for the same object, each of which were allocated
5600 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5601 reload, and is not modified in the insn itself. If we find such,
5602 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5603 This will not increase the number of spill registers needed and will
5604 prevent redundant code. */
5605
5606 #ifdef SMALL_REGISTER_CLASSES
5607
5608 static void
5609 merge_assigned_reloads (insn)
5610 rtx insn;
5611 {
5612 int i, j;
5613
5614 /* Scan all the reloads looking for ones that only load values and
5615 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5616 assigned and not modified by INSN. */
5617
5618 for (i = 0; i < n_reloads; i++)
5619 {
5620 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5621 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5622 || reg_set_p (reload_reg_rtx[i], insn))
5623 continue;
5624
5625 /* Look at all other reloads. Ensure that the only use of this
5626 reload_reg_rtx is in a reload that just loads the same value
5627 as we do. Note that any secondary reloads must be of the identical
5628 class since the values, modes, and result registers are the
5629 same, so we need not do anything with any secondary reloads. */
5630
5631 for (j = 0; j < n_reloads; j++)
5632 {
5633 if (i == j || reload_reg_rtx[j] == 0
5634 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5635 reload_reg_rtx[i]))
5636 continue;
5637
5638 /* If the reload regs aren't exactly the same (e.g, different modes)
5639 or if the values are different, we can't merge anything with this
5640 reload register. */
5641
5642 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5643 || reload_out[j] != 0 || reload_in[j] == 0
5644 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5645 break;
5646 }
5647
5648 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5649 we, in fact, found any matching reloads. */
5650
5651 if (j == n_reloads)
5652 {
5653 for (j = 0; j < n_reloads; j++)
5654 if (i != j && reload_reg_rtx[j] != 0
5655 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5656 {
5657 reload_when_needed[i] = RELOAD_OTHER;
5658 reload_in[j] = 0;
5659 transfer_replacements (i, j);
5660 }
5661
5662 /* If this is now RELOAD_OTHER, look for any reloads that load
5663 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5664 if they were for inputs, RELOAD_OTHER for outputs. Note that
5665 this test is equivalent to looking for reloads for this operand
5666 number. */
5667
5668 if (reload_when_needed[i] == RELOAD_OTHER)
5669 for (j = 0; j < n_reloads; j++)
5670 if (reload_in[j] != 0
5671 && reload_when_needed[i] != RELOAD_OTHER
5672 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5673 reload_in[i]))
5674 reload_when_needed[j]
5675 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5676 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5677 }
5678 }
5679 }
5680 #endif /* SMALL_RELOAD_CLASSES */
5681 \f
5682 /* Output insns to reload values in and out of the chosen reload regs. */
5683
5684 static void
5685 emit_reload_insns (insn)
5686 rtx insn;
5687 {
5688 register int j;
5689 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5690 rtx other_input_address_reload_insns = 0;
5691 rtx other_input_reload_insns = 0;
5692 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5693 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5694 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5695 rtx operand_reload_insns = 0;
5696 rtx other_operand_reload_insns = 0;
5697 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
5698 rtx following_insn = NEXT_INSN (insn);
5699 rtx before_insn = insn;
5700 int special;
5701 /* Values to be put in spill_reg_store are put here first. */
5702 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5703
5704 for (j = 0; j < reload_n_operands; j++)
5705 input_reload_insns[j] = input_address_reload_insns[j]
5706 = output_reload_insns[j] = output_address_reload_insns[j]
5707 = other_output_reload_insns[j] = 0;
5708
5709 /* Now output the instructions to copy the data into and out of the
5710 reload registers. Do these in the order that the reloads were reported,
5711 since reloads of base and index registers precede reloads of operands
5712 and the operands may need the base and index registers reloaded. */
5713
5714 for (j = 0; j < n_reloads; j++)
5715 {
5716 register rtx old;
5717 rtx oldequiv_reg = 0;
5718 rtx this_reload_insn = 0;
5719
5720 if (reload_spill_index[j] >= 0)
5721 new_spill_reg_store[reload_spill_index[j]] = 0;
5722
5723 old = reload_in[j];
5724 if (old != 0 && ! reload_inherited[j]
5725 && ! rtx_equal_p (reload_reg_rtx[j], old)
5726 && reload_reg_rtx[j] != 0)
5727 {
5728 register rtx reloadreg = reload_reg_rtx[j];
5729 rtx oldequiv = 0;
5730 enum machine_mode mode;
5731 rtx *where;
5732
5733 /* Determine the mode to reload in.
5734 This is very tricky because we have three to choose from.
5735 There is the mode the insn operand wants (reload_inmode[J]).
5736 There is the mode of the reload register RELOADREG.
5737 There is the intrinsic mode of the operand, which we could find
5738 by stripping some SUBREGs.
5739 It turns out that RELOADREG's mode is irrelevant:
5740 we can change that arbitrarily.
5741
5742 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5743 then the reload reg may not support QImode moves, so use SImode.
5744 If foo is in memory due to spilling a pseudo reg, this is safe,
5745 because the QImode value is in the least significant part of a
5746 slot big enough for a SImode. If foo is some other sort of
5747 memory reference, then it is impossible to reload this case,
5748 so previous passes had better make sure this never happens.
5749
5750 Then consider a one-word union which has SImode and one of its
5751 members is a float, being fetched as (SUBREG:SF union:SI).
5752 We must fetch that as SFmode because we could be loading into
5753 a float-only register. In this case OLD's mode is correct.
5754
5755 Consider an immediate integer: it has VOIDmode. Here we need
5756 to get a mode from something else.
5757
5758 In some cases, there is a fourth mode, the operand's
5759 containing mode. If the insn specifies a containing mode for
5760 this operand, it overrides all others.
5761
5762 I am not sure whether the algorithm here is always right,
5763 but it does the right things in those cases. */
5764
5765 mode = GET_MODE (old);
5766 if (mode == VOIDmode)
5767 mode = reload_inmode[j];
5768
5769 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5770 /* If we need a secondary register for this operation, see if
5771 the value is already in a register in that class. Don't
5772 do this if the secondary register will be used as a scratch
5773 register. */
5774
5775 if (reload_secondary_in_reload[j] >= 0
5776 && reload_secondary_in_icode[j] == CODE_FOR_nothing
5777 && optimize)
5778 oldequiv
5779 = find_equiv_reg (old, insn,
5780 reload_reg_class[reload_secondary_in_reload[j]],
5781 -1, NULL_PTR, 0, mode);
5782 #endif
5783
5784 /* If reloading from memory, see if there is a register
5785 that already holds the same value. If so, reload from there.
5786 We can pass 0 as the reload_reg_p argument because
5787 any other reload has either already been emitted,
5788 in which case find_equiv_reg will see the reload-insn,
5789 or has yet to be emitted, in which case it doesn't matter
5790 because we will use this equiv reg right away. */
5791
5792 if (oldequiv == 0 && optimize
5793 && (GET_CODE (old) == MEM
5794 || (GET_CODE (old) == REG
5795 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5796 && reg_renumber[REGNO (old)] < 0)))
5797 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5798 -1, NULL_PTR, 0, mode);
5799
5800 if (oldequiv)
5801 {
5802 int regno = true_regnum (oldequiv);
5803
5804 /* If OLDEQUIV is a spill register, don't use it for this
5805 if any other reload needs it at an earlier stage of this insn
5806 or at this stage. */
5807 if (spill_reg_order[regno] >= 0
5808 && (! reload_reg_free_p (regno, reload_opnum[j],
5809 reload_when_needed[j])
5810 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5811 reload_when_needed[j])))
5812 oldequiv = 0;
5813
5814 /* If OLDEQUIV is not a spill register,
5815 don't use it if any other reload wants it. */
5816 if (spill_reg_order[regno] < 0)
5817 {
5818 int k;
5819 for (k = 0; k < n_reloads; k++)
5820 if (reload_reg_rtx[k] != 0 && k != j
5821 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5822 oldequiv))
5823 {
5824 oldequiv = 0;
5825 break;
5826 }
5827 }
5828
5829 /* If it is no cheaper to copy from OLDEQUIV into the
5830 reload register than it would be to move from memory,
5831 don't use it. Likewise, if we need a secondary register
5832 or memory. */
5833
5834 if (oldequiv != 0
5835 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5836 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5837 reload_reg_class[j])
5838 >= MEMORY_MOVE_COST (mode)))
5839 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5840 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5841 mode, oldequiv)
5842 != NO_REGS)
5843 #endif
5844 #ifdef SECONDARY_MEMORY_NEEDED
5845 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5846 REGNO_REG_CLASS (regno),
5847 mode)
5848 #endif
5849 ))
5850 oldequiv = 0;
5851 }
5852
5853 if (oldequiv == 0)
5854 oldequiv = old;
5855 else if (GET_CODE (oldequiv) == REG)
5856 oldequiv_reg = oldequiv;
5857 else if (GET_CODE (oldequiv) == SUBREG)
5858 oldequiv_reg = SUBREG_REG (oldequiv);
5859
5860 /* If we are reloading from a register that was recently stored in
5861 with an output-reload, see if we can prove there was
5862 actually no need to store the old value in it. */
5863
5864 if (optimize && GET_CODE (oldequiv) == REG
5865 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
5866 && spill_reg_order[REGNO (oldequiv)] >= 0
5867 && spill_reg_store[spill_reg_order[REGNO (oldequiv)]] != 0
5868 && find_reg_note (insn, REG_DEAD, reload_in[j])
5869 /* This is unsafe if operand occurs more than once in current
5870 insn. Perhaps some occurrences weren't reloaded. */
5871 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5872 delete_output_reload
5873 (insn, j, spill_reg_store[spill_reg_order[REGNO (oldequiv)]]);
5874
5875 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5876 then load RELOADREG from OLDEQUIV. Note that we cannot use
5877 gen_lowpart_common since it can do the wrong thing when
5878 RELOADREG has a multi-word mode. Note that RELOADREG
5879 must always be a REG here. */
5880
5881 if (GET_MODE (reloadreg) != mode)
5882 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5883 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5884 oldequiv = SUBREG_REG (oldequiv);
5885 if (GET_MODE (oldequiv) != VOIDmode
5886 && mode != GET_MODE (oldequiv))
5887 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5888
5889 /* Switch to the right place to emit the reload insns. */
5890 switch (reload_when_needed[j])
5891 {
5892 case RELOAD_OTHER:
5893 where = &other_input_reload_insns;
5894 break;
5895 case RELOAD_FOR_INPUT:
5896 where = &input_reload_insns[reload_opnum[j]];
5897 break;
5898 case RELOAD_FOR_INPUT_ADDRESS:
5899 where = &input_address_reload_insns[reload_opnum[j]];
5900 break;
5901 case RELOAD_FOR_OUTPUT_ADDRESS:
5902 where = &output_address_reload_insns[reload_opnum[j]];
5903 break;
5904 case RELOAD_FOR_OPERAND_ADDRESS:
5905 where = &operand_reload_insns;
5906 break;
5907 case RELOAD_FOR_OPADDR_ADDR:
5908 where = &other_operand_reload_insns;
5909 break;
5910 case RELOAD_FOR_OTHER_ADDRESS:
5911 where = &other_input_address_reload_insns;
5912 break;
5913 default:
5914 abort ();
5915 }
5916
5917 push_to_sequence (*where);
5918 special = 0;
5919
5920 /* Auto-increment addresses must be reloaded in a special way. */
5921 if (GET_CODE (oldequiv) == POST_INC
5922 || GET_CODE (oldequiv) == POST_DEC
5923 || GET_CODE (oldequiv) == PRE_INC
5924 || GET_CODE (oldequiv) == PRE_DEC)
5925 {
5926 /* We are not going to bother supporting the case where a
5927 incremented register can't be copied directly from
5928 OLDEQUIV since this seems highly unlikely. */
5929 if (reload_secondary_in_reload[j] >= 0)
5930 abort ();
5931 /* Prevent normal processing of this reload. */
5932 special = 1;
5933 /* Output a special code sequence for this case. */
5934 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5935 }
5936
5937 /* If we are reloading a pseudo-register that was set by the previous
5938 insn, see if we can get rid of that pseudo-register entirely
5939 by redirecting the previous insn into our reload register. */
5940
5941 else if (optimize && GET_CODE (old) == REG
5942 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5943 && dead_or_set_p (insn, old)
5944 /* This is unsafe if some other reload
5945 uses the same reg first. */
5946 && reload_reg_free_before_p (REGNO (reloadreg),
5947 reload_opnum[j],
5948 reload_when_needed[j]))
5949 {
5950 rtx temp = PREV_INSN (insn);
5951 while (temp && GET_CODE (temp) == NOTE)
5952 temp = PREV_INSN (temp);
5953 if (temp
5954 && GET_CODE (temp) == INSN
5955 && GET_CODE (PATTERN (temp)) == SET
5956 && SET_DEST (PATTERN (temp)) == old
5957 /* Make sure we can access insn_operand_constraint. */
5958 && asm_noperands (PATTERN (temp)) < 0
5959 /* This is unsafe if prev insn rejects our reload reg. */
5960 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5961 reloadreg)
5962 /* This is unsafe if operand occurs more than once in current
5963 insn. Perhaps some occurrences aren't reloaded. */
5964 && count_occurrences (PATTERN (insn), old) == 1
5965 /* Don't risk splitting a matching pair of operands. */
5966 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5967 {
5968 /* Store into the reload register instead of the pseudo. */
5969 SET_DEST (PATTERN (temp)) = reloadreg;
5970 /* If these are the only uses of the pseudo reg,
5971 pretend for GDB it lives in the reload reg we used. */
5972 if (reg_n_deaths[REGNO (old)] == 1
5973 && reg_n_sets[REGNO (old)] == 1)
5974 {
5975 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5976 alter_reg (REGNO (old), -1);
5977 }
5978 special = 1;
5979 }
5980 }
5981
5982 /* We can't do that, so output an insn to load RELOADREG. */
5983
5984 if (! special)
5985 {
5986 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5987 rtx second_reload_reg = 0;
5988 enum insn_code icode;
5989
5990 /* If we have a secondary reload, pick up the secondary register
5991 and icode, if any. If OLDEQUIV and OLD are different or
5992 if this is an in-out reload, recompute whether or not we
5993 still need a secondary register and what the icode should
5994 be. If we still need a secondary register and the class or
5995 icode is different, go back to reloading from OLD if using
5996 OLDEQUIV means that we got the wrong type of register. We
5997 cannot have different class or icode due to an in-out reload
5998 because we don't make such reloads when both the input and
5999 output need secondary reload registers. */
6000
6001 if (reload_secondary_in_reload[j] >= 0)
6002 {
6003 int secondary_reload = reload_secondary_in_reload[j];
6004 rtx real_oldequiv = oldequiv;
6005 rtx real_old = old;
6006
6007 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6008 and similarly for OLD.
6009 See comments in get_secondary_reload in reload.c. */
6010 if (GET_CODE (oldequiv) == REG
6011 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6012 && reg_equiv_mem[REGNO (oldequiv)] != 0)
6013 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6014
6015 if (GET_CODE (old) == REG
6016 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6017 && reg_equiv_mem[REGNO (old)] != 0)
6018 real_old = reg_equiv_mem[REGNO (old)];
6019
6020 second_reload_reg = reload_reg_rtx[secondary_reload];
6021 icode = reload_secondary_in_icode[j];
6022
6023 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6024 || (reload_in[j] != 0 && reload_out[j] != 0))
6025 {
6026 enum reg_class new_class
6027 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6028 mode, real_oldequiv);
6029
6030 if (new_class == NO_REGS)
6031 second_reload_reg = 0;
6032 else
6033 {
6034 enum insn_code new_icode;
6035 enum machine_mode new_mode;
6036
6037 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6038 REGNO (second_reload_reg)))
6039 oldequiv = old, real_oldequiv = real_old;
6040 else
6041 {
6042 new_icode = reload_in_optab[(int) mode];
6043 if (new_icode != CODE_FOR_nothing
6044 && ((insn_operand_predicate[(int) new_icode][0]
6045 && ! ((*insn_operand_predicate[(int) new_icode][0])
6046 (reloadreg, mode)))
6047 || (insn_operand_predicate[(int) new_icode][1]
6048 && ! ((*insn_operand_predicate[(int) new_icode][1])
6049 (real_oldequiv, mode)))))
6050 new_icode = CODE_FOR_nothing;
6051
6052 if (new_icode == CODE_FOR_nothing)
6053 new_mode = mode;
6054 else
6055 new_mode = insn_operand_mode[(int) new_icode][2];
6056
6057 if (GET_MODE (second_reload_reg) != new_mode)
6058 {
6059 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6060 new_mode))
6061 oldequiv = old, real_oldequiv = real_old;
6062 else
6063 second_reload_reg
6064 = gen_rtx (REG, new_mode,
6065 REGNO (second_reload_reg));
6066 }
6067 }
6068 }
6069 }
6070
6071 /* If we still need a secondary reload register, check
6072 to see if it is being used as a scratch or intermediate
6073 register and generate code appropriately. If we need
6074 a scratch register, use REAL_OLDEQUIV since the form of
6075 the insn may depend on the actual address if it is
6076 a MEM. */
6077
6078 if (second_reload_reg)
6079 {
6080 if (icode != CODE_FOR_nothing)
6081 {
6082 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6083 second_reload_reg));
6084 special = 1;
6085 }
6086 else
6087 {
6088 /* See if we need a scratch register to load the
6089 intermediate register (a tertiary reload). */
6090 enum insn_code tertiary_icode
6091 = reload_secondary_in_icode[secondary_reload];
6092
6093 if (tertiary_icode != CODE_FOR_nothing)
6094 {
6095 rtx third_reload_reg
6096 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6097
6098 emit_insn ((GEN_FCN (tertiary_icode)
6099 (second_reload_reg, real_oldequiv,
6100 third_reload_reg)));
6101 }
6102 else
6103 gen_reload (second_reload_reg, oldequiv,
6104 reload_opnum[j],
6105 reload_when_needed[j]);
6106
6107 oldequiv = second_reload_reg;
6108 }
6109 }
6110 }
6111 #endif
6112
6113 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6114 gen_reload (reloadreg, oldequiv, reload_opnum[j],
6115 reload_when_needed[j]);
6116
6117 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
6118 /* We may have to make a REG_DEAD note for the secondary reload
6119 register in the insns we just made. Find the last insn that
6120 mentioned the register. */
6121 if (! special && second_reload_reg
6122 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
6123 {
6124 rtx prev;
6125
6126 for (prev = get_last_insn (); prev;
6127 prev = PREV_INSN (prev))
6128 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
6129 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
6130 PATTERN (prev)))
6131 {
6132 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
6133 second_reload_reg,
6134 REG_NOTES (prev));
6135 break;
6136 }
6137 }
6138 #endif
6139 }
6140
6141 this_reload_insn = get_last_insn ();
6142 /* End this sequence. */
6143 *where = get_insns ();
6144 end_sequence ();
6145 }
6146
6147 /* Add a note saying the input reload reg
6148 dies in this insn, if anyone cares. */
6149 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6150 if (old != 0
6151 && reload_reg_rtx[j] != old
6152 && reload_reg_rtx[j] != 0
6153 && reload_out[j] == 0
6154 && ! reload_inherited[j]
6155 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
6156 {
6157 register rtx reloadreg = reload_reg_rtx[j];
6158
6159 #if 0
6160 /* We can't abort here because we need to support this for sched.c.
6161 It's not terrible to miss a REG_DEAD note, but we should try
6162 to figure out how to do this correctly. */
6163 /* The code below is incorrect for address-only reloads. */
6164 if (reload_when_needed[j] != RELOAD_OTHER
6165 && reload_when_needed[j] != RELOAD_FOR_INPUT)
6166 abort ();
6167 #endif
6168
6169 /* Add a death note to this insn, for an input reload. */
6170
6171 if ((reload_when_needed[j] == RELOAD_OTHER
6172 || reload_when_needed[j] == RELOAD_FOR_INPUT)
6173 && ! dead_or_set_p (insn, reloadreg))
6174 REG_NOTES (insn)
6175 = gen_rtx (EXPR_LIST, REG_DEAD,
6176 reloadreg, REG_NOTES (insn));
6177 }
6178
6179 /* When we inherit a reload, the last marked death of the reload reg
6180 may no longer really be a death. */
6181 if (reload_reg_rtx[j] != 0
6182 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
6183 && reload_inherited[j])
6184 {
6185 /* Handle inheriting an output reload.
6186 Remove the death note from the output reload insn. */
6187 if (reload_spill_index[j] >= 0
6188 && GET_CODE (reload_in[j]) == REG
6189 && spill_reg_store[reload_spill_index[j]] != 0
6190 && find_regno_note (spill_reg_store[reload_spill_index[j]],
6191 REG_DEAD, REGNO (reload_reg_rtx[j])))
6192 remove_death (REGNO (reload_reg_rtx[j]),
6193 spill_reg_store[reload_spill_index[j]]);
6194 /* Likewise for input reloads that were inherited. */
6195 else if (reload_spill_index[j] >= 0
6196 && GET_CODE (reload_in[j]) == REG
6197 && spill_reg_store[reload_spill_index[j]] == 0
6198 && reload_inheritance_insn[j] != 0
6199 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
6200 REGNO (reload_reg_rtx[j])))
6201 remove_death (REGNO (reload_reg_rtx[j]),
6202 reload_inheritance_insn[j]);
6203 else
6204 {
6205 rtx prev;
6206
6207 /* We got this register from find_equiv_reg.
6208 Search back for its last death note and get rid of it.
6209 But don't search back too far.
6210 Don't go past a place where this reg is set,
6211 since a death note before that remains valid. */
6212 for (prev = PREV_INSN (insn);
6213 prev && GET_CODE (prev) != CODE_LABEL;
6214 prev = PREV_INSN (prev))
6215 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6216 && dead_or_set_p (prev, reload_reg_rtx[j]))
6217 {
6218 if (find_regno_note (prev, REG_DEAD,
6219 REGNO (reload_reg_rtx[j])))
6220 remove_death (REGNO (reload_reg_rtx[j]), prev);
6221 break;
6222 }
6223 }
6224 }
6225
6226 /* We might have used find_equiv_reg above to choose an alternate
6227 place from which to reload. If so, and it died, we need to remove
6228 that death and move it to one of the insns we just made. */
6229
6230 if (oldequiv_reg != 0
6231 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
6232 {
6233 rtx prev, prev1;
6234
6235 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
6236 prev = PREV_INSN (prev))
6237 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
6238 && dead_or_set_p (prev, oldequiv_reg))
6239 {
6240 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
6241 {
6242 for (prev1 = this_reload_insn;
6243 prev1; prev1 = PREV_INSN (prev1))
6244 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
6245 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
6246 PATTERN (prev1)))
6247 {
6248 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
6249 oldequiv_reg,
6250 REG_NOTES (prev1));
6251 break;
6252 }
6253 remove_death (REGNO (oldequiv_reg), prev);
6254 }
6255 break;
6256 }
6257 }
6258 #endif
6259
6260 /* If we are reloading a register that was recently stored in with an
6261 output-reload, see if we can prove there was
6262 actually no need to store the old value in it. */
6263
6264 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
6265 && reload_in[j] != 0
6266 && GET_CODE (reload_in[j]) == REG
6267 #if 0
6268 /* There doesn't seem to be any reason to restrict this to pseudos
6269 and doing so loses in the case where we are copying from a
6270 register of the wrong class. */
6271 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
6272 #endif
6273 && spill_reg_store[reload_spill_index[j]] != 0
6274 /* This is unsafe if some other reload uses the same reg first. */
6275 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
6276 reload_opnum[j], reload_when_needed[j])
6277 && dead_or_set_p (insn, reload_in[j])
6278 /* This is unsafe if operand occurs more than once in current
6279 insn. Perhaps some occurrences weren't reloaded. */
6280 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
6281 delete_output_reload (insn, j,
6282 spill_reg_store[reload_spill_index[j]]);
6283
6284 /* Input-reloading is done. Now do output-reloading,
6285 storing the value from the reload-register after the main insn
6286 if reload_out[j] is nonzero.
6287
6288 ??? At some point we need to support handling output reloads of
6289 JUMP_INSNs or insns that set cc0. */
6290 old = reload_out[j];
6291 if (old != 0
6292 && reload_reg_rtx[j] != old
6293 && reload_reg_rtx[j] != 0)
6294 {
6295 register rtx reloadreg = reload_reg_rtx[j];
6296 register rtx second_reloadreg = 0;
6297 rtx note, p;
6298 enum machine_mode mode;
6299 int special = 0;
6300
6301 /* An output operand that dies right away does need a reload,
6302 but need not be copied from it. Show the new location in the
6303 REG_UNUSED note. */
6304 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6305 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6306 {
6307 XEXP (note, 0) = reload_reg_rtx[j];
6308 continue;
6309 }
6310 /* Likewise for a SUBREG of an operand that dies. */
6311 else if (GET_CODE (old) == SUBREG
6312 && GET_CODE (SUBREG_REG (old)) == REG
6313 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6314 SUBREG_REG (old))))
6315 {
6316 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6317 reload_reg_rtx[j]);
6318 continue;
6319 }
6320 else if (GET_CODE (old) == SCRATCH)
6321 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6322 but we don't want to make an output reload. */
6323 continue;
6324
6325 #if 0
6326 /* Strip off of OLD any size-increasing SUBREGs such as
6327 (SUBREG:SI foo:QI 0). */
6328
6329 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
6330 && (GET_MODE_SIZE (GET_MODE (old))
6331 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
6332 old = SUBREG_REG (old);
6333 #endif
6334
6335 /* If is a JUMP_INSN, we can't support output reloads yet. */
6336 if (GET_CODE (insn) == JUMP_INSN)
6337 abort ();
6338
6339 if (reload_when_needed[j] == RELOAD_OTHER)
6340 start_sequence ();
6341 else
6342 push_to_sequence (output_reload_insns[reload_opnum[j]]);
6343
6344 /* Determine the mode to reload in.
6345 See comments above (for input reloading). */
6346
6347 mode = GET_MODE (old);
6348 if (mode == VOIDmode)
6349 {
6350 /* VOIDmode should never happen for an output. */
6351 if (asm_noperands (PATTERN (insn)) < 0)
6352 /* It's the compiler's fault. */
6353 fatal_insn ("VOIDmode on an output", insn);
6354 error_for_asm (insn, "output operand is constant in `asm'");
6355 /* Prevent crash--use something we know is valid. */
6356 mode = word_mode;
6357 old = gen_rtx (REG, mode, REGNO (reloadreg));
6358 }
6359
6360 if (GET_MODE (reloadreg) != mode)
6361 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6362
6363 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6364
6365 /* If we need two reload regs, set RELOADREG to the intermediate
6366 one, since it will be stored into OLD. We might need a secondary
6367 register only for an input reload, so check again here. */
6368
6369 if (reload_secondary_out_reload[j] >= 0)
6370 {
6371 rtx real_old = old;
6372
6373 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6374 && reg_equiv_mem[REGNO (old)] != 0)
6375 real_old = reg_equiv_mem[REGNO (old)];
6376
6377 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
6378 mode, real_old)
6379 != NO_REGS))
6380 {
6381 second_reloadreg = reloadreg;
6382 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
6383
6384 /* See if RELOADREG is to be used as a scratch register
6385 or as an intermediate register. */
6386 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
6387 {
6388 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
6389 (real_old, second_reloadreg, reloadreg)));
6390 special = 1;
6391 }
6392 else
6393 {
6394 /* See if we need both a scratch and intermediate reload
6395 register. */
6396
6397 int secondary_reload = reload_secondary_out_reload[j];
6398 enum insn_code tertiary_icode
6399 = reload_secondary_out_icode[secondary_reload];
6400
6401 if (GET_MODE (reloadreg) != mode)
6402 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
6403
6404 if (tertiary_icode != CODE_FOR_nothing)
6405 {
6406 rtx third_reloadreg
6407 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
6408 rtx tem;
6409
6410 /* Copy primary reload reg to secondary reload reg.
6411 (Note that these have been swapped above, then
6412 secondary reload reg to OLD using our insn. */
6413
6414 /* If REAL_OLD is a paradoxical SUBREG, remove it
6415 and try to put the opposite SUBREG on
6416 RELOADREG. */
6417 if (GET_CODE (real_old) == SUBREG
6418 && (GET_MODE_SIZE (GET_MODE (real_old))
6419 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6420 && 0 != (tem = gen_lowpart_common
6421 (GET_MODE (SUBREG_REG (real_old)),
6422 reloadreg)))
6423 real_old = SUBREG_REG (real_old), reloadreg = tem;
6424
6425 gen_reload (reloadreg, second_reloadreg,
6426 reload_opnum[j], reload_when_needed[j]);
6427 emit_insn ((GEN_FCN (tertiary_icode)
6428 (real_old, reloadreg, third_reloadreg)));
6429 special = 1;
6430 }
6431
6432 else
6433 /* Copy between the reload regs here and then to
6434 OUT later. */
6435
6436 gen_reload (reloadreg, second_reloadreg,
6437 reload_opnum[j], reload_when_needed[j]);
6438 }
6439 }
6440 }
6441 #endif
6442
6443 /* Output the last reload insn. */
6444 if (! special)
6445 gen_reload (old, reloadreg, reload_opnum[j],
6446 reload_when_needed[j]);
6447
6448 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6449 /* If final will look at death notes for this reg,
6450 put one on the last output-reload insn to use it. Similarly
6451 for any secondary register. */
6452 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
6453 for (p = get_last_insn (); p; p = PREV_INSN (p))
6454 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6455 && reg_overlap_mentioned_for_reload_p (reloadreg,
6456 PATTERN (p)))
6457 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6458 reloadreg, REG_NOTES (p));
6459
6460 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6461 if (! special
6462 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
6463 for (p = get_last_insn (); p; p = PREV_INSN (p))
6464 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
6465 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
6466 PATTERN (p)))
6467 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
6468 second_reloadreg, REG_NOTES (p));
6469 #endif
6470 #endif
6471 /* Look at all insns we emitted, just to be safe. */
6472 for (p = get_insns (); p; p = NEXT_INSN (p))
6473 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
6474 {
6475 /* If this output reload doesn't come from a spill reg,
6476 clear any memory of reloaded copies of the pseudo reg.
6477 If this output reload comes from a spill reg,
6478 reg_has_output_reload will make this do nothing. */
6479 note_stores (PATTERN (p), forget_old_reloads_1);
6480
6481 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p))
6482 && reload_spill_index[j] >= 0)
6483 new_spill_reg_store[reload_spill_index[j]] = p;
6484 }
6485
6486 if (reload_when_needed[j] == RELOAD_OTHER)
6487 {
6488 emit_insns (other_output_reload_insns[reload_opnum[j]]);
6489 other_output_reload_insns[reload_opnum[j]] = get_insns ();
6490 }
6491 else
6492 output_reload_insns[reload_opnum[j]] = get_insns ();
6493
6494 end_sequence ();
6495 }
6496 }
6497
6498 /* Now write all the insns we made for reloads in the order expected by
6499 the allocation functions. Prior to the insn being reloaded, we write
6500 the following reloads:
6501
6502 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6503
6504 RELOAD_OTHER reloads.
6505
6506 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6507 the RELOAD_FOR_INPUT reload for the operand.
6508
6509 RELOAD_FOR_OPADDR_ADDRS reloads.
6510
6511 RELOAD_FOR_OPERAND_ADDRESS reloads.
6512
6513 After the insn being reloaded, we write the following:
6514
6515 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6516 the RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
6517 reloads for the operand. The RELOAD_OTHER output reloads are output
6518 in descending order by reload number. */
6519
6520 emit_insns_before (other_input_address_reload_insns, before_insn);
6521 emit_insns_before (other_input_reload_insns, before_insn);
6522
6523 for (j = 0; j < reload_n_operands; j++)
6524 {
6525 emit_insns_before (input_address_reload_insns[j], before_insn);
6526 emit_insns_before (input_reload_insns[j], before_insn);
6527 }
6528
6529 emit_insns_before (other_operand_reload_insns, before_insn);
6530 emit_insns_before (operand_reload_insns, before_insn);
6531
6532 for (j = 0; j < reload_n_operands; j++)
6533 {
6534 emit_insns_before (output_address_reload_insns[j], following_insn);
6535 emit_insns_before (output_reload_insns[j], following_insn);
6536 emit_insns_before (other_output_reload_insns[j], following_insn);
6537 }
6538
6539 /* Move death notes from INSN
6540 to output-operand-address and output reload insns. */
6541 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6542 {
6543 rtx insn1;
6544 /* Loop over those insns, last ones first. */
6545 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6546 insn1 = PREV_INSN (insn1))
6547 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6548 {
6549 rtx source = SET_SRC (PATTERN (insn1));
6550 rtx dest = SET_DEST (PATTERN (insn1));
6551
6552 /* The note we will examine next. */
6553 rtx reg_notes = REG_NOTES (insn);
6554 /* The place that pointed to this note. */
6555 rtx *prev_reg_note = &REG_NOTES (insn);
6556
6557 /* If the note is for something used in the source of this
6558 reload insn, or in the output address, move the note. */
6559 while (reg_notes)
6560 {
6561 rtx next_reg_notes = XEXP (reg_notes, 1);
6562 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6563 && GET_CODE (XEXP (reg_notes, 0)) == REG
6564 && ((GET_CODE (dest) != REG
6565 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6566 dest))
6567 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6568 source)))
6569 {
6570 *prev_reg_note = next_reg_notes;
6571 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6572 REG_NOTES (insn1) = reg_notes;
6573 }
6574 else
6575 prev_reg_note = &XEXP (reg_notes, 1);
6576
6577 reg_notes = next_reg_notes;
6578 }
6579 }
6580 }
6581 #endif
6582
6583 /* For all the spill regs newly reloaded in this instruction,
6584 record what they were reloaded from, so subsequent instructions
6585 can inherit the reloads.
6586
6587 Update spill_reg_store for the reloads of this insn.
6588 Copy the elements that were updated in the loop above. */
6589
6590 for (j = 0; j < n_reloads; j++)
6591 {
6592 register int r = reload_order[j];
6593 register int i = reload_spill_index[r];
6594
6595 /* I is nonneg if this reload used one of the spill regs.
6596 If reload_reg_rtx[r] is 0, this is an optional reload
6597 that we opted to ignore. */
6598
6599 if (i >= 0 && reload_reg_rtx[r] != 0)
6600 {
6601 int nr
6602 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6603 int k;
6604 int part_reaches_end = 0;
6605 int all_reaches_end = 1;
6606
6607 /* For a multi register reload, we need to check if all or part
6608 of the value lives to the end. */
6609 for (k = 0; k < nr; k++)
6610 {
6611 if (reload_reg_reaches_end_p (spill_regs[i] + k, reload_opnum[r],
6612 reload_when_needed[r]))
6613 part_reaches_end = 1;
6614 else
6615 all_reaches_end = 0;
6616 }
6617
6618 /* Ignore reloads that don't reach the end of the insn in
6619 entirety. */
6620 if (all_reaches_end)
6621 {
6622 /* First, clear out memory of what used to be in this spill reg.
6623 If consecutive registers are used, clear them all. */
6624
6625 for (k = 0; k < nr; k++)
6626 {
6627 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6628 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6629 }
6630
6631 /* Maybe the spill reg contains a copy of reload_out. */
6632 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6633 {
6634 register int nregno = REGNO (reload_out[r]);
6635 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6636 : HARD_REGNO_NREGS (nregno,
6637 GET_MODE (reload_reg_rtx[r])));
6638
6639 spill_reg_store[i] = new_spill_reg_store[i];
6640 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6641
6642 /* If NREGNO is a hard register, it may occupy more than
6643 one register. If it does, say what is in the
6644 rest of the registers assuming that both registers
6645 agree on how many words the object takes. If not,
6646 invalidate the subsequent registers. */
6647
6648 if (nregno < FIRST_PSEUDO_REGISTER)
6649 for (k = 1; k < nnr; k++)
6650 reg_last_reload_reg[nregno + k]
6651 = (nr == nnr
6652 ? gen_rtx (REG,
6653 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6654 REGNO (reload_reg_rtx[r]) + k)
6655 : 0);
6656
6657 /* Now do the inverse operation. */
6658 for (k = 0; k < nr; k++)
6659 {
6660 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6661 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6662 ? nregno
6663 : nregno + k);
6664 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6665 }
6666 }
6667
6668 /* Maybe the spill reg contains a copy of reload_in. Only do
6669 something if there will not be an output reload for
6670 the register being reloaded. */
6671 else if (reload_out[r] == 0
6672 && reload_in[r] != 0
6673 && ((GET_CODE (reload_in[r]) == REG
6674 && ! reg_has_output_reload[REGNO (reload_in[r])])
6675 || (GET_CODE (reload_in_reg[r]) == REG
6676 && ! reg_has_output_reload[REGNO (reload_in_reg[r])])))
6677 {
6678 register int nregno;
6679 int nnr;
6680
6681 if (GET_CODE (reload_in[r]) == REG)
6682 nregno = REGNO (reload_in[r]);
6683 else
6684 nregno = REGNO (reload_in_reg[r]);
6685
6686 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6687 : HARD_REGNO_NREGS (nregno,
6688 GET_MODE (reload_reg_rtx[r])));
6689
6690 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6691
6692 if (nregno < FIRST_PSEUDO_REGISTER)
6693 for (k = 1; k < nnr; k++)
6694 reg_last_reload_reg[nregno + k]
6695 = (nr == nnr
6696 ? gen_rtx (REG,
6697 reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
6698 REGNO (reload_reg_rtx[r]) + k)
6699 : 0);
6700
6701 /* Unless we inherited this reload, show we haven't
6702 recently done a store. */
6703 if (! reload_inherited[r])
6704 spill_reg_store[i] = 0;
6705
6706 for (k = 0; k < nr; k++)
6707 {
6708 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6709 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
6710 ? nregno
6711 : nregno + k);
6712 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6713 = insn;
6714 }
6715 }
6716 }
6717
6718 /* However, if part of the reload reaches the end, then we must
6719 invalidate the old info for the part that survives to the end. */
6720 else if (part_reaches_end)
6721 {
6722 for (k = 0; k < nr; k++)
6723 if (reload_reg_reaches_end_p (spill_regs[i] + k,
6724 reload_opnum[r],
6725 reload_when_needed[r]))
6726 {
6727 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6728 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6729 }
6730 }
6731 }
6732
6733 /* The following if-statement was #if 0'd in 1.34 (or before...).
6734 It's reenabled in 1.35 because supposedly nothing else
6735 deals with this problem. */
6736
6737 /* If a register gets output-reloaded from a non-spill register,
6738 that invalidates any previous reloaded copy of it.
6739 But forget_old_reloads_1 won't get to see it, because
6740 it thinks only about the original insn. So invalidate it here. */
6741 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6742 {
6743 register int nregno = REGNO (reload_out[r]);
6744 if (nregno >= FIRST_PSEUDO_REGISTER)
6745 reg_last_reload_reg[nregno] = 0;
6746 else
6747 {
6748 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
6749
6750 while (num_regs-- > 0)
6751 reg_last_reload_reg[nregno + num_regs] = 0;
6752 }
6753 }
6754 }
6755 }
6756 \f
6757 /* Emit code to perform a reload from IN (which may be a reload register) to
6758 OUT (which may also be a reload register). IN or OUT is from operand
6759 OPNUM with reload type TYPE.
6760
6761 Returns first insn emitted. */
6762
6763 rtx
6764 gen_reload (out, in, opnum, type)
6765 rtx out;
6766 rtx in;
6767 int opnum;
6768 enum reload_type type;
6769 {
6770 rtx last = get_last_insn ();
6771 rtx tem;
6772
6773 /* If IN is a paradoxical SUBREG, remove it and try to put the
6774 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
6775 if (GET_CODE (in) == SUBREG
6776 && (GET_MODE_SIZE (GET_MODE (in))
6777 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
6778 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
6779 in = SUBREG_REG (in), out = tem;
6780 else if (GET_CODE (out) == SUBREG
6781 && (GET_MODE_SIZE (GET_MODE (out))
6782 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
6783 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
6784 out = SUBREG_REG (out), in = tem;
6785
6786 /* How to do this reload can get quite tricky. Normally, we are being
6787 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6788 register that didn't get a hard register. In that case we can just
6789 call emit_move_insn.
6790
6791 We can also be asked to reload a PLUS that adds a register or a MEM to
6792 another register, constant or MEM. This can occur during frame pointer
6793 elimination and while reloading addresses. This case is handled by
6794 trying to emit a single insn to perform the add. If it is not valid,
6795 we use a two insn sequence.
6796
6797 Finally, we could be called to handle an 'o' constraint by putting
6798 an address into a register. In that case, we first try to do this
6799 with a named pattern of "reload_load_address". If no such pattern
6800 exists, we just emit a SET insn and hope for the best (it will normally
6801 be valid on machines that use 'o').
6802
6803 This entire process is made complex because reload will never
6804 process the insns we generate here and so we must ensure that
6805 they will fit their constraints and also by the fact that parts of
6806 IN might be being reloaded separately and replaced with spill registers.
6807 Because of this, we are, in some sense, just guessing the right approach
6808 here. The one listed above seems to work.
6809
6810 ??? At some point, this whole thing needs to be rethought. */
6811
6812 if (GET_CODE (in) == PLUS
6813 && (GET_CODE (XEXP (in, 0)) == REG
6814 || GET_CODE (XEXP (in, 0)) == SUBREG
6815 || GET_CODE (XEXP (in, 0)) == MEM)
6816 && (GET_CODE (XEXP (in, 1)) == REG
6817 || GET_CODE (XEXP (in, 1)) == SUBREG
6818 || CONSTANT_P (XEXP (in, 1))
6819 || GET_CODE (XEXP (in, 1)) == MEM))
6820 {
6821 /* We need to compute the sum of a register or a MEM and another
6822 register, constant, or MEM, and put it into the reload
6823 register. The best possible way of doing this is if the machine
6824 has a three-operand ADD insn that accepts the required operands.
6825
6826 The simplest approach is to try to generate such an insn and see if it
6827 is recognized and matches its constraints. If so, it can be used.
6828
6829 It might be better not to actually emit the insn unless it is valid,
6830 but we need to pass the insn as an operand to `recog' and
6831 `insn_extract' and it is simpler to emit and then delete the insn if
6832 not valid than to dummy things up. */
6833
6834 rtx op0, op1, tem, insn;
6835 int code;
6836
6837 op0 = find_replacement (&XEXP (in, 0));
6838 op1 = find_replacement (&XEXP (in, 1));
6839
6840 /* Since constraint checking is strict, commutativity won't be
6841 checked, so we need to do that here to avoid spurious failure
6842 if the add instruction is two-address and the second operand
6843 of the add is the same as the reload reg, which is frequently
6844 the case. If the insn would be A = B + A, rearrange it so
6845 it will be A = A + B as constrain_operands expects. */
6846
6847 if (GET_CODE (XEXP (in, 1)) == REG
6848 && REGNO (out) == REGNO (XEXP (in, 1)))
6849 tem = op0, op0 = op1, op1 = tem;
6850
6851 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6852 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6853
6854 insn = emit_insn (gen_rtx (SET, VOIDmode, out, in));
6855 code = recog_memoized (insn);
6856
6857 if (code >= 0)
6858 {
6859 insn_extract (insn);
6860 /* We want constrain operands to treat this insn strictly in
6861 its validity determination, i.e., the way it would after reload
6862 has completed. */
6863 if (constrain_operands (code, 1))
6864 return insn;
6865 }
6866
6867 delete_insns_since (last);
6868
6869 /* If that failed, we must use a conservative two-insn sequence.
6870 use move to copy constant, MEM, or pseudo register to the reload
6871 register since "move" will be able to handle an arbitrary operand,
6872 unlike add which can't, in general. Then add the registers.
6873
6874 If there is another way to do this for a specific machine, a
6875 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6876 we emit below. */
6877
6878 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
6879 || (GET_CODE (op1) == REG
6880 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6881 tem = op0, op0 = op1, op1 = tem;
6882
6883 gen_reload (out, op0, opnum, type);
6884
6885 /* If OP0 and OP1 are the same, we can use OUT for OP1.
6886 This fixes a problem on the 32K where the stack pointer cannot
6887 be used as an operand of an add insn. */
6888
6889 if (rtx_equal_p (op0, op1))
6890 op1 = out;
6891
6892 insn = emit_insn (gen_add2_insn (out, op1));
6893
6894 /* If that failed, copy the address register to the reload register.
6895 Then add the constant to the reload register. */
6896
6897 code = recog_memoized (insn);
6898
6899 if (code >= 0)
6900 {
6901 insn_extract (insn);
6902 /* We want constrain operands to treat this insn strictly in
6903 its validity determination, i.e., the way it would after reload
6904 has completed. */
6905 if (constrain_operands (code, 1))
6906 return insn;
6907 }
6908
6909 delete_insns_since (last);
6910
6911 gen_reload (out, op1, opnum, type);
6912 emit_insn (gen_add2_insn (out, op0));
6913 }
6914
6915 #ifdef SECONDARY_MEMORY_NEEDED
6916 /* If we need a memory location to do the move, do it that way. */
6917 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6918 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
6919 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6920 REGNO_REG_CLASS (REGNO (out)),
6921 GET_MODE (out)))
6922 {
6923 /* Get the memory to use and rewrite both registers to its mode. */
6924 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
6925
6926 if (GET_MODE (loc) != GET_MODE (out))
6927 out = gen_rtx (REG, GET_MODE (loc), REGNO (out));
6928
6929 if (GET_MODE (loc) != GET_MODE (in))
6930 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6931
6932 gen_reload (loc, in, opnum, type);
6933 gen_reload (out, loc, opnum, type);
6934 }
6935 #endif
6936
6937 /* If IN is a simple operand, use gen_move_insn. */
6938 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6939 emit_insn (gen_move_insn (out, in));
6940
6941 #ifdef HAVE_reload_load_address
6942 else if (HAVE_reload_load_address)
6943 emit_insn (gen_reload_load_address (out, in));
6944 #endif
6945
6946 /* Otherwise, just write (set OUT IN) and hope for the best. */
6947 else
6948 emit_insn (gen_rtx (SET, VOIDmode, out, in));
6949
6950 /* Return the first insn emitted.
6951 We can not just return get_last_insn, because there may have
6952 been multiple instructions emitted. Also note that gen_move_insn may
6953 emit more than one insn itself, so we can not assume that there is one
6954 insn emitted per emit_insn_before call. */
6955
6956 return last ? NEXT_INSN (last) : get_insns ();
6957 }
6958 \f
6959 /* Delete a previously made output-reload
6960 whose result we now believe is not needed.
6961 First we double-check.
6962
6963 INSN is the insn now being processed.
6964 OUTPUT_RELOAD_INSN is the insn of the output reload.
6965 J is the reload-number for this insn. */
6966
6967 static void
6968 delete_output_reload (insn, j, output_reload_insn)
6969 rtx insn;
6970 int j;
6971 rtx output_reload_insn;
6972 {
6973 register rtx i1;
6974
6975 /* Get the raw pseudo-register referred to. */
6976
6977 rtx reg = reload_in[j];
6978 while (GET_CODE (reg) == SUBREG)
6979 reg = SUBREG_REG (reg);
6980
6981 /* If the pseudo-reg we are reloading is no longer referenced
6982 anywhere between the store into it and here,
6983 and no jumps or labels intervene, then the value can get
6984 here through the reload reg alone.
6985 Otherwise, give up--return. */
6986 for (i1 = NEXT_INSN (output_reload_insn);
6987 i1 != insn; i1 = NEXT_INSN (i1))
6988 {
6989 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6990 return;
6991 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6992 && reg_mentioned_p (reg, PATTERN (i1)))
6993 return;
6994 }
6995
6996 if (cannot_omit_stores[REGNO (reg)])
6997 return;
6998
6999 /* If this insn will store in the pseudo again,
7000 the previous store can be removed. */
7001 if (reload_out[j] == reload_in[j])
7002 delete_insn (output_reload_insn);
7003
7004 /* See if the pseudo reg has been completely replaced
7005 with reload regs. If so, delete the store insn
7006 and forget we had a stack slot for the pseudo. */
7007 else if (reg_n_deaths[REGNO (reg)] == 1
7008 && reg_basic_block[REGNO (reg)] >= 0
7009 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7010 {
7011 rtx i2;
7012
7013 /* We know that it was used only between here
7014 and the beginning of the current basic block.
7015 (We also know that the last use before INSN was
7016 the output reload we are thinking of deleting, but never mind that.)
7017 Search that range; see if any ref remains. */
7018 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7019 {
7020 rtx set = single_set (i2);
7021
7022 /* Uses which just store in the pseudo don't count,
7023 since if they are the only uses, they are dead. */
7024 if (set != 0 && SET_DEST (set) == reg)
7025 continue;
7026 if (GET_CODE (i2) == CODE_LABEL
7027 || GET_CODE (i2) == JUMP_INSN)
7028 break;
7029 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7030 && reg_mentioned_p (reg, PATTERN (i2)))
7031 /* Some other ref remains;
7032 we can't do anything. */
7033 return;
7034 }
7035
7036 /* Delete the now-dead stores into this pseudo. */
7037 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7038 {
7039 rtx set = single_set (i2);
7040
7041 if (set != 0 && SET_DEST (set) == reg)
7042 delete_insn (i2);
7043 if (GET_CODE (i2) == CODE_LABEL
7044 || GET_CODE (i2) == JUMP_INSN)
7045 break;
7046 }
7047
7048 /* For the debugging info,
7049 say the pseudo lives in this reload reg. */
7050 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7051 alter_reg (REGNO (reg), -1);
7052 }
7053 }
7054 \f
7055 /* Output reload-insns to reload VALUE into RELOADREG.
7056 VALUE is an autoincrement or autodecrement RTX whose operand
7057 is a register or memory location;
7058 so reloading involves incrementing that location.
7059
7060 INC_AMOUNT is the number to increment or decrement by (always positive).
7061 This cannot be deduced from VALUE. */
7062
7063 static void
7064 inc_for_reload (reloadreg, value, inc_amount)
7065 rtx reloadreg;
7066 rtx value;
7067 int inc_amount;
7068 {
7069 /* REG or MEM to be copied and incremented. */
7070 rtx incloc = XEXP (value, 0);
7071 /* Nonzero if increment after copying. */
7072 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7073 rtx last;
7074 rtx inc;
7075 rtx add_insn;
7076 int code;
7077
7078 /* No hard register is equivalent to this register after
7079 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7080 we could inc/dec that register as well (maybe even using it for
7081 the source), but I'm not sure it's worth worrying about. */
7082 if (GET_CODE (incloc) == REG)
7083 reg_last_reload_reg[REGNO (incloc)] = 0;
7084
7085 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7086 inc_amount = - inc_amount;
7087
7088 inc = GEN_INT (inc_amount);
7089
7090 /* If this is post-increment, first copy the location to the reload reg. */
7091 if (post)
7092 emit_insn (gen_move_insn (reloadreg, incloc));
7093
7094 /* See if we can directly increment INCLOC. Use a method similar to that
7095 in gen_reload. */
7096
7097 last = get_last_insn ();
7098 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
7099 gen_rtx (PLUS, GET_MODE (incloc),
7100 incloc, inc)));
7101
7102 code = recog_memoized (add_insn);
7103 if (code >= 0)
7104 {
7105 insn_extract (add_insn);
7106 if (constrain_operands (code, 1))
7107 {
7108 /* If this is a pre-increment and we have incremented the value
7109 where it lives, copy the incremented value to RELOADREG to
7110 be used as an address. */
7111
7112 if (! post)
7113 emit_insn (gen_move_insn (reloadreg, incloc));
7114
7115 return;
7116 }
7117 }
7118
7119 delete_insns_since (last);
7120
7121 /* If couldn't do the increment directly, must increment in RELOADREG.
7122 The way we do this depends on whether this is pre- or post-increment.
7123 For pre-increment, copy INCLOC to the reload register, increment it
7124 there, then save back. */
7125
7126 if (! post)
7127 {
7128 emit_insn (gen_move_insn (reloadreg, incloc));
7129 emit_insn (gen_add2_insn (reloadreg, inc));
7130 emit_insn (gen_move_insn (incloc, reloadreg));
7131 }
7132 else
7133 {
7134 /* Postincrement.
7135 Because this might be a jump insn or a compare, and because RELOADREG
7136 may not be available after the insn in an input reload, we must do
7137 the incrementation before the insn being reloaded for.
7138
7139 We have already copied INCLOC to RELOADREG. Increment the copy in
7140 RELOADREG, save that back, then decrement RELOADREG so it has
7141 the original value. */
7142
7143 emit_insn (gen_add2_insn (reloadreg, inc));
7144 emit_insn (gen_move_insn (incloc, reloadreg));
7145 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7146 }
7147
7148 return;
7149 }
7150 \f
7151 /* Return 1 if we are certain that the constraint-string STRING allows
7152 the hard register REG. Return 0 if we can't be sure of this. */
7153
7154 static int
7155 constraint_accepts_reg_p (string, reg)
7156 char *string;
7157 rtx reg;
7158 {
7159 int value = 0;
7160 int regno = true_regnum (reg);
7161 int c;
7162
7163 /* Initialize for first alternative. */
7164 value = 0;
7165 /* Check that each alternative contains `g' or `r'. */
7166 while (1)
7167 switch (c = *string++)
7168 {
7169 case 0:
7170 /* If an alternative lacks `g' or `r', we lose. */
7171 return value;
7172 case ',':
7173 /* If an alternative lacks `g' or `r', we lose. */
7174 if (value == 0)
7175 return 0;
7176 /* Initialize for next alternative. */
7177 value = 0;
7178 break;
7179 case 'g':
7180 case 'r':
7181 /* Any general reg wins for this alternative. */
7182 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7183 value = 1;
7184 break;
7185 default:
7186 /* Any reg in specified class wins for this alternative. */
7187 {
7188 enum reg_class class = REG_CLASS_FROM_LETTER (c);
7189
7190 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
7191 value = 1;
7192 }
7193 }
7194 }
7195 \f
7196 /* Return the number of places FIND appears within X, but don't count
7197 an occurrence if some SET_DEST is FIND. */
7198
7199 static int
7200 count_occurrences (x, find)
7201 register rtx x, find;
7202 {
7203 register int i, j;
7204 register enum rtx_code code;
7205 register char *format_ptr;
7206 int count;
7207
7208 if (x == find)
7209 return 1;
7210 if (x == 0)
7211 return 0;
7212
7213 code = GET_CODE (x);
7214
7215 switch (code)
7216 {
7217 case REG:
7218 case QUEUED:
7219 case CONST_INT:
7220 case CONST_DOUBLE:
7221 case SYMBOL_REF:
7222 case CODE_LABEL:
7223 case PC:
7224 case CC0:
7225 return 0;
7226
7227 case SET:
7228 if (SET_DEST (x) == find)
7229 return count_occurrences (SET_SRC (x), find);
7230 break;
7231 }
7232
7233 format_ptr = GET_RTX_FORMAT (code);
7234 count = 0;
7235
7236 for (i = 0; i < GET_RTX_LENGTH (code); i++)
7237 {
7238 switch (*format_ptr++)
7239 {
7240 case 'e':
7241 count += count_occurrences (XEXP (x, i), find);
7242 break;
7243
7244 case 'E':
7245 if (XVEC (x, i) != NULL)
7246 {
7247 for (j = 0; j < XVECLEN (x, i); j++)
7248 count += count_occurrences (XVECEXP (x, i, j), find);
7249 }
7250 break;
7251 }
7252 }
7253 return count;
7254 }
This page took 0.400299 seconds and 5 git commands to generate.