]> gcc.gnu.org Git - gcc.git/blob - gcc/reload1.c
b71cc9299a46c4877efe03ceab9198151d621873
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "obstack.h"
25 #include "insn-config.h"
26 #include "insn-flags.h"
27 #include "insn-codes.h"
28 #include "flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "reload.h"
33 #include "recog.h"
34 #include "basic-block.h"
35 #include "output.h"
36
37 /* This file contains the reload pass of the compiler, which is
38 run after register allocation has been done. It checks that
39 each insn is valid (operands required to be in registers really
40 are in registers of the proper class) and fixes up invalid ones
41 by copying values temporarily into registers for the insns
42 that need them.
43
44 The results of register allocation are described by the vector
45 reg_renumber; the insns still contain pseudo regs, but reg_renumber
46 can be used to find which hard reg, if any, a pseudo reg is in.
47
48 The technique we always use is to free up a few hard regs that are
49 called ``reload regs'', and for each place where a pseudo reg
50 must be in a hard reg, copy it temporarily into one of the reload regs.
51
52 All the pseudos that were formerly allocated to the hard regs that
53 are now in use as reload regs must be ``spilled''. This means
54 that they go to other hard regs, or to stack slots if no other
55 available hard regs can be found. Spilling can invalidate more
56 insns, requiring additional need for reloads, so we must keep checking
57 until the process stabilizes.
58
59 For machines with different classes of registers, we must keep track
60 of the register class needed for each reload, and make sure that
61 we allocate enough reload registers of each class.
62
63 The file reload.c contains the code that checks one insn for
64 validity and reports the reloads that it needs. This file
65 is in charge of scanning the entire rtl code, accumulating the
66 reload needs, spilling, assigning reload registers to use for
67 fixing up each insn, and generating the new insns to copy values
68 into the reload registers. */
69
70
71 #ifndef REGISTER_MOVE_COST
72 #define REGISTER_MOVE_COST(x, y) 2
73 #endif
74
75 #ifndef MEMORY_MOVE_COST
76 #define MEMORY_MOVE_COST(x) 4
77 #endif
78 \f
79 /* During reload_as_needed, element N contains a REG rtx for the hard reg
80 into which reg N has been reloaded (perhaps for a previous insn). */
81 static rtx *reg_last_reload_reg;
82
83 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
84 for an output reload that stores into reg N. */
85 static char *reg_has_output_reload;
86
87 /* Indicates which hard regs are reload-registers for an output reload
88 in the current insn. */
89 static HARD_REG_SET reg_is_output_reload;
90
91 /* Element N is the constant value to which pseudo reg N is equivalent,
92 or zero if pseudo reg N is not equivalent to a constant.
93 find_reloads looks at this in order to replace pseudo reg N
94 with the constant it stands for. */
95 rtx *reg_equiv_constant;
96
97 /* Element N is a memory location to which pseudo reg N is equivalent,
98 prior to any register elimination (such as frame pointer to stack
99 pointer). Depending on whether or not it is a valid address, this value
100 is transferred to either reg_equiv_address or reg_equiv_mem. */
101 rtx *reg_equiv_memory_loc;
102
103 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
104 This is used when the address is not valid as a memory address
105 (because its displacement is too big for the machine.) */
106 rtx *reg_equiv_address;
107
108 /* Element N is the memory slot to which pseudo reg N is equivalent,
109 or zero if pseudo reg N is not equivalent to a memory slot. */
110 rtx *reg_equiv_mem;
111
112 /* Widest width in which each pseudo reg is referred to (via subreg). */
113 static int *reg_max_ref_width;
114
115 /* Element N is the insn that initialized reg N from its equivalent
116 constant or memory slot. */
117 static rtx *reg_equiv_init;
118
119 /* During reload_as_needed, element N contains the last pseudo regno
120 reloaded into the Nth reload register. This vector is in parallel
121 with spill_regs. If that pseudo reg occupied more than one register,
122 reg_reloaded_contents points to that pseudo for each spill register in
123 use; all of these must remain set for an inheritance to occur. */
124 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
125
126 /* During reload_as_needed, element N contains the insn for which
127 the Nth reload register was last used. This vector is in parallel
128 with spill_regs, and its contents are significant only when
129 reg_reloaded_contents is significant. */
130 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
131
132 /* Number of spill-regs so far; number of valid elements of spill_regs. */
133 static int n_spills;
134
135 /* In parallel with spill_regs, contains REG rtx's for those regs.
136 Holds the last rtx used for any given reg, or 0 if it has never
137 been used for spilling yet. This rtx is reused, provided it has
138 the proper mode. */
139 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
140
141 /* In parallel with spill_regs, contains nonzero for a spill reg
142 that was stored after the last time it was used.
143 The precise value is the insn generated to do the store. */
144 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
145
146 /* This table is the inverse mapping of spill_regs:
147 indexed by hard reg number,
148 it contains the position of that reg in spill_regs,
149 or -1 for something that is not in spill_regs. */
150 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
151
152 /* This reg set indicates registers that may not be used for retrying global
153 allocation. The registers that may not be used include all spill registers
154 and the frame pointer (if we are using one). */
155 HARD_REG_SET forbidden_regs;
156
157 /* This reg set indicates registers that are not good for spill registers.
158 They will not be used to complete groups of spill registers. This includes
159 all fixed registers, registers that may be eliminated, and, if
160 SMALL_REGISTER_CLASSES is not defined, registers explicitly used in the rtl.
161
162 (spill_reg_order prevents these registers from being used to start a
163 group.) */
164 static HARD_REG_SET bad_spill_regs;
165
166 /* Describes order of use of registers for reloading
167 of spilled pseudo-registers. `spills' is the number of
168 elements that are actually valid; new ones are added at the end. */
169 static short spill_regs[FIRST_PSEUDO_REGISTER];
170
171 /* Describes order of preference for putting regs into spill_regs.
172 Contains the numbers of all the hard regs, in order most preferred first.
173 This order is different for each function.
174 It is set up by order_regs_for_reload.
175 Empty elements at the end contain -1. */
176 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
177
178 /* 1 for a hard register that appears explicitly in the rtl
179 (for example, function value registers, special registers
180 used by insns, structure value pointer registers). */
181 static char regs_explicitly_used[FIRST_PSEUDO_REGISTER];
182
183 /* Indicates if a register was counted against the need for
184 groups. 0 means it can count against max_nongroup instead. */
185 static HARD_REG_SET counted_for_groups;
186
187 /* Indicates if a register was counted against the need for
188 non-groups. 0 means it can become part of a new group.
189 During choose_reload_regs, 1 here means don't use this reg
190 as part of a group, even if it seems to be otherwise ok. */
191 static HARD_REG_SET counted_for_nongroups;
192
193 /* Nonzero if indirect addressing is supported on the machine; this means
194 that spilling (REG n) does not require reloading it into a register in
195 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
196 value indicates the level of indirect addressing supported, e.g., two
197 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
198 a hard register. */
199
200 static char spill_indirect_levels;
201
202 /* Nonzero if indirect addressing is supported when the innermost MEM is
203 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
204 which these are valid is the same as spill_indirect_levels, above. */
205
206 char indirect_symref_ok;
207
208 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
209
210 char double_reg_address_ok;
211
212 /* Record the stack slot for each spilled hard register. */
213
214 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
215
216 /* Width allocated so far for that stack slot. */
217
218 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
219
220 /* Indexed by register class and basic block number, nonzero if there is
221 any need for a spill register of that class in that basic block.
222 The pointer is 0 if we did stupid allocation and don't know
223 the structure of basic blocks. */
224
225 char *basic_block_needs[N_REG_CLASSES];
226
227 /* First uid used by insns created by reload in this function.
228 Used in find_equiv_reg. */
229 int reload_first_uid;
230
231 /* Flag set by local-alloc or global-alloc if anything is live in
232 a call-clobbered reg across calls. */
233
234 int caller_save_needed;
235
236 /* Set to 1 while reload_as_needed is operating.
237 Required by some machines to handle any generated moves differently. */
238
239 int reload_in_progress = 0;
240
241 /* These arrays record the insn_code of insns that may be needed to
242 perform input and output reloads of special objects. They provide a
243 place to pass a scratch register. */
244
245 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
246 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
247
248 /* This obstack is used for allocation of rtl during register elimination.
249 The allocated storage can be freed once find_reloads has processed the
250 insn. */
251
252 struct obstack reload_obstack;
253 char *reload_firstobj;
254
255 #define obstack_chunk_alloc xmalloc
256 #define obstack_chunk_free free
257
258 /* List of labels that must never be deleted. */
259 extern rtx forced_labels;
260 \f
261 /* This structure is used to record information about register eliminations.
262 Each array entry describes one possible way of eliminating a register
263 in favor of another. If there is more than one way of eliminating a
264 particular register, the most preferred should be specified first. */
265
266 static struct elim_table
267 {
268 int from; /* Register number to be eliminated. */
269 int to; /* Register number used as replacement. */
270 int initial_offset; /* Initial difference between values. */
271 int can_eliminate; /* Non-zero if this elimination can be done. */
272 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
273 insns made by reload. */
274 int offset; /* Current offset between the two regs. */
275 int max_offset; /* Maximum offset between the two regs. */
276 int previous_offset; /* Offset at end of previous insn. */
277 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
278 rtx from_rtx; /* REG rtx for the register to be eliminated.
279 We cannot simply compare the number since
280 we might then spuriously replace a hard
281 register corresponding to a pseudo
282 assigned to the reg to be eliminated. */
283 rtx to_rtx; /* REG rtx for the replacement. */
284 } reg_eliminate[] =
285
286 /* If a set of eliminable registers was specified, define the table from it.
287 Otherwise, default to the normal case of the frame pointer being
288 replaced by the stack pointer. */
289
290 #ifdef ELIMINABLE_REGS
291 ELIMINABLE_REGS;
292 #else
293 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
294 #endif
295
296 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
297
298 /* Record the number of pending eliminations that have an offset not equal
299 to their initial offset. If non-zero, we use a new copy of each
300 replacement result in any insns encountered. */
301 static int num_not_at_initial_offset;
302
303 /* Count the number of registers that we may be able to eliminate. */
304 static int num_eliminable;
305
306 /* For each label, we record the offset of each elimination. If we reach
307 a label by more than one path and an offset differs, we cannot do the
308 elimination. This information is indexed by the number of the label.
309 The first table is an array of flags that records whether we have yet
310 encountered a label and the second table is an array of arrays, one
311 entry in the latter array for each elimination. */
312
313 static char *offsets_known_at;
314 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
315
316 /* Number of labels in the current function. */
317
318 static int num_labels;
319
320 struct hard_reg_n_uses { int regno; int uses; };
321 \f
322 static int possible_group_p PROTO((int, int *));
323 static void count_possible_groups PROTO((int *, enum machine_mode *,
324 int *));
325 static int modes_equiv_for_class_p PROTO((enum machine_mode,
326 enum machine_mode,
327 enum reg_class));
328 static void spill_failure PROTO((rtx));
329 static int new_spill_reg PROTO((int, int, int *, int *, int,
330 FILE *));
331 static void delete_dead_insn PROTO((rtx));
332 static void alter_reg PROTO((int, int));
333 static void set_label_offsets PROTO((rtx, rtx, int));
334 static int eliminate_regs_in_insn PROTO((rtx, int));
335 static void mark_not_eliminable PROTO((rtx, rtx));
336 static int spill_hard_reg PROTO((int, int, FILE *, int));
337 static void scan_paradoxical_subregs PROTO((rtx));
338 static int hard_reg_use_compare PROTO((struct hard_reg_n_uses *,
339 struct hard_reg_n_uses *));
340 static void order_regs_for_reload PROTO((void));
341 static void reload_as_needed PROTO((rtx, int));
342 static void forget_old_reloads_1 PROTO((rtx));
343 static int reload_reg_class_lower PROTO((short *, short *));
344 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
345 enum machine_mode));
346 static int reload_reg_free_p PROTO((int, int, enum reload_type));
347 static int reload_reg_free_before_p PROTO((int, int, enum reload_type));
348 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
349 static int allocate_reload_reg PROTO((int, rtx, int, int));
350 static void choose_reload_regs PROTO((rtx, rtx));
351 static void merge_assigned_reloads PROTO((rtx));
352 static void emit_reload_insns PROTO((rtx));
353 static void delete_output_reload PROTO((rtx, int, rtx));
354 static void inc_for_reload PROTO((rtx, rtx, int));
355 static int constraint_accepts_reg_p PROTO((char *, rtx));
356 static int count_occurrences PROTO((rtx, rtx));
357 \f
358 /* Initialize the reload pass once per compilation. */
359
360 void
361 init_reload ()
362 {
363 register int i;
364
365 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
366 Set spill_indirect_levels to the number of levels such addressing is
367 permitted, zero if it is not permitted at all. */
368
369 register rtx tem
370 = gen_rtx (MEM, Pmode,
371 gen_rtx (PLUS, Pmode,
372 gen_rtx (REG, Pmode, LAST_VIRTUAL_REGISTER + 1),
373 GEN_INT (4)));
374 spill_indirect_levels = 0;
375
376 while (memory_address_p (QImode, tem))
377 {
378 spill_indirect_levels++;
379 tem = gen_rtx (MEM, Pmode, tem);
380 }
381
382 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
383
384 tem = gen_rtx (MEM, Pmode, gen_rtx (SYMBOL_REF, Pmode, "foo"));
385 indirect_symref_ok = memory_address_p (QImode, tem);
386
387 /* See if reg+reg is a valid (and offsettable) address. */
388
389 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
390 {
391 tem = gen_rtx (PLUS, Pmode,
392 gen_rtx (REG, Pmode, FRAME_POINTER_REGNUM),
393 gen_rtx (REG, Pmode, i));
394 /* This way, we make sure that reg+reg is an offsettable address. */
395 tem = plus_constant (tem, 4);
396
397 if (memory_address_p (QImode, tem))
398 {
399 double_reg_address_ok = 1;
400 break;
401 }
402 }
403
404 /* Initialize obstack for our rtl allocation. */
405 gcc_obstack_init (&reload_obstack);
406 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
407
408 #ifdef HAVE_SECONDARY_RELOADS
409
410 /* Initialize the optabs for doing special input and output reloads. */
411
412 for (i = 0; i < NUM_MACHINE_MODES; i++)
413 reload_in_optab[i] = reload_out_optab[i] = CODE_FOR_nothing;
414
415 #ifdef HAVE_reload_inqi
416 if (HAVE_reload_inqi)
417 reload_in_optab[(int) QImode] = CODE_FOR_reload_inqi;
418 #endif
419 #ifdef HAVE_reload_inhi
420 if (HAVE_reload_inhi)
421 reload_in_optab[(int) HImode] = CODE_FOR_reload_inhi;
422 #endif
423 #ifdef HAVE_reload_insi
424 if (HAVE_reload_insi)
425 reload_in_optab[(int) SImode] = CODE_FOR_reload_insi;
426 #endif
427 #ifdef HAVE_reload_indi
428 if (HAVE_reload_indi)
429 reload_in_optab[(int) DImode] = CODE_FOR_reload_indi;
430 #endif
431 #ifdef HAVE_reload_inti
432 if (HAVE_reload_inti)
433 reload_in_optab[(int) TImode] = CODE_FOR_reload_inti;
434 #endif
435 #ifdef HAVE_reload_inqf
436 if (HAVE_reload_inqf)
437 reload_in_optab[(int) QFmode] = CODE_FOR_reload_inqf;
438 #endif
439 #ifdef HAVE_reload_inhf
440 if (HAVE_reload_inhf)
441 reload_in_optab[(int) HFmode] = CODE_FOR_reload_inhf;
442 #endif
443 #ifdef HAVE_reload_insf
444 if (HAVE_reload_insf)
445 reload_in_optab[(int) SFmode] = CODE_FOR_reload_insf;
446 #endif
447 #ifdef HAVE_reload_indf
448 if (HAVE_reload_indf)
449 reload_in_optab[(int) DFmode] = CODE_FOR_reload_indf;
450 #endif
451 #ifdef HAVE_reload_inxf
452 if (HAVE_reload_inxf)
453 reload_in_optab[(int) XFmode] = CODE_FOR_reload_inxf;
454 #endif
455 #ifdef HAVE_reload_intf
456 if (HAVE_reload_intf)
457 reload_in_optab[(int) TFmode] = CODE_FOR_reload_intf;
458 #endif
459
460 #ifdef HAVE_reload_outqi
461 if (HAVE_reload_outqi)
462 reload_out_optab[(int) QImode] = CODE_FOR_reload_outqi;
463 #endif
464 #ifdef HAVE_reload_outhi
465 if (HAVE_reload_outhi)
466 reload_out_optab[(int) HImode] = CODE_FOR_reload_outhi;
467 #endif
468 #ifdef HAVE_reload_outsi
469 if (HAVE_reload_outsi)
470 reload_out_optab[(int) SImode] = CODE_FOR_reload_outsi;
471 #endif
472 #ifdef HAVE_reload_outdi
473 if (HAVE_reload_outdi)
474 reload_out_optab[(int) DImode] = CODE_FOR_reload_outdi;
475 #endif
476 #ifdef HAVE_reload_outti
477 if (HAVE_reload_outti)
478 reload_out_optab[(int) TImode] = CODE_FOR_reload_outti;
479 #endif
480 #ifdef HAVE_reload_outqf
481 if (HAVE_reload_outqf)
482 reload_out_optab[(int) QFmode] = CODE_FOR_reload_outqf;
483 #endif
484 #ifdef HAVE_reload_outhf
485 if (HAVE_reload_outhf)
486 reload_out_optab[(int) HFmode] = CODE_FOR_reload_outhf;
487 #endif
488 #ifdef HAVE_reload_outsf
489 if (HAVE_reload_outsf)
490 reload_out_optab[(int) SFmode] = CODE_FOR_reload_outsf;
491 #endif
492 #ifdef HAVE_reload_outdf
493 if (HAVE_reload_outdf)
494 reload_out_optab[(int) DFmode] = CODE_FOR_reload_outdf;
495 #endif
496 #ifdef HAVE_reload_outxf
497 if (HAVE_reload_outxf)
498 reload_out_optab[(int) XFmode] = CODE_FOR_reload_outxf;
499 #endif
500 #ifdef HAVE_reload_outtf
501 if (HAVE_reload_outtf)
502 reload_out_optab[(int) TFmode] = CODE_FOR_reload_outtf;
503 #endif
504
505 #endif /* HAVE_SECONDARY_RELOADS */
506
507 }
508
509 /* Main entry point for the reload pass.
510
511 FIRST is the first insn of the function being compiled.
512
513 GLOBAL nonzero means we were called from global_alloc
514 and should attempt to reallocate any pseudoregs that we
515 displace from hard regs we will use for reloads.
516 If GLOBAL is zero, we do not have enough information to do that,
517 so any pseudo reg that is spilled must go to the stack.
518
519 DUMPFILE is the global-reg debugging dump file stream, or 0.
520 If it is nonzero, messages are written to it to describe
521 which registers are seized as reload regs, which pseudo regs
522 are spilled from them, and where the pseudo regs are reallocated to.
523
524 Return value is nonzero if reload failed
525 and we must not do any more for this function. */
526
527 int
528 reload (first, global, dumpfile)
529 rtx first;
530 int global;
531 FILE *dumpfile;
532 {
533 register int class;
534 register int i, j;
535 register rtx insn;
536 register struct elim_table *ep;
537
538 int something_changed;
539 int something_needs_reloads;
540 int something_needs_elimination;
541 int new_basic_block_needs;
542 enum reg_class caller_save_spill_class = NO_REGS;
543 int caller_save_group_size = 1;
544
545 /* Nonzero means we couldn't get enough spill regs. */
546 int failure = 0;
547
548 /* The basic block number currently being processed for INSN. */
549 int this_block;
550
551 /* Make sure even insns with volatile mem refs are recognizable. */
552 init_recog ();
553
554 /* Enable find_equiv_reg to distinguish insns made by reload. */
555 reload_first_uid = get_max_uid ();
556
557 for (i = 0; i < N_REG_CLASSES; i++)
558 basic_block_needs[i] = 0;
559
560 #ifdef SECONDARY_MEMORY_NEEDED
561 /* Initialize the secondary memory table. */
562 clear_secondary_mem ();
563 #endif
564
565 /* Remember which hard regs appear explicitly
566 before we merge into `regs_ever_live' the ones in which
567 pseudo regs have been allocated. */
568 bcopy (regs_ever_live, regs_explicitly_used, sizeof regs_ever_live);
569
570 /* We don't have a stack slot for any spill reg yet. */
571 bzero (spill_stack_slot, sizeof spill_stack_slot);
572 bzero (spill_stack_slot_width, sizeof spill_stack_slot_width);
573
574 /* Initialize the save area information for caller-save, in case some
575 are needed. */
576 init_save_areas ();
577
578 /* Compute which hard registers are now in use
579 as homes for pseudo registers.
580 This is done here rather than (eg) in global_alloc
581 because this point is reached even if not optimizing. */
582
583 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
584 mark_home_live (i);
585
586 /* Make sure that the last insn in the chain
587 is not something that needs reloading. */
588 emit_note (NULL_PTR, NOTE_INSN_DELETED);
589
590 /* Find all the pseudo registers that didn't get hard regs
591 but do have known equivalent constants or memory slots.
592 These include parameters (known equivalent to parameter slots)
593 and cse'd or loop-moved constant memory addresses.
594
595 Record constant equivalents in reg_equiv_constant
596 so they will be substituted by find_reloads.
597 Record memory equivalents in reg_mem_equiv so they can
598 be substituted eventually by altering the REG-rtx's. */
599
600 reg_equiv_constant = (rtx *) alloca (max_regno * sizeof (rtx));
601 bzero (reg_equiv_constant, max_regno * sizeof (rtx));
602 reg_equiv_memory_loc = (rtx *) alloca (max_regno * sizeof (rtx));
603 bzero (reg_equiv_memory_loc, max_regno * sizeof (rtx));
604 reg_equiv_mem = (rtx *) alloca (max_regno * sizeof (rtx));
605 bzero (reg_equiv_mem, max_regno * sizeof (rtx));
606 reg_equiv_init = (rtx *) alloca (max_regno * sizeof (rtx));
607 bzero (reg_equiv_init, max_regno * sizeof (rtx));
608 reg_equiv_address = (rtx *) alloca (max_regno * sizeof (rtx));
609 bzero (reg_equiv_address, max_regno * sizeof (rtx));
610 reg_max_ref_width = (int *) alloca (max_regno * sizeof (int));
611 bzero (reg_max_ref_width, max_regno * sizeof (int));
612
613 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
614 Also find all paradoxical subregs
615 and find largest such for each pseudo. */
616
617 for (insn = first; insn; insn = NEXT_INSN (insn))
618 {
619 rtx set = single_set (insn);
620
621 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
622 {
623 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
624 if (note
625 #ifdef LEGITIMATE_PIC_OPERAND_P
626 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
627 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
628 #endif
629 )
630 {
631 rtx x = XEXP (note, 0);
632 i = REGNO (SET_DEST (set));
633 if (i > LAST_VIRTUAL_REGISTER)
634 {
635 if (GET_CODE (x) == MEM)
636 reg_equiv_memory_loc[i] = x;
637 else if (CONSTANT_P (x))
638 {
639 if (LEGITIMATE_CONSTANT_P (x))
640 reg_equiv_constant[i] = x;
641 else
642 reg_equiv_memory_loc[i]
643 = force_const_mem (GET_MODE (SET_DEST (set)), x);
644 }
645 else
646 continue;
647
648 /* If this register is being made equivalent to a MEM
649 and the MEM is not SET_SRC, the equivalencing insn
650 is one with the MEM as a SET_DEST and it occurs later.
651 So don't mark this insn now. */
652 if (GET_CODE (x) != MEM
653 || rtx_equal_p (SET_SRC (set), x))
654 reg_equiv_init[i] = insn;
655 }
656 }
657 }
658
659 /* If this insn is setting a MEM from a register equivalent to it,
660 this is the equivalencing insn. */
661 else if (set && GET_CODE (SET_DEST (set)) == MEM
662 && GET_CODE (SET_SRC (set)) == REG
663 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
664 && rtx_equal_p (SET_DEST (set),
665 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
666 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
667
668 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
669 scan_paradoxical_subregs (PATTERN (insn));
670 }
671
672 /* Does this function require a frame pointer? */
673
674 frame_pointer_needed = (! flag_omit_frame_pointer
675 #ifdef EXIT_IGNORE_STACK
676 /* ?? If EXIT_IGNORE_STACK is set, we will not save
677 and restore sp for alloca. So we can't eliminate
678 the frame pointer in that case. At some point,
679 we should improve this by emitting the
680 sp-adjusting insns for this case. */
681 || (current_function_calls_alloca
682 && EXIT_IGNORE_STACK)
683 #endif
684 || FRAME_POINTER_REQUIRED);
685
686 num_eliminable = 0;
687
688 /* Initialize the table of registers to eliminate. The way we do this
689 depends on how the eliminable registers were defined. */
690 #ifdef ELIMINABLE_REGS
691 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
692 {
693 ep->can_eliminate = ep->can_eliminate_previous
694 = (CAN_ELIMINATE (ep->from, ep->to)
695 && (ep->from != FRAME_POINTER_REGNUM || ! frame_pointer_needed));
696 }
697 #else
698 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
699 = ! frame_pointer_needed;
700 #endif
701
702 /* Count the number of eliminable registers and build the FROM and TO
703 REG rtx's. Note that code in gen_rtx will cause, e.g.,
704 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
705 We depend on this. */
706 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
707 {
708 num_eliminable += ep->can_eliminate;
709 ep->from_rtx = gen_rtx (REG, Pmode, ep->from);
710 ep->to_rtx = gen_rtx (REG, Pmode, ep->to);
711 }
712
713 num_labels = max_label_num () - get_first_label_num ();
714
715 /* Allocate the tables used to store offset information at labels. */
716 offsets_known_at = (char *) alloca (num_labels);
717 offsets_at
718 = (int (*)[NUM_ELIMINABLE_REGS])
719 alloca (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
720
721 offsets_known_at -= get_first_label_num ();
722 offsets_at -= get_first_label_num ();
723
724 /* Alter each pseudo-reg rtx to contain its hard reg number.
725 Assign stack slots to the pseudos that lack hard regs or equivalents.
726 Do not touch virtual registers. */
727
728 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
729 alter_reg (i, -1);
730
731 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done here
732 because the stack size may be a part of the offset computation for
733 register elimination. */
734 assign_stack_local (BLKmode, 0, 0);
735
736 /* If we have some registers we think can be eliminated, scan all insns to
737 see if there is an insn that sets one of these registers to something
738 other than itself plus a constant. If so, the register cannot be
739 eliminated. Doing this scan here eliminates an extra pass through the
740 main reload loop in the most common case where register elimination
741 cannot be done. */
742 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
743 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
744 || GET_CODE (insn) == CALL_INSN)
745 note_stores (PATTERN (insn), mark_not_eliminable);
746
747 #ifndef REGISTER_CONSTRAINTS
748 /* If all the pseudo regs have hard regs,
749 except for those that are never referenced,
750 we know that no reloads are needed. */
751 /* But that is not true if there are register constraints, since
752 in that case some pseudos might be in the wrong kind of hard reg. */
753
754 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
755 if (reg_renumber[i] == -1 && reg_n_refs[i] != 0)
756 break;
757
758 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
759 return;
760 #endif
761
762 /* Compute the order of preference for hard registers to spill.
763 Store them by decreasing preference in potential_reload_regs. */
764
765 order_regs_for_reload ();
766
767 /* So far, no hard regs have been spilled. */
768 n_spills = 0;
769 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
770 spill_reg_order[i] = -1;
771
772 /* On most machines, we can't use any register explicitly used in the
773 rtl as a spill register. But on some, we have to. Those will have
774 taken care to keep the life of hard regs as short as possible. */
775
776 #ifdef SMALL_REGISTER_CLASSES
777 CLEAR_HARD_REG_SET (forbidden_regs);
778 #else
779 COPY_HARD_REG_SET (forbidden_regs, bad_spill_regs);
780 #endif
781
782 /* Spill any hard regs that we know we can't eliminate. */
783 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
784 if (! ep->can_eliminate)
785 {
786 spill_hard_reg (ep->from, global, dumpfile, 1);
787 regs_ever_live[ep->from] = 1;
788 }
789
790 if (global)
791 for (i = 0; i < N_REG_CLASSES; i++)
792 {
793 basic_block_needs[i] = (char *)alloca (n_basic_blocks);
794 bzero (basic_block_needs[i], n_basic_blocks);
795 }
796
797 /* From now on, we need to emit any moves without making new pseudos. */
798 reload_in_progress = 1;
799
800 /* This loop scans the entire function each go-round
801 and repeats until one repetition spills no additional hard regs. */
802
803 /* This flag is set when a pseudo reg is spilled,
804 to require another pass. Note that getting an additional reload
805 reg does not necessarily imply any pseudo reg was spilled;
806 sometimes we find a reload reg that no pseudo reg was allocated in. */
807 something_changed = 1;
808 /* This flag is set if there are any insns that require reloading. */
809 something_needs_reloads = 0;
810 /* This flag is set if there are any insns that require register
811 eliminations. */
812 something_needs_elimination = 0;
813 while (something_changed)
814 {
815 rtx after_call = 0;
816
817 /* For each class, number of reload regs needed in that class.
818 This is the maximum over all insns of the needs in that class
819 of the individual insn. */
820 int max_needs[N_REG_CLASSES];
821 /* For each class, size of group of consecutive regs
822 that is needed for the reloads of this class. */
823 int group_size[N_REG_CLASSES];
824 /* For each class, max number of consecutive groups needed.
825 (Each group contains group_size[CLASS] consecutive registers.) */
826 int max_groups[N_REG_CLASSES];
827 /* For each class, max number needed of regs that don't belong
828 to any of the groups. */
829 int max_nongroups[N_REG_CLASSES];
830 /* For each class, the machine mode which requires consecutive
831 groups of regs of that class.
832 If two different modes ever require groups of one class,
833 they must be the same size and equally restrictive for that class,
834 otherwise we can't handle the complexity. */
835 enum machine_mode group_mode[N_REG_CLASSES];
836 /* Record the insn where each maximum need is first found. */
837 rtx max_needs_insn[N_REG_CLASSES];
838 rtx max_groups_insn[N_REG_CLASSES];
839 rtx max_nongroups_insn[N_REG_CLASSES];
840 rtx x;
841 int starting_frame_size = get_frame_size ();
842 static char *reg_class_names[] = REG_CLASS_NAMES;
843
844 something_changed = 0;
845 bzero (max_needs, sizeof max_needs);
846 bzero (max_groups, sizeof max_groups);
847 bzero (max_nongroups, sizeof max_nongroups);
848 bzero (max_needs_insn, sizeof max_needs_insn);
849 bzero (max_groups_insn, sizeof max_groups_insn);
850 bzero (max_nongroups_insn, sizeof max_nongroups_insn);
851 bzero (group_size, sizeof group_size);
852 for (i = 0; i < N_REG_CLASSES; i++)
853 group_mode[i] = VOIDmode;
854
855 /* Keep track of which basic blocks are needing the reloads. */
856 this_block = 0;
857
858 /* Remember whether any element of basic_block_needs
859 changes from 0 to 1 in this pass. */
860 new_basic_block_needs = 0;
861
862 /* Reset all offsets on eliminable registers to their initial values. */
863 #ifdef ELIMINABLE_REGS
864 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
865 {
866 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
867 ep->previous_offset = ep->offset
868 = ep->max_offset = ep->initial_offset;
869 }
870 #else
871 #ifdef INITIAL_FRAME_POINTER_OFFSET
872 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
873 #else
874 if (!FRAME_POINTER_REQUIRED)
875 abort ();
876 reg_eliminate[0].initial_offset = 0;
877 #endif
878 reg_eliminate[0].previous_offset = reg_eliminate[0].max_offset
879 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
880 #endif
881
882 num_not_at_initial_offset = 0;
883
884 bzero (&offsets_known_at[get_first_label_num ()], num_labels);
885
886 /* Set a known offset for each forced label to be at the initial offset
887 of each elimination. We do this because we assume that all
888 computed jumps occur from a location where each elimination is
889 at its initial offset. */
890
891 for (x = forced_labels; x; x = XEXP (x, 1))
892 if (XEXP (x, 0))
893 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
894
895 /* For each pseudo register that has an equivalent location defined,
896 try to eliminate any eliminable registers (such as the frame pointer)
897 assuming initial offsets for the replacement register, which
898 is the normal case.
899
900 If the resulting location is directly addressable, substitute
901 the MEM we just got directly for the old REG.
902
903 If it is not addressable but is a constant or the sum of a hard reg
904 and constant, it is probably not addressable because the constant is
905 out of range, in that case record the address; we will generate
906 hairy code to compute the address in a register each time it is
907 needed.
908
909 If the location is not addressable, but does not have one of the
910 above forms, assign a stack slot. We have to do this to avoid the
911 potential of producing lots of reloads if, e.g., a location involves
912 a pseudo that didn't get a hard register and has an equivalent memory
913 location that also involves a pseudo that didn't get a hard register.
914
915 Perhaps at some point we will improve reload_when_needed handling
916 so this problem goes away. But that's very hairy. */
917
918 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
919 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
920 {
921 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
922
923 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
924 XEXP (x, 0)))
925 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
926 else if (CONSTANT_P (XEXP (x, 0))
927 || (GET_CODE (XEXP (x, 0)) == PLUS
928 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
929 && (REGNO (XEXP (XEXP (x, 0), 0))
930 < FIRST_PSEUDO_REGISTER)
931 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
932 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
933 else
934 {
935 /* Make a new stack slot. Then indicate that something
936 changed so we go back and recompute offsets for
937 eliminable registers because the allocation of memory
938 below might change some offset. reg_equiv_{mem,address}
939 will be set up for this pseudo on the next pass around
940 the loop. */
941 reg_equiv_memory_loc[i] = 0;
942 reg_equiv_init[i] = 0;
943 alter_reg (i, -1);
944 something_changed = 1;
945 }
946 }
947
948 /* If we allocated another pseudo to the stack, redo elimination
949 bookkeeping. */
950 if (something_changed)
951 continue;
952
953 /* If caller-saves needs a group, initialize the group to include
954 the size and mode required for caller-saves. */
955
956 if (caller_save_group_size > 1)
957 {
958 group_mode[(int) caller_save_spill_class] = Pmode;
959 group_size[(int) caller_save_spill_class] = caller_save_group_size;
960 }
961
962 /* Compute the most additional registers needed by any instruction.
963 Collect information separately for each class of regs. */
964
965 for (insn = first; insn; insn = NEXT_INSN (insn))
966 {
967 if (global && this_block + 1 < n_basic_blocks
968 && insn == basic_block_head[this_block+1])
969 ++this_block;
970
971 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which
972 might include REG_LABEL), we need to see what effects this
973 has on the known offsets at labels. */
974
975 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
976 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
977 && REG_NOTES (insn) != 0))
978 set_label_offsets (insn, insn, 0);
979
980 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
981 {
982 /* Nonzero means don't use a reload reg that overlaps
983 the place where a function value can be returned. */
984 rtx avoid_return_reg = 0;
985
986 rtx old_body = PATTERN (insn);
987 int old_code = INSN_CODE (insn);
988 rtx old_notes = REG_NOTES (insn);
989 int did_elimination = 0;
990 int max_total_input_groups = 0, max_total_output_groups = 0;
991
992 /* To compute the number of reload registers of each class
993 needed for an insn, we must similate what choose_reload_regs
994 can do. We do this by splitting an insn into an "input" and
995 an "output" part. RELOAD_OTHER reloads are used in both.
996 The input part uses those reloads, RELOAD_FOR_INPUT reloads,
997 which must be live over the entire input section of reloads,
998 and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
999 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the
1000 inputs.
1001
1002 The registers needed for output are RELOAD_OTHER and
1003 RELOAD_FOR_OUTPUT, which are live for the entire output
1004 portion, and the maximum of all the RELOAD_FOR_OUTPUT_ADDRESS
1005 reloads for each operand.
1006
1007 The total number of registers needed is the maximum of the
1008 inputs and outputs. */
1009
1010 /* These just count RELOAD_OTHER. */
1011 int insn_needs[N_REG_CLASSES];
1012 int insn_groups[N_REG_CLASSES];
1013 int insn_total_groups = 0;
1014
1015 /* Count RELOAD_FOR_INPUT reloads. */
1016 int insn_needs_for_inputs[N_REG_CLASSES];
1017 int insn_groups_for_inputs[N_REG_CLASSES];
1018 int insn_total_groups_for_inputs = 0;
1019
1020 /* Count RELOAD_FOR_OUTPUT reloads. */
1021 int insn_needs_for_outputs[N_REG_CLASSES];
1022 int insn_groups_for_outputs[N_REG_CLASSES];
1023 int insn_total_groups_for_outputs = 0;
1024
1025 /* Count RELOAD_FOR_INSN reloads. */
1026 int insn_needs_for_insn[N_REG_CLASSES];
1027 int insn_groups_for_insn[N_REG_CLASSES];
1028 int insn_total_groups_for_insn = 0;
1029
1030 /* Count RELOAD_FOR_OTHER_ADDRESS reloads. */
1031 int insn_needs_for_other_addr[N_REG_CLASSES];
1032 int insn_groups_for_other_addr[N_REG_CLASSES];
1033 int insn_total_groups_for_other_addr = 0;
1034
1035 /* Count RELOAD_FOR_INPUT_ADDRESS reloads. */
1036 int insn_needs_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
1037 int insn_groups_for_in_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
1038 int insn_total_groups_for_in_addr[MAX_RECOG_OPERANDS];
1039
1040 /* Count RELOAD_FOR_OUTPUT_ADDRESS reloads. */
1041 int insn_needs_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
1042 int insn_groups_for_out_addr[MAX_RECOG_OPERANDS][N_REG_CLASSES];
1043 int insn_total_groups_for_out_addr[MAX_RECOG_OPERANDS];
1044
1045 /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */
1046 int insn_needs_for_op_addr[N_REG_CLASSES];
1047 int insn_groups_for_op_addr[N_REG_CLASSES];
1048 int insn_total_groups_for_op_addr = 0;
1049
1050 #if 0 /* This wouldn't work nowadays, since optimize_bit_field
1051 looks for non-strict memory addresses. */
1052 /* Optimization: a bit-field instruction whose field
1053 happens to be a byte or halfword in memory
1054 can be changed to a move instruction. */
1055
1056 if (GET_CODE (PATTERN (insn)) == SET)
1057 {
1058 rtx dest = SET_DEST (PATTERN (insn));
1059 rtx src = SET_SRC (PATTERN (insn));
1060
1061 if (GET_CODE (dest) == ZERO_EXTRACT
1062 || GET_CODE (dest) == SIGN_EXTRACT)
1063 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
1064 if (GET_CODE (src) == ZERO_EXTRACT
1065 || GET_CODE (src) == SIGN_EXTRACT)
1066 optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem);
1067 }
1068 #endif
1069
1070 /* If needed, eliminate any eliminable registers. */
1071 if (num_eliminable)
1072 did_elimination = eliminate_regs_in_insn (insn, 0);
1073
1074 #ifdef SMALL_REGISTER_CLASSES
1075 /* Set avoid_return_reg if this is an insn
1076 that might use the value of a function call. */
1077 if (GET_CODE (insn) == CALL_INSN)
1078 {
1079 if (GET_CODE (PATTERN (insn)) == SET)
1080 after_call = SET_DEST (PATTERN (insn));
1081 else if (GET_CODE (PATTERN (insn)) == PARALLEL
1082 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1083 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1084 else
1085 after_call = 0;
1086 }
1087 else if (after_call != 0
1088 && !(GET_CODE (PATTERN (insn)) == SET
1089 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
1090 {
1091 if (reg_mentioned_p (after_call, PATTERN (insn)))
1092 avoid_return_reg = after_call;
1093 after_call = 0;
1094 }
1095 #endif /* SMALL_REGISTER_CLASSES */
1096
1097 /* Analyze the instruction. */
1098 find_reloads (insn, 0, spill_indirect_levels, global,
1099 spill_reg_order);
1100
1101 /* Remember for later shortcuts which insns had any reloads or
1102 register eliminations.
1103
1104 One might think that it would be worthwhile to mark insns
1105 that need register replacements but not reloads, but this is
1106 not safe because find_reloads may do some manipulation of
1107 the insn (such as swapping commutative operands), which would
1108 be lost when we restore the old pattern after register
1109 replacement. So the actions of find_reloads must be redone in
1110 subsequent passes or in reload_as_needed.
1111
1112 However, it is safe to mark insns that need reloads
1113 but not register replacement. */
1114
1115 PUT_MODE (insn, (did_elimination ? QImode
1116 : n_reloads ? HImode
1117 : GET_MODE (insn) == DImode ? DImode
1118 : VOIDmode));
1119
1120 /* Discard any register replacements done. */
1121 if (did_elimination)
1122 {
1123 obstack_free (&reload_obstack, reload_firstobj);
1124 PATTERN (insn) = old_body;
1125 INSN_CODE (insn) = old_code;
1126 REG_NOTES (insn) = old_notes;
1127 something_needs_elimination = 1;
1128 }
1129
1130 /* If this insn has no reloads, we need not do anything except
1131 in the case of a CALL_INSN when we have caller-saves and
1132 caller-save needs reloads. */
1133
1134 if (n_reloads == 0
1135 && ! (GET_CODE (insn) == CALL_INSN
1136 && caller_save_spill_class != NO_REGS))
1137 continue;
1138
1139 something_needs_reloads = 1;
1140
1141 for (i = 0; i < N_REG_CLASSES; i++)
1142 {
1143 insn_needs[i] = 0, insn_groups[i] = 0;
1144 insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0;
1145 insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0;
1146 insn_needs_for_insn[i] = 0, insn_groups_for_insn[i] = 0;
1147 insn_needs_for_op_addr[i] = 0, insn_groups_for_op_addr[i] = 0;
1148 insn_needs_for_other_addr[i] = 0;
1149 insn_groups_for_other_addr[i] = 0;
1150 }
1151
1152 for (i = 0; i < reload_n_operands; i++)
1153 {
1154 insn_total_groups_for_in_addr[i] = 0;
1155 insn_total_groups_for_out_addr[i] = 0;
1156
1157 for (j = 0; j < N_REG_CLASSES; j++)
1158 {
1159 insn_needs_for_in_addr[i][j] = 0;
1160 insn_needs_for_out_addr[i][j] = 0;
1161 insn_groups_for_in_addr[i][j] = 0;
1162 insn_groups_for_out_addr[i][j] = 0;
1163 }
1164 }
1165
1166 /* Count each reload once in every class
1167 containing the reload's own class. */
1168
1169 for (i = 0; i < n_reloads; i++)
1170 {
1171 register enum reg_class *p;
1172 enum reg_class class = reload_reg_class[i];
1173 int size;
1174 enum machine_mode mode;
1175 int *this_groups;
1176 int *this_needs;
1177 int *this_total_groups;
1178
1179 /* Don't count the dummy reloads, for which one of the
1180 regs mentioned in the insn can be used for reloading.
1181 Don't count optional reloads.
1182 Don't count reloads that got combined with others. */
1183 if (reload_reg_rtx[i] != 0
1184 || reload_optional[i] != 0
1185 || (reload_out[i] == 0 && reload_in[i] == 0
1186 && ! reload_secondary_p[i]))
1187 continue;
1188
1189 /* Show that a reload register of this class is needed
1190 in this basic block. We do not use insn_needs and
1191 insn_groups because they are overly conservative for
1192 this purpose. */
1193 if (global && ! basic_block_needs[(int) class][this_block])
1194 {
1195 basic_block_needs[(int) class][this_block] = 1;
1196 new_basic_block_needs = 1;
1197 }
1198
1199 /* Decide which time-of-use to count this reload for. */
1200 switch (reload_when_needed[i])
1201 {
1202 case RELOAD_OTHER:
1203 this_needs = insn_needs;
1204 this_groups = insn_groups;
1205 this_total_groups = &insn_total_groups;
1206 break;
1207
1208 case RELOAD_FOR_INPUT:
1209 this_needs = insn_needs_for_inputs;
1210 this_groups = insn_groups_for_inputs;
1211 this_total_groups = &insn_total_groups_for_inputs;
1212 break;
1213
1214 case RELOAD_FOR_OUTPUT:
1215 this_needs = insn_needs_for_outputs;
1216 this_groups = insn_groups_for_outputs;
1217 this_total_groups = &insn_total_groups_for_outputs;
1218 break;
1219
1220 case RELOAD_FOR_INSN:
1221 this_needs = insn_needs_for_insn;
1222 this_groups = insn_groups_for_outputs;
1223 this_total_groups = &insn_total_groups_for_insn;
1224 break;
1225
1226 case RELOAD_FOR_OTHER_ADDRESS:
1227 this_needs = insn_needs_for_other_addr;
1228 this_groups = insn_groups_for_other_addr;
1229 this_total_groups = &insn_total_groups_for_other_addr;
1230 break;
1231
1232 case RELOAD_FOR_INPUT_ADDRESS:
1233 this_needs = insn_needs_for_in_addr[reload_opnum[i]];
1234 this_groups = insn_groups_for_in_addr[reload_opnum[i]];
1235 this_total_groups
1236 = &insn_total_groups_for_in_addr[reload_opnum[i]];
1237 break;
1238
1239 case RELOAD_FOR_OUTPUT_ADDRESS:
1240 this_needs = insn_needs_for_out_addr[reload_opnum[i]];
1241 this_groups = insn_groups_for_out_addr[reload_opnum[i]];
1242 this_total_groups
1243 = &insn_total_groups_for_out_addr[reload_opnum[i]];
1244 break;
1245
1246 case RELOAD_FOR_OPERAND_ADDRESS:
1247 this_needs = insn_needs_for_op_addr;
1248 this_groups = insn_groups_for_op_addr;
1249 this_total_groups = &insn_total_groups_for_op_addr;
1250 break;
1251 }
1252
1253 mode = reload_inmode[i];
1254 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1255 mode = reload_outmode[i];
1256 size = CLASS_MAX_NREGS (class, mode);
1257 if (size > 1)
1258 {
1259 enum machine_mode other_mode, allocate_mode;
1260
1261 /* Count number of groups needed separately from
1262 number of individual regs needed. */
1263 this_groups[(int) class]++;
1264 p = reg_class_superclasses[(int) class];
1265 while (*p != LIM_REG_CLASSES)
1266 this_groups[(int) *p++]++;
1267 (*this_total_groups)++;
1268
1269 /* Record size and mode of a group of this class. */
1270 /* If more than one size group is needed,
1271 make all groups the largest needed size. */
1272 if (group_size[(int) class] < size)
1273 {
1274 other_mode = group_mode[(int) class];
1275 allocate_mode = mode;
1276
1277 group_size[(int) class] = size;
1278 group_mode[(int) class] = mode;
1279 }
1280 else
1281 {
1282 other_mode = mode;
1283 allocate_mode = group_mode[(int) class];
1284 }
1285
1286 /* Crash if two dissimilar machine modes both need
1287 groups of consecutive regs of the same class. */
1288
1289 if (other_mode != VOIDmode
1290 && other_mode != allocate_mode
1291 && ! modes_equiv_for_class_p (allocate_mode,
1292 other_mode,
1293 class))
1294 abort ();
1295 }
1296 else if (size == 1)
1297 {
1298 this_needs[(int) class] += 1;
1299 p = reg_class_superclasses[(int) class];
1300 while (*p != LIM_REG_CLASSES)
1301 this_needs[(int) *p++] += 1;
1302 }
1303 else
1304 abort ();
1305 }
1306
1307 /* All reloads have been counted for this insn;
1308 now merge the various times of use.
1309 This sets insn_needs, etc., to the maximum total number
1310 of registers needed at any point in this insn. */
1311
1312 for (i = 0; i < N_REG_CLASSES; i++)
1313 {
1314 int in_max, out_max;
1315
1316 for (in_max = 0, out_max = 0, j = 0;
1317 j < reload_n_operands; j++)
1318 {
1319 in_max = MAX (in_max, insn_needs_for_in_addr[j][i]);
1320 out_max = MAX (out_max, insn_needs_for_out_addr[j][i]);
1321 }
1322
1323 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1324 and operand addresses but not things used to reload them.
1325 Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads don't
1326 conflict with things needed to reload inputs or
1327 outputs. */
1328
1329 in_max = MAX (in_max, insn_needs_for_op_addr[i]);
1330 out_max = MAX (out_max, insn_needs_for_insn[i]);
1331
1332 insn_needs_for_inputs[i]
1333 = MAX (insn_needs_for_inputs[i]
1334 + insn_needs_for_op_addr[i]
1335 + insn_needs_for_insn[i],
1336 in_max + insn_needs_for_inputs[i]);
1337
1338 insn_needs_for_outputs[i] += out_max;
1339 insn_needs[i] += MAX (MAX (insn_needs_for_inputs[i],
1340 insn_needs_for_outputs[i]),
1341 insn_needs_for_other_addr[i]);
1342
1343 for (in_max = 0, out_max = 0, j = 0;
1344 j < reload_n_operands; j++)
1345 {
1346 in_max = MAX (in_max, insn_groups_for_in_addr[j][i]);
1347 out_max = MAX (out_max, insn_groups_for_out_addr[j][i]);
1348 }
1349
1350 in_max = MAX (in_max, insn_groups_for_op_addr[i]);
1351 out_max = MAX (out_max, insn_groups_for_insn[i]);
1352
1353 insn_groups_for_inputs[i]
1354 = MAX (insn_groups_for_inputs[i]
1355 + insn_groups_for_op_addr[i]
1356 + insn_groups_for_insn[i],
1357 in_max + insn_groups_for_inputs[i]);
1358
1359 insn_groups_for_outputs[i] += out_max;
1360 insn_groups[i] += MAX (MAX (insn_groups_for_inputs[i],
1361 insn_groups_for_outputs[i]),
1362 insn_groups_for_other_addr[i]);
1363 }
1364
1365 for (i = 0; i < reload_n_operands; i++)
1366 {
1367 max_total_input_groups
1368 = MAX (max_total_input_groups,
1369 insn_total_groups_for_in_addr[i]);
1370 max_total_output_groups
1371 = MAX (max_total_output_groups,
1372 insn_total_groups_for_out_addr[i]);
1373 }
1374
1375 max_total_input_groups = MAX (max_total_input_groups,
1376 insn_total_groups_for_op_addr);
1377 max_total_output_groups = MAX (max_total_output_groups,
1378 insn_total_groups_for_insn);
1379
1380 insn_total_groups_for_inputs
1381 = MAX (max_total_input_groups + insn_total_groups_for_op_addr
1382 + insn_total_groups_for_insn,
1383 max_total_input_groups + insn_total_groups_for_inputs);
1384
1385 insn_total_groups_for_outputs += max_total_output_groups;
1386
1387 insn_total_groups += MAX (MAX (insn_total_groups_for_outputs,
1388 insn_total_groups_for_inputs),
1389 insn_total_groups_for_other_addr);
1390
1391 /* If this is a CALL_INSN and caller-saves will need
1392 a spill register, act as if the spill register is
1393 needed for this insn. However, the spill register
1394 can be used by any reload of this insn, so we only
1395 need do something if no need for that class has
1396 been recorded.
1397
1398 The assumption that every CALL_INSN will trigger a
1399 caller-save is highly conservative, however, the number
1400 of cases where caller-saves will need a spill register but
1401 a block containing a CALL_INSN won't need a spill register
1402 of that class should be quite rare.
1403
1404 If a group is needed, the size and mode of the group will
1405 have been set up at the beginning of this loop. */
1406
1407 if (GET_CODE (insn) == CALL_INSN
1408 && caller_save_spill_class != NO_REGS)
1409 {
1410 int *caller_save_needs
1411 = (caller_save_group_size > 1 ? insn_groups : insn_needs);
1412
1413 if (caller_save_needs[(int) caller_save_spill_class] == 0)
1414 {
1415 register enum reg_class *p
1416 = reg_class_superclasses[(int) caller_save_spill_class];
1417
1418 caller_save_needs[(int) caller_save_spill_class]++;
1419
1420 while (*p != LIM_REG_CLASSES)
1421 caller_save_needs[(int) *p++] += 1;
1422 }
1423
1424 if (caller_save_group_size > 1)
1425 insn_total_groups = MAX (insn_total_groups, 1);
1426
1427
1428 /* Show that this basic block will need a register of
1429 this class. */
1430
1431 if (global
1432 && ! (basic_block_needs[(int) caller_save_spill_class]
1433 [this_block]))
1434 {
1435 basic_block_needs[(int) caller_save_spill_class]
1436 [this_block] = 1;
1437 new_basic_block_needs = 1;
1438 }
1439 }
1440
1441 #ifdef SMALL_REGISTER_CLASSES
1442 /* If this insn stores the value of a function call,
1443 and that value is in a register that has been spilled,
1444 and if the insn needs a reload in a class
1445 that might use that register as the reload register,
1446 then add add an extra need in that class.
1447 This makes sure we have a register available that does
1448 not overlap the return value. */
1449 if (avoid_return_reg)
1450 {
1451 int regno = REGNO (avoid_return_reg);
1452 int nregs
1453 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
1454 int r;
1455 int basic_needs[N_REG_CLASSES], basic_groups[N_REG_CLASSES];
1456
1457 /* First compute the "basic needs", which counts a
1458 need only in the smallest class in which it
1459 is required. */
1460
1461 bcopy (insn_needs, basic_needs, sizeof basic_needs);
1462 bcopy (insn_groups, basic_groups, sizeof basic_groups);
1463
1464 for (i = 0; i < N_REG_CLASSES; i++)
1465 {
1466 enum reg_class *p;
1467
1468 if (basic_needs[i] >= 0)
1469 for (p = reg_class_superclasses[i];
1470 *p != LIM_REG_CLASSES; p++)
1471 basic_needs[(int) *p] -= basic_needs[i];
1472
1473 if (basic_groups[i] >= 0)
1474 for (p = reg_class_superclasses[i];
1475 *p != LIM_REG_CLASSES; p++)
1476 basic_groups[(int) *p] -= basic_groups[i];
1477 }
1478
1479 /* Now count extra regs if there might be a conflict with
1480 the return value register.
1481
1482 ??? This is not quite correct because we don't properly
1483 handle the case of groups, but if we end up doing
1484 something wrong, it either will end up not mattering or
1485 we will abort elsewhere. */
1486
1487 for (r = regno; r < regno + nregs; r++)
1488 if (spill_reg_order[r] >= 0)
1489 for (i = 0; i < N_REG_CLASSES; i++)
1490 if (TEST_HARD_REG_BIT (reg_class_contents[i], r))
1491 {
1492 if (basic_needs[i] > 0 || basic_groups[i] > 0)
1493 {
1494 enum reg_class *p;
1495
1496 insn_needs[i]++;
1497 p = reg_class_superclasses[i];
1498 while (*p != LIM_REG_CLASSES)
1499 insn_needs[(int) *p++]++;
1500 }
1501 }
1502 }
1503 #endif /* SMALL_REGISTER_CLASSES */
1504
1505 /* For each class, collect maximum need of any insn. */
1506
1507 for (i = 0; i < N_REG_CLASSES; i++)
1508 {
1509 if (max_needs[i] < insn_needs[i])
1510 {
1511 max_needs[i] = insn_needs[i];
1512 max_needs_insn[i] = insn;
1513 }
1514 if (max_groups[i] < insn_groups[i])
1515 {
1516 max_groups[i] = insn_groups[i];
1517 max_groups_insn[i] = insn;
1518 }
1519 if (insn_total_groups > 0)
1520 if (max_nongroups[i] < insn_needs[i])
1521 {
1522 max_nongroups[i] = insn_needs[i];
1523 max_nongroups_insn[i] = insn;
1524 }
1525 }
1526 }
1527 /* Note that there is a continue statement above. */
1528 }
1529
1530 /* If we allocated any new memory locations, make another pass
1531 since it might have changed elimination offsets. */
1532 if (starting_frame_size != get_frame_size ())
1533 something_changed = 1;
1534
1535 if (dumpfile)
1536 for (i = 0; i < N_REG_CLASSES; i++)
1537 {
1538 if (max_needs[i] > 0)
1539 fprintf (dumpfile,
1540 ";; Need %d reg%s of class %s (for insn %d).\n",
1541 max_needs[i], max_needs[i] == 1 ? "" : "s",
1542 reg_class_names[i], INSN_UID (max_needs_insn[i]));
1543 if (max_nongroups[i] > 0)
1544 fprintf (dumpfile,
1545 ";; Need %d nongroup reg%s of class %s (for insn %d).\n",
1546 max_nongroups[i], max_nongroups[i] == 1 ? "" : "s",
1547 reg_class_names[i], INSN_UID (max_nongroups_insn[i]));
1548 if (max_groups[i] > 0)
1549 fprintf (dumpfile,
1550 ";; Need %d group%s (%smode) of class %s (for insn %d).\n",
1551 max_groups[i], max_groups[i] == 1 ? "" : "s",
1552 mode_name[(int) group_mode[i]],
1553 reg_class_names[i], INSN_UID (max_groups_insn[i]));
1554 }
1555
1556 /* If we have caller-saves, set up the save areas and see if caller-save
1557 will need a spill register. */
1558
1559 if (caller_save_needed
1560 && ! setup_save_areas (&something_changed)
1561 && caller_save_spill_class == NO_REGS)
1562 {
1563 /* The class we will need depends on whether the machine
1564 supports the sum of two registers for an address; see
1565 find_address_reloads for details. */
1566
1567 caller_save_spill_class
1568 = double_reg_address_ok ? INDEX_REG_CLASS : BASE_REG_CLASS;
1569 caller_save_group_size
1570 = CLASS_MAX_NREGS (caller_save_spill_class, Pmode);
1571 something_changed = 1;
1572 }
1573
1574 /* See if anything that happened changes which eliminations are valid.
1575 For example, on the Sparc, whether or not the frame pointer can
1576 be eliminated can depend on what registers have been used. We need
1577 not check some conditions again (such as flag_omit_frame_pointer)
1578 since they can't have changed. */
1579
1580 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1581 if ((ep->from == FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
1582 #ifdef ELIMINABLE_REGS
1583 || ! CAN_ELIMINATE (ep->from, ep->to)
1584 #endif
1585 )
1586 ep->can_eliminate = 0;
1587
1588 /* Look for the case where we have discovered that we can't replace
1589 register A with register B and that means that we will now be
1590 trying to replace register A with register C. This means we can
1591 no longer replace register C with register B and we need to disable
1592 such an elimination, if it exists. This occurs often with A == ap,
1593 B == sp, and C == fp. */
1594
1595 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1596 {
1597 struct elim_table *op;
1598 register int new_to = -1;
1599
1600 if (! ep->can_eliminate && ep->can_eliminate_previous)
1601 {
1602 /* Find the current elimination for ep->from, if there is a
1603 new one. */
1604 for (op = reg_eliminate;
1605 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1606 if (op->from == ep->from && op->can_eliminate)
1607 {
1608 new_to = op->to;
1609 break;
1610 }
1611
1612 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
1613 disable it. */
1614 for (op = reg_eliminate;
1615 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
1616 if (op->from == new_to && op->to == ep->to)
1617 op->can_eliminate = 0;
1618 }
1619 }
1620
1621 /* See if any registers that we thought we could eliminate the previous
1622 time are no longer eliminable. If so, something has changed and we
1623 must spill the register. Also, recompute the number of eliminable
1624 registers and see if the frame pointer is needed; it is if there is
1625 no elimination of the frame pointer that we can perform. */
1626
1627 frame_pointer_needed = 1;
1628 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1629 {
1630 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM)
1631 frame_pointer_needed = 0;
1632
1633 if (! ep->can_eliminate && ep->can_eliminate_previous)
1634 {
1635 ep->can_eliminate_previous = 0;
1636 spill_hard_reg (ep->from, global, dumpfile, 1);
1637 regs_ever_live[ep->from] = 1;
1638 something_changed = 1;
1639 num_eliminable--;
1640 }
1641 }
1642
1643 /* If all needs are met, we win. */
1644
1645 for (i = 0; i < N_REG_CLASSES; i++)
1646 if (max_needs[i] > 0 || max_groups[i] > 0 || max_nongroups[i] > 0)
1647 break;
1648 if (i == N_REG_CLASSES && !new_basic_block_needs && ! something_changed)
1649 break;
1650
1651 /* Not all needs are met; must spill some hard regs. */
1652
1653 /* Put all registers spilled so far back in potential_reload_regs, but
1654 put them at the front, since we've already spilled most of the
1655 psuedos in them (we might have left some pseudos unspilled if they
1656 were in a block that didn't need any spill registers of a conflicting
1657 class. We used to try to mark off the need for those registers,
1658 but doing so properly is very complex and reallocating them is the
1659 simpler approach. First, "pack" potential_reload_regs by pushing
1660 any nonnegative entries towards the end. That will leave room
1661 for the registers we already spilled.
1662
1663 Also, undo the marking of the spill registers from the last time
1664 around in FORBIDDEN_REGS since we will be probably be allocating
1665 them again below.
1666
1667 ??? It is theoretically possible that we might end up not using one
1668 of our previously-spilled registers in this allocation, even though
1669 they are at the head of the list. It's not clear what to do about
1670 this, but it was no better before, when we marked off the needs met
1671 by the previously-spilled registers. With the current code, globals
1672 can be allocated into these registers, but locals cannot. */
1673
1674 if (n_spills)
1675 {
1676 for (i = j = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1677 if (potential_reload_regs[i] != -1)
1678 potential_reload_regs[j--] = potential_reload_regs[i];
1679
1680 for (i = 0; i < n_spills; i++)
1681 {
1682 potential_reload_regs[i] = spill_regs[i];
1683 spill_reg_order[spill_regs[i]] = -1;
1684 CLEAR_HARD_REG_BIT (forbidden_regs, spill_regs[i]);
1685 }
1686
1687 n_spills = 0;
1688 }
1689
1690 /* Now find more reload regs to satisfy the remaining need
1691 Do it by ascending class number, since otherwise a reg
1692 might be spilled for a big class and might fail to count
1693 for a smaller class even though it belongs to that class.
1694
1695 Count spilled regs in `spills', and add entries to
1696 `spill_regs' and `spill_reg_order'.
1697
1698 ??? Note there is a problem here.
1699 When there is a need for a group in a high-numbered class,
1700 and also need for non-group regs that come from a lower class,
1701 the non-group regs are chosen first. If there aren't many regs,
1702 they might leave no room for a group.
1703
1704 This was happening on the 386. To fix it, we added the code
1705 that calls possible_group_p, so that the lower class won't
1706 break up the last possible group.
1707
1708 Really fixing the problem would require changes above
1709 in counting the regs already spilled, and in choose_reload_regs.
1710 It might be hard to avoid introducing bugs there. */
1711
1712 CLEAR_HARD_REG_SET (counted_for_groups);
1713 CLEAR_HARD_REG_SET (counted_for_nongroups);
1714
1715 for (class = 0; class < N_REG_CLASSES; class++)
1716 {
1717 /* First get the groups of registers.
1718 If we got single registers first, we might fragment
1719 possible groups. */
1720 while (max_groups[class] > 0)
1721 {
1722 /* If any single spilled regs happen to form groups,
1723 count them now. Maybe we don't really need
1724 to spill another group. */
1725 count_possible_groups (group_size, group_mode, max_groups);
1726
1727 if (max_groups[class] <= 0)
1728 break;
1729
1730 /* Groups of size 2 (the only groups used on most machines)
1731 are treated specially. */
1732 if (group_size[class] == 2)
1733 {
1734 /* First, look for a register that will complete a group. */
1735 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1736 {
1737 int other;
1738
1739 j = potential_reload_regs[i];
1740 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1741 &&
1742 ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1743 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1744 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1745 && HARD_REGNO_MODE_OK (other, group_mode[class])
1746 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1747 other)
1748 /* We don't want one part of another group.
1749 We could get "two groups" that overlap! */
1750 && ! TEST_HARD_REG_BIT (counted_for_groups, other))
1751 ||
1752 (j < FIRST_PSEUDO_REGISTER - 1
1753 && (other = j + 1, spill_reg_order[other] >= 0)
1754 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1755 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1756 && HARD_REGNO_MODE_OK (j, group_mode[class])
1757 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1758 other)
1759 && ! TEST_HARD_REG_BIT (counted_for_groups,
1760 other))))
1761 {
1762 register enum reg_class *p;
1763
1764 /* We have found one that will complete a group,
1765 so count off one group as provided. */
1766 max_groups[class]--;
1767 p = reg_class_superclasses[class];
1768 while (*p != LIM_REG_CLASSES)
1769 max_groups[(int) *p++]--;
1770
1771 /* Indicate both these regs are part of a group. */
1772 SET_HARD_REG_BIT (counted_for_groups, j);
1773 SET_HARD_REG_BIT (counted_for_groups, other);
1774 break;
1775 }
1776 }
1777 /* We can't complete a group, so start one. */
1778 if (i == FIRST_PSEUDO_REGISTER)
1779 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1780 {
1781 j = potential_reload_regs[i];
1782 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1783 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1784 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1785 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1786 && HARD_REGNO_MODE_OK (j, group_mode[class])
1787 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
1788 j + 1))
1789 break;
1790 }
1791
1792 /* I should be the index in potential_reload_regs
1793 of the new reload reg we have found. */
1794
1795 if (i >= FIRST_PSEUDO_REGISTER)
1796 {
1797 /* There are no groups left to spill. */
1798 spill_failure (max_groups_insn[class]);
1799 failure = 1;
1800 goto failed;
1801 }
1802 else
1803 something_changed
1804 |= new_spill_reg (i, class, max_needs, NULL_PTR,
1805 global, dumpfile);
1806 }
1807 else
1808 {
1809 /* For groups of more than 2 registers,
1810 look for a sufficient sequence of unspilled registers,
1811 and spill them all at once. */
1812 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1813 {
1814 int k;
1815
1816 j = potential_reload_regs[i];
1817 if (j >= 0
1818 && j + group_size[class] <= FIRST_PSEUDO_REGISTER
1819 && HARD_REGNO_MODE_OK (j, group_mode[class]))
1820 {
1821 /* Check each reg in the sequence. */
1822 for (k = 0; k < group_size[class]; k++)
1823 if (! (spill_reg_order[j + k] < 0
1824 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1825 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1826 break;
1827 /* We got a full sequence, so spill them all. */
1828 if (k == group_size[class])
1829 {
1830 register enum reg_class *p;
1831 for (k = 0; k < group_size[class]; k++)
1832 {
1833 int idx;
1834 SET_HARD_REG_BIT (counted_for_groups, j + k);
1835 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1836 if (potential_reload_regs[idx] == j + k)
1837 break;
1838 something_changed
1839 |= new_spill_reg (idx, class,
1840 max_needs, NULL_PTR,
1841 global, dumpfile);
1842 }
1843
1844 /* We have found one that will complete a group,
1845 so count off one group as provided. */
1846 max_groups[class]--;
1847 p = reg_class_superclasses[class];
1848 while (*p != LIM_REG_CLASSES)
1849 max_groups[(int) *p++]--;
1850
1851 break;
1852 }
1853 }
1854 }
1855 /* We couldn't find any registers for this reload.
1856 Avoid going into an infinite loop. */
1857 if (i >= FIRST_PSEUDO_REGISTER)
1858 {
1859 /* There are no groups left. */
1860 spill_failure (max_groups_insn[class]);
1861 failure = 1;
1862 goto failed;
1863 }
1864 }
1865 }
1866
1867 /* Now similarly satisfy all need for single registers. */
1868
1869 while (max_needs[class] > 0 || max_nongroups[class] > 0)
1870 {
1871 /* Consider the potential reload regs that aren't
1872 yet in use as reload regs, in order of preference.
1873 Find the most preferred one that's in this class. */
1874
1875 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1876 if (potential_reload_regs[i] >= 0
1877 && TEST_HARD_REG_BIT (reg_class_contents[class],
1878 potential_reload_regs[i])
1879 /* If this reg will not be available for groups,
1880 pick one that does not foreclose possible groups.
1881 This is a kludge, and not very general,
1882 but it should be sufficient to make the 386 work,
1883 and the problem should not occur on machines with
1884 more registers. */
1885 && (max_nongroups[class] == 0
1886 || possible_group_p (potential_reload_regs[i], max_groups)))
1887 break;
1888
1889 /* If we couldn't get a register, try to get one even if we
1890 might foreclose possible groups. This may cause problems
1891 later, but that's better than aborting now, since it is
1892 possible that we will, in fact, be able to form the needed
1893 group even with this allocation. */
1894
1895 if (i >= FIRST_PSEUDO_REGISTER
1896 && (asm_noperands (max_needs[class] > 0
1897 ? max_needs_insn[class]
1898 : max_nongroups_insn[class])
1899 < 0))
1900 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1901 if (potential_reload_regs[i] >= 0
1902 && TEST_HARD_REG_BIT (reg_class_contents[class],
1903 potential_reload_regs[i]))
1904 break;
1905
1906 /* I should be the index in potential_reload_regs
1907 of the new reload reg we have found. */
1908
1909 if (i >= FIRST_PSEUDO_REGISTER)
1910 {
1911 /* There are no possible registers left to spill. */
1912 spill_failure (max_needs[class] > 0 ? max_needs_insn[class]
1913 : max_nongroups_insn[class]);
1914 failure = 1;
1915 goto failed;
1916 }
1917 else
1918 something_changed
1919 |= new_spill_reg (i, class, max_needs, max_nongroups,
1920 global, dumpfile);
1921 }
1922 }
1923 }
1924
1925 /* If global-alloc was run, notify it of any register eliminations we have
1926 done. */
1927 if (global)
1928 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1929 if (ep->can_eliminate)
1930 mark_elimination (ep->from, ep->to);
1931
1932 /* Insert code to save and restore call-clobbered hard regs
1933 around calls. Tell if what mode to use so that we will process
1934 those insns in reload_as_needed if we have to. */
1935
1936 if (caller_save_needed)
1937 save_call_clobbered_regs (num_eliminable ? QImode
1938 : caller_save_spill_class != NO_REGS ? HImode
1939 : VOIDmode);
1940
1941 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1942 If that insn didn't set the register (i.e., it copied the register to
1943 memory), just delete that insn instead of the equivalencing insn plus
1944 anything now dead. If we call delete_dead_insn on that insn, we may
1945 delete the insn that actually sets the register if the register die
1946 there and that is incorrect. */
1947
1948 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1949 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
1950 && GET_CODE (reg_equiv_init[i]) != NOTE)
1951 {
1952 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
1953 delete_dead_insn (reg_equiv_init[i]);
1954 else
1955 {
1956 PUT_CODE (reg_equiv_init[i], NOTE);
1957 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
1958 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
1959 }
1960 }
1961
1962 /* Use the reload registers where necessary
1963 by generating move instructions to move the must-be-register
1964 values into or out of the reload registers. */
1965
1966 if (something_needs_reloads || something_needs_elimination
1967 || (caller_save_needed && num_eliminable)
1968 || caller_save_spill_class != NO_REGS)
1969 reload_as_needed (first, global);
1970
1971 /* If we were able to eliminate the frame pointer, show that it is no
1972 longer live at the start of any basic block. If it ls live by
1973 virtue of being in a pseudo, that pseudo will be marked live
1974 and hence the frame pointer will be known to be live via that
1975 pseudo. */
1976
1977 if (! frame_pointer_needed)
1978 for (i = 0; i < n_basic_blocks; i++)
1979 basic_block_live_at_start[i][FRAME_POINTER_REGNUM / REGSET_ELT_BITS]
1980 &= ~ ((REGSET_ELT_TYPE) 1 << (FRAME_POINTER_REGNUM % REGSET_ELT_BITS));
1981
1982 /* Come here (with failure set nonzero) if we can't get enough spill regs
1983 and we decide not to abort about it. */
1984 failed:
1985
1986 reload_in_progress = 0;
1987
1988 /* Now eliminate all pseudo regs by modifying them into
1989 their equivalent memory references.
1990 The REG-rtx's for the pseudos are modified in place,
1991 so all insns that used to refer to them now refer to memory.
1992
1993 For a reg that has a reg_equiv_address, all those insns
1994 were changed by reloading so that no insns refer to it any longer;
1995 but the DECL_RTL of a variable decl may refer to it,
1996 and if so this causes the debugging info to mention the variable. */
1997
1998 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1999 {
2000 rtx addr = 0;
2001 int in_struct = 0;
2002 if (reg_equiv_mem[i])
2003 {
2004 addr = XEXP (reg_equiv_mem[i], 0);
2005 in_struct = MEM_IN_STRUCT_P (reg_equiv_mem[i]);
2006 }
2007 if (reg_equiv_address[i])
2008 addr = reg_equiv_address[i];
2009 if (addr)
2010 {
2011 if (reg_renumber[i] < 0)
2012 {
2013 rtx reg = regno_reg_rtx[i];
2014 XEXP (reg, 0) = addr;
2015 REG_USERVAR_P (reg) = 0;
2016 MEM_IN_STRUCT_P (reg) = in_struct;
2017 PUT_CODE (reg, MEM);
2018 }
2019 else if (reg_equiv_mem[i])
2020 XEXP (reg_equiv_mem[i], 0) = addr;
2021 }
2022 }
2023
2024 #ifdef PRESERVE_DEATH_INFO_REGNO_P
2025 /* Make a pass over all the insns and remove death notes for things that
2026 are no longer registers or no longer die in the insn (e.g., an input
2027 and output pseudo being tied). */
2028
2029 for (insn = first; insn; insn = NEXT_INSN (insn))
2030 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2031 {
2032 rtx note, next;
2033
2034 for (note = REG_NOTES (insn); note; note = next)
2035 {
2036 next = XEXP (note, 1);
2037 if (REG_NOTE_KIND (note) == REG_DEAD
2038 && (GET_CODE (XEXP (note, 0)) != REG
2039 || reg_set_p (XEXP (note, 0), PATTERN (insn))))
2040 remove_note (insn, note);
2041 }
2042 }
2043 #endif
2044
2045 /* Indicate that we no longer have known memory locations or constants. */
2046 reg_equiv_constant = 0;
2047 reg_equiv_memory_loc = 0;
2048
2049 return failure;
2050 }
2051 \f
2052 /* Nonzero if, after spilling reg REGNO for non-groups,
2053 it will still be possible to find a group if we still need one. */
2054
2055 static int
2056 possible_group_p (regno, max_groups)
2057 int regno;
2058 int *max_groups;
2059 {
2060 int i;
2061 int class = (int) NO_REGS;
2062
2063 for (i = 0; i < (int) N_REG_CLASSES; i++)
2064 if (max_groups[i] > 0)
2065 {
2066 class = i;
2067 break;
2068 }
2069
2070 if (class == (int) NO_REGS)
2071 return 1;
2072
2073 /* Consider each pair of consecutive registers. */
2074 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2075 {
2076 /* Ignore pairs that include reg REGNO. */
2077 if (i == regno || i + 1 == regno)
2078 continue;
2079
2080 /* Ignore pairs that are outside the class that needs the group.
2081 ??? Here we fail to handle the case where two different classes
2082 independently need groups. But this never happens with our
2083 current machine descriptions. */
2084 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2085 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2086 continue;
2087
2088 /* A pair of consecutive regs we can still spill does the trick. */
2089 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2090 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2091 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2092 return 1;
2093
2094 /* A pair of one already spilled and one we can spill does it
2095 provided the one already spilled is not otherwise reserved. */
2096 if (spill_reg_order[i] < 0
2097 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2098 && spill_reg_order[i + 1] >= 0
2099 && ! TEST_HARD_REG_BIT (counted_for_groups, i + 1)
2100 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i + 1))
2101 return 1;
2102 if (spill_reg_order[i + 1] < 0
2103 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2104 && spill_reg_order[i] >= 0
2105 && ! TEST_HARD_REG_BIT (counted_for_groups, i)
2106 && ! TEST_HARD_REG_BIT (counted_for_nongroups, i))
2107 return 1;
2108 }
2109
2110 return 0;
2111 }
2112 \f
2113 /* Count any groups that can be formed from the registers recently spilled.
2114 This is done class by class, in order of ascending class number. */
2115
2116 static void
2117 count_possible_groups (group_size, group_mode, max_groups)
2118 int *group_size;
2119 enum machine_mode *group_mode;
2120 int *max_groups;
2121 {
2122 int i;
2123 /* Now find all consecutive groups of spilled registers
2124 and mark each group off against the need for such groups.
2125 But don't count them against ordinary need, yet. */
2126
2127 for (i = 0; i < N_REG_CLASSES; i++)
2128 if (group_size[i] > 1)
2129 {
2130 HARD_REG_SET new;
2131 int j;
2132
2133 CLEAR_HARD_REG_SET (new);
2134
2135 /* Make a mask of all the regs that are spill regs in class I. */
2136 for (j = 0; j < n_spills; j++)
2137 if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j])
2138 && ! TEST_HARD_REG_BIT (counted_for_groups, spill_regs[j])
2139 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
2140 spill_regs[j]))
2141 SET_HARD_REG_BIT (new, spill_regs[j]);
2142
2143 /* Find each consecutive group of them. */
2144 for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++)
2145 if (TEST_HARD_REG_BIT (new, j)
2146 && j + group_size[i] <= FIRST_PSEUDO_REGISTER
2147 /* Next line in case group-mode for this class
2148 demands an even-odd pair. */
2149 && HARD_REGNO_MODE_OK (j, group_mode[i]))
2150 {
2151 int k;
2152 for (k = 1; k < group_size[i]; k++)
2153 if (! TEST_HARD_REG_BIT (new, j + k))
2154 break;
2155 if (k == group_size[i])
2156 {
2157 /* We found a group. Mark it off against this class's
2158 need for groups, and against each superclass too. */
2159 register enum reg_class *p;
2160 max_groups[i]--;
2161 p = reg_class_superclasses[i];
2162 while (*p != LIM_REG_CLASSES)
2163 max_groups[(int) *p++]--;
2164 /* Don't count these registers again. */
2165 for (k = 0; k < group_size[i]; k++)
2166 SET_HARD_REG_BIT (counted_for_groups, j + k);
2167 }
2168 /* Skip to the last reg in this group. When j is incremented
2169 above, it will then point to the first reg of the next
2170 possible group. */
2171 j += k - 1;
2172 }
2173 }
2174
2175 }
2176 \f
2177 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2178 another mode that needs to be reloaded for the same register class CLASS.
2179 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2180 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2181
2182 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2183 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2184 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2185 causes unnecessary failures on machines requiring alignment of register
2186 groups when the two modes are different sizes, because the larger mode has
2187 more strict alignment rules than the smaller mode. */
2188
2189 static int
2190 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2191 enum machine_mode allocate_mode, other_mode;
2192 enum reg_class class;
2193 {
2194 register int regno;
2195 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2196 {
2197 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2198 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2199 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2200 return 0;
2201 }
2202 return 1;
2203 }
2204
2205 /* Handle the failure to find a register to spill.
2206 INSN should be one of the insns which needed this particular spill reg. */
2207
2208 static void
2209 spill_failure (insn)
2210 rtx insn;
2211 {
2212 if (asm_noperands (PATTERN (insn)) >= 0)
2213 error_for_asm (insn, "`asm' needs too many reloads");
2214 else
2215 abort ();
2216 }
2217
2218 /* Add a new register to the tables of available spill-registers
2219 (as well as spilling all pseudos allocated to the register).
2220 I is the index of this register in potential_reload_regs.
2221 CLASS is the regclass whose need is being satisfied.
2222 MAX_NEEDS and MAX_NONGROUPS are the vectors of needs,
2223 so that this register can count off against them.
2224 MAX_NONGROUPS is 0 if this register is part of a group.
2225 GLOBAL and DUMPFILE are the same as the args that `reload' got. */
2226
2227 static int
2228 new_spill_reg (i, class, max_needs, max_nongroups, global, dumpfile)
2229 int i;
2230 int class;
2231 int *max_needs;
2232 int *max_nongroups;
2233 int global;
2234 FILE *dumpfile;
2235 {
2236 register enum reg_class *p;
2237 int val;
2238 int regno = potential_reload_regs[i];
2239
2240 if (i >= FIRST_PSEUDO_REGISTER)
2241 abort (); /* Caller failed to find any register. */
2242
2243 if (fixed_regs[regno] || TEST_HARD_REG_BIT (forbidden_regs, regno))
2244 fatal ("fixed or forbidden register was spilled.\n\
2245 This may be due to a compiler bug or to impossible asm statements.");
2246
2247 /* Make reg REGNO an additional reload reg. */
2248
2249 potential_reload_regs[i] = -1;
2250 spill_regs[n_spills] = regno;
2251 spill_reg_order[regno] = n_spills;
2252 if (dumpfile)
2253 fprintf (dumpfile, "Spilling reg %d.\n", spill_regs[n_spills]);
2254
2255 /* Clear off the needs we just satisfied. */
2256
2257 max_needs[class]--;
2258 p = reg_class_superclasses[class];
2259 while (*p != LIM_REG_CLASSES)
2260 max_needs[(int) *p++]--;
2261
2262 if (max_nongroups && max_nongroups[class] > 0)
2263 {
2264 SET_HARD_REG_BIT (counted_for_nongroups, regno);
2265 max_nongroups[class]--;
2266 p = reg_class_superclasses[class];
2267 while (*p != LIM_REG_CLASSES)
2268 max_nongroups[(int) *p++]--;
2269 }
2270
2271 /* Spill every pseudo reg that was allocated to this reg
2272 or to something that overlaps this reg. */
2273
2274 val = spill_hard_reg (spill_regs[n_spills], global, dumpfile, 0);
2275
2276 /* If there are some registers still to eliminate and this register
2277 wasn't ever used before, additional stack space may have to be
2278 allocated to store this register. Thus, we may have changed the offset
2279 between the stack and frame pointers, so mark that something has changed.
2280 (If new pseudos were spilled, thus requiring more space, VAL would have
2281 been set non-zero by the call to spill_hard_reg above since additional
2282 reloads may be needed in that case.
2283
2284 One might think that we need only set VAL to 1 if this is a call-used
2285 register. However, the set of registers that must be saved by the
2286 prologue is not identical to the call-used set. For example, the
2287 register used by the call insn for the return PC is a call-used register,
2288 but must be saved by the prologue. */
2289 if (num_eliminable && ! regs_ever_live[spill_regs[n_spills]])
2290 val = 1;
2291
2292 regs_ever_live[spill_regs[n_spills]] = 1;
2293 n_spills++;
2294
2295 return val;
2296 }
2297 \f
2298 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2299 data that is dead in INSN. */
2300
2301 static void
2302 delete_dead_insn (insn)
2303 rtx insn;
2304 {
2305 rtx prev = prev_real_insn (insn);
2306 rtx prev_dest;
2307
2308 /* If the previous insn sets a register that dies in our insn, delete it
2309 too. */
2310 if (prev && GET_CODE (PATTERN (prev)) == SET
2311 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2312 && reg_mentioned_p (prev_dest, PATTERN (insn))
2313 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest)))
2314 delete_dead_insn (prev);
2315
2316 PUT_CODE (insn, NOTE);
2317 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2318 NOTE_SOURCE_FILE (insn) = 0;
2319 }
2320
2321 /* Modify the home of pseudo-reg I.
2322 The new home is present in reg_renumber[I].
2323
2324 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2325 or it may be -1, meaning there is none or it is not relevant.
2326 This is used so that all pseudos spilled from a given hard reg
2327 can share one stack slot. */
2328
2329 static void
2330 alter_reg (i, from_reg)
2331 register int i;
2332 int from_reg;
2333 {
2334 /* When outputting an inline function, this can happen
2335 for a reg that isn't actually used. */
2336 if (regno_reg_rtx[i] == 0)
2337 return;
2338
2339 /* If the reg got changed to a MEM at rtl-generation time,
2340 ignore it. */
2341 if (GET_CODE (regno_reg_rtx[i]) != REG)
2342 return;
2343
2344 /* Modify the reg-rtx to contain the new hard reg
2345 number or else to contain its pseudo reg number. */
2346 REGNO (regno_reg_rtx[i])
2347 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2348
2349 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2350 allocate a stack slot for it. */
2351
2352 if (reg_renumber[i] < 0
2353 && reg_n_refs[i] > 0
2354 && reg_equiv_constant[i] == 0
2355 && reg_equiv_memory_loc[i] == 0)
2356 {
2357 register rtx x;
2358 int inherent_size = PSEUDO_REGNO_BYTES (i);
2359 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2360 int adjust = 0;
2361
2362 /* Each pseudo reg has an inherent size which comes from its own mode,
2363 and a total size which provides room for paradoxical subregs
2364 which refer to the pseudo reg in wider modes.
2365
2366 We can use a slot already allocated if it provides both
2367 enough inherent space and enough total space.
2368 Otherwise, we allocate a new slot, making sure that it has no less
2369 inherent space, and no less total space, then the previous slot. */
2370 if (from_reg == -1)
2371 {
2372 /* No known place to spill from => no slot to reuse. */
2373 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size, -1);
2374 #if BYTES_BIG_ENDIAN
2375 /* Cancel the big-endian correction done in assign_stack_local.
2376 Get the address of the beginning of the slot.
2377 This is so we can do a big-endian correction unconditionally
2378 below. */
2379 adjust = inherent_size - total_size;
2380 #endif
2381 }
2382 /* Reuse a stack slot if possible. */
2383 else if (spill_stack_slot[from_reg] != 0
2384 && spill_stack_slot_width[from_reg] >= total_size
2385 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2386 >= inherent_size))
2387 x = spill_stack_slot[from_reg];
2388 /* Allocate a bigger slot. */
2389 else
2390 {
2391 /* Compute maximum size needed, both for inherent size
2392 and for total size. */
2393 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2394 if (spill_stack_slot[from_reg])
2395 {
2396 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2397 > inherent_size)
2398 mode = GET_MODE (spill_stack_slot[from_reg]);
2399 if (spill_stack_slot_width[from_reg] > total_size)
2400 total_size = spill_stack_slot_width[from_reg];
2401 }
2402 /* Make a slot with that size. */
2403 x = assign_stack_local (mode, total_size, -1);
2404 #if BYTES_BIG_ENDIAN
2405 /* Cancel the big-endian correction done in assign_stack_local.
2406 Get the address of the beginning of the slot.
2407 This is so we can do a big-endian correction unconditionally
2408 below. */
2409 adjust = GET_MODE_SIZE (mode) - total_size;
2410 #endif
2411 spill_stack_slot[from_reg] = x;
2412 spill_stack_slot_width[from_reg] = total_size;
2413 }
2414
2415 #if BYTES_BIG_ENDIAN
2416 /* On a big endian machine, the "address" of the slot
2417 is the address of the low part that fits its inherent mode. */
2418 if (inherent_size < total_size)
2419 adjust += (total_size - inherent_size);
2420 #endif /* BYTES_BIG_ENDIAN */
2421
2422 /* If we have any adjustment to make, or if the stack slot is the
2423 wrong mode, make a new stack slot. */
2424 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2425 {
2426 x = gen_rtx (MEM, GET_MODE (regno_reg_rtx[i]),
2427 plus_constant (XEXP (x, 0), adjust));
2428 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2429 }
2430
2431 /* Save the stack slot for later. */
2432 reg_equiv_memory_loc[i] = x;
2433 }
2434 }
2435
2436 /* Mark the slots in regs_ever_live for the hard regs
2437 used by pseudo-reg number REGNO. */
2438
2439 void
2440 mark_home_live (regno)
2441 int regno;
2442 {
2443 register int i, lim;
2444 i = reg_renumber[regno];
2445 if (i < 0)
2446 return;
2447 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2448 while (i < lim)
2449 regs_ever_live[i++] = 1;
2450 }
2451 \f
2452 /* This function handles the tracking of elimination offsets around branches.
2453
2454 X is a piece of RTL being scanned.
2455
2456 INSN is the insn that it came from, if any.
2457
2458 INITIAL_P is non-zero if we are to set the offset to be the initial
2459 offset and zero if we are setting the offset of the label to be the
2460 current offset. */
2461
2462 static void
2463 set_label_offsets (x, insn, initial_p)
2464 rtx x;
2465 rtx insn;
2466 int initial_p;
2467 {
2468 enum rtx_code code = GET_CODE (x);
2469 rtx tem;
2470 int i;
2471 struct elim_table *p;
2472
2473 switch (code)
2474 {
2475 case LABEL_REF:
2476 if (LABEL_REF_NONLOCAL_P (x))
2477 return;
2478
2479 x = XEXP (x, 0);
2480
2481 /* ... fall through ... */
2482
2483 case CODE_LABEL:
2484 /* If we know nothing about this label, set the desired offsets. Note
2485 that this sets the offset at a label to be the offset before a label
2486 if we don't know anything about the label. This is not correct for
2487 the label after a BARRIER, but is the best guess we can make. If
2488 we guessed wrong, we will suppress an elimination that might have
2489 been possible had we been able to guess correctly. */
2490
2491 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2492 {
2493 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2494 offsets_at[CODE_LABEL_NUMBER (x)][i]
2495 = (initial_p ? reg_eliminate[i].initial_offset
2496 : reg_eliminate[i].offset);
2497 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2498 }
2499
2500 /* Otherwise, if this is the definition of a label and it is
2501 preceded by a BARRIER, set our offsets to the known offset of
2502 that label. */
2503
2504 else if (x == insn
2505 && (tem = prev_nonnote_insn (insn)) != 0
2506 && GET_CODE (tem) == BARRIER)
2507 {
2508 num_not_at_initial_offset = 0;
2509 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2510 {
2511 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
2512 = offsets_at[CODE_LABEL_NUMBER (x)][i];
2513 if (reg_eliminate[i].can_eliminate
2514 && (reg_eliminate[i].offset
2515 != reg_eliminate[i].initial_offset))
2516 num_not_at_initial_offset++;
2517 }
2518 }
2519
2520 else
2521 /* If neither of the above cases is true, compare each offset
2522 with those previously recorded and suppress any eliminations
2523 where the offsets disagree. */
2524
2525 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2526 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2527 != (initial_p ? reg_eliminate[i].initial_offset
2528 : reg_eliminate[i].offset))
2529 reg_eliminate[i].can_eliminate = 0;
2530
2531 return;
2532
2533 case JUMP_INSN:
2534 set_label_offsets (PATTERN (insn), insn, initial_p);
2535
2536 /* ... fall through ... */
2537
2538 case INSN:
2539 case CALL_INSN:
2540 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2541 and hence must have all eliminations at their initial offsets. */
2542 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2543 if (REG_NOTE_KIND (tem) == REG_LABEL)
2544 set_label_offsets (XEXP (tem, 0), insn, 1);
2545 return;
2546
2547 case ADDR_VEC:
2548 case ADDR_DIFF_VEC:
2549 /* Each of the labels in the address vector must be at their initial
2550 offsets. We want the first first for ADDR_VEC and the second
2551 field for ADDR_DIFF_VEC. */
2552
2553 for (i = 0; i < XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2554 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2555 insn, initial_p);
2556 return;
2557
2558 case SET:
2559 /* We only care about setting PC. If the source is not RETURN,
2560 IF_THEN_ELSE, or a label, disable any eliminations not at
2561 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2562 isn't one of those possibilities. For branches to a label,
2563 call ourselves recursively.
2564
2565 Note that this can disable elimination unnecessarily when we have
2566 a non-local goto since it will look like a non-constant jump to
2567 someplace in the current function. This isn't a significant
2568 problem since such jumps will normally be when all elimination
2569 pairs are back to their initial offsets. */
2570
2571 if (SET_DEST (x) != pc_rtx)
2572 return;
2573
2574 switch (GET_CODE (SET_SRC (x)))
2575 {
2576 case PC:
2577 case RETURN:
2578 return;
2579
2580 case LABEL_REF:
2581 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2582 return;
2583
2584 case IF_THEN_ELSE:
2585 tem = XEXP (SET_SRC (x), 1);
2586 if (GET_CODE (tem) == LABEL_REF)
2587 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2588 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2589 break;
2590
2591 tem = XEXP (SET_SRC (x), 2);
2592 if (GET_CODE (tem) == LABEL_REF)
2593 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2594 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2595 break;
2596 return;
2597 }
2598
2599 /* If we reach here, all eliminations must be at their initial
2600 offset because we are doing a jump to a variable address. */
2601 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2602 if (p->offset != p->initial_offset)
2603 p->can_eliminate = 0;
2604 }
2605 }
2606 \f
2607 /* Used for communication between the next two function to properly share
2608 the vector for an ASM_OPERANDS. */
2609
2610 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2611
2612 /* Scan X and replace any eliminable registers (such as fp) with a
2613 replacement (such as sp), plus an offset.
2614
2615 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2616 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2617 MEM, we are allowed to replace a sum of a register and the constant zero
2618 with the register, which we cannot do outside a MEM. In addition, we need
2619 to record the fact that a register is referenced outside a MEM.
2620
2621 If INSN is nonzero, it is the insn containing X. If we replace a REG
2622 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2623 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2624 that the REG is being modified.
2625
2626 If we see a modification to a register we know about, take the
2627 appropriate action (see case SET, below).
2628
2629 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2630 replacements done assuming all offsets are at their initial values. If
2631 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2632 encounter, return the actual location so that find_reloads will do
2633 the proper thing. */
2634
2635 rtx
2636 eliminate_regs (x, mem_mode, insn)
2637 rtx x;
2638 enum machine_mode mem_mode;
2639 rtx insn;
2640 {
2641 enum rtx_code code = GET_CODE (x);
2642 struct elim_table *ep;
2643 int regno;
2644 rtx new;
2645 int i, j;
2646 char *fmt;
2647 int copied = 0;
2648
2649 switch (code)
2650 {
2651 case CONST_INT:
2652 case CONST_DOUBLE:
2653 case CONST:
2654 case SYMBOL_REF:
2655 case CODE_LABEL:
2656 case PC:
2657 case CC0:
2658 case ASM_INPUT:
2659 case ADDR_VEC:
2660 case ADDR_DIFF_VEC:
2661 case RETURN:
2662 return x;
2663
2664 case REG:
2665 regno = REGNO (x);
2666
2667 /* First handle the case where we encounter a bare register that
2668 is eliminable. Replace it with a PLUS. */
2669 if (regno < FIRST_PSEUDO_REGISTER)
2670 {
2671 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2672 ep++)
2673 if (ep->from_rtx == x && ep->can_eliminate)
2674 {
2675 if (! mem_mode)
2676 ep->ref_outside_mem = 1;
2677 return plus_constant (ep->to_rtx, ep->previous_offset);
2678 }
2679
2680 }
2681 else if (reg_equiv_memory_loc && reg_equiv_memory_loc[regno]
2682 && (reg_equiv_address[regno] || num_not_at_initial_offset))
2683 {
2684 /* In this case, find_reloads would attempt to either use an
2685 incorrect address (if something is not at its initial offset)
2686 or substitute an replaced address into an insn (which loses
2687 if the offset is changed by some later action). So we simply
2688 return the replaced stack slot (assuming it is changed by
2689 elimination) and ignore the fact that this is actually a
2690 reference to the pseudo. Ensure we make a copy of the
2691 address in case it is shared. */
2692 new = eliminate_regs (reg_equiv_memory_loc[regno],
2693 mem_mode, NULL_RTX);
2694 if (new != reg_equiv_memory_loc[regno])
2695 return copy_rtx (new);
2696 }
2697 return x;
2698
2699 case PLUS:
2700 /* If this is the sum of an eliminable register and a constant, rework
2701 the sum. */
2702 if (GET_CODE (XEXP (x, 0)) == REG
2703 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2704 && CONSTANT_P (XEXP (x, 1)))
2705 {
2706 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2707 ep++)
2708 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2709 {
2710 if (! mem_mode)
2711 ep->ref_outside_mem = 1;
2712
2713 /* The only time we want to replace a PLUS with a REG (this
2714 occurs when the constant operand of the PLUS is the negative
2715 of the offset) is when we are inside a MEM. We won't want
2716 to do so at other times because that would change the
2717 structure of the insn in a way that reload can't handle.
2718 We special-case the commonest situation in
2719 eliminate_regs_in_insn, so just replace a PLUS with a
2720 PLUS here, unless inside a MEM. */
2721 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2722 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2723 return ep->to_rtx;
2724 else
2725 return gen_rtx (PLUS, Pmode, ep->to_rtx,
2726 plus_constant (XEXP (x, 1),
2727 ep->previous_offset));
2728 }
2729
2730 /* If the register is not eliminable, we are done since the other
2731 operand is a constant. */
2732 return x;
2733 }
2734
2735 /* If this is part of an address, we want to bring any constant to the
2736 outermost PLUS. We will do this by doing register replacement in
2737 our operands and seeing if a constant shows up in one of them.
2738
2739 We assume here this is part of an address (or a "load address" insn)
2740 since an eliminable register is not likely to appear in any other
2741 context.
2742
2743 If we have (plus (eliminable) (reg)), we want to produce
2744 (plus (plus (replacement) (reg) (const))). If this was part of a
2745 normal add insn, (plus (replacement) (reg)) will be pushed as a
2746 reload. This is the desired action. */
2747
2748 {
2749 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2750 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
2751
2752 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2753 {
2754 /* If one side is a PLUS and the other side is a pseudo that
2755 didn't get a hard register but has a reg_equiv_constant,
2756 we must replace the constant here since it may no longer
2757 be in the position of any operand. */
2758 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2759 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2760 && reg_renumber[REGNO (new1)] < 0
2761 && reg_equiv_constant != 0
2762 && reg_equiv_constant[REGNO (new1)] != 0)
2763 new1 = reg_equiv_constant[REGNO (new1)];
2764 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2765 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2766 && reg_renumber[REGNO (new0)] < 0
2767 && reg_equiv_constant[REGNO (new0)] != 0)
2768 new0 = reg_equiv_constant[REGNO (new0)];
2769
2770 new = form_sum (new0, new1);
2771
2772 /* As above, if we are not inside a MEM we do not want to
2773 turn a PLUS into something else. We might try to do so here
2774 for an addition of 0 if we aren't optimizing. */
2775 if (! mem_mode && GET_CODE (new) != PLUS)
2776 return gen_rtx (PLUS, GET_MODE (x), new, const0_rtx);
2777 else
2778 return new;
2779 }
2780 }
2781 return x;
2782
2783 case EXPR_LIST:
2784 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2785 if (XEXP (x, 0))
2786 {
2787 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2788 if (new != XEXP (x, 0))
2789 x = gen_rtx (EXPR_LIST, REG_NOTE_KIND (x), new, XEXP (x, 1));
2790 }
2791
2792 /* ... fall through ... */
2793
2794 case INSN_LIST:
2795 /* Now do eliminations in the rest of the chain. If this was
2796 an EXPR_LIST, this might result in allocating more memory than is
2797 strictly needed, but it simplifies the code. */
2798 if (XEXP (x, 1))
2799 {
2800 new = eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX);
2801 if (new != XEXP (x, 1))
2802 return gen_rtx (INSN_LIST, GET_MODE (x), XEXP (x, 0), new);
2803 }
2804 return x;
2805
2806 case CALL:
2807 case COMPARE:
2808 case MINUS:
2809 case MULT:
2810 case DIV: case UDIV:
2811 case MOD: case UMOD:
2812 case AND: case IOR: case XOR:
2813 case LSHIFT: case ASHIFT: case ROTATE:
2814 case ASHIFTRT: case LSHIFTRT: case ROTATERT:
2815 case NE: case EQ:
2816 case GE: case GT: case GEU: case GTU:
2817 case LE: case LT: case LEU: case LTU:
2818 {
2819 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2820 rtx new1
2821 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, NULL_RTX) : 0;
2822
2823 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2824 return gen_rtx (code, GET_MODE (x), new0, new1);
2825 }
2826 return x;
2827
2828 case PRE_INC:
2829 case POST_INC:
2830 case PRE_DEC:
2831 case POST_DEC:
2832 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2833 if (ep->to_rtx == XEXP (x, 0))
2834 {
2835 if (code == PRE_DEC || code == POST_DEC)
2836 ep->offset += GET_MODE_SIZE (mem_mode);
2837 else
2838 ep->offset -= GET_MODE_SIZE (mem_mode);
2839 }
2840
2841 /* Fall through to generic unary operation case. */
2842 case USE:
2843 case STRICT_LOW_PART:
2844 case NEG: case NOT:
2845 case SIGN_EXTEND: case ZERO_EXTEND:
2846 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2847 case FLOAT: case FIX:
2848 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2849 case ABS:
2850 case SQRT:
2851 case FFS:
2852 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2853 if (new != XEXP (x, 0))
2854 return gen_rtx (code, GET_MODE (x), new);
2855 return x;
2856
2857 case SUBREG:
2858 /* Similar to above processing, but preserve SUBREG_WORD.
2859 Convert (subreg (mem)) to (mem) if not paradoxical.
2860 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2861 pseudo didn't get a hard reg, we must replace this with the
2862 eliminated version of the memory location because push_reloads
2863 may do the replacement in certain circumstances. */
2864 if (GET_CODE (SUBREG_REG (x)) == REG
2865 && (GET_MODE_SIZE (GET_MODE (x))
2866 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2867 && reg_equiv_memory_loc != 0
2868 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2869 {
2870 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2871 mem_mode, NULL_RTX);
2872
2873 /* If we didn't change anything, we must retain the pseudo. */
2874 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2875 new = XEXP (x, 0);
2876 else
2877 /* Otherwise, ensure NEW isn't shared in case we have to reload
2878 it. */
2879 new = copy_rtx (new);
2880 }
2881 else
2882 new = eliminate_regs (SUBREG_REG (x), mem_mode, NULL_RTX);
2883
2884 if (new != XEXP (x, 0))
2885 {
2886 if (GET_CODE (new) == MEM
2887 && (GET_MODE_SIZE (GET_MODE (x))
2888 <= GET_MODE_SIZE (GET_MODE (new))))
2889 {
2890 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2891 enum machine_mode mode = GET_MODE (x);
2892
2893 #if BYTES_BIG_ENDIAN
2894 offset += (MIN (UNITS_PER_WORD,
2895 GET_MODE_SIZE (GET_MODE (new)))
2896 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2897 #endif
2898
2899 PUT_MODE (new, mode);
2900 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2901 return new;
2902 }
2903 else
2904 return gen_rtx (SUBREG, GET_MODE (x), new, SUBREG_WORD (x));
2905 }
2906
2907 return x;
2908
2909 case CLOBBER:
2910 /* If clobbering a register that is the replacement register for an
2911 elimination we still think can be performed, note that it cannot
2912 be performed. Otherwise, we need not be concerned about it. */
2913 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2914 if (ep->to_rtx == XEXP (x, 0))
2915 ep->can_eliminate = 0;
2916
2917 new = eliminate_regs (XEXP (x, 0), mem_mode, NULL_RTX);
2918 if (new != XEXP (x, 0))
2919 return gen_rtx (code, GET_MODE (x), new);
2920 return x;
2921
2922 case ASM_OPERANDS:
2923 {
2924 rtx *temp_vec;
2925 /* Properly handle sharing input and constraint vectors. */
2926 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2927 {
2928 /* When we come to a new vector not seen before,
2929 scan all its elements; keep the old vector if none
2930 of them changes; otherwise, make a copy. */
2931 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2932 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2933 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2934 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2935 mem_mode, NULL_RTX);
2936
2937 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2938 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2939 break;
2940
2941 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2942 new_asm_operands_vec = old_asm_operands_vec;
2943 else
2944 new_asm_operands_vec
2945 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2946 }
2947
2948 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2949 if (new_asm_operands_vec == old_asm_operands_vec)
2950 return x;
2951
2952 new = gen_rtx (ASM_OPERANDS, VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2953 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2954 ASM_OPERANDS_OUTPUT_IDX (x), new_asm_operands_vec,
2955 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2956 ASM_OPERANDS_SOURCE_FILE (x),
2957 ASM_OPERANDS_SOURCE_LINE (x));
2958 new->volatil = x->volatil;
2959 return new;
2960 }
2961
2962 case SET:
2963 /* Check for setting a register that we know about. */
2964 if (GET_CODE (SET_DEST (x)) == REG)
2965 {
2966 /* See if this is setting the replacement register for an
2967 elimination.
2968
2969 If DEST is the frame pointer, we do nothing because we assume that
2970 all assignments to the frame pointer are for non-local gotos and
2971 are being done at a time when they are valid and do not disturb
2972 anything else. Some machines want to eliminate a fake argument
2973 pointer with either the frame or stack pointer. Assignments to
2974 the frame pointer must not prevent this elimination. */
2975
2976 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2977 ep++)
2978 if (ep->to_rtx == SET_DEST (x)
2979 && SET_DEST (x) != frame_pointer_rtx)
2980 {
2981 /* If it is being incremented, adjust the offset. Otherwise,
2982 this elimination can't be done. */
2983 rtx src = SET_SRC (x);
2984
2985 if (GET_CODE (src) == PLUS
2986 && XEXP (src, 0) == SET_DEST (x)
2987 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2988 ep->offset -= INTVAL (XEXP (src, 1));
2989 else
2990 ep->can_eliminate = 0;
2991 }
2992
2993 /* Now check to see we are assigning to a register that can be
2994 eliminated. If so, it must be as part of a PARALLEL, since we
2995 will not have been called if this is a single SET. So indicate
2996 that we can no longer eliminate this reg. */
2997 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2998 ep++)
2999 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3000 ep->can_eliminate = 0;
3001 }
3002
3003 /* Now avoid the loop below in this common case. */
3004 {
3005 rtx new0 = eliminate_regs (SET_DEST (x), 0, NULL_RTX);
3006 rtx new1 = eliminate_regs (SET_SRC (x), 0, NULL_RTX);
3007
3008 /* If SET_DEST changed from a REG to a MEM and INSN is non-zero,
3009 write a CLOBBER insn. */
3010 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3011 && insn != 0)
3012 emit_insn_after (gen_rtx (CLOBBER, VOIDmode, SET_DEST (x)), insn);
3013
3014 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3015 return gen_rtx (SET, VOIDmode, new0, new1);
3016 }
3017
3018 return x;
3019
3020 case MEM:
3021 /* Our only special processing is to pass the mode of the MEM to our
3022 recursive call and copy the flags. While we are here, handle this
3023 case more efficiently. */
3024 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), NULL_RTX);
3025 if (new != XEXP (x, 0))
3026 {
3027 new = gen_rtx (MEM, GET_MODE (x), new);
3028 new->volatil = x->volatil;
3029 new->unchanging = x->unchanging;
3030 new->in_struct = x->in_struct;
3031 return new;
3032 }
3033 else
3034 return x;
3035 }
3036
3037 /* Process each of our operands recursively. If any have changed, make a
3038 copy of the rtx. */
3039 fmt = GET_RTX_FORMAT (code);
3040 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3041 {
3042 if (*fmt == 'e')
3043 {
3044 new = eliminate_regs (XEXP (x, i), mem_mode, NULL_RTX);
3045 if (new != XEXP (x, i) && ! copied)
3046 {
3047 rtx new_x = rtx_alloc (code);
3048 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3049 + (sizeof (new_x->fld[0])
3050 * GET_RTX_LENGTH (code))));
3051 x = new_x;
3052 copied = 1;
3053 }
3054 XEXP (x, i) = new;
3055 }
3056 else if (*fmt == 'E')
3057 {
3058 int copied_vec = 0;
3059 for (j = 0; j < XVECLEN (x, i); j++)
3060 {
3061 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3062 if (new != XVECEXP (x, i, j) && ! copied_vec)
3063 {
3064 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
3065 &XVECEXP (x, i, 0));
3066 if (! copied)
3067 {
3068 rtx new_x = rtx_alloc (code);
3069 bcopy (x, new_x, (sizeof (*new_x) - sizeof (new_x->fld)
3070 + (sizeof (new_x->fld[0])
3071 * GET_RTX_LENGTH (code))));
3072 x = new_x;
3073 copied = 1;
3074 }
3075 XVEC (x, i) = new_v;
3076 copied_vec = 1;
3077 }
3078 XVECEXP (x, i, j) = new;
3079 }
3080 }
3081 }
3082
3083 return x;
3084 }
3085 \f
3086 /* Scan INSN and eliminate all eliminable registers in it.
3087
3088 If REPLACE is nonzero, do the replacement destructively. Also
3089 delete the insn as dead it if it is setting an eliminable register.
3090
3091 If REPLACE is zero, do all our allocations in reload_obstack.
3092
3093 If no eliminations were done and this insn doesn't require any elimination
3094 processing (these are not identical conditions: it might be updating sp,
3095 but not referencing fp; this needs to be seen during reload_as_needed so
3096 that the offset between fp and sp can be taken into consideration), zero
3097 is returned. Otherwise, 1 is returned. */
3098
3099 static int
3100 eliminate_regs_in_insn (insn, replace)
3101 rtx insn;
3102 int replace;
3103 {
3104 rtx old_body = PATTERN (insn);
3105 rtx new_body;
3106 int val = 0;
3107 struct elim_table *ep;
3108
3109 if (! replace)
3110 push_obstacks (&reload_obstack, &reload_obstack);
3111
3112 if (GET_CODE (old_body) == SET && GET_CODE (SET_DEST (old_body)) == REG
3113 && REGNO (SET_DEST (old_body)) < FIRST_PSEUDO_REGISTER)
3114 {
3115 /* Check for setting an eliminable register. */
3116 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3117 if (ep->from_rtx == SET_DEST (old_body) && ep->can_eliminate)
3118 {
3119 /* In this case this insn isn't serving a useful purpose. We
3120 will delete it in reload_as_needed once we know that this
3121 elimination is, in fact, being done.
3122
3123 If REPLACE isn't set, we can't delete this insn, but neededn't
3124 process it since it won't be used unless something changes. */
3125 if (replace)
3126 delete_dead_insn (insn);
3127 val = 1;
3128 goto done;
3129 }
3130
3131 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3132 in the insn is the negative of the offset in FROM. Substitute
3133 (set (reg) (reg to)) for the insn and change its code.
3134
3135 We have to do this here, rather than in eliminate_regs, do that we can
3136 change the insn code. */
3137
3138 if (GET_CODE (SET_SRC (old_body)) == PLUS
3139 && GET_CODE (XEXP (SET_SRC (old_body), 0)) == REG
3140 && GET_CODE (XEXP (SET_SRC (old_body), 1)) == CONST_INT)
3141 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3142 ep++)
3143 if (ep->from_rtx == XEXP (SET_SRC (old_body), 0)
3144 && ep->can_eliminate
3145 && ep->offset == - INTVAL (XEXP (SET_SRC (old_body), 1)))
3146 {
3147 PATTERN (insn) = gen_rtx (SET, VOIDmode,
3148 SET_DEST (old_body), ep->to_rtx);
3149 INSN_CODE (insn) = -1;
3150 val = 1;
3151 goto done;
3152 }
3153 }
3154
3155 old_asm_operands_vec = 0;
3156
3157 /* Replace the body of this insn with a substituted form. If we changed
3158 something, return non-zero. If this is the final call for this
3159 insn (REPLACE is non-zero), do the elimination in REG_NOTES as well.
3160
3161 If we are replacing a body that was a (set X (plus Y Z)), try to
3162 re-recognize the insn. We do this in case we had a simple addition
3163 but now can do this as a load-address. This saves an insn in this
3164 common case. */
3165
3166 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3167 if (new_body != old_body)
3168 {
3169 /* If we aren't replacing things permanently and we changed something,
3170 make another copy to ensure that all the RTL is new. Otherwise
3171 things can go wrong if find_reload swaps commutative operands
3172 and one is inside RTL that has been copied while the other is not. */
3173
3174 /* Don't copy an asm_operands because (1) there's no need and (2)
3175 copy_rtx can't do it properly when there are multiple outputs. */
3176 if (! replace && asm_noperands (old_body) < 0)
3177 new_body = copy_rtx (new_body);
3178
3179 /* If we had a move insn but now we don't, rerecognize it. */
3180 if ((GET_CODE (old_body) == SET && GET_CODE (SET_SRC (old_body)) == REG
3181 && (GET_CODE (new_body) != SET
3182 || GET_CODE (SET_SRC (new_body)) != REG))
3183 /* If this was an add insn before, rerecognize. */
3184 ||
3185 (GET_CODE (old_body) == SET
3186 && GET_CODE (SET_SRC (old_body)) == PLUS))
3187 {
3188 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3189 /* If recognition fails, store the new body anyway.
3190 It's normal to have recognition failures here
3191 due to bizarre memory addresses; reloading will fix them. */
3192 PATTERN (insn) = new_body;
3193 }
3194 else
3195 PATTERN (insn) = new_body;
3196
3197 if (replace && REG_NOTES (insn))
3198 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, NULL_RTX);
3199 val = 1;
3200 }
3201
3202 /* Loop through all elimination pairs. See if any have changed and
3203 recalculate the number not at initial offset.
3204
3205 Compute the maximum offset (minimum offset if the stack does not
3206 grow downward) for each elimination pair.
3207
3208 We also detect a cases where register elimination cannot be done,
3209 namely, if a register would be both changed and referenced outside a MEM
3210 in the resulting insn since such an insn is often undefined and, even if
3211 not, we cannot know what meaning will be given to it. Note that it is
3212 valid to have a register used in an address in an insn that changes it
3213 (presumably with a pre- or post-increment or decrement).
3214
3215 If anything changes, return nonzero. */
3216
3217 num_not_at_initial_offset = 0;
3218 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3219 {
3220 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3221 ep->can_eliminate = 0;
3222
3223 ep->ref_outside_mem = 0;
3224
3225 if (ep->previous_offset != ep->offset)
3226 val = 1;
3227
3228 ep->previous_offset = ep->offset;
3229 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3230 num_not_at_initial_offset++;
3231
3232 #ifdef STACK_GROWS_DOWNWARD
3233 ep->max_offset = MAX (ep->max_offset, ep->offset);
3234 #else
3235 ep->max_offset = MIN (ep->max_offset, ep->offset);
3236 #endif
3237 }
3238
3239 done:
3240 if (! replace)
3241 pop_obstacks ();
3242
3243 return val;
3244 }
3245
3246 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3247 replacement we currently believe is valid, mark it as not eliminable if X
3248 modifies DEST in any way other than by adding a constant integer to it.
3249
3250 If DEST is the frame pointer, we do nothing because we assume that
3251 all assignments to the frame pointer are nonlocal gotos and are being done
3252 at a time when they are valid and do not disturb anything else.
3253 Some machines want to eliminate a fake argument pointer with either the
3254 frame or stack pointer. Assignments to the frame pointer must not prevent
3255 this elimination.
3256
3257 Called via note_stores from reload before starting its passes to scan
3258 the insns of the function. */
3259
3260 static void
3261 mark_not_eliminable (dest, x)
3262 rtx dest;
3263 rtx x;
3264 {
3265 register int i;
3266
3267 /* A SUBREG of a hard register here is just changing its mode. We should
3268 not see a SUBREG of an eliminable hard register, but check just in
3269 case. */
3270 if (GET_CODE (dest) == SUBREG)
3271 dest = SUBREG_REG (dest);
3272
3273 if (dest == frame_pointer_rtx)
3274 return;
3275
3276 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3277 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3278 && (GET_CODE (x) != SET
3279 || GET_CODE (SET_SRC (x)) != PLUS
3280 || XEXP (SET_SRC (x), 0) != dest
3281 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3282 {
3283 reg_eliminate[i].can_eliminate_previous
3284 = reg_eliminate[i].can_eliminate = 0;
3285 num_eliminable--;
3286 }
3287 }
3288 \f
3289 /* Kick all pseudos out of hard register REGNO.
3290 If GLOBAL is nonzero, try to find someplace else to put them.
3291 If DUMPFILE is nonzero, log actions taken on that file.
3292
3293 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3294 because we found we can't eliminate some register. In the case, no pseudos
3295 are allowed to be in the register, even if they are only in a block that
3296 doesn't require spill registers, unlike the case when we are spilling this
3297 hard reg to produce another spill register.
3298
3299 Return nonzero if any pseudos needed to be kicked out. */
3300
3301 static int
3302 spill_hard_reg (regno, global, dumpfile, cant_eliminate)
3303 register int regno;
3304 int global;
3305 FILE *dumpfile;
3306 int cant_eliminate;
3307 {
3308 int something_changed = 0;
3309 register int i;
3310
3311 SET_HARD_REG_BIT (forbidden_regs, regno);
3312
3313 /* Spill every pseudo reg that was allocated to this reg
3314 or to something that overlaps this reg. */
3315
3316 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3317 if (reg_renumber[i] >= 0
3318 && reg_renumber[i] <= regno
3319 && (reg_renumber[i]
3320 + HARD_REGNO_NREGS (reg_renumber[i],
3321 PSEUDO_REGNO_MODE (i))
3322 > regno))
3323 {
3324 enum reg_class class = REGNO_REG_CLASS (regno);
3325
3326 /* If this register belongs solely to a basic block which needed no
3327 spilling of any class that this register is contained in,
3328 leave it be, unless we are spilling this register because
3329 it was a hard register that can't be eliminated. */
3330
3331 if (! cant_eliminate
3332 && basic_block_needs[0]
3333 && reg_basic_block[i] >= 0
3334 && basic_block_needs[(int) class][reg_basic_block[i]] == 0)
3335 {
3336 enum reg_class *p;
3337
3338 for (p = reg_class_superclasses[(int) class];
3339 *p != LIM_REG_CLASSES; p++)
3340 if (basic_block_needs[(int) *p][reg_basic_block[i]] > 0)
3341 break;
3342
3343 if (*p == LIM_REG_CLASSES)
3344 continue;
3345 }
3346
3347 /* Mark it as no longer having a hard register home. */
3348 reg_renumber[i] = -1;
3349 /* We will need to scan everything again. */
3350 something_changed = 1;
3351 if (global)
3352 retry_global_alloc (i, forbidden_regs);
3353
3354 alter_reg (i, regno);
3355 if (dumpfile)
3356 {
3357 if (reg_renumber[i] == -1)
3358 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3359 else
3360 fprintf (dumpfile, " Register %d now in %d.\n\n",
3361 i, reg_renumber[i]);
3362 }
3363 }
3364
3365 return something_changed;
3366 }
3367 \f
3368 /* Find all paradoxical subregs within X and update reg_max_ref_width. */
3369
3370 static void
3371 scan_paradoxical_subregs (x)
3372 register rtx x;
3373 {
3374 register int i;
3375 register char *fmt;
3376 register enum rtx_code code = GET_CODE (x);
3377
3378 switch (code)
3379 {
3380 case CONST_INT:
3381 case CONST:
3382 case SYMBOL_REF:
3383 case LABEL_REF:
3384 case CONST_DOUBLE:
3385 case CC0:
3386 case PC:
3387 case REG:
3388 case USE:
3389 case CLOBBER:
3390 return;
3391
3392 case SUBREG:
3393 if (GET_CODE (SUBREG_REG (x)) == REG
3394 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3395 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3396 = GET_MODE_SIZE (GET_MODE (x));
3397 return;
3398 }
3399
3400 fmt = GET_RTX_FORMAT (code);
3401 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3402 {
3403 if (fmt[i] == 'e')
3404 scan_paradoxical_subregs (XEXP (x, i));
3405 else if (fmt[i] == 'E')
3406 {
3407 register int j;
3408 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3409 scan_paradoxical_subregs (XVECEXP (x, i, j));
3410 }
3411 }
3412 }
3413 \f
3414 static int
3415 hard_reg_use_compare (p1, p2)
3416 struct hard_reg_n_uses *p1, *p2;
3417 {
3418 int tem = p1->uses - p2->uses;
3419 if (tem != 0) return tem;
3420 /* If regs are equally good, sort by regno,
3421 so that the results of qsort leave nothing to chance. */
3422 return p1->regno - p2->regno;
3423 }
3424
3425 /* Choose the order to consider regs for use as reload registers
3426 based on how much trouble would be caused by spilling one.
3427 Store them in order of decreasing preference in potential_reload_regs. */
3428
3429 static void
3430 order_regs_for_reload ()
3431 {
3432 register int i;
3433 register int o = 0;
3434 int large = 0;
3435
3436 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3437
3438 CLEAR_HARD_REG_SET (bad_spill_regs);
3439
3440 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3441 potential_reload_regs[i] = -1;
3442
3443 /* Count number of uses of each hard reg by pseudo regs allocated to it
3444 and then order them by decreasing use. */
3445
3446 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3447 {
3448 hard_reg_n_uses[i].uses = 0;
3449 hard_reg_n_uses[i].regno = i;
3450 }
3451
3452 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3453 {
3454 int regno = reg_renumber[i];
3455 if (regno >= 0)
3456 {
3457 int lim = regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i));
3458 while (regno < lim)
3459 hard_reg_n_uses[regno++].uses += reg_n_refs[i];
3460 }
3461 large += reg_n_refs[i];
3462 }
3463
3464 /* Now fixed registers (which cannot safely be used for reloading)
3465 get a very high use count so they will be considered least desirable.
3466 Registers used explicitly in the rtl code are almost as bad. */
3467
3468 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3469 {
3470 if (fixed_regs[i])
3471 {
3472 hard_reg_n_uses[i].uses += 2 * large + 2;
3473 SET_HARD_REG_BIT (bad_spill_regs, i);
3474 }
3475 else if (regs_explicitly_used[i])
3476 {
3477 hard_reg_n_uses[i].uses += large + 1;
3478 #ifndef SMALL_REGISTER_CLASSES
3479 /* ??? We are doing this here because of the potential that
3480 bad code may be generated if a register explicitly used in
3481 an insn was used as a spill register for that insn. But
3482 not using these are spill registers may lose on some machine.
3483 We'll have to see how this works out. */
3484 SET_HARD_REG_BIT (bad_spill_regs, i);
3485 #endif
3486 }
3487 }
3488 hard_reg_n_uses[FRAME_POINTER_REGNUM].uses += 2 * large + 2;
3489 SET_HARD_REG_BIT (bad_spill_regs, FRAME_POINTER_REGNUM);
3490
3491 #ifdef ELIMINABLE_REGS
3492 /* If registers other than the frame pointer are eliminable, mark them as
3493 poor choices. */
3494 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3495 {
3496 hard_reg_n_uses[reg_eliminate[i].from].uses += 2 * large + 2;
3497 SET_HARD_REG_BIT (bad_spill_regs, reg_eliminate[i].from);
3498 }
3499 #endif
3500
3501 /* Prefer registers not so far used, for use in temporary loading.
3502 Among them, if REG_ALLOC_ORDER is defined, use that order.
3503 Otherwise, prefer registers not preserved by calls. */
3504
3505 #ifdef REG_ALLOC_ORDER
3506 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3507 {
3508 int regno = reg_alloc_order[i];
3509
3510 if (hard_reg_n_uses[regno].uses == 0)
3511 potential_reload_regs[o++] = regno;
3512 }
3513 #else
3514 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3515 {
3516 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i])
3517 potential_reload_regs[o++] = i;
3518 }
3519 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3520 {
3521 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i])
3522 potential_reload_regs[o++] = i;
3523 }
3524 #endif
3525
3526 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
3527 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
3528
3529 /* Now add the regs that are already used,
3530 preferring those used less often. The fixed and otherwise forbidden
3531 registers will be at the end of this list. */
3532
3533 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3534 if (hard_reg_n_uses[i].uses != 0)
3535 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
3536 }
3537 \f
3538 /* Reload pseudo-registers into hard regs around each insn as needed.
3539 Additional register load insns are output before the insn that needs it
3540 and perhaps store insns after insns that modify the reloaded pseudo reg.
3541
3542 reg_last_reload_reg and reg_reloaded_contents keep track of
3543 which registers are already available in reload registers.
3544 We update these for the reloads that we perform,
3545 as the insns are scanned. */
3546
3547 static void
3548 reload_as_needed (first, live_known)
3549 rtx first;
3550 int live_known;
3551 {
3552 register rtx insn;
3553 register int i;
3554 int this_block = 0;
3555 rtx x;
3556 rtx after_call = 0;
3557
3558 bzero (spill_reg_rtx, sizeof spill_reg_rtx);
3559 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
3560 bzero (reg_last_reload_reg, max_regno * sizeof (rtx));
3561 reg_has_output_reload = (char *) alloca (max_regno);
3562 for (i = 0; i < n_spills; i++)
3563 {
3564 reg_reloaded_contents[i] = -1;
3565 reg_reloaded_insn[i] = 0;
3566 }
3567
3568 /* Reset all offsets on eliminable registers to their initial values. */
3569 #ifdef ELIMINABLE_REGS
3570 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3571 {
3572 INITIAL_ELIMINATION_OFFSET (reg_eliminate[i].from, reg_eliminate[i].to,
3573 reg_eliminate[i].initial_offset);
3574 reg_eliminate[i].previous_offset
3575 = reg_eliminate[i].offset = reg_eliminate[i].initial_offset;
3576 }
3577 #else
3578 INITIAL_FRAME_POINTER_OFFSET (reg_eliminate[0].initial_offset);
3579 reg_eliminate[0].previous_offset
3580 = reg_eliminate[0].offset = reg_eliminate[0].initial_offset;
3581 #endif
3582
3583 num_not_at_initial_offset = 0;
3584
3585 for (insn = first; insn;)
3586 {
3587 register rtx next = NEXT_INSN (insn);
3588
3589 /* Notice when we move to a new basic block. */
3590 if (live_known && this_block + 1 < n_basic_blocks
3591 && insn == basic_block_head[this_block+1])
3592 ++this_block;
3593
3594 /* If we pass a label, copy the offsets from the label information
3595 into the current offsets of each elimination. */
3596 if (GET_CODE (insn) == CODE_LABEL)
3597 {
3598 num_not_at_initial_offset = 0;
3599 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3600 {
3601 reg_eliminate[i].offset = reg_eliminate[i].previous_offset
3602 = offsets_at[CODE_LABEL_NUMBER (insn)][i];
3603 if (reg_eliminate[i].can_eliminate
3604 && (reg_eliminate[i].offset
3605 != reg_eliminate[i].initial_offset))
3606 num_not_at_initial_offset++;
3607 }
3608 }
3609
3610 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3611 {
3612 rtx avoid_return_reg = 0;
3613
3614 #ifdef SMALL_REGISTER_CLASSES
3615 /* Set avoid_return_reg if this is an insn
3616 that might use the value of a function call. */
3617 if (GET_CODE (insn) == CALL_INSN)
3618 {
3619 if (GET_CODE (PATTERN (insn)) == SET)
3620 after_call = SET_DEST (PATTERN (insn));
3621 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3622 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
3623 after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
3624 else
3625 after_call = 0;
3626 }
3627 else if (after_call != 0
3628 && !(GET_CODE (PATTERN (insn)) == SET
3629 && SET_DEST (PATTERN (insn)) == stack_pointer_rtx))
3630 {
3631 if (reg_mentioned_p (after_call, PATTERN (insn)))
3632 avoid_return_reg = after_call;
3633 after_call = 0;
3634 }
3635 #endif /* SMALL_REGISTER_CLASSES */
3636
3637 /* If this is a USE and CLOBBER of a MEM, ensure that any
3638 references to eliminable registers have been removed. */
3639
3640 if ((GET_CODE (PATTERN (insn)) == USE
3641 || GET_CODE (PATTERN (insn)) == CLOBBER)
3642 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3643 XEXP (XEXP (PATTERN (insn), 0), 0)
3644 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3645 GET_MODE (XEXP (PATTERN (insn), 0)), NULL_RTX);
3646
3647 /* If we need to do register elimination processing, do so.
3648 This might delete the insn, in which case we are done. */
3649 if (num_eliminable && GET_MODE (insn) == QImode)
3650 {
3651 eliminate_regs_in_insn (insn, 1);
3652 if (GET_CODE (insn) == NOTE)
3653 {
3654 insn = next;
3655 continue;
3656 }
3657 }
3658
3659 if (GET_MODE (insn) == VOIDmode)
3660 n_reloads = 0;
3661 /* First find the pseudo regs that must be reloaded for this insn.
3662 This info is returned in the tables reload_... (see reload.h).
3663 Also modify the body of INSN by substituting RELOAD
3664 rtx's for those pseudo regs. */
3665 else
3666 {
3667 bzero (reg_has_output_reload, max_regno);
3668 CLEAR_HARD_REG_SET (reg_is_output_reload);
3669
3670 find_reloads (insn, 1, spill_indirect_levels, live_known,
3671 spill_reg_order);
3672 }
3673
3674 if (n_reloads > 0)
3675 {
3676 rtx prev = PREV_INSN (insn), next = NEXT_INSN (insn);
3677 rtx p;
3678 int class;
3679
3680 /* If this block has not had spilling done for a
3681 particular clas and we have any non-optionals that need a
3682 spill reg in that class, abort. */
3683
3684 for (class = 0; class < N_REG_CLASSES; class++)
3685 if (basic_block_needs[class] != 0
3686 && basic_block_needs[class][this_block] == 0)
3687 for (i = 0; i < n_reloads; i++)
3688 if (class == (int) reload_reg_class[i]
3689 && reload_reg_rtx[i] == 0
3690 && ! reload_optional[i]
3691 && (reload_in[i] != 0 || reload_out[i] != 0
3692 || reload_secondary_p[i] != 0))
3693 abort ();
3694
3695 /* Now compute which reload regs to reload them into. Perhaps
3696 reusing reload regs from previous insns, or else output
3697 load insns to reload them. Maybe output store insns too.
3698 Record the choices of reload reg in reload_reg_rtx. */
3699 choose_reload_regs (insn, avoid_return_reg);
3700
3701 #ifdef SMALL_REGISTER_CLASSES
3702 /* Merge any reloads that we didn't combine for fear of
3703 increasing the number of spill registers needed but now
3704 discover can be safely merged. */
3705 merge_assigned_reloads (insn);
3706 #endif
3707
3708 /* Generate the insns to reload operands into or out of
3709 their reload regs. */
3710 emit_reload_insns (insn);
3711
3712 /* Substitute the chosen reload regs from reload_reg_rtx
3713 into the insn's body (or perhaps into the bodies of other
3714 load and store insn that we just made for reloading
3715 and that we moved the structure into). */
3716 subst_reloads ();
3717
3718 /* If this was an ASM, make sure that all the reload insns
3719 we have generated are valid. If not, give an error
3720 and delete them. */
3721
3722 if (asm_noperands (PATTERN (insn)) >= 0)
3723 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3724 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
3725 && (recog_memoized (p) < 0
3726 || (insn_extract (p),
3727 ! constrain_operands (INSN_CODE (p), 1))))
3728 {
3729 error_for_asm (insn,
3730 "`asm' operand requires impossible reload");
3731 PUT_CODE (p, NOTE);
3732 NOTE_SOURCE_FILE (p) = 0;
3733 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3734 }
3735 }
3736 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3737 is no longer validly lying around to save a future reload.
3738 Note that this does not detect pseudos that were reloaded
3739 for this insn in order to be stored in
3740 (obeying register constraints). That is correct; such reload
3741 registers ARE still valid. */
3742 note_stores (PATTERN (insn), forget_old_reloads_1);
3743
3744 /* There may have been CLOBBER insns placed after INSN. So scan
3745 between INSN and NEXT and use them to forget old reloads. */
3746 for (x = NEXT_INSN (insn); x != next; x = NEXT_INSN (x))
3747 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3748 note_stores (PATTERN (x), forget_old_reloads_1);
3749
3750 #ifdef AUTO_INC_DEC
3751 /* Likewise for regs altered by auto-increment in this insn.
3752 But note that the reg-notes are not changed by reloading:
3753 they still contain the pseudo-regs, not the spill regs. */
3754 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
3755 if (REG_NOTE_KIND (x) == REG_INC)
3756 {
3757 /* See if this pseudo reg was reloaded in this insn.
3758 If so, its last-reload info is still valid
3759 because it is based on this insn's reload. */
3760 for (i = 0; i < n_reloads; i++)
3761 if (reload_out[i] == XEXP (x, 0))
3762 break;
3763
3764 if (i == n_reloads)
3765 forget_old_reloads_1 (XEXP (x, 0));
3766 }
3767 #endif
3768 }
3769 /* A reload reg's contents are unknown after a label. */
3770 if (GET_CODE (insn) == CODE_LABEL)
3771 for (i = 0; i < n_spills; i++)
3772 {
3773 reg_reloaded_contents[i] = -1;
3774 reg_reloaded_insn[i] = 0;
3775 }
3776
3777 /* Don't assume a reload reg is still good after a call insn
3778 if it is a call-used reg. */
3779 else if (GET_CODE (insn) == CALL_INSN)
3780 for (i = 0; i < n_spills; i++)
3781 if (call_used_regs[spill_regs[i]])
3782 {
3783 reg_reloaded_contents[i] = -1;
3784 reg_reloaded_insn[i] = 0;
3785 }
3786
3787 /* In case registers overlap, allow certain insns to invalidate
3788 particular hard registers. */
3789
3790 #ifdef INSN_CLOBBERS_REGNO_P
3791 for (i = 0 ; i < n_spills ; i++)
3792 if (INSN_CLOBBERS_REGNO_P (insn, spill_regs[i]))
3793 {
3794 reg_reloaded_contents[i] = -1;
3795 reg_reloaded_insn[i] = 0;
3796 }
3797 #endif
3798
3799 insn = next;
3800
3801 #ifdef USE_C_ALLOCA
3802 alloca (0);
3803 #endif
3804 }
3805 }
3806
3807 /* Discard all record of any value reloaded from X,
3808 or reloaded in X from someplace else;
3809 unless X is an output reload reg of the current insn.
3810
3811 X may be a hard reg (the reload reg)
3812 or it may be a pseudo reg that was reloaded from. */
3813
3814 static void
3815 forget_old_reloads_1 (x)
3816 rtx x;
3817 {
3818 register int regno;
3819 int nr;
3820 int offset = 0;
3821
3822 /* note_stores does give us subregs of hard regs. */
3823 while (GET_CODE (x) == SUBREG)
3824 {
3825 offset += SUBREG_WORD (x);
3826 x = SUBREG_REG (x);
3827 }
3828
3829 if (GET_CODE (x) != REG)
3830 return;
3831
3832 regno = REGNO (x) + offset;
3833
3834 if (regno >= FIRST_PSEUDO_REGISTER)
3835 nr = 1;
3836 else
3837 {
3838 int i;
3839 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
3840 /* Storing into a spilled-reg invalidates its contents.
3841 This can happen if a block-local pseudo is allocated to that reg
3842 and it wasn't spilled because this block's total need is 0.
3843 Then some insn might have an optional reload and use this reg. */
3844 for (i = 0; i < nr; i++)
3845 if (spill_reg_order[regno + i] >= 0
3846 /* But don't do this if the reg actually serves as an output
3847 reload reg in the current instruction. */
3848 && (n_reloads == 0
3849 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i)))
3850 {
3851 reg_reloaded_contents[spill_reg_order[regno + i]] = -1;
3852 reg_reloaded_insn[spill_reg_order[regno + i]] = 0;
3853 }
3854 }
3855
3856 /* Since value of X has changed,
3857 forget any value previously copied from it. */
3858
3859 while (nr-- > 0)
3860 /* But don't forget a copy if this is the output reload
3861 that establishes the copy's validity. */
3862 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
3863 reg_last_reload_reg[regno + nr] = 0;
3864 }
3865 \f
3866 /* For each reload, the mode of the reload register. */
3867 static enum machine_mode reload_mode[MAX_RELOADS];
3868
3869 /* For each reload, the largest number of registers it will require. */
3870 static int reload_nregs[MAX_RELOADS];
3871
3872 /* Comparison function for qsort to decide which of two reloads
3873 should be handled first. *P1 and *P2 are the reload numbers. */
3874
3875 static int
3876 reload_reg_class_lower (p1, p2)
3877 short *p1, *p2;
3878 {
3879 register int r1 = *p1, r2 = *p2;
3880 register int t;
3881
3882 /* Consider required reloads before optional ones. */
3883 t = reload_optional[r1] - reload_optional[r2];
3884 if (t != 0)
3885 return t;
3886
3887 /* Count all solitary classes before non-solitary ones. */
3888 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
3889 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
3890 if (t != 0)
3891 return t;
3892
3893 /* Aside from solitaires, consider all multi-reg groups first. */
3894 t = reload_nregs[r2] - reload_nregs[r1];
3895 if (t != 0)
3896 return t;
3897
3898 /* Consider reloads in order of increasing reg-class number. */
3899 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
3900 if (t != 0)
3901 return t;
3902
3903 /* If reloads are equally urgent, sort by reload number,
3904 so that the results of qsort leave nothing to chance. */
3905 return r1 - r2;
3906 }
3907 \f
3908 /* The following HARD_REG_SETs indicate when each hard register is
3909 used for a reload of various parts of the current insn. */
3910
3911 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
3912 static HARD_REG_SET reload_reg_used;
3913 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
3914 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
3915 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
3916 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
3917 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
3918 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
3919 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
3920 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
3921 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
3922 static HARD_REG_SET reload_reg_used_in_op_addr;
3923 /* If reg is in use for a RELOAD_FOR_INSN reload. */
3924 static HARD_REG_SET reload_reg_used_in_insn;
3925 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
3926 static HARD_REG_SET reload_reg_used_in_other_addr;
3927
3928 /* If reg is in use as a reload reg for any sort of reload. */
3929 static HARD_REG_SET reload_reg_used_at_all;
3930
3931 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
3932 TYPE. MODE is used to indicate how many consecutive regs are
3933 actually used. */
3934
3935 static void
3936 mark_reload_reg_in_use (regno, opnum, type, mode)
3937 int regno;
3938 int opnum;
3939 enum reload_type type;
3940 enum machine_mode mode;
3941 {
3942 int nregs = HARD_REGNO_NREGS (regno, mode);
3943 int i;
3944
3945 for (i = regno; i < nregs + regno; i++)
3946 {
3947 switch (type)
3948 {
3949 case RELOAD_OTHER:
3950 SET_HARD_REG_BIT (reload_reg_used, i);
3951 break;
3952
3953 case RELOAD_FOR_INPUT_ADDRESS:
3954 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
3955 break;
3956
3957 case RELOAD_FOR_OUTPUT_ADDRESS:
3958 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
3959 break;
3960
3961 case RELOAD_FOR_OPERAND_ADDRESS:
3962 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
3963 break;
3964
3965 case RELOAD_FOR_OTHER_ADDRESS:
3966 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
3967 break;
3968
3969 case RELOAD_FOR_INPUT:
3970 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
3971 break;
3972
3973 case RELOAD_FOR_OUTPUT:
3974 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
3975 break;
3976
3977 case RELOAD_FOR_INSN:
3978 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
3979 break;
3980 }
3981
3982 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
3983 }
3984 }
3985
3986 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
3987 specified by OPNUM and TYPE. */
3988
3989 static int
3990 reload_reg_free_p (regno, opnum, type)
3991 int regno;
3992 int opnum;
3993 enum reload_type type;
3994 {
3995 int i;
3996
3997 /* In use for a RELOAD_OTHER means it's not available for anything except
3998 RELOAD_FOR_OTHER_ADDRESS. Recall that RELOAD_FOR_OTHER_ADDRESS is known
3999 to be used only for inputs. */
4000
4001 if (type != RELOAD_FOR_OTHER_ADDRESS
4002 && TEST_HARD_REG_BIT (reload_reg_used, regno))
4003 return 0;
4004
4005 switch (type)
4006 {
4007 case RELOAD_OTHER:
4008 /* In use for anything means not available for a RELOAD_OTHER. */
4009 return ! TEST_HARD_REG_BIT (reload_reg_used_at_all, regno);
4010
4011 /* The other kinds of use can sometimes share a register. */
4012 case RELOAD_FOR_INPUT:
4013 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4014 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4015 return 0;
4016
4017 /* If it is used for some other input, can't use it. */
4018 for (i = 0; i < reload_n_operands; i++)
4019 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4020 return 0;
4021
4022 /* If it is used in a later operand's address, can't use it. */
4023 for (i = opnum + 1; i < reload_n_operands; i++)
4024 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4025 return 0;
4026
4027 return 1;
4028
4029 case RELOAD_FOR_INPUT_ADDRESS:
4030 /* Can't use a register if it is used for an input address for this
4031 operand or used as an input in an earlier one. */
4032 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno))
4033 return 0;
4034
4035 for (i = 0; i < opnum; i++)
4036 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4037 return 0;
4038
4039 return 1;
4040
4041 case RELOAD_FOR_OUTPUT_ADDRESS:
4042 /* Can't use a register if it is used for an output address for this
4043 operand or used as an output in this or a later operand. */
4044 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4045 return 0;
4046
4047 for (i = opnum; i < reload_n_operands; i++)
4048 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4049 return 0;
4050
4051 return 1;
4052
4053 case RELOAD_FOR_OPERAND_ADDRESS:
4054 for (i = 0; i < reload_n_operands; i++)
4055 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4056 return 0;
4057
4058 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4059 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4060
4061 case RELOAD_FOR_OUTPUT:
4062 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4063 outputs, or an operand address for this or an earlier output. */
4064 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4065 return 0;
4066
4067 for (i = 0; i < reload_n_operands; i++)
4068 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4069 return 0;
4070
4071 for (i = 0; i <= opnum; i++)
4072 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4073 return 0;
4074
4075 return 1;
4076
4077 case RELOAD_FOR_INSN:
4078 for (i = 0; i < reload_n_operands; i++)
4079 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4080 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4081 return 0;
4082
4083 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4084 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4085
4086 case RELOAD_FOR_OTHER_ADDRESS:
4087 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4088 }
4089 abort ();
4090 }
4091
4092 /* Return 1 if the value in reload reg REGNO, as used by a reload
4093 needed for the part of the insn specified by OPNUM and TYPE,
4094 is not in use for a reload in any prior part of the insn.
4095
4096 We can assume that the reload reg was already tested for availability
4097 at the time it is needed, and we should not check this again,
4098 in case the reg has already been marked in use. */
4099
4100 static int
4101 reload_reg_free_before_p (regno, opnum, type)
4102 int regno;
4103 int opnum;
4104 enum reload_type type;
4105 {
4106 int i;
4107
4108 switch (type)
4109 {
4110 case RELOAD_FOR_OTHER_ADDRESS:
4111 /* These always come first. */
4112 return 1;
4113
4114 case RELOAD_OTHER:
4115 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4116
4117 /* If this use is for part of the insn,
4118 check the reg is not in use for any prior part. It is tempting
4119 to try to do this by falling through from objecs that occur
4120 later in the insn to ones that occur earlier, but that will not
4121 correctly take into account the fact that here we MUST ignore
4122 things that would prevent the register from being allocated in
4123 the first place, since we know that it was allocated. */
4124
4125 case RELOAD_FOR_OUTPUT_ADDRESS:
4126 /* Earlier reloads are for earlier outputs or their addresses,
4127 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4128 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4129 RELOAD_OTHER).. */
4130 for (i = 0; i < opnum; i++)
4131 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4132 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4133 return 0;
4134
4135 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4136 return 0;
4137
4138 for (i = 0; i < reload_n_operands; i++)
4139 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4140 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4141 return 0;
4142
4143 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4144 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4145 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4146
4147 case RELOAD_FOR_OUTPUT:
4148 /* This can't be used in the output address for this operand and
4149 anything that can't be used for it, except that we've already
4150 tested for RELOAD_FOR_INSN objects. */
4151
4152 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4153 return 0;
4154
4155 for (i = 0; i < opnum; i++)
4156 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4157 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4158 return 0;
4159
4160 for (i = 0; i < reload_n_operands; i++)
4161 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4162 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4163 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4164 return 0;
4165
4166 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4167
4168 case RELOAD_FOR_OPERAND_ADDRESS:
4169 case RELOAD_FOR_INSN:
4170 /* These can't conflict with inputs, or each other, so all we have to
4171 test is input addresses and the addresses of OTHER items. */
4172
4173 for (i = 0; i < reload_n_operands; i++)
4174 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4175 return 0;
4176
4177 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4178
4179 case RELOAD_FOR_INPUT:
4180 /* The only things earlier are the address for this and
4181 earlier inputs, other inputs (which we know we don't conflict
4182 with), and addresses of RELOAD_OTHER objects. */
4183
4184 for (i = 0; i <= opnum; i++)
4185 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4186 return 0;
4187
4188 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4189
4190 case RELOAD_FOR_INPUT_ADDRESS:
4191 /* Similarly, all we have to check is for use in earlier inputs'
4192 addresses. */
4193 for (i = 0; i < opnum; i++)
4194 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4195 return 0;
4196
4197 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4198 }
4199 abort ();
4200 }
4201
4202 /* Return 1 if the value in reload reg REGNO, as used by a reload
4203 needed for the part of the insn specified by OPNUM and TYPE,
4204 is still available in REGNO at the end of the insn.
4205
4206 We can assume that the reload reg was already tested for availability
4207 at the time it is needed, and we should not check this again,
4208 in case the reg has already been marked in use. */
4209
4210 static int
4211 reload_reg_reaches_end_p (regno, opnum, type)
4212 int regno;
4213 int opnum;
4214 enum reload_type type;
4215 {
4216 int i;
4217
4218 switch (type)
4219 {
4220 case RELOAD_OTHER:
4221 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4222 its value must reach the end. */
4223 return 1;
4224
4225 /* If this use is for part of the insn,
4226 its value reaches if no subsequent part uses the same register.
4227 Just like the above function, don't try to do this with lots
4228 of fallthroughs. */
4229
4230 case RELOAD_FOR_OTHER_ADDRESS:
4231 /* Here we check for everything else, since these don't conflict
4232 with anything else and everything comes later. */
4233
4234 for (i = 0; i < reload_n_operands; i++)
4235 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4236 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4237 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4238 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4239 return 0;
4240
4241 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4242 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4243 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4244
4245 case RELOAD_FOR_INPUT_ADDRESS:
4246 /* Similar, except that we check only for this and subsequent inputs
4247 and the address of only subsequent inputs and we do not need
4248 to check for RELOAD_OTHER objects since they are known not to
4249 conflict. */
4250
4251 for (i = opnum; i < reload_n_operands; i++)
4252 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4253 return 0;
4254
4255 for (i = opnum + 1; i < reload_n_operands; i++)
4256 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno))
4257 return 0;
4258
4259 for (i = 0; i < reload_n_operands; i++)
4260 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4261 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4262 return 0;
4263
4264 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4265 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4266
4267 case RELOAD_FOR_INPUT:
4268 /* Similar to input address, except we start at the next operand for
4269 both input and input address and we do not check for
4270 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4271 would conflict. */
4272
4273 for (i = opnum + 1; i < reload_n_operands; i++)
4274 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4275 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4276 return 0;
4277
4278 /* ... fall through ... */
4279
4280 case RELOAD_FOR_OPERAND_ADDRESS:
4281 /* Check outputs and their addresses. */
4282
4283 for (i = 0; i < reload_n_operands; i++)
4284 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4285 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4286 return 0;
4287
4288 return 1;
4289
4290 case RELOAD_FOR_INSN:
4291 /* These conflict with other outputs with with RELOAD_OTHER. So
4292 we need only check for output addresses. */
4293
4294 opnum = -1;
4295
4296 /* ... fall through ... */
4297
4298 case RELOAD_FOR_OUTPUT:
4299 case RELOAD_FOR_OUTPUT_ADDRESS:
4300 /* We already know these can't conflict with a later output. So the
4301 only thing to check are later output addresses. */
4302 for (i = opnum + 1; i < reload_n_operands; i++)
4303 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno))
4304 return 0;
4305
4306 return 1;
4307 }
4308
4309 abort ();
4310 }
4311 \f
4312 /* Vector of reload-numbers showing the order in which the reloads should
4313 be processed. */
4314 short reload_order[MAX_RELOADS];
4315
4316 /* Indexed by reload number, 1 if incoming value
4317 inherited from previous insns. */
4318 char reload_inherited[MAX_RELOADS];
4319
4320 /* For an inherited reload, this is the insn the reload was inherited from,
4321 if we know it. Otherwise, this is 0. */
4322 rtx reload_inheritance_insn[MAX_RELOADS];
4323
4324 /* If non-zero, this is a place to get the value of the reload,
4325 rather than using reload_in. */
4326 rtx reload_override_in[MAX_RELOADS];
4327
4328 /* For each reload, the index in spill_regs of the spill register used,
4329 or -1 if we did not need one of the spill registers for this reload. */
4330 int reload_spill_index[MAX_RELOADS];
4331
4332 /* Index of last register assigned as a spill register. We allocate in
4333 a round-robin fashio. */
4334
4335 static last_spill_reg = 0;
4336
4337 /* Find a spill register to use as a reload register for reload R.
4338 LAST_RELOAD is non-zero if this is the last reload for the insn being
4339 processed.
4340
4341 Set reload_reg_rtx[R] to the register allocated.
4342
4343 If NOERROR is nonzero, we return 1 if successful,
4344 or 0 if we couldn't find a spill reg and we didn't change anything. */
4345
4346 static int
4347 allocate_reload_reg (r, insn, last_reload, noerror)
4348 int r;
4349 rtx insn;
4350 int last_reload;
4351 int noerror;
4352 {
4353 int i;
4354 int pass;
4355 int count;
4356 rtx new;
4357 int regno;
4358
4359 /* If we put this reload ahead, thinking it is a group,
4360 then insist on finding a group. Otherwise we can grab a
4361 reg that some other reload needs.
4362 (That can happen when we have a 68000 DATA_OR_FP_REG
4363 which is a group of data regs or one fp reg.)
4364 We need not be so restrictive if there are no more reloads
4365 for this insn.
4366
4367 ??? Really it would be nicer to have smarter handling
4368 for that kind of reg class, where a problem like this is normal.
4369 Perhaps those classes should be avoided for reloading
4370 by use of more alternatives. */
4371
4372 int force_group = reload_nregs[r] > 1 && ! last_reload;
4373
4374 /* If we want a single register and haven't yet found one,
4375 take any reg in the right class and not in use.
4376 If we want a consecutive group, here is where we look for it.
4377
4378 We use two passes so we can first look for reload regs to
4379 reuse, which are already in use for other reloads in this insn,
4380 and only then use additional registers.
4381 I think that maximizing reuse is needed to make sure we don't
4382 run out of reload regs. Suppose we have three reloads, and
4383 reloads A and B can share regs. These need two regs.
4384 Suppose A and B are given different regs.
4385 That leaves none for C. */
4386 for (pass = 0; pass < 2; pass++)
4387 {
4388 /* I is the index in spill_regs.
4389 We advance it round-robin between insns to use all spill regs
4390 equally, so that inherited reloads have a chance
4391 of leapfrogging each other. */
4392
4393 for (count = 0, i = last_spill_reg; count < n_spills; count++)
4394 {
4395 int class = (int) reload_reg_class[r];
4396
4397 i = (i + 1) % n_spills;
4398
4399 if (reload_reg_free_p (spill_regs[i], reload_opnum[r],
4400 reload_when_needed[r])
4401 && TEST_HARD_REG_BIT (reg_class_contents[class], spill_regs[i])
4402 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4403 /* Look first for regs to share, then for unshared. */
4404 && (pass || TEST_HARD_REG_BIT (reload_reg_used_at_all,
4405 spill_regs[i])))
4406 {
4407 int nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4408 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
4409 (on 68000) got us two FP regs. If NR is 1,
4410 we would reject both of them. */
4411 if (force_group)
4412 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
4413 /* If we need only one reg, we have already won. */
4414 if (nr == 1)
4415 {
4416 /* But reject a single reg if we demand a group. */
4417 if (force_group)
4418 continue;
4419 break;
4420 }
4421 /* Otherwise check that as many consecutive regs as we need
4422 are available here.
4423 Also, don't use for a group registers that are
4424 needed for nongroups. */
4425 if (! TEST_HARD_REG_BIT (counted_for_nongroups, spill_regs[i]))
4426 while (nr > 1)
4427 {
4428 regno = spill_regs[i] + nr - 1;
4429 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
4430 && spill_reg_order[regno] >= 0
4431 && reload_reg_free_p (regno, reload_opnum[r],
4432 reload_when_needed[r])
4433 && ! TEST_HARD_REG_BIT (counted_for_nongroups,
4434 regno)))
4435 break;
4436 nr--;
4437 }
4438 if (nr == 1)
4439 break;
4440 }
4441 }
4442
4443 /* If we found something on pass 1, omit pass 2. */
4444 if (count < n_spills)
4445 break;
4446 }
4447
4448 /* We should have found a spill register by now. */
4449 if (count == n_spills)
4450 {
4451 if (noerror)
4452 return 0;
4453 goto failure;
4454 }
4455
4456 last_spill_reg = i;
4457
4458 /* Mark as in use for this insn the reload regs we use for this. */
4459 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
4460 reload_when_needed[r], reload_mode[r]);
4461
4462 new = spill_reg_rtx[i];
4463
4464 if (new == 0 || GET_MODE (new) != reload_mode[r])
4465 spill_reg_rtx[i] = new = gen_rtx (REG, reload_mode[r], spill_regs[i]);
4466
4467 reload_reg_rtx[r] = new;
4468 reload_spill_index[r] = i;
4469 regno = true_regnum (new);
4470
4471 /* Detect when the reload reg can't hold the reload mode.
4472 This used to be one `if', but Sequent compiler can't handle that. */
4473 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4474 {
4475 enum machine_mode test_mode = VOIDmode;
4476 if (reload_in[r])
4477 test_mode = GET_MODE (reload_in[r]);
4478 /* If reload_in[r] has VOIDmode, it means we will load it
4479 in whatever mode the reload reg has: to wit, reload_mode[r].
4480 We have already tested that for validity. */
4481 /* Aside from that, we need to test that the expressions
4482 to reload from or into have modes which are valid for this
4483 reload register. Otherwise the reload insns would be invalid. */
4484 if (! (reload_in[r] != 0 && test_mode != VOIDmode
4485 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
4486 if (! (reload_out[r] != 0
4487 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
4488 /* The reg is OK. */
4489 return 1;
4490 }
4491
4492 /* The reg is not OK. */
4493 if (noerror)
4494 return 0;
4495
4496 failure:
4497 if (asm_noperands (PATTERN (insn)) < 0)
4498 /* It's the compiler's fault. */
4499 abort ();
4500
4501 /* It's the user's fault; the operand's mode and constraint
4502 don't match. Disable this reload so we don't crash in final. */
4503 error_for_asm (insn,
4504 "`asm' operand constraint incompatible with operand size");
4505 reload_in[r] = 0;
4506 reload_out[r] = 0;
4507 reload_reg_rtx[r] = 0;
4508 reload_optional[r] = 1;
4509 reload_secondary_p[r] = 1;
4510
4511 return 1;
4512 }
4513 \f
4514 /* Assign hard reg targets for the pseudo-registers we must reload
4515 into hard regs for this insn.
4516 Also output the instructions to copy them in and out of the hard regs.
4517
4518 For machines with register classes, we are responsible for
4519 finding a reload reg in the proper class. */
4520
4521 static void
4522 choose_reload_regs (insn, avoid_return_reg)
4523 rtx insn;
4524 rtx avoid_return_reg;
4525 {
4526 register int i, j;
4527 int max_group_size = 1;
4528 enum reg_class group_class = NO_REGS;
4529 int inheritance;
4530
4531 rtx save_reload_reg_rtx[MAX_RELOADS];
4532 char save_reload_inherited[MAX_RELOADS];
4533 rtx save_reload_inheritance_insn[MAX_RELOADS];
4534 rtx save_reload_override_in[MAX_RELOADS];
4535 int save_reload_spill_index[MAX_RELOADS];
4536 HARD_REG_SET save_reload_reg_used;
4537 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4538 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4539 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4540 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4541 HARD_REG_SET save_reload_reg_used_in_op_addr;
4542 HARD_REG_SET save_reload_reg_used_in_insn;
4543 HARD_REG_SET save_reload_reg_used_in_other_addr;
4544 HARD_REG_SET save_reload_reg_used_at_all;
4545
4546 bzero (reload_inherited, MAX_RELOADS);
4547 bzero (reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
4548 bzero (reload_override_in, MAX_RELOADS * sizeof (rtx));
4549
4550 CLEAR_HARD_REG_SET (reload_reg_used);
4551 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
4552 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
4553 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
4554 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
4555
4556 for (i = 0; i < reload_n_operands; i++)
4557 {
4558 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
4559 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
4560 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
4561 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
4562 }
4563
4564 #ifdef SMALL_REGISTER_CLASSES
4565 /* Don't bother with avoiding the return reg
4566 if we have no mandatory reload that could use it. */
4567 if (avoid_return_reg)
4568 {
4569 int do_avoid = 0;
4570 int regno = REGNO (avoid_return_reg);
4571 int nregs
4572 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4573 int r;
4574
4575 for (r = regno; r < regno + nregs; r++)
4576 if (spill_reg_order[r] >= 0)
4577 for (j = 0; j < n_reloads; j++)
4578 if (!reload_optional[j] && reload_reg_rtx[j] == 0
4579 && (reload_in[j] != 0 || reload_out[j] != 0
4580 || reload_secondary_p[j])
4581 &&
4582 TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[j]], r))
4583 do_avoid = 1;
4584 if (!do_avoid)
4585 avoid_return_reg = 0;
4586 }
4587 #endif /* SMALL_REGISTER_CLASSES */
4588
4589 #if 0 /* Not needed, now that we can always retry without inheritance. */
4590 /* See if we have more mandatory reloads than spill regs.
4591 If so, then we cannot risk optimizations that could prevent
4592 reloads from sharing one spill register.
4593
4594 Since we will try finding a better register than reload_reg_rtx
4595 unless it is equal to reload_in or reload_out, count such reloads. */
4596
4597 {
4598 int tem = 0;
4599 #ifdef SMALL_REGISTER_CLASSES
4600 int tem = (avoid_return_reg != 0);
4601 #endif
4602 for (j = 0; j < n_reloads; j++)
4603 if (! reload_optional[j]
4604 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
4605 && (reload_reg_rtx[j] == 0
4606 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
4607 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
4608 tem++;
4609 if (tem > n_spills)
4610 must_reuse = 1;
4611 }
4612 #endif
4613
4614 #ifdef SMALL_REGISTER_CLASSES
4615 /* Don't use the subroutine call return reg for a reload
4616 if we are supposed to avoid it. */
4617 if (avoid_return_reg)
4618 {
4619 int regno = REGNO (avoid_return_reg);
4620 int nregs
4621 = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg));
4622 int r;
4623
4624 for (r = regno; r < regno + nregs; r++)
4625 if (spill_reg_order[r] >= 0)
4626 SET_HARD_REG_BIT (reload_reg_used, r);
4627 }
4628 #endif /* SMALL_REGISTER_CLASSES */
4629
4630 /* In order to be certain of getting the registers we need,
4631 we must sort the reloads into order of increasing register class.
4632 Then our grabbing of reload registers will parallel the process
4633 that provided the reload registers.
4634
4635 Also note whether any of the reloads wants a consecutive group of regs.
4636 If so, record the maximum size of the group desired and what
4637 register class contains all the groups needed by this insn. */
4638
4639 for (j = 0; j < n_reloads; j++)
4640 {
4641 reload_order[j] = j;
4642 reload_spill_index[j] = -1;
4643
4644 reload_mode[j]
4645 = (reload_inmode[j] == VOIDmode
4646 || (GET_MODE_SIZE (reload_outmode[j])
4647 > GET_MODE_SIZE (reload_inmode[j])))
4648 ? reload_outmode[j] : reload_inmode[j];
4649
4650 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
4651
4652 if (reload_nregs[j] > 1)
4653 {
4654 max_group_size = MAX (reload_nregs[j], max_group_size);
4655 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
4656 }
4657
4658 /* If we have already decided to use a certain register,
4659 don't use it in another way. */
4660 if (reload_reg_rtx[j])
4661 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
4662 reload_when_needed[j], reload_mode[j]);
4663 }
4664
4665 if (n_reloads > 1)
4666 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
4667
4668 bcopy (reload_reg_rtx, save_reload_reg_rtx, sizeof reload_reg_rtx);
4669 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
4670 bcopy (reload_inheritance_insn, save_reload_inheritance_insn,
4671 sizeof reload_inheritance_insn);
4672 bcopy (reload_override_in, save_reload_override_in,
4673 sizeof reload_override_in);
4674 bcopy (reload_spill_index, save_reload_spill_index,
4675 sizeof reload_spill_index);
4676 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
4677 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
4678 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
4679 reload_reg_used_in_op_addr);
4680 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
4681 reload_reg_used_in_insn);
4682 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
4683 reload_reg_used_in_other_addr);
4684
4685 for (i = 0; i < reload_n_operands; i++)
4686 {
4687 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
4688 reload_reg_used_in_output[i]);
4689 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
4690 reload_reg_used_in_input[i]);
4691 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
4692 reload_reg_used_in_input_addr[i]);
4693 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
4694 reload_reg_used_in_output_addr[i]);
4695 }
4696
4697 /* If -O, try first with inheritance, then turning it off.
4698 If not -O, don't do inheritance.
4699 Using inheritance when not optimizing leads to paradoxes
4700 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
4701 because one side of the comparison might be inherited. */
4702
4703 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
4704 {
4705 /* Process the reloads in order of preference just found.
4706 Beyond this point, subregs can be found in reload_reg_rtx.
4707
4708 This used to look for an existing reloaded home for all
4709 of the reloads, and only then perform any new reloads.
4710 But that could lose if the reloads were done out of reg-class order
4711 because a later reload with a looser constraint might have an old
4712 home in a register needed by an earlier reload with a tighter constraint.
4713
4714 To solve this, we make two passes over the reloads, in the order
4715 described above. In the first pass we try to inherit a reload
4716 from a previous insn. If there is a later reload that needs a
4717 class that is a proper subset of the class being processed, we must
4718 also allocate a spill register during the first pass.
4719
4720 Then make a second pass over the reloads to allocate any reloads
4721 that haven't been given registers yet. */
4722
4723 for (j = 0; j < n_reloads; j++)
4724 {
4725 register int r = reload_order[j];
4726
4727 /* Ignore reloads that got marked inoperative. */
4728 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4729 continue;
4730
4731 /* If find_reloads chose a to use reload_in or reload_out as a reload
4732 register, we don't need to chose one. Otherwise, try even if it found
4733 one since we might save an insn if we find the value lying around. */
4734 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
4735 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
4736 || rtx_equal_p (reload_out[r], reload_reg_rtx[r])))
4737 continue;
4738
4739 #if 0 /* No longer needed for correct operation.
4740 It might give better code, or might not; worth an experiment? */
4741 /* If this is an optional reload, we can't inherit from earlier insns
4742 until we are sure that any non-optional reloads have been allocated.
4743 The following code takes advantage of the fact that optional reloads
4744 are at the end of reload_order. */
4745 if (reload_optional[r] != 0)
4746 for (i = 0; i < j; i++)
4747 if ((reload_out[reload_order[i]] != 0
4748 || reload_in[reload_order[i]] != 0
4749 || reload_secondary_p[reload_order[i]])
4750 && ! reload_optional[reload_order[i]]
4751 && reload_reg_rtx[reload_order[i]] == 0)
4752 allocate_reload_reg (reload_order[i], insn, 0, inheritance);
4753 #endif
4754
4755 /* First see if this pseudo is already available as reloaded
4756 for a previous insn. We cannot try to inherit for reloads
4757 that are smaller than the maximum number of registers needed
4758 for groups unless the register we would allocate cannot be used
4759 for the groups.
4760
4761 We could check here to see if this is a secondary reload for
4762 an object that is already in a register of the desired class.
4763 This would avoid the need for the secondary reload register.
4764 But this is complex because we can't easily determine what
4765 objects might want to be loaded via this reload. So let a register
4766 be allocated here. In `emit_reload_insns' we suppress one of the
4767 loads in the case described above. */
4768
4769 if (inheritance)
4770 {
4771 register int regno = -1;
4772 enum machine_mode mode;
4773
4774 if (reload_in[r] == 0)
4775 ;
4776 else if (GET_CODE (reload_in[r]) == REG)
4777 {
4778 regno = REGNO (reload_in[r]);
4779 mode = GET_MODE (reload_in[r]);
4780 }
4781 else if (GET_CODE (reload_in_reg[r]) == REG)
4782 {
4783 regno = REGNO (reload_in_reg[r]);
4784 mode = GET_MODE (reload_in_reg[r]);
4785 }
4786 #if 0
4787 /* This won't work, since REGNO can be a pseudo reg number.
4788 Also, it takes much more hair to keep track of all the things
4789 that can invalidate an inherited reload of part of a pseudoreg. */
4790 else if (GET_CODE (reload_in[r]) == SUBREG
4791 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
4792 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
4793 #endif
4794
4795 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
4796 {
4797 i = spill_reg_order[REGNO (reg_last_reload_reg[regno])];
4798
4799 if (reg_reloaded_contents[i] == regno
4800 && (GET_MODE_SIZE (GET_MODE (reg_last_reload_reg[regno]))
4801 >= GET_MODE_SIZE (mode))
4802 && HARD_REGNO_MODE_OK (spill_regs[i], reload_mode[r])
4803 && TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4804 spill_regs[i])
4805 && (reload_nregs[r] == max_group_size
4806 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
4807 spill_regs[i]))
4808 && reload_reg_free_p (spill_regs[i], reload_opnum[r],
4809 reload_when_needed[r])
4810 && reload_reg_free_before_p (spill_regs[i],
4811 reload_opnum[r],
4812 reload_when_needed[r]))
4813 {
4814 /* If a group is needed, verify that all the subsequent
4815 registers still have their values intact. */
4816 int nr
4817 = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
4818 int k;
4819
4820 for (k = 1; k < nr; k++)
4821 if (reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
4822 != regno)
4823 break;
4824
4825 if (k == nr)
4826 {
4827 /* Mark the register as in use for this part of
4828 the insn. */
4829 mark_reload_reg_in_use (spill_regs[i],
4830 reload_opnum[r],
4831 reload_when_needed[r],
4832 reload_mode[r]);
4833 reload_reg_rtx[r] = reg_last_reload_reg[regno];
4834 reload_inherited[r] = 1;
4835 reload_inheritance_insn[r] = reg_reloaded_insn[i];
4836 reload_spill_index[r] = i;
4837 }
4838 }
4839 }
4840 }
4841
4842 /* Here's another way to see if the value is already lying around. */
4843 if (inheritance
4844 && reload_in[r] != 0
4845 && ! reload_inherited[r]
4846 && reload_out[r] == 0
4847 && (CONSTANT_P (reload_in[r])
4848 || GET_CODE (reload_in[r]) == PLUS
4849 || GET_CODE (reload_in[r]) == REG
4850 || GET_CODE (reload_in[r]) == MEM)
4851 && (reload_nregs[r] == max_group_size
4852 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
4853 {
4854 register rtx equiv
4855 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
4856 -1, NULL_PTR, 0, reload_mode[r]);
4857 int regno;
4858
4859 if (equiv != 0)
4860 {
4861 if (GET_CODE (equiv) == REG)
4862 regno = REGNO (equiv);
4863 else if (GET_CODE (equiv) == SUBREG)
4864 {
4865 regno = REGNO (SUBREG_REG (equiv));
4866 if (regno < FIRST_PSEUDO_REGISTER)
4867 regno += SUBREG_WORD (equiv);
4868 }
4869 else
4870 abort ();
4871 }
4872
4873 /* If we found a spill reg, reject it unless it is free
4874 and of the desired class. */
4875 if (equiv != 0
4876 && ((spill_reg_order[regno] >= 0
4877 && ! reload_reg_free_before_p (regno, reload_opnum[r],
4878 reload_when_needed[r]))
4879 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
4880 regno)))
4881 equiv = 0;
4882
4883 if (equiv != 0 && TEST_HARD_REG_BIT (reload_reg_used_at_all, regno))
4884 equiv = 0;
4885
4886 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
4887 equiv = 0;
4888
4889 /* We found a register that contains the value we need.
4890 If this register is the same as an `earlyclobber' operand
4891 of the current insn, just mark it as a place to reload from
4892 since we can't use it as the reload register itself. */
4893
4894 if (equiv != 0)
4895 for (i = 0; i < n_earlyclobbers; i++)
4896 if (reg_overlap_mentioned_for_reload_p (equiv,
4897 reload_earlyclobbers[i]))
4898 {
4899 reload_override_in[r] = equiv;
4900 equiv = 0;
4901 break;
4902 }
4903
4904 /* JRV: If the equiv register we have found is explicitly
4905 clobbered in the current insn, mark but don't use, as above. */
4906
4907 if (equiv != 0 && regno_clobbered_p (regno, insn))
4908 {
4909 reload_override_in[r] = equiv;
4910 equiv = 0;
4911 }
4912
4913 /* If we found an equivalent reg, say no code need be generated
4914 to load it, and use it as our reload reg. */
4915 if (equiv != 0 && regno != FRAME_POINTER_REGNUM)
4916 {
4917 reload_reg_rtx[r] = equiv;
4918 reload_inherited[r] = 1;
4919 /* If it is a spill reg,
4920 mark the spill reg as in use for this insn. */
4921 i = spill_reg_order[regno];
4922 if (i >= 0)
4923 mark_reload_reg_in_use (regno, reload_opnum[r],
4924 reload_when_needed[r],
4925 reload_mode[r]);
4926 }
4927 }
4928
4929 /* If we found a register to use already, or if this is an optional
4930 reload, we are done. */
4931 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
4932 continue;
4933
4934 #if 0 /* No longer needed for correct operation. Might or might not
4935 give better code on the average. Want to experiment? */
4936
4937 /* See if there is a later reload that has a class different from our
4938 class that intersects our class or that requires less register
4939 than our reload. If so, we must allocate a register to this
4940 reload now, since that reload might inherit a previous reload
4941 and take the only available register in our class. Don't do this
4942 for optional reloads since they will force all previous reloads
4943 to be allocated. Also don't do this for reloads that have been
4944 turned off. */
4945
4946 for (i = j + 1; i < n_reloads; i++)
4947 {
4948 int s = reload_order[i];
4949
4950 if ((reload_in[s] == 0 && reload_out[s] == 0
4951 && ! reload_secondary_p[s])
4952 || reload_optional[s])
4953 continue;
4954
4955 if ((reload_reg_class[s] != reload_reg_class[r]
4956 && reg_classes_intersect_p (reload_reg_class[r],
4957 reload_reg_class[s]))
4958 || reload_nregs[s] < reload_nregs[r])
4959 break;
4960 }
4961
4962 if (i == n_reloads)
4963 continue;
4964
4965 allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance);
4966 #endif
4967 }
4968
4969 /* Now allocate reload registers for anything non-optional that
4970 didn't get one yet. */
4971 for (j = 0; j < n_reloads; j++)
4972 {
4973 register int r = reload_order[j];
4974
4975 /* Ignore reloads that got marked inoperative. */
4976 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
4977 continue;
4978
4979 /* Skip reloads that already have a register allocated or are
4980 optional. */
4981 if (reload_reg_rtx[r] != 0 || reload_optional[r])
4982 continue;
4983
4984 if (! allocate_reload_reg (r, insn, j == n_reloads - 1, inheritance))
4985 break;
4986 }
4987
4988 /* If that loop got all the way, we have won. */
4989 if (j == n_reloads)
4990 break;
4991
4992 fail:
4993 /* Loop around and try without any inheritance. */
4994 /* First undo everything done by the failed attempt
4995 to allocate with inheritance. */
4996 bcopy (save_reload_reg_rtx, reload_reg_rtx, sizeof reload_reg_rtx);
4997 bcopy (save_reload_inherited, reload_inherited, sizeof reload_inherited);
4998 bcopy (save_reload_inheritance_insn, reload_inheritance_insn,
4999 sizeof reload_inheritance_insn);
5000 bcopy (save_reload_override_in, reload_override_in,
5001 sizeof reload_override_in);
5002 bcopy (save_reload_spill_index, reload_spill_index,
5003 sizeof reload_spill_index);
5004 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
5005 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
5006 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
5007 save_reload_reg_used_in_op_addr);
5008 COPY_HARD_REG_SET (reload_reg_used_in_insn,
5009 save_reload_reg_used_in_insn);
5010 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
5011 save_reload_reg_used_in_other_addr);
5012
5013 for (i = 0; i < reload_n_operands; i++)
5014 {
5015 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
5016 save_reload_reg_used_in_input[i]);
5017 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
5018 save_reload_reg_used_in_output[i]);
5019 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
5020 save_reload_reg_used_in_input_addr[i]);
5021 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
5022 save_reload_reg_used_in_output_addr[i]);
5023 }
5024 }
5025
5026 /* If we thought we could inherit a reload, because it seemed that
5027 nothing else wanted the same reload register earlier in the insn,
5028 verify that assumption, now that all reloads have been assigned. */
5029
5030 for (j = 0; j < n_reloads; j++)
5031 {
5032 register int r = reload_order[j];
5033
5034 if (reload_inherited[r] && reload_reg_rtx[r] != 0
5035 && ! reload_reg_free_before_p (true_regnum (reload_reg_rtx[r]),
5036 reload_opnum[r],
5037 reload_when_needed[r]))
5038 reload_inherited[r] = 0;
5039
5040 /* If we found a better place to reload from,
5041 validate it in the same fashion, if it is a reload reg. */
5042 if (reload_override_in[r]
5043 && (GET_CODE (reload_override_in[r]) == REG
5044 || GET_CODE (reload_override_in[r]) == SUBREG))
5045 {
5046 int regno = true_regnum (reload_override_in[r]);
5047 if (spill_reg_order[regno] >= 0
5048 && ! reload_reg_free_before_p (regno, reload_opnum[r],
5049 reload_when_needed[r]))
5050 reload_override_in[r] = 0;
5051 }
5052 }
5053
5054 /* Now that reload_override_in is known valid,
5055 actually override reload_in. */
5056 for (j = 0; j < n_reloads; j++)
5057 if (reload_override_in[j])
5058 reload_in[j] = reload_override_in[j];
5059
5060 /* If this reload won't be done because it has been cancelled or is
5061 optional and not inherited, clear reload_reg_rtx so other
5062 routines (such as subst_reloads) don't get confused. */
5063 for (j = 0; j < n_reloads; j++)
5064 if ((reload_optional[j] && ! reload_inherited[j])
5065 || (reload_in[j] == 0 && reload_out[j] == 0
5066 && ! reload_secondary_p[j]))
5067 reload_reg_rtx[j] = 0;
5068
5069 /* Record which pseudos and which spill regs have output reloads. */
5070 for (j = 0; j < n_reloads; j++)
5071 {
5072 register int r = reload_order[j];
5073
5074 i = reload_spill_index[r];
5075
5076 /* I is nonneg if this reload used one of the spill regs.
5077 If reload_reg_rtx[r] is 0, this is an optional reload
5078 that we opted to ignore. */
5079 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG
5080 && reload_reg_rtx[r] != 0)
5081 {
5082 register int nregno = REGNO (reload_out[r]);
5083 int nr = 1;
5084
5085 if (nregno < FIRST_PSEUDO_REGISTER)
5086 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
5087
5088 while (--nr >= 0)
5089 reg_has_output_reload[nregno + nr] = 1;
5090
5091 if (i >= 0)
5092 {
5093 nr = HARD_REGNO_NREGS (spill_regs[i], reload_mode[r]);
5094 while (--nr >= 0)
5095 SET_HARD_REG_BIT (reg_is_output_reload, spill_regs[i] + nr);
5096 }
5097
5098 if (reload_when_needed[r] != RELOAD_OTHER
5099 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
5100 && reload_when_needed[r] != RELOAD_FOR_INSN)
5101 abort ();
5102 }
5103 }
5104 }
5105 \f
5106 /* If SMALL_REGISTER_CLASSES are defined, we may not have merged two
5107 reloads of the same item for fear that we might not have enough reload
5108 registers. However, normally they will get the same reload register
5109 and hence actually need not be loaded twice.
5110
5111 Here we check for the most common case of this phenomenon: when we have
5112 a number of reloads for the same object, each of which were allocated
5113 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5114 reload, and is not modified in the insn itself. If we find such,
5115 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5116 This will not increase the number of spill registers needed and will
5117 prevent redundant code. */
5118
5119 #ifdef SMALL_REGISTER_CLASSES
5120
5121 static void
5122 merge_assigned_reloads (insn)
5123 rtx insn;
5124 {
5125 int i, j;
5126
5127 /* Scan all the reloads looking for ones that only load values and
5128 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5129 assigned and not modified by INSN. */
5130
5131 for (i = 0; i < n_reloads; i++)
5132 {
5133 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
5134 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
5135 || reg_set_p (reload_reg_rtx[i], insn))
5136 continue;
5137
5138 /* Look at all other reloads. Ensure that the only use of this
5139 reload_reg_rtx is in a reload that just loads the same value
5140 as we do. Note that any secondary reloads must be of the identical
5141 class since the values, modes, and result registers are the
5142 same, so we need not do anything with any secondary reloads. */
5143
5144 for (j = 0; j < n_reloads; j++)
5145 {
5146 if (i == j || reload_reg_rtx[j] == 0
5147 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
5148 reload_reg_rtx[i]))
5149 continue;
5150
5151 /* If the reload regs aren't exactly the same (e.g, different modes)
5152 or if the values are different, we can't merge anything with this
5153 reload register. */
5154
5155 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
5156 || reload_out[j] != 0 || reload_in[j] == 0
5157 || ! rtx_equal_p (reload_in[i], reload_in[j]))
5158 break;
5159 }
5160
5161 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
5162 we, in fact, found any matching reloads. */
5163
5164 if (j == n_reloads)
5165 {
5166 for (j = 0; j < n_reloads; j++)
5167 if (i != j && reload_reg_rtx[j] != 0
5168 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j]))
5169 {
5170 reload_when_needed[i] = RELOAD_OTHER;
5171 reload_in[j] = 0;
5172 transfer_replacements (i, j);
5173 }
5174
5175 /* If this is now RELOAD_OTHER, look for any reloads that load
5176 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
5177 if they were for inputs, RELOAD_OTHER for outputs. Note that
5178 this test is equivalent to looking for reloads for this operand
5179 number. */
5180
5181 if (reload_when_needed[i] == RELOAD_OTHER)
5182 for (j = 0; j < n_reloads; j++)
5183 if (reload_in[j] != 0
5184 && reload_when_needed[i] != RELOAD_OTHER
5185 && reg_overlap_mentioned_for_reload_p (reload_in[j],
5186 reload_in[i]))
5187 reload_when_needed[j]
5188 = reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
5189 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER;
5190 }
5191 }
5192 }
5193 #endif /* SMALL_RELOAD_CLASSES */
5194 \f
5195 /* Output insns to reload values in and out of the chosen reload regs. */
5196
5197 static void
5198 emit_reload_insns (insn)
5199 rtx insn;
5200 {
5201 register int j;
5202 rtx input_reload_insns[MAX_RECOG_OPERANDS];
5203 rtx other_input_address_reload_insns = 0;
5204 rtx other_input_reload_insns = 0;
5205 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
5206 rtx output_reload_insns[MAX_RECOG_OPERANDS];
5207 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
5208 rtx operand_reload_insns = 0;
5209 rtx following_insn = NEXT_INSN (insn);
5210 rtx before_insn = insn;
5211 int special;
5212 /* Values to be put in spill_reg_store are put here first. */
5213 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
5214
5215 for (j = 0; j < reload_n_operands; j++)
5216 input_reload_insns[j] = input_address_reload_insns[j]
5217 = output_reload_insns[j] = output_address_reload_insns[j] = 0;
5218
5219 /* If this is a CALL_INSN preceded by USE insns, any reload insns
5220 must go in front of the first USE insn, not in front of INSN. */
5221
5222 if (GET_CODE (insn) == CALL_INSN && GET_CODE (PREV_INSN (insn)) == INSN
5223 && GET_CODE (PATTERN (PREV_INSN (insn))) == USE)
5224 while (GET_CODE (PREV_INSN (before_insn)) == INSN
5225 && GET_CODE (PATTERN (PREV_INSN (before_insn))) == USE)
5226 before_insn = PREV_INSN (before_insn);
5227
5228 /* If this insn is followed by any CLOBBER insns made by find_reloads,
5229 put our reloads after them since they may otherwise be
5230 misinterpreted. */
5231
5232 while (NEXT_INSN (following_insn) != 0
5233 && GET_CODE (NEXT_INSN (following_insn)) == INSN
5234 && GET_MODE (NEXT_INSN (following_insn)) == DImode
5235 && GET_CODE (PATTERN (NEXT_INSN (following_insn))) == CLOBBER)
5236 following_insn = NEXT_INSN (following_insn);
5237
5238 /* Now output the instructions to copy the data into and out of the
5239 reload registers. Do these in the order that the reloads were reported,
5240 since reloads of base and index registers precede reloads of operands
5241 and the operands may need the base and index registers reloaded. */
5242
5243 for (j = 0; j < n_reloads; j++)
5244 {
5245 register rtx old;
5246 rtx oldequiv_reg = 0;
5247 rtx store_insn = 0;
5248
5249 old = reload_in[j];
5250 if (old != 0 && ! reload_inherited[j]
5251 && ! rtx_equal_p (reload_reg_rtx[j], old)
5252 && reload_reg_rtx[j] != 0)
5253 {
5254 register rtx reloadreg = reload_reg_rtx[j];
5255 rtx oldequiv = 0;
5256 enum machine_mode mode;
5257 rtx *where;
5258
5259 /* Determine the mode to reload in.
5260 This is very tricky because we have three to choose from.
5261 There is the mode the insn operand wants (reload_inmode[J]).
5262 There is the mode of the reload register RELOADREG.
5263 There is the intrinsic mode of the operand, which we could find
5264 by stripping some SUBREGs.
5265 It turns out that RELOADREG's mode is irrelevant:
5266 we can change that arbitrarily.
5267
5268 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
5269 then the reload reg may not support QImode moves, so use SImode.
5270 If foo is in memory due to spilling a pseudo reg, this is safe,
5271 because the QImode value is in the least significant part of a
5272 slot big enough for a SImode. If foo is some other sort of
5273 memory reference, then it is impossible to reload this case,
5274 so previous passes had better make sure this never happens.
5275
5276 Then consider a one-word union which has SImode and one of its
5277 members is a float, being fetched as (SUBREG:SF union:SI).
5278 We must fetch that as SFmode because we could be loading into
5279 a float-only register. In this case OLD's mode is correct.
5280
5281 Consider an immediate integer: it has VOIDmode. Here we need
5282 to get a mode from something else.
5283
5284 In some cases, there is a fourth mode, the operand's
5285 containing mode. If the insn specifies a containing mode for
5286 this operand, it overrides all others.
5287
5288 I am not sure whether the algorithm here is always right,
5289 but it does the right things in those cases. */
5290
5291 mode = GET_MODE (old);
5292 if (mode == VOIDmode)
5293 mode = reload_inmode[j];
5294
5295 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5296 /* If we need a secondary register for this operation, see if
5297 the value is already in a register in that class. Don't
5298 do this if the secondary register will be used as a scratch
5299 register. */
5300
5301 if (reload_secondary_reload[j] >= 0
5302 && reload_secondary_icode[j] == CODE_FOR_nothing
5303 && optimize)
5304 oldequiv
5305 = find_equiv_reg (old, insn,
5306 reload_reg_class[reload_secondary_reload[j]],
5307 -1, NULL_PTR, 0, mode);
5308 #endif
5309
5310 /* If reloading from memory, see if there is a register
5311 that already holds the same value. If so, reload from there.
5312 We can pass 0 as the reload_reg_p argument because
5313 any other reload has either already been emitted,
5314 in which case find_equiv_reg will see the reload-insn,
5315 or has yet to be emitted, in which case it doesn't matter
5316 because we will use this equiv reg right away. */
5317
5318 if (oldequiv == 0 && optimize
5319 && (GET_CODE (old) == MEM
5320 || (GET_CODE (old) == REG
5321 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5322 && reg_renumber[REGNO (old)] < 0)))
5323 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
5324 -1, NULL_PTR, 0, mode);
5325
5326 if (oldequiv)
5327 {
5328 int regno = true_regnum (oldequiv);
5329
5330 /* If OLDEQUIV is a spill register, don't use it for this
5331 if any other reload needs it at an earlier stage of this insn
5332 or at this stage. */
5333 if (spill_reg_order[regno] >= 0
5334 && (! reload_reg_free_p (regno, reload_opnum[j],
5335 reload_when_needed[j])
5336 || ! reload_reg_free_before_p (regno, reload_opnum[j],
5337 reload_when_needed[j])))
5338 oldequiv = 0;
5339
5340 /* If OLDEQUIV is not a spill register,
5341 don't use it if any other reload wants it. */
5342 if (spill_reg_order[regno] < 0)
5343 {
5344 int k;
5345 for (k = 0; k < n_reloads; k++)
5346 if (reload_reg_rtx[k] != 0 && k != j
5347 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
5348 oldequiv))
5349 {
5350 oldequiv = 0;
5351 break;
5352 }
5353 }
5354
5355 /* If it is no cheaper to copy from OLDEQUIV into the
5356 reload register than it would be to move from memory,
5357 don't use it. Likewise, if we need a secondary register
5358 or memory. */
5359
5360 if (oldequiv != 0
5361 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
5362 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
5363 reload_reg_class[j])
5364 >= MEMORY_MOVE_COST (mode)))
5365 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5366 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5367 mode, oldequiv)
5368 != NO_REGS)
5369 #endif
5370 #ifdef SECONDARY_MEMORY_NEEDED
5371 || SECONDARY_MEMORY_NEEDED (reload_reg_class[j],
5372 REGNO_REG_CLASS (regno),
5373 mode)
5374 #endif
5375 ))
5376 oldequiv = 0;
5377 }
5378
5379 if (oldequiv == 0)
5380 oldequiv = old;
5381 else if (GET_CODE (oldequiv) == REG)
5382 oldequiv_reg = oldequiv;
5383 else if (GET_CODE (oldequiv) == SUBREG)
5384 oldequiv_reg = SUBREG_REG (oldequiv);
5385
5386 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
5387 then load RELOADREG from OLDEQUIV. */
5388
5389 if (GET_MODE (reloadreg) != mode)
5390 reloadreg = gen_lowpart_common (mode, reloadreg);
5391 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
5392 oldequiv = SUBREG_REG (oldequiv);
5393 if (GET_MODE (oldequiv) != VOIDmode
5394 && mode != GET_MODE (oldequiv))
5395 oldequiv = gen_rtx (SUBREG, mode, oldequiv, 0);
5396
5397 /* Switch to the right place to emit the reload insns. */
5398 switch (reload_when_needed[j])
5399 {
5400 case RELOAD_OTHER:
5401 where = &other_input_reload_insns;
5402 break;
5403 case RELOAD_FOR_INPUT:
5404 where = &input_reload_insns[reload_opnum[j]];
5405 break;
5406 case RELOAD_FOR_INPUT_ADDRESS:
5407 where = &input_address_reload_insns[reload_opnum[j]];
5408 break;
5409 case RELOAD_FOR_OUTPUT_ADDRESS:
5410 where = &output_address_reload_insns[reload_opnum[j]];
5411 break;
5412 case RELOAD_FOR_OPERAND_ADDRESS:
5413 where = &operand_reload_insns;
5414 break;
5415 case RELOAD_FOR_OTHER_ADDRESS:
5416 where = &other_input_address_reload_insns;
5417 break;
5418 default:
5419 abort ();
5420 }
5421
5422 push_to_sequence (*where);
5423 special = 0;
5424
5425 /* Auto-increment addresses must be reloaded in a special way. */
5426 if (GET_CODE (oldequiv) == POST_INC
5427 || GET_CODE (oldequiv) == POST_DEC
5428 || GET_CODE (oldequiv) == PRE_INC
5429 || GET_CODE (oldequiv) == PRE_DEC)
5430 {
5431 /* We are not going to bother supporting the case where a
5432 incremented register can't be copied directly from
5433 OLDEQUIV since this seems highly unlikely. */
5434 if (reload_secondary_reload[j] >= 0)
5435 abort ();
5436 /* Prevent normal processing of this reload. */
5437 special = 1;
5438 /* Output a special code sequence for this case. */
5439 inc_for_reload (reloadreg, oldequiv, reload_inc[j]);
5440 }
5441
5442 /* If we are reloading a pseudo-register that was set by the previous
5443 insn, see if we can get rid of that pseudo-register entirely
5444 by redirecting the previous insn into our reload register. */
5445
5446 else if (optimize && GET_CODE (old) == REG
5447 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5448 && dead_or_set_p (insn, old)
5449 /* This is unsafe if some other reload
5450 uses the same reg first. */
5451 && reload_reg_free_before_p (REGNO (reloadreg),
5452 reload_opnum[j],
5453 reload_when_needed[j]))
5454 {
5455 rtx temp = PREV_INSN (insn);
5456 while (temp && GET_CODE (temp) == NOTE)
5457 temp = PREV_INSN (temp);
5458 if (temp
5459 && GET_CODE (temp) == INSN
5460 && GET_CODE (PATTERN (temp)) == SET
5461 && SET_DEST (PATTERN (temp)) == old
5462 /* Make sure we can access insn_operand_constraint. */
5463 && asm_noperands (PATTERN (temp)) < 0
5464 /* This is unsafe if prev insn rejects our reload reg. */
5465 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
5466 reloadreg)
5467 /* This is unsafe if operand occurs more than once in current
5468 insn. Perhaps some occurrences aren't reloaded. */
5469 && count_occurrences (PATTERN (insn), old) == 1
5470 /* Don't risk splitting a matching pair of operands. */
5471 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
5472 {
5473 /* Store into the reload register instead of the pseudo. */
5474 SET_DEST (PATTERN (temp)) = reloadreg;
5475 /* If these are the only uses of the pseudo reg,
5476 pretend for GDB it lives in the reload reg we used. */
5477 if (reg_n_deaths[REGNO (old)] == 1
5478 && reg_n_sets[REGNO (old)] == 1)
5479 {
5480 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
5481 alter_reg (REGNO (old), -1);
5482 }
5483 special = 1;
5484 }
5485 }
5486
5487 /* We can't do that, so output an insn to load RELOADREG. */
5488
5489 if (! special)
5490 {
5491 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5492 rtx second_reload_reg = 0;
5493 enum insn_code icode;
5494
5495 /* If we have a secondary reload, pick up the secondary register
5496 and icode, if any. If OLDEQUIV and OLD are different or
5497 if this is an in-out reload, recompute whether or not we
5498 still need a secondary register and what the icode should
5499 be. If we still need a secondary register and the class or
5500 icode is different, go back to reloading from OLD if using
5501 OLDEQUIV means that we got the wrong type of register. We
5502 cannot have different class or icode due to an in-out reload
5503 because we don't make such reloads when both the input and
5504 output need secondary reload registers. */
5505
5506 if (reload_secondary_reload[j] >= 0)
5507 {
5508 int secondary_reload = reload_secondary_reload[j];
5509 rtx real_oldequiv = oldequiv;
5510 rtx real_old = old;
5511
5512 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
5513 and similarly for OLD.
5514 See comments in find_secondary_reload in reload.c. */
5515 if (GET_CODE (oldequiv) == REG
5516 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
5517 && reg_equiv_mem[REGNO (oldequiv)] != 0)
5518 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
5519
5520 if (GET_CODE (old) == REG
5521 && REGNO (old) >= FIRST_PSEUDO_REGISTER
5522 && reg_equiv_mem[REGNO (old)] != 0)
5523 real_old = reg_equiv_mem[REGNO (old)];
5524
5525 second_reload_reg = reload_reg_rtx[secondary_reload];
5526 icode = reload_secondary_icode[j];
5527
5528 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
5529 || (reload_in[j] != 0 && reload_out[j] != 0))
5530 {
5531 enum reg_class new_class
5532 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
5533 mode, real_oldequiv);
5534
5535 if (new_class == NO_REGS)
5536 second_reload_reg = 0;
5537 else
5538 {
5539 enum insn_code new_icode;
5540 enum machine_mode new_mode;
5541
5542 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
5543 REGNO (second_reload_reg)))
5544 oldequiv = old, real_oldequiv = real_old;
5545 else
5546 {
5547 new_icode = reload_in_optab[(int) mode];
5548 if (new_icode != CODE_FOR_nothing
5549 && ((insn_operand_predicate[(int) new_icode][0]
5550 && ! ((*insn_operand_predicate[(int) new_icode][0])
5551 (reloadreg, mode)))
5552 || (insn_operand_predicate[(int) new_icode][1]
5553 && ! ((*insn_operand_predicate[(int) new_icode][1])
5554 (real_oldequiv, mode)))))
5555 new_icode = CODE_FOR_nothing;
5556
5557 if (new_icode == CODE_FOR_nothing)
5558 new_mode = mode;
5559 else
5560 new_mode = insn_operand_mode[new_icode][2];
5561
5562 if (GET_MODE (second_reload_reg) != new_mode)
5563 {
5564 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
5565 new_mode))
5566 oldequiv = old, real_oldequiv = real_old;
5567 else
5568 second_reload_reg
5569 = gen_rtx (REG, new_mode,
5570 REGNO (second_reload_reg));
5571 }
5572 }
5573 }
5574 }
5575
5576 /* If we still need a secondary reload register, check
5577 to see if it is being used as a scratch or intermediate
5578 register and generate code appropriately. If we need
5579 a scratch register, use REAL_OLDEQUIV since the form of
5580 the insn may depend on the actual address if it is
5581 a MEM. */
5582
5583 if (second_reload_reg)
5584 {
5585 if (icode != CODE_FOR_nothing)
5586 {
5587 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
5588 second_reload_reg));
5589 special = 1;
5590 }
5591 else
5592 {
5593 /* See if we need a scratch register to load the
5594 intermediate register (a tertiary reload). */
5595 enum insn_code tertiary_icode
5596 = reload_secondary_icode[secondary_reload];
5597
5598 if (tertiary_icode != CODE_FOR_nothing)
5599 {
5600 rtx third_reload_reg
5601 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5602
5603 emit_insn ((GEN_FCN (tertiary_icode)
5604 (second_reload_reg, real_oldequiv,
5605 third_reload_reg)));
5606 }
5607 else
5608 gen_input_reload (second_reload_reg, oldequiv,
5609 reload_opnum[j],
5610 reload_when_needed[j]);
5611
5612 oldequiv = second_reload_reg;
5613 }
5614 }
5615 }
5616 #endif
5617
5618 if (! special)
5619 gen_input_reload (reloadreg, oldequiv, reload_opnum[j],
5620 reload_when_needed[j]);
5621
5622 #if defined(SECONDARY_INPUT_RELOAD_CLASS) && defined(PRESERVE_DEATH_INFO_REGNO_P)
5623 /* We may have to make a REG_DEAD note for the secondary reload
5624 register in the insns we just made. Find the last insn that
5625 mentioned the register. */
5626 if (! special && second_reload_reg
5627 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reload_reg)))
5628 {
5629 rtx prev;
5630
5631 for (prev = get_last_insn (); prev;
5632 prev = PREV_INSN (prev))
5633 if (GET_RTX_CLASS (GET_CODE (prev) == 'i')
5634 && reg_overlap_mentioned_for_reload_p (second_reload_reg,
5635 PATTERN (prev)))
5636 {
5637 REG_NOTES (prev) = gen_rtx (EXPR_LIST, REG_DEAD,
5638 second_reload_reg,
5639 REG_NOTES (prev));
5640 break;
5641 }
5642 }
5643 #endif
5644 }
5645
5646 /* End this sequence. */
5647 *where = get_insns ();
5648 end_sequence ();
5649 }
5650
5651 /* Add a note saying the input reload reg
5652 dies in this insn, if anyone cares. */
5653 #ifdef PRESERVE_DEATH_INFO_REGNO_P
5654 if (old != 0
5655 && reload_reg_rtx[j] != old
5656 && reload_reg_rtx[j] != 0
5657 && reload_out[j] == 0
5658 && ! reload_inherited[j]
5659 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j])))
5660 {
5661 register rtx reloadreg = reload_reg_rtx[j];
5662
5663 #if 0
5664 /* We can't abort here because we need to support this for sched.c.
5665 It's not terrible to miss a REG_DEAD note, but we should try
5666 to figure out how to do this correctly. */
5667 /* The code below is incorrect for address-only reloads. */
5668 if (reload_when_needed[j] != RELOAD_OTHER
5669 && reload_when_needed[j] != RELOAD_FOR_INPUT)
5670 abort ();
5671 #endif
5672
5673 /* Add a death note to this insn, for an input reload. */
5674
5675 if ((reload_when_needed[j] == RELOAD_OTHER
5676 || reload_when_needed[j] == RELOAD_FOR_INPUT)
5677 && ! dead_or_set_p (insn, reloadreg))
5678 REG_NOTES (insn)
5679 = gen_rtx (EXPR_LIST, REG_DEAD,
5680 reloadreg, REG_NOTES (insn));
5681 }
5682
5683 /* When we inherit a reload, the last marked death of the reload reg
5684 may no longer really be a death. */
5685 if (reload_reg_rtx[j] != 0
5686 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (reload_reg_rtx[j]))
5687 && reload_inherited[j])
5688 {
5689 /* Handle inheriting an output reload.
5690 Remove the death note from the output reload insn. */
5691 if (reload_spill_index[j] >= 0
5692 && GET_CODE (reload_in[j]) == REG
5693 && spill_reg_store[reload_spill_index[j]] != 0
5694 && find_regno_note (spill_reg_store[reload_spill_index[j]],
5695 REG_DEAD, REGNO (reload_reg_rtx[j])))
5696 remove_death (REGNO (reload_reg_rtx[j]),
5697 spill_reg_store[reload_spill_index[j]]);
5698 /* Likewise for input reloads that were inherited. */
5699 else if (reload_spill_index[j] >= 0
5700 && GET_CODE (reload_in[j]) == REG
5701 && spill_reg_store[reload_spill_index[j]] == 0
5702 && reload_inheritance_insn[j] != 0
5703 && find_regno_note (reload_inheritance_insn[j], REG_DEAD,
5704 REGNO (reload_reg_rtx[j])))
5705 remove_death (REGNO (reload_reg_rtx[j]),
5706 reload_inheritance_insn[j]);
5707 else
5708 {
5709 rtx prev;
5710
5711 /* We got this register from find_equiv_reg.
5712 Search back for its last death note and get rid of it.
5713 But don't search back too far.
5714 Don't go past a place where this reg is set,
5715 since a death note before that remains valid. */
5716 for (prev = PREV_INSN (insn);
5717 prev && GET_CODE (prev) != CODE_LABEL;
5718 prev = PREV_INSN (prev))
5719 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5720 && dead_or_set_p (prev, reload_reg_rtx[j]))
5721 {
5722 if (find_regno_note (prev, REG_DEAD,
5723 REGNO (reload_reg_rtx[j])))
5724 remove_death (REGNO (reload_reg_rtx[j]), prev);
5725 break;
5726 }
5727 }
5728 }
5729
5730 /* We might have used find_equiv_reg above to choose an alternate
5731 place from which to reload. If so, and it died, we need to remove
5732 that death and move it to one of the insns we just made. */
5733
5734 if (oldequiv_reg != 0
5735 && PRESERVE_DEATH_INFO_REGNO_P (true_regnum (oldequiv_reg)))
5736 {
5737 rtx prev, prev1;
5738
5739 for (prev = PREV_INSN (insn); prev && GET_CODE (prev) != CODE_LABEL;
5740 prev = PREV_INSN (prev))
5741 if (GET_RTX_CLASS (GET_CODE (prev)) == 'i'
5742 && dead_or_set_p (prev, oldequiv_reg))
5743 {
5744 if (find_regno_note (prev, REG_DEAD, REGNO (oldequiv_reg)))
5745 {
5746 for (prev1 = this_reload_insn;
5747 prev1; prev1 = PREV_INSN (prev1))
5748 if (GET_RTX_CLASS (GET_CODE (prev1) == 'i')
5749 && reg_overlap_mentioned_for_reload_p (oldequiv_reg,
5750 PATTERN (prev1)))
5751 {
5752 REG_NOTES (prev1) = gen_rtx (EXPR_LIST, REG_DEAD,
5753 oldequiv_reg,
5754 REG_NOTES (prev1));
5755 break;
5756 }
5757 remove_death (REGNO (oldequiv_reg), prev);
5758 }
5759 break;
5760 }
5761 }
5762 #endif
5763
5764 /* If we are reloading a register that was recently stored in with an
5765 output-reload, see if we can prove there was
5766 actually no need to store the old value in it. */
5767
5768 if (optimize && reload_inherited[j] && reload_spill_index[j] >= 0
5769 && reload_in[j] != 0
5770 && GET_CODE (reload_in[j]) == REG
5771 #if 0
5772 /* There doesn't seem to be any reason to restrict this to pseudos
5773 and doing so loses in the case where we are copying from a
5774 register of the wrong class. */
5775 && REGNO (reload_in[j]) >= FIRST_PSEUDO_REGISTER
5776 #endif
5777 && spill_reg_store[reload_spill_index[j]] != 0
5778 /* This is unsafe if some other reload uses the same reg first. */
5779 && reload_reg_free_before_p (spill_regs[reload_spill_index[j]],
5780 reload_opnum[j], reload_when_needed[j])
5781 && dead_or_set_p (insn, reload_in[j])
5782 /* This is unsafe if operand occurs more than once in current
5783 insn. Perhaps some occurrences weren't reloaded. */
5784 && count_occurrences (PATTERN (insn), reload_in[j]) == 1)
5785 delete_output_reload (insn, j,
5786 spill_reg_store[reload_spill_index[j]]);
5787
5788 /* Input-reloading is done. Now do output-reloading,
5789 storing the value from the reload-register after the main insn
5790 if reload_out[j] is nonzero.
5791
5792 ??? At some point we need to support handling output reloads of
5793 JUMP_INSNs or insns that set cc0. */
5794 old = reload_out[j];
5795 if (old != 0
5796 && reload_reg_rtx[j] != old
5797 && reload_reg_rtx[j] != 0)
5798 {
5799 register rtx reloadreg = reload_reg_rtx[j];
5800 register rtx second_reloadreg = 0;
5801 rtx note, p;
5802 enum machine_mode mode;
5803 int special = 0;
5804
5805 /* An output operand that dies right away does need a reload,
5806 but need not be copied from it. Show the new location in the
5807 REG_UNUSED note. */
5808 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
5809 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
5810 {
5811 XEXP (note, 0) = reload_reg_rtx[j];
5812 continue;
5813 }
5814 else if (GET_CODE (old) == SCRATCH)
5815 /* If we aren't optimizing, there won't be a REG_UNUSED note,
5816 but we don't want to make an output reload. */
5817 continue;
5818
5819 #if 0
5820 /* Strip off of OLD any size-increasing SUBREGs such as
5821 (SUBREG:SI foo:QI 0). */
5822
5823 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
5824 && (GET_MODE_SIZE (GET_MODE (old))
5825 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
5826 old = SUBREG_REG (old);
5827 #endif
5828
5829 /* If is a JUMP_INSN, we can't support output reloads yet. */
5830 if (GET_CODE (insn) == JUMP_INSN)
5831 abort ();
5832
5833 push_to_sequence (output_reload_insns[reload_opnum[j]]);
5834
5835 /* Determine the mode to reload in.
5836 See comments above (for input reloading). */
5837
5838 mode = GET_MODE (old);
5839 if (mode == VOIDmode)
5840 {
5841 /* VOIDmode should never happen for an output. */
5842 if (asm_noperands (PATTERN (insn)) < 0)
5843 /* It's the compiler's fault. */
5844 abort ();
5845 error_for_asm (insn, "output operand is constant in `asm'");
5846 /* Prevent crash--use something we know is valid. */
5847 mode = word_mode;
5848 old = gen_rtx (REG, mode, REGNO (reloadreg));
5849 }
5850
5851 if (GET_MODE (reloadreg) != mode)
5852 reloadreg = gen_lowpart_common (mode, reloadreg);
5853
5854 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5855
5856 /* If we need two reload regs, set RELOADREG to the intermediate
5857 one, since it will be stored into OUT. We might need a secondary
5858 register only for an input reload, so check again here. */
5859
5860 if (reload_secondary_reload[j] >= 0)
5861 {
5862 rtx real_old = old;
5863
5864 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
5865 && reg_equiv_mem[REGNO (old)] != 0)
5866 real_old = reg_equiv_mem[REGNO (old)];
5867
5868 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
5869 mode, real_old)
5870 != NO_REGS))
5871 {
5872 second_reloadreg = reloadreg;
5873 reloadreg = reload_reg_rtx[reload_secondary_reload[j]];
5874
5875 /* See if RELOADREG is to be used as a scratch register
5876 or as an intermediate register. */
5877 if (reload_secondary_icode[j] != CODE_FOR_nothing)
5878 {
5879 emit_insn ((GEN_FCN (reload_secondary_icode[j])
5880 (real_old, second_reloadreg, reloadreg)));
5881 special = 1;
5882 }
5883 else
5884 {
5885 /* See if we need both a scratch and intermediate reload
5886 register. */
5887 int secondary_reload = reload_secondary_reload[j];
5888 enum insn_code tertiary_icode
5889 = reload_secondary_icode[secondary_reload];
5890 rtx pat;
5891
5892 if (GET_MODE (reloadreg) != mode)
5893 reloadreg = gen_rtx (REG, mode, REGNO (reloadreg));
5894
5895 if (tertiary_icode != CODE_FOR_nothing)
5896 {
5897 rtx third_reloadreg
5898 = reload_reg_rtx[reload_secondary_reload[secondary_reload]];
5899 pat = (GEN_FCN (tertiary_icode)
5900 (reloadreg, second_reloadreg, third_reloadreg));
5901 }
5902 #ifdef SECONDARY_MEMORY_NEEDED
5903 /* If we need a memory location to do the move, do it that way. */
5904 else if (GET_CODE (reloadreg) == REG
5905 && REGNO (reloadreg) < FIRST_PSEUDO_REGISTER
5906 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (reloadreg)),
5907 REGNO_REG_CLASS (REGNO (second_reloadreg)),
5908 GET_MODE (second_reloadreg)))
5909 {
5910 /* Get the memory to use and rewrite both registers
5911 to its mode. */
5912 rtx loc
5913 = get_secondary_mem (reloadreg,
5914 GET_MODE (second_reloadreg),
5915 reload_opnum[j],
5916 reload_when_needed[j]);
5917 rtx tmp_reloadreg;
5918
5919 if (GET_MODE (loc) != GET_MODE (second_reloadreg))
5920 second_reloadreg = gen_rtx (REG, GET_MODE (loc),
5921 REGNO (second_reloadreg));
5922
5923 if (GET_MODE (loc) != GET_MODE (reloadreg))
5924 tmp_reloadreg = gen_rtx (REG, GET_MODE (loc),
5925 REGNO (reloadreg));
5926 else
5927 tmp_reloadreg = reloadreg;
5928
5929 emit_move_insn (loc, second_reloadreg);
5930 pat = gen_move_insn (tmp_reloadreg, loc);
5931 }
5932 #endif
5933 else
5934 pat = gen_move_insn (reloadreg, second_reloadreg);
5935
5936 emit_insn (pat);
5937 }
5938 }
5939 }
5940 #endif
5941
5942 /* Output the last reload insn. */
5943 if (! special)
5944 {
5945 #ifdef SECONDARY_MEMORY_NEEDED
5946 /* If we need a memory location to do the move, do it that way. */
5947 if (GET_CODE (old) == REG && REGNO (old) < FIRST_PSEUDO_REGISTER
5948 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (old)),
5949 REGNO_REG_CLASS (REGNO (reloadreg)),
5950 GET_MODE (reloadreg)))
5951 {
5952 /* Get the memory to use and rewrite both registers to
5953 its mode. */
5954 rtx loc = get_secondary_mem (old, GET_MODE (reloadreg),
5955 reload_opnum[j],
5956 reload_when_needed[j]);
5957
5958 if (GET_MODE (loc) != GET_MODE (reloadreg))
5959 reloadreg = gen_rtx (REG, GET_MODE (loc),
5960 REGNO (reloadreg));
5961
5962 if (GET_MODE (loc) != GET_MODE (old))
5963 old = gen_rtx (REG, GET_MODE (loc), REGNO (old));
5964
5965 emit_insn (gen_move_insn (loc, reloadreg));
5966 emit_insn (gen_move_insn (old, loc));
5967 }
5968 else
5969 #endif
5970 emit_insn (gen_move_insn (old, reloadreg));
5971 }
5972
5973 #ifdef PRESERVE_DEATH_INFO_REGNO_P
5974 /* If final will look at death notes for this reg,
5975 put one on the last output-reload insn to use it. Similarly
5976 for any secondary register. */
5977 if (PRESERVE_DEATH_INFO_REGNO_P (REGNO (reloadreg)))
5978 for (p = get_last_insn (); p; p = PREV_INSN (p))
5979 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
5980 && reg_overlap_mentioned_for_reload_p (reloadreg,
5981 PATTERN (p)))
5982 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5983 reloadreg, REG_NOTES (p));
5984
5985 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
5986 if (! special
5987 && PRESERVE_DEATH_INFO_REGNO_P (REGNO (second_reloadreg)))
5988 for (p = get_last_insn (); p; p = PREV_INSN (p))
5989 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
5990 && reg_overlap_mentioned_for_reload_p (second_reloadreg,
5991 PATTERN (p)))
5992 REG_NOTES (p) = gen_rtx (EXPR_LIST, REG_DEAD,
5993 second_reloadreg, REG_NOTES (p));
5994 #endif
5995 #endif
5996 /* Look at all insns we emitted, just to be safe. */
5997 for (p = get_insns (); p; p = NEXT_INSN (p))
5998 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
5999 {
6000 /* If this output reload doesn't come from a spill reg,
6001 clear any memory of reloaded copies of the pseudo reg.
6002 If this output reload comes from a spill reg,
6003 reg_has_output_reload will make this do nothing. */
6004 note_stores (PATTERN (p), forget_old_reloads_1);
6005
6006 if (reg_mentioned_p (reload_reg_rtx[j], PATTERN (p)))
6007 store_insn = p;
6008 }
6009
6010 output_reload_insns[reload_opnum[j]] = get_insns ();
6011 end_sequence ();
6012
6013 }
6014
6015 if (reload_spill_index[j] >= 0)
6016 new_spill_reg_store[reload_spill_index[j]] = store_insn;
6017 }
6018
6019 /* Now write all the insns we made for reloads in the order expected by
6020 the allocation functions. Prior to the insn being reloaded, we write
6021 the following reloads:
6022
6023 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6024
6025 RELOAD_OTHER reloads.
6026
6027 For each operand, any RELOAD_FOR_INPUT_ADDRESS reloads followed by
6028 the RELOAD_FOR_INPUT reload for the operand.
6029
6030 RELOAD_FOR_OPERAND_ADDRESS reloads.
6031
6032 After the insn being reloaded, we write the following:
6033
6034 For each operand, any RELOAD_FOR_OUTPUT_ADDRESS reload followed by
6035 the RELOAD_FOR_OUTPUT reload for that operand. */
6036
6037 emit_insns_before (other_input_address_reload_insns, before_insn);
6038 emit_insns_before (other_input_reload_insns, before_insn);
6039
6040 for (j = 0; j < reload_n_operands; j++)
6041 {
6042 emit_insns_before (input_address_reload_insns[j], before_insn);
6043 emit_insns_before (input_reload_insns[j], before_insn);
6044 }
6045
6046 emit_insns_before (operand_reload_insns, before_insn);
6047
6048 for (j = 0; j < reload_n_operands; j++)
6049 {
6050 emit_insns_before (output_address_reload_insns[j], following_insn);
6051 emit_insns_before (output_reload_insns[j], following_insn);
6052 }
6053
6054 /* Move death notes from INSN
6055 to output-operand-address and output reload insns. */
6056 #ifdef PRESERVE_DEATH_INFO_REGNO_P
6057 {
6058 rtx insn1;
6059 /* Loop over those insns, last ones first. */
6060 for (insn1 = PREV_INSN (following_insn); insn1 != insn;
6061 insn1 = PREV_INSN (insn1))
6062 if (GET_CODE (insn1) == INSN && GET_CODE (PATTERN (insn1)) == SET)
6063 {
6064 rtx source = SET_SRC (PATTERN (insn1));
6065 rtx dest = SET_DEST (PATTERN (insn1));
6066
6067 /* The note we will examine next. */
6068 rtx reg_notes = REG_NOTES (insn);
6069 /* The place that pointed to this note. */
6070 rtx *prev_reg_note = &REG_NOTES (insn);
6071
6072 /* If the note is for something used in the source of this
6073 reload insn, or in the output address, move the note. */
6074 while (reg_notes)
6075 {
6076 rtx next_reg_notes = XEXP (reg_notes, 1);
6077 if (REG_NOTE_KIND (reg_notes) == REG_DEAD
6078 && GET_CODE (XEXP (reg_notes, 0)) == REG
6079 && ((GET_CODE (dest) != REG
6080 && reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6081 dest))
6082 || reg_overlap_mentioned_for_reload_p (XEXP (reg_notes, 0),
6083 source)))
6084 {
6085 *prev_reg_note = next_reg_notes;
6086 XEXP (reg_notes, 1) = REG_NOTES (insn1);
6087 REG_NOTES (insn1) = reg_notes;
6088 }
6089 else
6090 prev_reg_note = &XEXP (reg_notes, 1);
6091
6092 reg_notes = next_reg_notes;
6093 }
6094 }
6095 }
6096 #endif
6097
6098 /* For all the spill regs newly reloaded in this instruction,
6099 record what they were reloaded from, so subsequent instructions
6100 can inherit the reloads.
6101
6102 Update spill_reg_store for the reloads of this insn.
6103 Copy the elements that were updated in the loop above. */
6104
6105 for (j = 0; j < n_reloads; j++)
6106 {
6107 register int r = reload_order[j];
6108 register int i = reload_spill_index[r];
6109
6110 /* I is nonneg if this reload used one of the spill regs.
6111 If reload_reg_rtx[r] is 0, this is an optional reload
6112 that we opted to ignore.
6113
6114 Also ignore reloads that don't reach the end of the insn,
6115 since we will eventually see the one that does. */
6116
6117 if (i >= 0 && reload_reg_rtx[r] != 0
6118 && reload_reg_reaches_end_p (spill_regs[i], reload_opnum[r],
6119 reload_when_needed[r]))
6120 {
6121 /* First, clear out memory of what used to be in this spill reg.
6122 If consecutive registers are used, clear them all. */
6123 int nr
6124 = HARD_REGNO_NREGS (spill_regs[i], GET_MODE (reload_reg_rtx[r]));
6125 int k;
6126
6127 for (k = 0; k < nr; k++)
6128 {
6129 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]] = -1;
6130 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = 0;
6131 }
6132
6133 /* Maybe the spill reg contains a copy of reload_out. */
6134 if (reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6135 {
6136 register int nregno = REGNO (reload_out[r]);
6137 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6138 : HARD_REGNO_NREGS (nregno,
6139 GET_MODE (reload_reg_rtx[r])));
6140
6141 spill_reg_store[i] = new_spill_reg_store[i];
6142 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6143
6144 /* If NREGNO is a hard register, it may occupy more than
6145 one register. If it does, say what is in the
6146 rest of the registers assuming that both registers
6147 agree on how many words the object takes. If not,
6148 invalidate the subsequent registers. */
6149
6150 if (nregno < FIRST_PSEUDO_REGISTER)
6151 for (k = 1; k < nnr; k++)
6152 reg_last_reload_reg[nregno + k]
6153 = (nr == nnr ? gen_rtx (REG, word_mode,
6154 REGNO (reload_reg_rtx[r]) + k)
6155 : 0);
6156
6157 /* Now do the inverse operation. */
6158 for (k = 0; k < nr; k++)
6159 {
6160 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6161 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6162 : nregno + k);
6163 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]] = insn;
6164 }
6165 }
6166
6167 /* Maybe the spill reg contains a copy of reload_in. */
6168 else if (reload_out[r] == 0
6169 && reload_in[r] != 0
6170 && (GET_CODE (reload_in[r]) == REG
6171 || GET_CODE (reload_in_reg[r]) == REG))
6172 {
6173 register int nregno;
6174 int nnr;
6175
6176 if (GET_CODE (reload_in[r]) == REG)
6177 nregno = REGNO (reload_in[r]);
6178 else
6179 nregno = REGNO (reload_in_reg[r]);
6180
6181 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
6182 : HARD_REGNO_NREGS (nregno,
6183 GET_MODE (reload_reg_rtx[r])));
6184
6185 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
6186
6187 if (nregno < FIRST_PSEUDO_REGISTER)
6188 for (k = 1; k < nnr; k++)
6189 reg_last_reload_reg[nregno + k]
6190 = (nr == nnr ? gen_rtx (REG, word_mode,
6191 REGNO (reload_reg_rtx[r]) + k)
6192 : 0);
6193
6194 /* Unless we inherited this reload, show we haven't
6195 recently done a store. */
6196 if (! reload_inherited[r])
6197 spill_reg_store[i] = 0;
6198
6199 for (k = 0; k < nr; k++)
6200 {
6201 reg_reloaded_contents[spill_reg_order[spill_regs[i] + k]]
6202 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr ? nregno
6203 : nregno + k);
6204 reg_reloaded_insn[spill_reg_order[spill_regs[i] + k]]
6205 = insn;
6206 }
6207 }
6208 }
6209
6210 /* The following if-statement was #if 0'd in 1.34 (or before...).
6211 It's reenabled in 1.35 because supposedly nothing else
6212 deals with this problem. */
6213
6214 /* If a register gets output-reloaded from a non-spill register,
6215 that invalidates any previous reloaded copy of it.
6216 But forget_old_reloads_1 won't get to see it, because
6217 it thinks only about the original insn. So invalidate it here. */
6218 if (i < 0 && reload_out[r] != 0 && GET_CODE (reload_out[r]) == REG)
6219 {
6220 register int nregno = REGNO (reload_out[r]);
6221 reg_last_reload_reg[nregno] = 0;
6222 }
6223 }
6224 }
6225 \f
6226 /* Emit code to perform an input reload of IN to RELOADREG. IN is from
6227 operand OPNUM with reload type TYPE.
6228
6229 Returns first insn emitted. */
6230
6231 rtx
6232 gen_input_reload (reloadreg, in, opnum, type)
6233 rtx reloadreg;
6234 rtx in;
6235 int opnum;
6236 enum reload_type type;
6237 {
6238 rtx last = get_last_insn ();
6239
6240 /* How to do this reload can get quite tricky. Normally, we are being
6241 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
6242 register that didn't get a hard register. In that case we can just
6243 call emit_move_insn.
6244
6245 We can also be asked to reload a PLUS that adds either two registers, or
6246 a register and a constant or MEM, or a MEM and a constant. This can
6247 occur during frame pointer elimination and while reloading addresses.
6248 This case is handled by trying to emit a single insn
6249 to perform the add. If it is not valid, we use a two insn sequence.
6250
6251 Finally, we could be called to handle an 'o' constraint by putting
6252 an address into a register. In that case, we first try to do this
6253 with a named pattern of "reload_load_address". If no such pattern
6254 exists, we just emit a SET insn and hope for the best (it will normally
6255 be valid on machines that use 'o').
6256
6257 This entire process is made complex because reload will never
6258 process the insns we generate here and so we must ensure that
6259 they will fit their constraints and also by the fact that parts of
6260 IN might be being reloaded separately and replaced with spill registers.
6261 Because of this, we are, in some sense, just guessing the right approach
6262 here. The one listed above seems to work.
6263
6264 ??? At some point, this whole thing needs to be rethought. */
6265
6266 if (GET_CODE (in) == PLUS
6267 && ((GET_CODE (XEXP (in, 0)) == REG
6268 && (GET_CODE (XEXP (in, 1)) == REG
6269 || CONSTANT_P (XEXP (in, 1))
6270 || GET_CODE (XEXP (in, 1)) == MEM))
6271 || (GET_CODE (XEXP (in, 0)) == MEM
6272 && CONSTANT_P (XEXP (in, 1)))))
6273 {
6274 /* We need to compute the sum of what is either a register and a
6275 constant, a register and memory, a hard register and a pseudo
6276 register, or memory and a constant and put it into the reload
6277 register. The best possible way of doing this is if the machine
6278 has a three-operand ADD insn that accepts the required operands.
6279
6280 The simplest approach is to try to generate such an insn and see if it
6281 is recognized and matches its constraints. If so, it can be used.
6282
6283 It might be better not to actually emit the insn unless it is valid,
6284 but we need to pass the insn as an operand to `recog' and
6285 `insn_extract' and it is simpler to emit and then delete the insn if
6286 not valid than to dummy things up. */
6287
6288 rtx op0, op1, tem, insn;
6289 int code;
6290
6291 op0 = find_replacement (&XEXP (in, 0));
6292 op1 = find_replacement (&XEXP (in, 1));
6293
6294 /* Since constraint checking is strict, commutativity won't be
6295 checked, so we need to do that here to avoid spurious failure
6296 if the add instruction is two-address and the second operand
6297 of the add is the same as the reload reg, which is frequently
6298 the case. If the insn would be A = B + A, rearrange it so
6299 it will be A = A + B as constrain_operands expects. */
6300
6301 if (GET_CODE (XEXP (in, 1)) == REG
6302 && REGNO (reloadreg) == REGNO (XEXP (in, 1)))
6303 tem = op0, op0 = op1, op1 = tem;
6304
6305 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
6306 in = gen_rtx (PLUS, GET_MODE (in), op0, op1);
6307
6308 insn = emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6309 code = recog_memoized (insn);
6310
6311 if (code >= 0)
6312 {
6313 insn_extract (insn);
6314 /* We want constrain operands to treat this insn strictly in
6315 its validity determination, i.e., the way it would after reload
6316 has completed. */
6317 if (constrain_operands (code, 1))
6318 return insn;
6319 }
6320
6321 delete_insns_since (last);
6322
6323 /* If that failed, we must use a conservative two-insn sequence.
6324 use move to copy constant, MEM, or pseudo register to the reload
6325 register since "move" will be able to handle an arbitrary operand,
6326 unlike add which can't, in general. Then add the registers.
6327
6328 If there is another way to do this for a specific machine, a
6329 DEFINE_PEEPHOLE should be specified that recognizes the sequence
6330 we emit below. */
6331
6332 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM
6333 || (GET_CODE (op1) == REG
6334 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
6335 tem = op0, op0 = op1, op1 = tem;
6336
6337 emit_insn (gen_move_insn (reloadreg, op0));
6338
6339 /* If OP0 and OP1 are the same, we can use RELOADREG for OP1.
6340 This fixes a problem on the 32K where the stack pointer cannot
6341 be used as an operand of an add insn. */
6342
6343 if (rtx_equal_p (op0, op1))
6344 op1 = reloadreg;
6345
6346 emit_insn (gen_add2_insn (reloadreg, op1));
6347 }
6348
6349 #ifdef SECONDARY_MEMORY_NEEDED
6350 /* If we need a memory location to do the move, do it that way. */
6351 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
6352 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
6353 REGNO_REG_CLASS (REGNO (reloadreg)),
6354 GET_MODE (reloadreg)))
6355 {
6356 /* Get the memory to use and rewrite both registers to its mode. */
6357 rtx loc = get_secondary_mem (in, GET_MODE (reloadreg), opnum, type);
6358
6359 if (GET_MODE (loc) != GET_MODE (reloadreg))
6360 reloadreg = gen_rtx (REG, GET_MODE (loc), REGNO (reloadreg));
6361
6362 if (GET_MODE (loc) != GET_MODE (in))
6363 in = gen_rtx (REG, GET_MODE (loc), REGNO (in));
6364
6365 emit_insn (gen_move_insn (loc, in));
6366 emit_insn (gen_move_insn (reloadreg, loc));
6367 }
6368 #endif
6369
6370 /* If IN is a simple operand, use gen_move_insn. */
6371 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
6372 emit_insn (gen_move_insn (reloadreg, in));
6373
6374 #ifdef HAVE_reload_load_address
6375 else if (HAVE_reload_load_address)
6376 emit_insn (gen_reload_load_address (reloadreg, in));
6377 #endif
6378
6379 /* Otherwise, just write (set REGLOADREG IN) and hope for the best. */
6380 else
6381 emit_insn (gen_rtx (SET, VOIDmode, reloadreg, in));
6382
6383 /* Return the first insn emitted.
6384 We can not just return get_last_insn, because there may have
6385 been multiple instructions emitted. Also note that gen_move_insn may
6386 emit more than one insn itself, so we can not assume that there is one
6387 insn emitted per emit_insn_before call. */
6388
6389 return last ? NEXT_INSN (last) : get_insns ();
6390 }
6391 \f
6392 /* Delete a previously made output-reload
6393 whose result we now believe is not needed.
6394 First we double-check.
6395
6396 INSN is the insn now being processed.
6397 OUTPUT_RELOAD_INSN is the insn of the output reload.
6398 J is the reload-number for this insn. */
6399
6400 static void
6401 delete_output_reload (insn, j, output_reload_insn)
6402 rtx insn;
6403 int j;
6404 rtx output_reload_insn;
6405 {
6406 register rtx i1;
6407
6408 /* Get the raw pseudo-register referred to. */
6409
6410 rtx reg = reload_in[j];
6411 while (GET_CODE (reg) == SUBREG)
6412 reg = SUBREG_REG (reg);
6413
6414 /* If the pseudo-reg we are reloading is no longer referenced
6415 anywhere between the store into it and here,
6416 and no jumps or labels intervene, then the value can get
6417 here through the reload reg alone.
6418 Otherwise, give up--return. */
6419 for (i1 = NEXT_INSN (output_reload_insn);
6420 i1 != insn; i1 = NEXT_INSN (i1))
6421 {
6422 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
6423 return;
6424 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
6425 && reg_mentioned_p (reg, PATTERN (i1)))
6426 return;
6427 }
6428
6429 /* If this insn will store in the pseudo again,
6430 the previous store can be removed. */
6431 if (reload_out[j] == reload_in[j])
6432 delete_insn (output_reload_insn);
6433
6434 /* See if the pseudo reg has been completely replaced
6435 with reload regs. If so, delete the store insn
6436 and forget we had a stack slot for the pseudo. */
6437 else if (reg_n_deaths[REGNO (reg)] == 1
6438 && reg_basic_block[REGNO (reg)] >= 0
6439 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
6440 {
6441 rtx i2;
6442
6443 /* We know that it was used only between here
6444 and the beginning of the current basic block.
6445 (We also know that the last use before INSN was
6446 the output reload we are thinking of deleting, but never mind that.)
6447 Search that range; see if any ref remains. */
6448 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6449 {
6450 rtx set = single_set (i2);
6451
6452 /* Uses which just store in the pseudo don't count,
6453 since if they are the only uses, they are dead. */
6454 if (set != 0 && SET_DEST (set) == reg)
6455 continue;
6456 if (GET_CODE (i2) == CODE_LABEL
6457 || GET_CODE (i2) == JUMP_INSN)
6458 break;
6459 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
6460 && reg_mentioned_p (reg, PATTERN (i2)))
6461 /* Some other ref remains;
6462 we can't do anything. */
6463 return;
6464 }
6465
6466 /* Delete the now-dead stores into this pseudo. */
6467 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
6468 {
6469 rtx set = single_set (i2);
6470
6471 if (set != 0 && SET_DEST (set) == reg)
6472 delete_insn (i2);
6473 if (GET_CODE (i2) == CODE_LABEL
6474 || GET_CODE (i2) == JUMP_INSN)
6475 break;
6476 }
6477
6478 /* For the debugging info,
6479 say the pseudo lives in this reload reg. */
6480 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
6481 alter_reg (REGNO (reg), -1);
6482 }
6483 }
6484 \f
6485 /* Output reload-insns to reload VALUE into RELOADREG.
6486 VALUE is an autoincrement or autodecrement RTX whose operand
6487 is a register or memory location;
6488 so reloading involves incrementing that location.
6489
6490 INC_AMOUNT is the number to increment or decrement by (always positive).
6491 This cannot be deduced from VALUE. */
6492
6493 static void
6494 inc_for_reload (reloadreg, value, inc_amount)
6495 rtx reloadreg;
6496 rtx value;
6497 int inc_amount;
6498 {
6499 /* REG or MEM to be copied and incremented. */
6500 rtx incloc = XEXP (value, 0);
6501 /* Nonzero if increment after copying. */
6502 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
6503 rtx last;
6504 rtx inc;
6505 rtx add_insn;
6506 int code;
6507
6508 /* No hard register is equivalent to this register after
6509 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
6510 we could inc/dec that register as well (maybe even using it for
6511 the source), but I'm not sure it's worth worrying about. */
6512 if (GET_CODE (incloc) == REG)
6513 reg_last_reload_reg[REGNO (incloc)] = 0;
6514
6515 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
6516 inc_amount = - inc_amount;
6517
6518 inc = GEN_INT (inc_amount);
6519
6520 /* If this is post-increment, first copy the location to the reload reg. */
6521 if (post)
6522 emit_insn (gen_move_insn (reloadreg, incloc));
6523
6524 /* See if we can directly increment INCLOC. Use a method similar to that
6525 in gen_input_reload. */
6526
6527 last = get_last_insn ();
6528 add_insn = emit_insn (gen_rtx (SET, VOIDmode, incloc,
6529 gen_rtx (PLUS, GET_MODE (incloc),
6530 incloc, inc)));
6531
6532 code = recog_memoized (add_insn);
6533 if (code >= 0)
6534 {
6535 insn_extract (add_insn);
6536 if (constrain_operands (code, 1))
6537 {
6538 /* If this is a pre-increment and we have incremented the value
6539 where it lives, copy the incremented value to RELOADREG to
6540 be used as an address. */
6541
6542 if (! post)
6543 emit_insn (gen_move_insn (reloadreg, incloc));
6544
6545 return;
6546 }
6547 }
6548
6549 delete_insns_since (last);
6550
6551 /* If couldn't do the increment directly, must increment in RELOADREG.
6552 The way we do this depends on whether this is pre- or post-increment.
6553 For pre-increment, copy INCLOC to the reload register, increment it
6554 there, then save back. */
6555
6556 if (! post)
6557 {
6558 emit_insn (gen_move_insn (reloadreg, incloc));
6559 emit_insn (gen_add2_insn (reloadreg, inc));
6560 emit_insn (gen_move_insn (incloc, reloadreg));
6561 }
6562 else
6563 {
6564 /* Postincrement.
6565 Because this might be a jump insn or a compare, and because RELOADREG
6566 may not be available after the insn in an input reload, we must do
6567 the incrementation before the insn being reloaded for.
6568
6569 We have already copied INCLOC to RELOADREG. Increment the copy in
6570 RELOADREG, save that back, then decrement RELOADREG so it has
6571 the original value. */
6572
6573 emit_insn (gen_add2_insn (reloadreg, inc));
6574 emit_insn (gen_move_insn (incloc, reloadreg));
6575 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
6576 }
6577
6578 return;
6579 }
6580 \f
6581 /* Return 1 if we are certain that the constraint-string STRING allows
6582 the hard register REG. Return 0 if we can't be sure of this. */
6583
6584 static int
6585 constraint_accepts_reg_p (string, reg)
6586 char *string;
6587 rtx reg;
6588 {
6589 int value = 0;
6590 int regno = true_regnum (reg);
6591 int c;
6592
6593 /* Initialize for first alternative. */
6594 value = 0;
6595 /* Check that each alternative contains `g' or `r'. */
6596 while (1)
6597 switch (c = *string++)
6598 {
6599 case 0:
6600 /* If an alternative lacks `g' or `r', we lose. */
6601 return value;
6602 case ',':
6603 /* If an alternative lacks `g' or `r', we lose. */
6604 if (value == 0)
6605 return 0;
6606 /* Initialize for next alternative. */
6607 value = 0;
6608 break;
6609 case 'g':
6610 case 'r':
6611 /* Any general reg wins for this alternative. */
6612 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
6613 value = 1;
6614 break;
6615 default:
6616 /* Any reg in specified class wins for this alternative. */
6617 {
6618 enum reg_class class = REG_CLASS_FROM_LETTER (c);
6619
6620 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
6621 value = 1;
6622 }
6623 }
6624 }
6625 \f
6626 /* Return the number of places FIND appears within X, but don't count
6627 an occurrence if some SET_DEST is FIND. */
6628
6629 static int
6630 count_occurrences (x, find)
6631 register rtx x, find;
6632 {
6633 register int i, j;
6634 register enum rtx_code code;
6635 register char *format_ptr;
6636 int count;
6637
6638 if (x == find)
6639 return 1;
6640 if (x == 0)
6641 return 0;
6642
6643 code = GET_CODE (x);
6644
6645 switch (code)
6646 {
6647 case REG:
6648 case QUEUED:
6649 case CONST_INT:
6650 case CONST_DOUBLE:
6651 case SYMBOL_REF:
6652 case CODE_LABEL:
6653 case PC:
6654 case CC0:
6655 return 0;
6656
6657 case SET:
6658 if (SET_DEST (x) == find)
6659 return count_occurrences (SET_SRC (x), find);
6660 break;
6661 }
6662
6663 format_ptr = GET_RTX_FORMAT (code);
6664 count = 0;
6665
6666 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6667 {
6668 switch (*format_ptr++)
6669 {
6670 case 'e':
6671 count += count_occurrences (XEXP (x, i), find);
6672 break;
6673
6674 case 'E':
6675 if (XVEC (x, i) != NULL)
6676 {
6677 for (j = 0; j < XVECLEN (x, i); j++)
6678 count += count_occurrences (XVECEXP (x, i, j), find);
6679 }
6680 break;
6681 }
6682 }
6683 return count;
6684 }
This page took 0.355567 seconds and 4 git commands to generate.