]> gcc.gnu.org Git - gcc.git/blob - gcc/reload1.c
reload1.c (struct elim_table): Delete MAX_OFFSET member.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24
25 #include "machmode.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "obstack.h"
29 #include "insn-config.h"
30 #include "insn-flags.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "expr.h"
34 #include "regs.h"
35 #include "basic-block.h"
36 #include "reload.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "real.h"
40 #include "toplev.h"
41
42 /* This file contains the reload pass of the compiler, which is
43 run after register allocation has been done. It checks that
44 each insn is valid (operands required to be in registers really
45 are in registers of the proper class) and fixes up invalid ones
46 by copying values temporarily into registers for the insns
47 that need them.
48
49 The results of register allocation are described by the vector
50 reg_renumber; the insns still contain pseudo regs, but reg_renumber
51 can be used to find which hard reg, if any, a pseudo reg is in.
52
53 The technique we always use is to free up a few hard regs that are
54 called ``reload regs'', and for each place where a pseudo reg
55 must be in a hard reg, copy it temporarily into one of the reload regs.
56
57 Reload regs are allocated locally for every instruction that needs
58 reloads. When there are pseudos which are allocated to a register that
59 has been chosen as a reload reg, such pseudos must be ``spilled''.
60 This means that they go to other hard regs, or to stack slots if no other
61 available hard regs can be found. Spilling can invalidate more
62 insns, requiring additional need for reloads, so we must keep checking
63 until the process stabilizes.
64
65 For machines with different classes of registers, we must keep track
66 of the register class needed for each reload, and make sure that
67 we allocate enough reload registers of each class.
68
69 The file reload.c contains the code that checks one insn for
70 validity and reports the reloads that it needs. This file
71 is in charge of scanning the entire rtl code, accumulating the
72 reload needs, spilling, assigning reload registers to use for
73 fixing up each insn, and generating the new insns to copy values
74 into the reload registers. */
75
76
77 #ifndef REGISTER_MOVE_COST
78 #define REGISTER_MOVE_COST(x, y) 2
79 #endif
80 \f
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
84
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
88
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
92
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
98
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
104
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
109
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
113
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
116
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
120
121 /* Vector to remember old contents of reg_renumber before spilling. */
122 static short *reg_old_renumber;
123
124 /* During reload_as_needed, element N contains the last pseudo regno reloaded
125 into hard register N. If that pseudo reg occupied more than one register,
126 reg_reloaded_contents points to that pseudo for each spill register in
127 use; all of these must remain set for an inheritance to occur. */
128 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
129
130 /* During reload_as_needed, element N contains the insn for which
131 hard register N was last used. Its contents are significant only
132 when reg_reloaded_valid is set for this register. */
133 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
134
135 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
136 static HARD_REG_SET reg_reloaded_valid;
137 /* Indicate if the register was dead at the end of the reload.
138 This is only valid if reg_reloaded_contents is set and valid. */
139 static HARD_REG_SET reg_reloaded_dead;
140
141 /* Number of spill-regs so far; number of valid elements of spill_regs. */
142 static int n_spills;
143
144 /* In parallel with spill_regs, contains REG rtx's for those regs.
145 Holds the last rtx used for any given reg, or 0 if it has never
146 been used for spilling yet. This rtx is reused, provided it has
147 the proper mode. */
148 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
149
150 /* In parallel with spill_regs, contains nonzero for a spill reg
151 that was stored after the last time it was used.
152 The precise value is the insn generated to do the store. */
153 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
154
155 /* This is the register that was stored with spill_reg_store. This is a
156 copy of reload_out / reload_out_reg when the value was stored; if
157 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
158 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
159
160 /* This table is the inverse mapping of spill_regs:
161 indexed by hard reg number,
162 it contains the position of that reg in spill_regs,
163 or -1 for something that is not in spill_regs.
164
165 ?!? This is no longer accurate. */
166 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
167
168 /* This reg set indicates registers that can't be used as spill registers for
169 the currently processed insn. These are the hard registers which are live
170 during the insn, but not allocated to pseudos, as well as fixed
171 registers. */
172 static HARD_REG_SET bad_spill_regs;
173
174 /* These are the hard registers that can't be used as spill register for any
175 insn. This includes registers used for user variables and registers that
176 we can't eliminate. A register that appears in this set also can't be used
177 to retry register allocation. */
178 static HARD_REG_SET bad_spill_regs_global;
179
180 /* Describes order of use of registers for reloading
181 of spilled pseudo-registers. `n_spills' is the number of
182 elements that are actually valid; new ones are added at the end.
183
184 Both spill_regs and spill_reg_order are used on two occasions:
185 once during find_reload_regs, where they keep track of the spill registers
186 for a single insn, but also during reload_as_needed where they show all
187 the registers ever used by reload. For the latter case, the information
188 is calculated during finish_spills. */
189 static short spill_regs[FIRST_PSEUDO_REGISTER];
190
191 /* This vector of reg sets indicates, for each pseudo, which hard registers
192 may not be used for retrying global allocation because the register was
193 formerly spilled from one of them. If we allowed reallocating a pseudo to
194 a register that it was already allocated to, reload might not
195 terminate. */
196 static HARD_REG_SET *pseudo_previous_regs;
197
198 /* This vector of reg sets indicates, for each pseudo, which hard
199 registers may not be used for retrying global allocation because they
200 are used as spill registers during one of the insns in which the
201 pseudo is live. */
202 static HARD_REG_SET *pseudo_forbidden_regs;
203
204 /* All hard regs that have been used as spill registers for any insn are
205 marked in this set. */
206 static HARD_REG_SET used_spill_regs;
207
208 /* Index of last register assigned as a spill register. We allocate in
209 a round-robin fashion. */
210 static int last_spill_reg;
211
212 /* Describes order of preference for putting regs into spill_regs.
213 Contains the numbers of all the hard regs, in order most preferred first.
214 This order is different for each function.
215 It is set up by order_regs_for_reload.
216 Empty elements at the end contain -1. */
217 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
218
219 /* Nonzero if indirect addressing is supported on the machine; this means
220 that spilling (REG n) does not require reloading it into a register in
221 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
222 value indicates the level of indirect addressing supported, e.g., two
223 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
224 a hard register. */
225 static char spill_indirect_levels;
226
227 /* Nonzero if indirect addressing is supported when the innermost MEM is
228 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
229 which these are valid is the same as spill_indirect_levels, above. */
230 char indirect_symref_ok;
231
232 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
233 char double_reg_address_ok;
234
235 /* Record the stack slot for each spilled hard register. */
236 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
237
238 /* Width allocated so far for that stack slot. */
239 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
240
241 /* Record which pseudos needed to be spilled. */
242 static regset spilled_pseudos;
243
244 /* First uid used by insns created by reload in this function.
245 Used in find_equiv_reg. */
246 int reload_first_uid;
247
248 /* Flag set by local-alloc or global-alloc if anything is live in
249 a call-clobbered reg across calls. */
250 int caller_save_needed;
251
252 /* Set to 1 while reload_as_needed is operating.
253 Required by some machines to handle any generated moves differently. */
254 int reload_in_progress = 0;
255
256 /* These arrays record the insn_code of insns that may be needed to
257 perform input and output reloads of special objects. They provide a
258 place to pass a scratch register. */
259 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
260 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
261
262 /* This obstack is used for allocation of rtl during register elimination.
263 The allocated storage can be freed once find_reloads has processed the
264 insn. */
265 struct obstack reload_obstack;
266
267 /* Points to the beginning of the reload_obstack. All insn_chain structures
268 are allocated first. */
269 char *reload_startobj;
270
271 /* The point after all insn_chain structures. Used to quickly deallocate
272 memory used while processing one insn. */
273 char *reload_firstobj;
274
275 #define obstack_chunk_alloc xmalloc
276 #define obstack_chunk_free free
277
278 /* List of labels that must never be deleted. */
279 extern rtx forced_labels;
280
281 /* List of insn_chain instructions, one for every insn that reload needs to
282 examine. */
283 struct insn_chain *reload_insn_chain;
284
285 /* List of all insns needing reloads. */
286 static struct insn_chain *insns_need_reload;
287 \f
288 /* This structure is used to record information about register eliminations.
289 Each array entry describes one possible way of eliminating a register
290 in favor of another. If there is more than one way of eliminating a
291 particular register, the most preferred should be specified first. */
292
293 static struct elim_table
294 {
295 int from; /* Register number to be eliminated. */
296 int to; /* Register number used as replacement. */
297 int initial_offset; /* Initial difference between values. */
298 int can_eliminate; /* Non-zero if this elimination can be done. */
299 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
300 insns made by reload. */
301 int offset; /* Current offset between the two regs. */
302 int previous_offset; /* Offset at end of previous insn. */
303 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
304 rtx from_rtx; /* REG rtx for the register to be eliminated.
305 We cannot simply compare the number since
306 we might then spuriously replace a hard
307 register corresponding to a pseudo
308 assigned to the reg to be eliminated. */
309 rtx to_rtx; /* REG rtx for the replacement. */
310 } reg_eliminate[] =
311
312 /* If a set of eliminable registers was specified, define the table from it.
313 Otherwise, default to the normal case of the frame pointer being
314 replaced by the stack pointer. */
315
316 #ifdef ELIMINABLE_REGS
317 ELIMINABLE_REGS;
318 #else
319 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
320 #endif
321
322 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
323
324 /* Record the number of pending eliminations that have an offset not equal
325 to their initial offset. If non-zero, we use a new copy of each
326 replacement result in any insns encountered. */
327 int num_not_at_initial_offset;
328
329 /* Count the number of registers that we may be able to eliminate. */
330 static int num_eliminable;
331
332 /* For each label, we record the offset of each elimination. If we reach
333 a label by more than one path and an offset differs, we cannot do the
334 elimination. This information is indexed by the number of the label.
335 The first table is an array of flags that records whether we have yet
336 encountered a label and the second table is an array of arrays, one
337 entry in the latter array for each elimination. */
338
339 static char *offsets_known_at;
340 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
341
342 /* Number of labels in the current function. */
343
344 static int num_labels;
345
346 struct hard_reg_n_uses
347 {
348 int regno;
349 unsigned int uses;
350 };
351 \f
352 static void maybe_fix_stack_asms PROTO((void));
353 static void calculate_needs_all_insns PROTO((int));
354 static void calculate_needs PROTO((struct insn_chain *));
355 static void find_reload_regs PROTO((struct insn_chain *chain,
356 FILE *));
357 static void find_tworeg_group PROTO((struct insn_chain *, int,
358 FILE *));
359 static void find_group PROTO((struct insn_chain *, int,
360 FILE *));
361 static int possible_group_p PROTO((struct insn_chain *, int));
362 static void count_possible_groups PROTO((struct insn_chain *, int));
363 static int modes_equiv_for_class_p PROTO((enum machine_mode,
364 enum machine_mode,
365 enum reg_class));
366 static void delete_caller_save_insns PROTO((void));
367
368 static void spill_failure PROTO((rtx));
369 static void new_spill_reg PROTO((struct insn_chain *, int, int,
370 int, FILE *));
371 static void maybe_mark_pseudo_spilled PROTO((int));
372 static void delete_dead_insn PROTO((rtx));
373 static void alter_reg PROTO((int, int));
374 static void set_label_offsets PROTO((rtx, rtx, int));
375 static int eliminate_regs_in_insn PROTO((rtx, int));
376 static void update_eliminable_offsets PROTO((void));
377 static void mark_not_eliminable PROTO((rtx, rtx));
378 static void set_initial_elim_offsets PROTO((void));
379 static void verify_initial_elim_offsets PROTO((void));
380 static void set_initial_label_offsets PROTO((void));
381 static void set_offsets_for_label PROTO((rtx));
382 static void init_elim_table PROTO((void));
383 static void update_eliminables PROTO((HARD_REG_SET *));
384 static void spill_hard_reg PROTO((int, FILE *, int));
385 static int finish_spills PROTO((int, FILE *));
386 static void ior_hard_reg_set PROTO((HARD_REG_SET *, HARD_REG_SET *));
387 static void scan_paradoxical_subregs PROTO((rtx));
388 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
389 static void count_pseudo PROTO((struct hard_reg_n_uses *, int));
390 static void order_regs_for_reload PROTO((struct insn_chain *));
391 static void reload_as_needed PROTO((int));
392 static void forget_old_reloads_1 PROTO((rtx, rtx));
393 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
394 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
395 enum machine_mode));
396 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
397 enum machine_mode));
398 static int reload_reg_free_p PROTO((int, int, enum reload_type));
399 static int reload_reg_free_before_p PROTO((int, int, enum reload_type, int));
400 static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx, rtx, int));
401 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
402 static int allocate_reload_reg PROTO((struct insn_chain *, int, int,
403 int));
404 static void choose_reload_regs PROTO((struct insn_chain *));
405 static void merge_assigned_reloads PROTO((rtx));
406 static void emit_reload_insns PROTO((struct insn_chain *));
407 static void delete_output_reload PROTO((rtx, int, int));
408 static void delete_address_reloads PROTO((rtx, rtx));
409 static void delete_address_reloads_1 PROTO((rtx, rtx, rtx));
410 static rtx inc_for_reload PROTO((rtx, rtx, rtx, int));
411 static int constraint_accepts_reg_p PROTO((char *, rtx));
412 static void reload_cse_regs_1 PROTO((rtx));
413 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
414 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
415 static void reload_cse_invalidate_mem PROTO((rtx));
416 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
417 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
418 static int reload_cse_noop_set_p PROTO((rtx, rtx));
419 static int reload_cse_simplify_set PROTO((rtx, rtx));
420 static int reload_cse_simplify_operands PROTO((rtx));
421 static void reload_cse_check_clobber PROTO((rtx, rtx));
422 static void reload_cse_record_set PROTO((rtx, rtx));
423 static void reload_combine PROTO((void));
424 static void reload_combine_note_use PROTO((rtx *, rtx));
425 static void reload_combine_note_store PROTO((rtx, rtx));
426 static void reload_cse_move2add PROTO((rtx));
427 static void move2add_note_store PROTO((rtx, rtx));
428 \f
429 /* Initialize the reload pass once per compilation. */
430
431 void
432 init_reload ()
433 {
434 register int i;
435
436 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
437 Set spill_indirect_levels to the number of levels such addressing is
438 permitted, zero if it is not permitted at all. */
439
440 register rtx tem
441 = gen_rtx_MEM (Pmode,
442 gen_rtx_PLUS (Pmode,
443 gen_rtx_REG (Pmode, LAST_VIRTUAL_REGISTER + 1),
444 GEN_INT (4)));
445 spill_indirect_levels = 0;
446
447 while (memory_address_p (QImode, tem))
448 {
449 spill_indirect_levels++;
450 tem = gen_rtx_MEM (Pmode, tem);
451 }
452
453 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
454
455 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
456 indirect_symref_ok = memory_address_p (QImode, tem);
457
458 /* See if reg+reg is a valid (and offsettable) address. */
459
460 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
461 {
462 tem = gen_rtx_PLUS (Pmode,
463 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
464 gen_rtx_REG (Pmode, i));
465 /* This way, we make sure that reg+reg is an offsettable address. */
466 tem = plus_constant (tem, 4);
467
468 if (memory_address_p (QImode, tem))
469 {
470 double_reg_address_ok = 1;
471 break;
472 }
473 }
474
475 /* Initialize obstack for our rtl allocation. */
476 gcc_obstack_init (&reload_obstack);
477 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
478 }
479
480 /* List of insn chains that are currently unused. */
481 static struct insn_chain *unused_insn_chains = 0;
482
483 /* Allocate an empty insn_chain structure. */
484 struct insn_chain *
485 new_insn_chain ()
486 {
487 struct insn_chain *c;
488
489 if (unused_insn_chains == 0)
490 {
491 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
492 c->live_before = OBSTACK_ALLOC_REG_SET (&reload_obstack);
493 c->live_after = OBSTACK_ALLOC_REG_SET (&reload_obstack);
494 }
495 else
496 {
497 c = unused_insn_chains;
498 unused_insn_chains = c->next;
499 }
500 c->is_caller_save_insn = 0;
501 c->need_operand_change = 0;
502 c->need_reload = 0;
503 c->need_elim = 0;
504 return c;
505 }
506
507 /* Small utility function to set all regs in hard reg set TO which are
508 allocated to pseudos in regset FROM. */
509 void
510 compute_use_by_pseudos (to, from)
511 HARD_REG_SET *to;
512 regset from;
513 {
514 int regno;
515 EXECUTE_IF_SET_IN_REG_SET
516 (from, FIRST_PSEUDO_REGISTER, regno,
517 {
518 int r = reg_renumber[regno];
519 int nregs;
520 if (r < 0)
521 abort ();
522 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
523 while (nregs-- > 0)
524 SET_HARD_REG_BIT (*to, r + nregs);
525 });
526 }
527 \f
528 /* Global variables used by reload and its subroutines. */
529
530 /* Set during calculate_needs if an insn needs register elimination. */
531 static int something_needs_elimination;
532 /* Set during calculate_needs if an insn needs an operand changed. */
533 int something_needs_operands_changed;
534
535 /* Nonzero means we couldn't get enough spill regs. */
536 static int failure;
537
538 /* Main entry point for the reload pass.
539
540 FIRST is the first insn of the function being compiled.
541
542 GLOBAL nonzero means we were called from global_alloc
543 and should attempt to reallocate any pseudoregs that we
544 displace from hard regs we will use for reloads.
545 If GLOBAL is zero, we do not have enough information to do that,
546 so any pseudo reg that is spilled must go to the stack.
547
548 DUMPFILE is the global-reg debugging dump file stream, or 0.
549 If it is nonzero, messages are written to it to describe
550 which registers are seized as reload regs, which pseudo regs
551 are spilled from them, and where the pseudo regs are reallocated to.
552
553 Return value is nonzero if reload failed
554 and we must not do any more for this function. */
555
556 int
557 reload (first, global, dumpfile)
558 rtx first;
559 int global;
560 FILE *dumpfile;
561 {
562 register int i;
563 register rtx insn;
564 register struct elim_table *ep;
565
566 /* The two pointers used to track the true location of the memory used
567 for label offsets. */
568 char *real_known_ptr = NULL_PTR;
569 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
570
571 /* Make sure even insns with volatile mem refs are recognizable. */
572 init_recog ();
573
574 failure = 0;
575
576 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
577
578 /* Make sure that the last insn in the chain
579 is not something that needs reloading. */
580 emit_note (NULL_PTR, NOTE_INSN_DELETED);
581
582 /* Enable find_equiv_reg to distinguish insns made by reload. */
583 reload_first_uid = get_max_uid ();
584
585 #ifdef SECONDARY_MEMORY_NEEDED
586 /* Initialize the secondary memory table. */
587 clear_secondary_mem ();
588 #endif
589
590 /* We don't have a stack slot for any spill reg yet. */
591 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
592 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
593
594 /* Initialize the save area information for caller-save, in case some
595 are needed. */
596 init_save_areas ();
597
598 /* Compute which hard registers are now in use
599 as homes for pseudo registers.
600 This is done here rather than (eg) in global_alloc
601 because this point is reached even if not optimizing. */
602 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
603 mark_home_live (i);
604
605 /* A function that receives a nonlocal goto must save all call-saved
606 registers. */
607 if (current_function_has_nonlocal_label)
608 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
609 {
610 if (! call_used_regs[i] && ! fixed_regs[i])
611 regs_ever_live[i] = 1;
612 }
613
614 /* Find all the pseudo registers that didn't get hard regs
615 but do have known equivalent constants or memory slots.
616 These include parameters (known equivalent to parameter slots)
617 and cse'd or loop-moved constant memory addresses.
618
619 Record constant equivalents in reg_equiv_constant
620 so they will be substituted by find_reloads.
621 Record memory equivalents in reg_mem_equiv so they can
622 be substituted eventually by altering the REG-rtx's. */
623
624 reg_equiv_constant = (rtx *) xmalloc (max_regno * sizeof (rtx));
625 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
626 reg_equiv_memory_loc = (rtx *) xmalloc (max_regno * sizeof (rtx));
627 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
628 reg_equiv_mem = (rtx *) xmalloc (max_regno * sizeof (rtx));
629 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
630 reg_equiv_init = (rtx *) xmalloc (max_regno * sizeof (rtx));
631 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
632 reg_equiv_address = (rtx *) xmalloc (max_regno * sizeof (rtx));
633 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
634 reg_max_ref_width = (int *) xmalloc (max_regno * sizeof (int));
635 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
636 reg_old_renumber = (short *) xmalloc (max_regno * sizeof (short));
637 bcopy (reg_renumber, reg_old_renumber, max_regno * sizeof (short));
638 pseudo_forbidden_regs
639 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
640 pseudo_previous_regs
641 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
642
643 CLEAR_HARD_REG_SET (bad_spill_regs_global);
644 bzero ((char *) pseudo_previous_regs, max_regno * sizeof (HARD_REG_SET));
645
646 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
647 Also find all paradoxical subregs and find largest such for each pseudo.
648 On machines with small register classes, record hard registers that
649 are used for user variables. These can never be used for spills.
650 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
651 caller-saved registers must be marked live. */
652
653 for (insn = first; insn; insn = NEXT_INSN (insn))
654 {
655 rtx set = single_set (insn);
656
657 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
658 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
659 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
660 if (! call_used_regs[i])
661 regs_ever_live[i] = 1;
662
663 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
664 {
665 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
666 if (note
667 #ifdef LEGITIMATE_PIC_OPERAND_P
668 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
669 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
670 #endif
671 )
672 {
673 rtx x = XEXP (note, 0);
674 i = REGNO (SET_DEST (set));
675 if (i > LAST_VIRTUAL_REGISTER)
676 {
677 if (GET_CODE (x) == MEM)
678 {
679 /* If the operand is a PLUS, the MEM may be shared,
680 so make sure we have an unshared copy here. */
681 if (GET_CODE (XEXP (x, 0)) == PLUS)
682 x = copy_rtx (x);
683
684 reg_equiv_memory_loc[i] = x;
685 }
686 else if (CONSTANT_P (x))
687 {
688 if (LEGITIMATE_CONSTANT_P (x))
689 reg_equiv_constant[i] = x;
690 else
691 reg_equiv_memory_loc[i]
692 = force_const_mem (GET_MODE (SET_DEST (set)), x);
693 }
694 else
695 continue;
696
697 /* If this register is being made equivalent to a MEM
698 and the MEM is not SET_SRC, the equivalencing insn
699 is one with the MEM as a SET_DEST and it occurs later.
700 So don't mark this insn now. */
701 if (GET_CODE (x) != MEM
702 || rtx_equal_p (SET_SRC (set), x))
703 reg_equiv_init[i] = insn;
704 }
705 }
706 }
707
708 /* If this insn is setting a MEM from a register equivalent to it,
709 this is the equivalencing insn. */
710 else if (set && GET_CODE (SET_DEST (set)) == MEM
711 && GET_CODE (SET_SRC (set)) == REG
712 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
713 && rtx_equal_p (SET_DEST (set),
714 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
715 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
716
717 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
718 scan_paradoxical_subregs (PATTERN (insn));
719 }
720
721 init_elim_table ();
722
723 num_labels = max_label_num () - get_first_label_num ();
724
725 /* Allocate the tables used to store offset information at labels. */
726 /* We used to use alloca here, but the size of what it would try to
727 allocate would occasionally cause it to exceed the stack limit and
728 cause a core dump. */
729 real_known_ptr = xmalloc (num_labels);
730 real_at_ptr
731 = (int (*)[NUM_ELIMINABLE_REGS])
732 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
733
734 offsets_known_at = real_known_ptr - get_first_label_num ();
735 offsets_at
736 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
737
738 /* Alter each pseudo-reg rtx to contain its hard reg number.
739 Assign stack slots to the pseudos that lack hard regs or equivalents.
740 Do not touch virtual registers. */
741
742 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
743 alter_reg (i, -1);
744
745 /* If we have some registers we think can be eliminated, scan all insns to
746 see if there is an insn that sets one of these registers to something
747 other than itself plus a constant. If so, the register cannot be
748 eliminated. Doing this scan here eliminates an extra pass through the
749 main reload loop in the most common case where register elimination
750 cannot be done. */
751 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
752 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
753 || GET_CODE (insn) == CALL_INSN)
754 note_stores (PATTERN (insn), mark_not_eliminable);
755
756 #ifndef REGISTER_CONSTRAINTS
757 /* If all the pseudo regs have hard regs,
758 except for those that are never referenced,
759 we know that no reloads are needed. */
760 /* But that is not true if there are register constraints, since
761 in that case some pseudos might be in the wrong kind of hard reg. */
762
763 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
764 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
765 break;
766
767 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
768 {
769 free (real_known_ptr);
770 free (real_at_ptr);
771 free (reg_equiv_constant);
772 free (reg_equiv_memory_loc);
773 free (reg_equiv_mem);
774 free (reg_equiv_init);
775 free (reg_equiv_address);
776 free (reg_max_ref_width);
777 free (reg_old_renumber);
778 free (pseudo_previous_regs);
779 free (pseudo_forbidden_regs);
780 return 0;
781 }
782 #endif
783
784 maybe_fix_stack_asms ();
785
786 insns_need_reload = 0;
787 something_needs_elimination = 0;
788
789 /* Initialize to -1, which means take the first spill register. */
790 last_spill_reg = -1;
791
792 spilled_pseudos = ALLOCA_REG_SET ();
793
794 /* Spill any hard regs that we know we can't eliminate. */
795 CLEAR_HARD_REG_SET (used_spill_regs);
796 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
797 if (! ep->can_eliminate)
798 spill_hard_reg (ep->from, dumpfile, 1);
799
800 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
801 if (frame_pointer_needed)
802 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, dumpfile, 1);
803 #endif
804 finish_spills (global, dumpfile);
805
806 /* From now on, we need to emit any moves without making new pseudos. */
807 reload_in_progress = 1;
808
809 /* This loop scans the entire function each go-round
810 and repeats until one repetition spills no additional hard regs. */
811 for (;;)
812 {
813 int something_changed;
814 int did_spill;
815 struct insn_chain *chain;
816
817 HOST_WIDE_INT starting_frame_size;
818
819 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
820 here because the stack size may be a part of the offset computation
821 for register elimination, and there might have been new stack slots
822 created in the last iteration of this loop. */
823 assign_stack_local (BLKmode, 0, 0);
824
825 starting_frame_size = get_frame_size ();
826
827 set_initial_elim_offsets ();
828 set_initial_label_offsets ();
829
830 /* For each pseudo register that has an equivalent location defined,
831 try to eliminate any eliminable registers (such as the frame pointer)
832 assuming initial offsets for the replacement register, which
833 is the normal case.
834
835 If the resulting location is directly addressable, substitute
836 the MEM we just got directly for the old REG.
837
838 If it is not addressable but is a constant or the sum of a hard reg
839 and constant, it is probably not addressable because the constant is
840 out of range, in that case record the address; we will generate
841 hairy code to compute the address in a register each time it is
842 needed. Similarly if it is a hard register, but one that is not
843 valid as an address register.
844
845 If the location is not addressable, but does not have one of the
846 above forms, assign a stack slot. We have to do this to avoid the
847 potential of producing lots of reloads if, e.g., a location involves
848 a pseudo that didn't get a hard register and has an equivalent memory
849 location that also involves a pseudo that didn't get a hard register.
850
851 Perhaps at some point we will improve reload_when_needed handling
852 so this problem goes away. But that's very hairy. */
853
854 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
855 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
856 {
857 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
858
859 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
860 XEXP (x, 0)))
861 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
862 else if (CONSTANT_P (XEXP (x, 0))
863 || (GET_CODE (XEXP (x, 0)) == REG
864 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
865 || (GET_CODE (XEXP (x, 0)) == PLUS
866 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
867 && (REGNO (XEXP (XEXP (x, 0), 0))
868 < FIRST_PSEUDO_REGISTER)
869 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
870 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
871 else
872 {
873 /* Make a new stack slot. Then indicate that something
874 changed so we go back and recompute offsets for
875 eliminable registers because the allocation of memory
876 below might change some offset. reg_equiv_{mem,address}
877 will be set up for this pseudo on the next pass around
878 the loop. */
879 reg_equiv_memory_loc[i] = 0;
880 reg_equiv_init[i] = 0;
881 alter_reg (i, -1);
882 }
883 }
884
885 if (caller_save_needed)
886 setup_save_areas ();
887
888 /* If we allocated another stack slot, redo elimination bookkeeping. */
889 if (starting_frame_size != get_frame_size ())
890 continue;
891
892 if (caller_save_needed)
893 {
894 save_call_clobbered_regs ();
895 /* That might have allocated new insn_chain structures. */
896 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
897 }
898
899 calculate_needs_all_insns (global);
900
901 CLEAR_REG_SET (spilled_pseudos);
902 did_spill = 0;
903
904 something_changed = 0;
905
906 /* If we allocated any new memory locations, make another pass
907 since it might have changed elimination offsets. */
908 if (starting_frame_size != get_frame_size ())
909 something_changed = 1;
910
911 {
912 HARD_REG_SET to_spill;
913 CLEAR_HARD_REG_SET (to_spill);
914 update_eliminables (&to_spill);
915 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
916 if (TEST_HARD_REG_BIT (to_spill, i))
917 {
918 spill_hard_reg (i, dumpfile, 1);
919 did_spill = 1;
920 }
921 }
922
923 CLEAR_HARD_REG_SET (used_spill_regs);
924 /* Try to satisfy the needs for each insn. */
925 for (chain = insns_need_reload; chain != 0;
926 chain = chain->next_need_reload)
927 find_reload_regs (chain, dumpfile);
928
929 if (failure)
930 goto failed;
931
932 if (insns_need_reload != 0 || did_spill)
933 something_changed |= finish_spills (global, dumpfile);
934
935 if (! something_changed)
936 break;
937
938 if (caller_save_needed)
939 delete_caller_save_insns ();
940 }
941
942 /* If global-alloc was run, notify it of any register eliminations we have
943 done. */
944 if (global)
945 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
946 if (ep->can_eliminate)
947 mark_elimination (ep->from, ep->to);
948
949 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
950 If that insn didn't set the register (i.e., it copied the register to
951 memory), just delete that insn instead of the equivalencing insn plus
952 anything now dead. If we call delete_dead_insn on that insn, we may
953 delete the insn that actually sets the register if the register die
954 there and that is incorrect. */
955
956 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
957 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
958 && GET_CODE (reg_equiv_init[i]) != NOTE)
959 {
960 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
961 delete_dead_insn (reg_equiv_init[i]);
962 else
963 {
964 PUT_CODE (reg_equiv_init[i], NOTE);
965 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
966 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
967 }
968 }
969
970 /* Use the reload registers where necessary
971 by generating move instructions to move the must-be-register
972 values into or out of the reload registers. */
973
974 if (insns_need_reload != 0 || something_needs_elimination
975 || something_needs_operands_changed)
976 {
977 int old_frame_size = get_frame_size ();
978
979 reload_as_needed (global);
980
981 if (old_frame_size != get_frame_size ())
982 abort ();
983
984 if (num_eliminable)
985 verify_initial_elim_offsets ();
986 }
987
988 /* If we were able to eliminate the frame pointer, show that it is no
989 longer live at the start of any basic block. If it ls live by
990 virtue of being in a pseudo, that pseudo will be marked live
991 and hence the frame pointer will be known to be live via that
992 pseudo. */
993
994 if (! frame_pointer_needed)
995 for (i = 0; i < n_basic_blocks; i++)
996 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
997 HARD_FRAME_POINTER_REGNUM);
998
999 /* Come here (with failure set nonzero) if we can't get enough spill regs
1000 and we decide not to abort about it. */
1001 failed:
1002
1003 reload_in_progress = 0;
1004
1005 /* Now eliminate all pseudo regs by modifying them into
1006 their equivalent memory references.
1007 The REG-rtx's for the pseudos are modified in place,
1008 so all insns that used to refer to them now refer to memory.
1009
1010 For a reg that has a reg_equiv_address, all those insns
1011 were changed by reloading so that no insns refer to it any longer;
1012 but the DECL_RTL of a variable decl may refer to it,
1013 and if so this causes the debugging info to mention the variable. */
1014
1015 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1016 {
1017 rtx addr = 0;
1018 int in_struct = 0;
1019 int is_readonly = 0;
1020
1021 if (reg_equiv_memory_loc[i])
1022 {
1023 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
1024 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
1025 }
1026
1027 if (reg_equiv_mem[i])
1028 addr = XEXP (reg_equiv_mem[i], 0);
1029
1030 if (reg_equiv_address[i])
1031 addr = reg_equiv_address[i];
1032
1033 if (addr)
1034 {
1035 if (reg_renumber[i] < 0)
1036 {
1037 rtx reg = regno_reg_rtx[i];
1038 XEXP (reg, 0) = addr;
1039 REG_USERVAR_P (reg) = 0;
1040 RTX_UNCHANGING_P (reg) = is_readonly;
1041 MEM_IN_STRUCT_P (reg) = in_struct;
1042 /* We have no alias information about this newly created
1043 MEM. */
1044 MEM_ALIAS_SET (reg) = 0;
1045 PUT_CODE (reg, MEM);
1046 }
1047 else if (reg_equiv_mem[i])
1048 XEXP (reg_equiv_mem[i], 0) = addr;
1049 }
1050 }
1051
1052 /* Make a pass over all the insns and delete all USEs which we inserted
1053 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1054 notes. Delete all CLOBBER insns and simplify (subreg (reg)) operands. */
1055
1056 for (insn = first; insn; insn = NEXT_INSN (insn))
1057 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1058 {
1059 rtx *pnote;
1060
1061 if ((GET_CODE (PATTERN (insn)) == USE
1062 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1063 || GET_CODE (PATTERN (insn)) == CLOBBER)
1064 {
1065 PUT_CODE (insn, NOTE);
1066 NOTE_SOURCE_FILE (insn) = 0;
1067 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1068 continue;
1069 }
1070
1071 pnote = &REG_NOTES (insn);
1072 while (*pnote != 0)
1073 {
1074 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1075 || REG_NOTE_KIND (*pnote) == REG_UNUSED)
1076 *pnote = XEXP (*pnote, 1);
1077 else
1078 pnote = &XEXP (*pnote, 1);
1079 }
1080
1081 /* And simplify (subreg (reg)) if it appears as an operand. */
1082 cleanup_subreg_operands (insn);
1083 }
1084
1085 /* If we are doing stack checking, give a warning if this function's
1086 frame size is larger than we expect. */
1087 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1088 {
1089 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1090
1091 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1092 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1093 size += UNITS_PER_WORD;
1094
1095 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1096 warning ("frame size too large for reliable stack checking");
1097 }
1098
1099 /* Indicate that we no longer have known memory locations or constants. */
1100 if (reg_equiv_constant)
1101 free (reg_equiv_constant);
1102 reg_equiv_constant = 0;
1103 if (reg_equiv_memory_loc)
1104 free (reg_equiv_memory_loc);
1105 reg_equiv_memory_loc = 0;
1106
1107 if (real_known_ptr)
1108 free (real_known_ptr);
1109 if (real_at_ptr)
1110 free (real_at_ptr);
1111
1112 free (reg_equiv_mem);
1113 free (reg_equiv_init);
1114 free (reg_equiv_address);
1115 free (reg_max_ref_width);
1116 free (reg_old_renumber);
1117 free (pseudo_previous_regs);
1118 free (pseudo_forbidden_regs);
1119
1120 FREE_REG_SET (spilled_pseudos);
1121
1122 CLEAR_HARD_REG_SET (used_spill_regs);
1123 for (i = 0; i < n_spills; i++)
1124 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1125
1126 /* Free all the insn_chain structures at once. */
1127 obstack_free (&reload_obstack, reload_startobj);
1128 unused_insn_chains = 0;
1129
1130 return failure;
1131 }
1132
1133 /* Yet another special case. Unfortunately, reg-stack forces people to
1134 write incorrect clobbers in asm statements. These clobbers must not
1135 cause the register to appear in bad_spill_regs, otherwise we'll call
1136 fatal_insn later. We clear the corresponding regnos in the live
1137 register sets to avoid this.
1138 The whole thing is rather sick, I'm afraid. */
1139 static void
1140 maybe_fix_stack_asms ()
1141 {
1142 #ifdef STACK_REGS
1143 char *constraints[MAX_RECOG_OPERANDS];
1144 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1145 struct insn_chain *chain;
1146
1147 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1148 {
1149 int i, noperands;
1150 HARD_REG_SET clobbered, allowed;
1151 rtx pat;
1152
1153 if (GET_RTX_CLASS (GET_CODE (chain->insn)) != 'i'
1154 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1155 continue;
1156 pat = PATTERN (chain->insn);
1157 if (GET_CODE (pat) != PARALLEL)
1158 continue;
1159
1160 CLEAR_HARD_REG_SET (clobbered);
1161 CLEAR_HARD_REG_SET (allowed);
1162
1163 /* First, make a mask of all stack regs that are clobbered. */
1164 for (i = 0; i < XVECLEN (pat, 0); i++)
1165 {
1166 rtx t = XVECEXP (pat, 0, i);
1167 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1168 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1169 }
1170
1171 /* Get the operand values and constraints out of the insn. */
1172 decode_asm_operands (pat, recog_operand, recog_operand_loc,
1173 constraints, operand_mode);
1174
1175 /* For every operand, see what registers are allowed. */
1176 for (i = 0; i < noperands; i++)
1177 {
1178 char *p = constraints[i];
1179 /* For every alternative, we compute the class of registers allowed
1180 for reloading in CLS, and merge its contents into the reg set
1181 ALLOWED. */
1182 int cls = (int) NO_REGS;
1183
1184 for (;;)
1185 {
1186 char c = *p++;
1187
1188 if (c == '\0' || c == ',' || c == '#')
1189 {
1190 /* End of one alternative - mark the regs in the current
1191 class, and reset the class. */
1192 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1193 cls = NO_REGS;
1194 if (c == '#')
1195 do {
1196 c = *p++;
1197 } while (c != '\0' && c != ',');
1198 if (c == '\0')
1199 break;
1200 continue;
1201 }
1202
1203 switch (c)
1204 {
1205 case '=': case '+': case '*': case '%': case '?': case '!':
1206 case '0': case '1': case '2': case '3': case '4': case 'm':
1207 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1208 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1209 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1210 case 'P':
1211 #ifdef EXTRA_CONSTRAINT
1212 case 'Q': case 'R': case 'S': case 'T': case 'U':
1213 #endif
1214 break;
1215
1216 case 'p':
1217 cls = (int) reg_class_subunion[cls][(int) BASE_REG_CLASS];
1218 break;
1219
1220 case 'g':
1221 case 'r':
1222 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1223 break;
1224
1225 default:
1226 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)];
1227
1228 }
1229 }
1230 }
1231 /* Those of the registers which are clobbered, but allowed by the
1232 constraints, must be usable as reload registers. So clear them
1233 out of the life information. */
1234 AND_HARD_REG_SET (allowed, clobbered);
1235 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1236 if (TEST_HARD_REG_BIT (allowed, i))
1237 {
1238 CLEAR_REGNO_REG_SET (chain->live_before, i);
1239 CLEAR_REGNO_REG_SET (chain->live_after, i);
1240 }
1241 }
1242
1243 #endif
1244 }
1245
1246 \f
1247 /* Walk the chain of insns, and determine for each whether it needs reloads
1248 and/or eliminations. Build the corresponding insns_need_reload list, and
1249 set something_needs_elimination as appropriate. */
1250 static void
1251 calculate_needs_all_insns (global)
1252 int global;
1253 {
1254 struct insn_chain **pprev_reload = &insns_need_reload;
1255 struct insn_chain **pchain;
1256
1257 something_needs_elimination = 0;
1258
1259 for (pchain = &reload_insn_chain; *pchain != 0; pchain = &(*pchain)->next)
1260 {
1261 rtx insn;
1262 struct insn_chain *chain;
1263
1264 chain = *pchain;
1265 insn = chain->insn;
1266
1267 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1268 include REG_LABEL), we need to see what effects this has on the
1269 known offsets at labels. */
1270
1271 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1272 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1273 && REG_NOTES (insn) != 0))
1274 set_label_offsets (insn, insn, 0);
1275
1276 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1277 {
1278 rtx old_body = PATTERN (insn);
1279 int old_code = INSN_CODE (insn);
1280 rtx old_notes = REG_NOTES (insn);
1281 int did_elimination = 0;
1282 int operands_changed = 0;
1283
1284 /* If needed, eliminate any eliminable registers. */
1285 if (num_eliminable)
1286 did_elimination = eliminate_regs_in_insn (insn, 0);
1287
1288 /* Analyze the instruction. */
1289 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1290 global, spill_reg_order);
1291
1292 /* If a no-op set needs more than one reload, this is likely
1293 to be something that needs input address reloads. We
1294 can't get rid of this cleanly later, and it is of no use
1295 anyway, so discard it now.
1296 We only do this when expensive_optimizations is enabled,
1297 since this complements reload inheritance / output
1298 reload deletion, and it can make debugging harder. */
1299 if (flag_expensive_optimizations && n_reloads > 1)
1300 {
1301 rtx set = single_set (insn);
1302 if (set
1303 && SET_SRC (set) == SET_DEST (set)
1304 && GET_CODE (SET_SRC (set)) == REG
1305 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1306 {
1307 PUT_CODE (insn, NOTE);
1308 NOTE_SOURCE_FILE (insn) = 0;
1309 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1310 continue;
1311 }
1312 }
1313 if (num_eliminable)
1314 update_eliminable_offsets ();
1315
1316 /* Remember for later shortcuts which insns had any reloads or
1317 register eliminations. */
1318 chain->need_elim = did_elimination;
1319 chain->need_reload = n_reloads > 0;
1320 chain->need_operand_change = operands_changed;
1321
1322 /* Discard any register replacements done. */
1323 if (did_elimination)
1324 {
1325 obstack_free (&reload_obstack, reload_firstobj);
1326 PATTERN (insn) = old_body;
1327 INSN_CODE (insn) = old_code;
1328 REG_NOTES (insn) = old_notes;
1329 something_needs_elimination = 1;
1330 }
1331
1332 something_needs_operands_changed |= operands_changed;
1333
1334 if (n_reloads != 0)
1335 {
1336 *pprev_reload = chain;
1337 pprev_reload = &chain->next_need_reload;
1338
1339 calculate_needs (chain);
1340 }
1341 }
1342 }
1343 *pprev_reload = 0;
1344 }
1345
1346 /* Compute the most additional registers needed by one instruction,
1347 given by CHAIN. Collect information separately for each class of regs.
1348
1349 To compute the number of reload registers of each class needed for an
1350 insn, we must simulate what choose_reload_regs can do. We do this by
1351 splitting an insn into an "input" and an "output" part. RELOAD_OTHER
1352 reloads are used in both. The input part uses those reloads,
1353 RELOAD_FOR_INPUT reloads, which must be live over the entire input section
1354 of reloads, and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1355 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the inputs.
1356
1357 The registers needed for output are RELOAD_OTHER and RELOAD_FOR_OUTPUT,
1358 which are live for the entire output portion, and the maximum of all the
1359 RELOAD_FOR_OUTPUT_ADDRESS reloads for each operand.
1360
1361 The total number of registers needed is the maximum of the
1362 inputs and outputs. */
1363
1364 static void
1365 calculate_needs (chain)
1366 struct insn_chain *chain;
1367 {
1368 int i;
1369
1370 /* Each `struct needs' corresponds to one RELOAD_... type. */
1371 struct {
1372 struct needs other;
1373 struct needs input;
1374 struct needs output;
1375 struct needs insn;
1376 struct needs other_addr;
1377 struct needs op_addr;
1378 struct needs op_addr_reload;
1379 struct needs in_addr[MAX_RECOG_OPERANDS];
1380 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1381 struct needs out_addr[MAX_RECOG_OPERANDS];
1382 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1383 } insn_needs;
1384
1385 bzero ((char *) chain->group_size, sizeof chain->group_size);
1386 for (i = 0; i < N_REG_CLASSES; i++)
1387 chain->group_mode[i] = VOIDmode;
1388 bzero ((char *) &insn_needs, sizeof insn_needs);
1389
1390 /* Count each reload once in every class
1391 containing the reload's own class. */
1392
1393 for (i = 0; i < n_reloads; i++)
1394 {
1395 register enum reg_class *p;
1396 enum reg_class class = reload_reg_class[i];
1397 int size;
1398 enum machine_mode mode;
1399 struct needs *this_needs;
1400
1401 /* Don't count the dummy reloads, for which one of the
1402 regs mentioned in the insn can be used for reloading.
1403 Don't count optional reloads.
1404 Don't count reloads that got combined with others. */
1405 if (reload_reg_rtx[i] != 0
1406 || reload_optional[i] != 0
1407 || (reload_out[i] == 0 && reload_in[i] == 0
1408 && ! reload_secondary_p[i]))
1409 continue;
1410
1411 mode = reload_inmode[i];
1412 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1413 mode = reload_outmode[i];
1414 size = CLASS_MAX_NREGS (class, mode);
1415
1416 /* Decide which time-of-use to count this reload for. */
1417 switch (reload_when_needed[i])
1418 {
1419 case RELOAD_OTHER:
1420 this_needs = &insn_needs.other;
1421 break;
1422 case RELOAD_FOR_INPUT:
1423 this_needs = &insn_needs.input;
1424 break;
1425 case RELOAD_FOR_OUTPUT:
1426 this_needs = &insn_needs.output;
1427 break;
1428 case RELOAD_FOR_INSN:
1429 this_needs = &insn_needs.insn;
1430 break;
1431 case RELOAD_FOR_OTHER_ADDRESS:
1432 this_needs = &insn_needs.other_addr;
1433 break;
1434 case RELOAD_FOR_INPUT_ADDRESS:
1435 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1436 break;
1437 case RELOAD_FOR_INPADDR_ADDRESS:
1438 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1439 break;
1440 case RELOAD_FOR_OUTPUT_ADDRESS:
1441 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1442 break;
1443 case RELOAD_FOR_OUTADDR_ADDRESS:
1444 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1445 break;
1446 case RELOAD_FOR_OPERAND_ADDRESS:
1447 this_needs = &insn_needs.op_addr;
1448 break;
1449 case RELOAD_FOR_OPADDR_ADDR:
1450 this_needs = &insn_needs.op_addr_reload;
1451 break;
1452 }
1453
1454 if (size > 1)
1455 {
1456 enum machine_mode other_mode, allocate_mode;
1457
1458 /* Count number of groups needed separately from
1459 number of individual regs needed. */
1460 this_needs->groups[(int) class]++;
1461 p = reg_class_superclasses[(int) class];
1462 while (*p != LIM_REG_CLASSES)
1463 this_needs->groups[(int) *p++]++;
1464
1465 /* Record size and mode of a group of this class. */
1466 /* If more than one size group is needed,
1467 make all groups the largest needed size. */
1468 if (chain->group_size[(int) class] < size)
1469 {
1470 other_mode = chain->group_mode[(int) class];
1471 allocate_mode = mode;
1472
1473 chain->group_size[(int) class] = size;
1474 chain->group_mode[(int) class] = mode;
1475 }
1476 else
1477 {
1478 other_mode = mode;
1479 allocate_mode = chain->group_mode[(int) class];
1480 }
1481
1482 /* Crash if two dissimilar machine modes both need
1483 groups of consecutive regs of the same class. */
1484
1485 if (other_mode != VOIDmode && other_mode != allocate_mode
1486 && ! modes_equiv_for_class_p (allocate_mode,
1487 other_mode, class))
1488 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1489 chain->insn);
1490 }
1491 else if (size == 1)
1492 {
1493 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) class] += 1;
1494 p = reg_class_superclasses[(int) class];
1495 while (*p != LIM_REG_CLASSES)
1496 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) *p++] += 1;
1497 }
1498 else
1499 abort ();
1500 }
1501
1502 /* All reloads have been counted for this insn;
1503 now merge the various times of use.
1504 This sets insn_needs, etc., to the maximum total number
1505 of registers needed at any point in this insn. */
1506
1507 for (i = 0; i < N_REG_CLASSES; i++)
1508 {
1509 int j, in_max, out_max;
1510
1511 /* Compute normal and nongroup needs. */
1512 for (j = 0; j <= 1; j++)
1513 {
1514 int k;
1515 for (in_max = 0, out_max = 0, k = 0; k < reload_n_operands; k++)
1516 {
1517 in_max = MAX (in_max,
1518 (insn_needs.in_addr[k].regs[j][i]
1519 + insn_needs.in_addr_addr[k].regs[j][i]));
1520 out_max = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1521 out_max = MAX (out_max,
1522 insn_needs.out_addr_addr[k].regs[j][i]);
1523 }
1524
1525 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1526 and operand addresses but not things used to reload
1527 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1528 don't conflict with things needed to reload inputs or
1529 outputs. */
1530
1531 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1532 insn_needs.op_addr_reload.regs[j][i]),
1533 in_max);
1534
1535 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1536
1537 insn_needs.input.regs[j][i]
1538 = MAX (insn_needs.input.regs[j][i]
1539 + insn_needs.op_addr.regs[j][i]
1540 + insn_needs.insn.regs[j][i],
1541 in_max + insn_needs.input.regs[j][i]);
1542
1543 insn_needs.output.regs[j][i] += out_max;
1544 insn_needs.other.regs[j][i]
1545 += MAX (MAX (insn_needs.input.regs[j][i],
1546 insn_needs.output.regs[j][i]),
1547 insn_needs.other_addr.regs[j][i]);
1548
1549 }
1550
1551 /* Now compute group needs. */
1552 for (in_max = 0, out_max = 0, j = 0; j < reload_n_operands; j++)
1553 {
1554 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1555 in_max = MAX (in_max, insn_needs.in_addr_addr[j].groups[i]);
1556 out_max = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1557 out_max = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1558 }
1559
1560 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1561 insn_needs.op_addr_reload.groups[i]),
1562 in_max);
1563 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1564
1565 insn_needs.input.groups[i]
1566 = MAX (insn_needs.input.groups[i]
1567 + insn_needs.op_addr.groups[i]
1568 + insn_needs.insn.groups[i],
1569 in_max + insn_needs.input.groups[i]);
1570
1571 insn_needs.output.groups[i] += out_max;
1572 insn_needs.other.groups[i]
1573 += MAX (MAX (insn_needs.input.groups[i],
1574 insn_needs.output.groups[i]),
1575 insn_needs.other_addr.groups[i]);
1576 }
1577
1578 /* Record the needs for later. */
1579 chain->need = insn_needs.other;
1580 }
1581 \f
1582 /* Find a group of exactly 2 registers.
1583
1584 First try to fill out the group by spilling a single register which
1585 would allow completion of the group.
1586
1587 Then try to create a new group from a pair of registers, neither of
1588 which are explicitly used.
1589
1590 Then try to create a group from any pair of registers. */
1591
1592 static void
1593 find_tworeg_group (chain, class, dumpfile)
1594 struct insn_chain *chain;
1595 int class;
1596 FILE *dumpfile;
1597 {
1598 int i;
1599 /* First, look for a register that will complete a group. */
1600 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1601 {
1602 int j, other;
1603
1604 j = potential_reload_regs[i];
1605 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1606 && ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1607 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1608 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1609 && HARD_REGNO_MODE_OK (other, chain->group_mode[class])
1610 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1611 /* We don't want one part of another group.
1612 We could get "two groups" that overlap! */
1613 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))
1614 || (j < FIRST_PSEUDO_REGISTER - 1
1615 && (other = j + 1, spill_reg_order[other] >= 0)
1616 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1617 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1618 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1619 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1620 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))))
1621 {
1622 register enum reg_class *p;
1623
1624 /* We have found one that will complete a group,
1625 so count off one group as provided. */
1626 chain->need.groups[class]--;
1627 p = reg_class_superclasses[class];
1628 while (*p != LIM_REG_CLASSES)
1629 {
1630 if (chain->group_size [(int) *p] <= chain->group_size [class])
1631 chain->need.groups[(int) *p]--;
1632 p++;
1633 }
1634
1635 /* Indicate both these regs are part of a group. */
1636 SET_HARD_REG_BIT (chain->counted_for_groups, j);
1637 SET_HARD_REG_BIT (chain->counted_for_groups, other);
1638 break;
1639 }
1640 }
1641 /* We can't complete a group, so start one. */
1642 if (i == FIRST_PSEUDO_REGISTER)
1643 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1644 {
1645 int j, k;
1646 j = potential_reload_regs[i];
1647 /* Verify that J+1 is a potential reload reg. */
1648 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1649 if (potential_reload_regs[k] == j + 1)
1650 break;
1651 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1652 && k < FIRST_PSEUDO_REGISTER
1653 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1654 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1655 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1656 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1657 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, j + 1)
1658 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1659 break;
1660 }
1661
1662 /* I should be the index in potential_reload_regs
1663 of the new reload reg we have found. */
1664
1665 new_spill_reg (chain, i, class, 0, dumpfile);
1666 }
1667
1668 /* Find a group of more than 2 registers.
1669 Look for a sufficient sequence of unspilled registers, and spill them all
1670 at once. */
1671
1672 static void
1673 find_group (chain, class, dumpfile)
1674 struct insn_chain *chain;
1675 int class;
1676 FILE *dumpfile;
1677 {
1678 int i;
1679
1680 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1681 {
1682 int j = potential_reload_regs[i];
1683
1684 if (j >= 0
1685 && j + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
1686 && HARD_REGNO_MODE_OK (j, chain->group_mode[class]))
1687 {
1688 int k;
1689 /* Check each reg in the sequence. */
1690 for (k = 0; k < chain->group_size[class]; k++)
1691 if (! (spill_reg_order[j + k] < 0
1692 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1693 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1694 break;
1695 /* We got a full sequence, so spill them all. */
1696 if (k == chain->group_size[class])
1697 {
1698 register enum reg_class *p;
1699 for (k = 0; k < chain->group_size[class]; k++)
1700 {
1701 int idx;
1702 SET_HARD_REG_BIT (chain->counted_for_groups, j + k);
1703 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1704 if (potential_reload_regs[idx] == j + k)
1705 break;
1706 new_spill_reg (chain, idx, class, 0, dumpfile);
1707 }
1708
1709 /* We have found one that will complete a group,
1710 so count off one group as provided. */
1711 chain->need.groups[class]--;
1712 p = reg_class_superclasses[class];
1713 while (*p != LIM_REG_CLASSES)
1714 {
1715 if (chain->group_size [(int) *p]
1716 <= chain->group_size [class])
1717 chain->need.groups[(int) *p]--;
1718 p++;
1719 }
1720 return;
1721 }
1722 }
1723 }
1724 /* There are no groups left. */
1725 spill_failure (chain->insn);
1726 failure = 1;
1727 }
1728
1729 /* If pseudo REG conflicts with one of our reload registers, mark it as
1730 spilled. */
1731 static void
1732 maybe_mark_pseudo_spilled (reg)
1733 int reg;
1734 {
1735 int i;
1736 int r = reg_renumber[reg];
1737 int nregs;
1738
1739 if (r < 0)
1740 abort ();
1741 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1742 for (i = 0; i < n_spills; i++)
1743 if (r <= spill_regs[i] && r + nregs > spill_regs[i])
1744 {
1745 SET_REGNO_REG_SET (spilled_pseudos, reg);
1746 return;
1747 }
1748 }
1749
1750 /* Find more reload regs to satisfy the remaining need of an insn, which
1751 is given by CHAIN.
1752 Do it by ascending class number, since otherwise a reg
1753 might be spilled for a big class and might fail to count
1754 for a smaller class even though it belongs to that class.
1755
1756 Count spilled regs in `spills', and add entries to
1757 `spill_regs' and `spill_reg_order'.
1758
1759 ??? Note there is a problem here.
1760 When there is a need for a group in a high-numbered class,
1761 and also need for non-group regs that come from a lower class,
1762 the non-group regs are chosen first. If there aren't many regs,
1763 they might leave no room for a group.
1764
1765 This was happening on the 386. To fix it, we added the code
1766 that calls possible_group_p, so that the lower class won't
1767 break up the last possible group.
1768
1769 Really fixing the problem would require changes above
1770 in counting the regs already spilled, and in choose_reload_regs.
1771 It might be hard to avoid introducing bugs there. */
1772
1773 static void
1774 find_reload_regs (chain, dumpfile)
1775 struct insn_chain *chain;
1776 FILE *dumpfile;
1777 {
1778 int i, class;
1779 short *group_needs = chain->need.groups;
1780 short *simple_needs = chain->need.regs[0];
1781 short *nongroup_needs = chain->need.regs[1];
1782
1783 if (dumpfile)
1784 fprintf (dumpfile, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1785
1786 /* Compute the order of preference for hard registers to spill.
1787 Store them by decreasing preference in potential_reload_regs. */
1788
1789 order_regs_for_reload (chain);
1790
1791 /* So far, no hard regs have been spilled. */
1792 n_spills = 0;
1793 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1794 spill_reg_order[i] = -1;
1795
1796 CLEAR_HARD_REG_SET (chain->used_spill_regs);
1797 CLEAR_HARD_REG_SET (chain->counted_for_groups);
1798 CLEAR_HARD_REG_SET (chain->counted_for_nongroups);
1799
1800 for (class = 0; class < N_REG_CLASSES; class++)
1801 {
1802 /* First get the groups of registers.
1803 If we got single registers first, we might fragment
1804 possible groups. */
1805 while (group_needs[class] > 0)
1806 {
1807 /* If any single spilled regs happen to form groups,
1808 count them now. Maybe we don't really need
1809 to spill another group. */
1810 count_possible_groups (chain, class);
1811
1812 if (group_needs[class] <= 0)
1813 break;
1814
1815 /* Groups of size 2, the only groups used on most machines,
1816 are treated specially. */
1817 if (chain->group_size[class] == 2)
1818 find_tworeg_group (chain, class, dumpfile);
1819 else
1820 find_group (chain, class, dumpfile);
1821 if (failure)
1822 return;
1823 }
1824
1825 /* Now similarly satisfy all need for single registers. */
1826
1827 while (simple_needs[class] > 0 || nongroup_needs[class] > 0)
1828 {
1829 /* If we spilled enough regs, but they weren't counted
1830 against the non-group need, see if we can count them now.
1831 If so, we can avoid some actual spilling. */
1832 if (simple_needs[class] <= 0 && nongroup_needs[class] > 0)
1833 for (i = 0; i < n_spills; i++)
1834 {
1835 int regno = spill_regs[i];
1836 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
1837 && !TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
1838 && !TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno)
1839 && nongroup_needs[class] > 0)
1840 {
1841 register enum reg_class *p;
1842
1843 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
1844 nongroup_needs[class]--;
1845 p = reg_class_superclasses[class];
1846 while (*p != LIM_REG_CLASSES)
1847 nongroup_needs[(int) *p++]--;
1848 }
1849 }
1850
1851 if (simple_needs[class] <= 0 && nongroup_needs[class] <= 0)
1852 break;
1853
1854 /* Consider the potential reload regs that aren't
1855 yet in use as reload regs, in order of preference.
1856 Find the most preferred one that's in this class. */
1857
1858 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1859 {
1860 int regno = potential_reload_regs[i];
1861 if (regno >= 0
1862 && TEST_HARD_REG_BIT (reg_class_contents[class], regno)
1863 /* If this reg will not be available for groups,
1864 pick one that does not foreclose possible groups.
1865 This is a kludge, and not very general,
1866 but it should be sufficient to make the 386 work,
1867 and the problem should not occur on machines with
1868 more registers. */
1869 && (nongroup_needs[class] == 0
1870 || possible_group_p (chain, regno)))
1871 break;
1872 }
1873
1874 /* If we couldn't get a register, try to get one even if we
1875 might foreclose possible groups. This may cause problems
1876 later, but that's better than aborting now, since it is
1877 possible that we will, in fact, be able to form the needed
1878 group even with this allocation. */
1879
1880 if (i >= FIRST_PSEUDO_REGISTER
1881 && asm_noperands (chain->insn) < 0)
1882 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1883 if (potential_reload_regs[i] >= 0
1884 && TEST_HARD_REG_BIT (reg_class_contents[class],
1885 potential_reload_regs[i]))
1886 break;
1887
1888 /* I should be the index in potential_reload_regs
1889 of the new reload reg we have found. */
1890
1891 new_spill_reg (chain, i, class, 1, dumpfile);
1892 if (failure)
1893 return;
1894 }
1895 }
1896
1897 /* We know which hard regs to use, now mark the pseudos that live in them
1898 as needing to be kicked out. */
1899 EXECUTE_IF_SET_IN_REG_SET
1900 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
1901 {
1902 maybe_mark_pseudo_spilled (i);
1903 });
1904 EXECUTE_IF_SET_IN_REG_SET
1905 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
1906 {
1907 maybe_mark_pseudo_spilled (i);
1908 });
1909
1910 IOR_HARD_REG_SET (used_spill_regs, chain->used_spill_regs);
1911 }
1912
1913 void
1914 dump_needs (chain, dumpfile)
1915 struct insn_chain *chain;
1916 FILE *dumpfile;
1917 {
1918 static char *reg_class_names[] = REG_CLASS_NAMES;
1919 int i;
1920 struct needs *n = &chain->need;
1921
1922 for (i = 0; i < N_REG_CLASSES; i++)
1923 {
1924 if (n->regs[i][0] > 0)
1925 fprintf (dumpfile,
1926 ";; Need %d reg%s of class %s.\n",
1927 n->regs[i][0], n->regs[i][0] == 1 ? "" : "s",
1928 reg_class_names[i]);
1929 if (n->regs[i][1] > 0)
1930 fprintf (dumpfile,
1931 ";; Need %d nongroup reg%s of class %s.\n",
1932 n->regs[i][1], n->regs[i][1] == 1 ? "" : "s",
1933 reg_class_names[i]);
1934 if (n->groups[i] > 0)
1935 fprintf (dumpfile,
1936 ";; Need %d group%s (%smode) of class %s.\n",
1937 n->groups[i], n->groups[i] == 1 ? "" : "s",
1938 mode_name[(int) chain->group_mode[i]],
1939 reg_class_names[i]);
1940 }
1941 }
1942 \f
1943 /* Delete all insns that were inserted by emit_caller_save_insns during
1944 this iteration. */
1945 static void
1946 delete_caller_save_insns ()
1947 {
1948 struct insn_chain *c = reload_insn_chain;
1949
1950 while (c != 0)
1951 {
1952 while (c != 0 && c->is_caller_save_insn)
1953 {
1954 struct insn_chain *next = c->next;
1955 rtx insn = c->insn;
1956
1957 if (insn == basic_block_head[c->block])
1958 basic_block_head[c->block] = NEXT_INSN (insn);
1959 if (insn == basic_block_end[c->block])
1960 basic_block_end[c->block] = PREV_INSN (insn);
1961 if (c == reload_insn_chain)
1962 reload_insn_chain = next;
1963
1964 if (NEXT_INSN (insn) != 0)
1965 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1966 if (PREV_INSN (insn) != 0)
1967 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1968
1969 if (next)
1970 next->prev = c->prev;
1971 if (c->prev)
1972 c->prev->next = next;
1973 c->next = unused_insn_chains;
1974 unused_insn_chains = c;
1975 c = next;
1976 }
1977 if (c != 0)
1978 c = c->next;
1979 }
1980 }
1981 \f
1982 /* Nonzero if, after spilling reg REGNO for non-groups,
1983 it will still be possible to find a group if we still need one. */
1984
1985 static int
1986 possible_group_p (chain, regno)
1987 struct insn_chain *chain;
1988 int regno;
1989 {
1990 int i;
1991 int class = (int) NO_REGS;
1992
1993 for (i = 0; i < (int) N_REG_CLASSES; i++)
1994 if (chain->need.groups[i] > 0)
1995 {
1996 class = i;
1997 break;
1998 }
1999
2000 if (class == (int) NO_REGS)
2001 return 1;
2002
2003 /* Consider each pair of consecutive registers. */
2004 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2005 {
2006 /* Ignore pairs that include reg REGNO. */
2007 if (i == regno || i + 1 == regno)
2008 continue;
2009
2010 /* Ignore pairs that are outside the class that needs the group.
2011 ??? Here we fail to handle the case where two different classes
2012 independently need groups. But this never happens with our
2013 current machine descriptions. */
2014 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2015 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2016 continue;
2017
2018 /* A pair of consecutive regs we can still spill does the trick. */
2019 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2020 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2021 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2022 return 1;
2023
2024 /* A pair of one already spilled and one we can spill does it
2025 provided the one already spilled is not otherwise reserved. */
2026 if (spill_reg_order[i] < 0
2027 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2028 && spill_reg_order[i + 1] >= 0
2029 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i + 1)
2030 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i + 1))
2031 return 1;
2032 if (spill_reg_order[i + 1] < 0
2033 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2034 && spill_reg_order[i] >= 0
2035 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i)
2036 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i))
2037 return 1;
2038 }
2039
2040 return 0;
2041 }
2042
2043 /* Count any groups of CLASS that can be formed from the registers recently
2044 spilled. */
2045
2046 static void
2047 count_possible_groups (chain, class)
2048 struct insn_chain *chain;
2049 int class;
2050 {
2051 HARD_REG_SET new;
2052 int i, j;
2053
2054 /* Now find all consecutive groups of spilled registers
2055 and mark each group off against the need for such groups.
2056 But don't count them against ordinary need, yet. */
2057
2058 if (chain->group_size[class] == 0)
2059 return;
2060
2061 CLEAR_HARD_REG_SET (new);
2062
2063 /* Make a mask of all the regs that are spill regs in class I. */
2064 for (i = 0; i < n_spills; i++)
2065 {
2066 int regno = spill_regs[i];
2067
2068 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
2069 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
2070 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno))
2071 SET_HARD_REG_BIT (new, regno);
2072 }
2073
2074 /* Find each consecutive group of them. */
2075 for (i = 0; i < FIRST_PSEUDO_REGISTER && chain->need.groups[class] > 0; i++)
2076 if (TEST_HARD_REG_BIT (new, i)
2077 && i + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
2078 && HARD_REGNO_MODE_OK (i, chain->group_mode[class]))
2079 {
2080 for (j = 1; j < chain->group_size[class]; j++)
2081 if (! TEST_HARD_REG_BIT (new, i + j))
2082 break;
2083
2084 if (j == chain->group_size[class])
2085 {
2086 /* We found a group. Mark it off against this class's need for
2087 groups, and against each superclass too. */
2088 register enum reg_class *p;
2089
2090 chain->need.groups[class]--;
2091 p = reg_class_superclasses[class];
2092 while (*p != LIM_REG_CLASSES)
2093 {
2094 if (chain->group_size [(int) *p] <= chain->group_size [class])
2095 chain->need.groups[(int) *p]--;
2096 p++;
2097 }
2098
2099 /* Don't count these registers again. */
2100 for (j = 0; j < chain->group_size[class]; j++)
2101 SET_HARD_REG_BIT (chain->counted_for_groups, i + j);
2102 }
2103
2104 /* Skip to the last reg in this group. When i is incremented above,
2105 it will then point to the first reg of the next possible group. */
2106 i += j - 1;
2107 }
2108 }
2109 \f
2110 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2111 another mode that needs to be reloaded for the same register class CLASS.
2112 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2113 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2114
2115 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2116 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2117 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2118 causes unnecessary failures on machines requiring alignment of register
2119 groups when the two modes are different sizes, because the larger mode has
2120 more strict alignment rules than the smaller mode. */
2121
2122 static int
2123 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2124 enum machine_mode allocate_mode, other_mode;
2125 enum reg_class class;
2126 {
2127 register int regno;
2128 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2129 {
2130 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2131 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2132 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2133 return 0;
2134 }
2135 return 1;
2136 }
2137 \f
2138 /* Handle the failure to find a register to spill.
2139 INSN should be one of the insns which needed this particular spill reg. */
2140
2141 static void
2142 spill_failure (insn)
2143 rtx insn;
2144 {
2145 if (asm_noperands (PATTERN (insn)) >= 0)
2146 error_for_asm (insn, "`asm' needs too many reloads");
2147 else
2148 fatal_insn ("Unable to find a register to spill.", insn);
2149 }
2150
2151 /* Add a new register to the tables of available spill-registers.
2152 CHAIN is the insn for which the register will be used; we decrease the
2153 needs of that insn.
2154 I is the index of this register in potential_reload_regs.
2155 CLASS is the regclass whose need is being satisfied.
2156 NONGROUP is 0 if this register is part of a group.
2157 DUMPFILE is the same as the one that `reload' got. */
2158
2159 static void
2160 new_spill_reg (chain, i, class, nongroup, dumpfile)
2161 struct insn_chain *chain;
2162 int i;
2163 int class;
2164 int nongroup;
2165 FILE *dumpfile;
2166 {
2167 register enum reg_class *p;
2168 int regno = potential_reload_regs[i];
2169
2170 if (i >= FIRST_PSEUDO_REGISTER)
2171 {
2172 spill_failure (chain->insn);
2173 failure = 1;
2174 return;
2175 }
2176
2177 if (TEST_HARD_REG_BIT (bad_spill_regs, regno))
2178 {
2179 static char *reg_class_names[] = REG_CLASS_NAMES;
2180
2181 if (asm_noperands (PATTERN (chain->insn)) < 0)
2182 {
2183 /* The error message is still correct - we know only that it wasn't
2184 an asm statement that caused the problem, but one of the global
2185 registers declared by the users might have screwed us. */
2186 error ("fixed or forbidden register %d (%s) was spilled for class %s.",
2187 regno, reg_names[regno], reg_class_names[class]);
2188 error ("This may be due to a compiler bug or to impossible asm");
2189 error ("statements or clauses.");
2190 fatal_insn ("This is the instruction:", chain->insn);
2191 }
2192 error_for_asm (chain->insn, "Invalid `asm' statement:");
2193 error_for_asm (chain->insn,
2194 "fixed or forbidden register %d (%s) was spilled for class %s.",
2195 regno, reg_names[regno], reg_class_names[class]);
2196 failure = 1;
2197 return;
2198 }
2199
2200 /* Make reg REGNO an additional reload reg. */
2201
2202 potential_reload_regs[i] = -1;
2203 spill_regs[n_spills] = regno;
2204 spill_reg_order[regno] = n_spills;
2205 if (dumpfile)
2206 fprintf (dumpfile, "Spilling reg %d.\n", regno);
2207 SET_HARD_REG_BIT (chain->used_spill_regs, regno);
2208
2209 /* Clear off the needs we just satisfied. */
2210
2211 chain->need.regs[0][class]--;
2212 p = reg_class_superclasses[class];
2213 while (*p != LIM_REG_CLASSES)
2214 chain->need.regs[0][(int) *p++]--;
2215
2216 if (nongroup && chain->need.regs[1][class] > 0)
2217 {
2218 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
2219 chain->need.regs[1][class]--;
2220 p = reg_class_superclasses[class];
2221 while (*p != LIM_REG_CLASSES)
2222 chain->need.regs[1][(int) *p++]--;
2223 }
2224
2225 n_spills++;
2226 }
2227 \f
2228 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2229 data that is dead in INSN. */
2230
2231 static void
2232 delete_dead_insn (insn)
2233 rtx insn;
2234 {
2235 rtx prev = prev_real_insn (insn);
2236 rtx prev_dest;
2237
2238 /* If the previous insn sets a register that dies in our insn, delete it
2239 too. */
2240 if (prev && GET_CODE (PATTERN (prev)) == SET
2241 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2242 && reg_mentioned_p (prev_dest, PATTERN (insn))
2243 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2244 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2245 delete_dead_insn (prev);
2246
2247 PUT_CODE (insn, NOTE);
2248 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2249 NOTE_SOURCE_FILE (insn) = 0;
2250 }
2251
2252 /* Modify the home of pseudo-reg I.
2253 The new home is present in reg_renumber[I].
2254
2255 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2256 or it may be -1, meaning there is none or it is not relevant.
2257 This is used so that all pseudos spilled from a given hard reg
2258 can share one stack slot. */
2259
2260 static void
2261 alter_reg (i, from_reg)
2262 register int i;
2263 int from_reg;
2264 {
2265 /* When outputting an inline function, this can happen
2266 for a reg that isn't actually used. */
2267 if (regno_reg_rtx[i] == 0)
2268 return;
2269
2270 /* If the reg got changed to a MEM at rtl-generation time,
2271 ignore it. */
2272 if (GET_CODE (regno_reg_rtx[i]) != REG)
2273 return;
2274
2275 /* Modify the reg-rtx to contain the new hard reg
2276 number or else to contain its pseudo reg number. */
2277 REGNO (regno_reg_rtx[i])
2278 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2279
2280 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2281 allocate a stack slot for it. */
2282
2283 if (reg_renumber[i] < 0
2284 && REG_N_REFS (i) > 0
2285 && reg_equiv_constant[i] == 0
2286 && reg_equiv_memory_loc[i] == 0)
2287 {
2288 register rtx x;
2289 int inherent_size = PSEUDO_REGNO_BYTES (i);
2290 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2291 int adjust = 0;
2292
2293 /* Each pseudo reg has an inherent size which comes from its own mode,
2294 and a total size which provides room for paradoxical subregs
2295 which refer to the pseudo reg in wider modes.
2296
2297 We can use a slot already allocated if it provides both
2298 enough inherent space and enough total space.
2299 Otherwise, we allocate a new slot, making sure that it has no less
2300 inherent space, and no less total space, then the previous slot. */
2301 if (from_reg == -1)
2302 {
2303 /* No known place to spill from => no slot to reuse. */
2304 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2305 inherent_size == total_size ? 0 : -1);
2306 if (BYTES_BIG_ENDIAN)
2307 /* Cancel the big-endian correction done in assign_stack_local.
2308 Get the address of the beginning of the slot.
2309 This is so we can do a big-endian correction unconditionally
2310 below. */
2311 adjust = inherent_size - total_size;
2312
2313 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2314 }
2315 /* Reuse a stack slot if possible. */
2316 else if (spill_stack_slot[from_reg] != 0
2317 && spill_stack_slot_width[from_reg] >= total_size
2318 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2319 >= inherent_size))
2320 x = spill_stack_slot[from_reg];
2321 /* Allocate a bigger slot. */
2322 else
2323 {
2324 /* Compute maximum size needed, both for inherent size
2325 and for total size. */
2326 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2327 rtx stack_slot;
2328 if (spill_stack_slot[from_reg])
2329 {
2330 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2331 > inherent_size)
2332 mode = GET_MODE (spill_stack_slot[from_reg]);
2333 if (spill_stack_slot_width[from_reg] > total_size)
2334 total_size = spill_stack_slot_width[from_reg];
2335 }
2336 /* Make a slot with that size. */
2337 x = assign_stack_local (mode, total_size,
2338 inherent_size == total_size ? 0 : -1);
2339 stack_slot = x;
2340 if (BYTES_BIG_ENDIAN)
2341 {
2342 /* Cancel the big-endian correction done in assign_stack_local.
2343 Get the address of the beginning of the slot.
2344 This is so we can do a big-endian correction unconditionally
2345 below. */
2346 adjust = GET_MODE_SIZE (mode) - total_size;
2347 if (adjust)
2348 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2349 * BITS_PER_UNIT,
2350 MODE_INT, 1),
2351 plus_constant (XEXP (x, 0), adjust));
2352 }
2353 spill_stack_slot[from_reg] = stack_slot;
2354 spill_stack_slot_width[from_reg] = total_size;
2355 }
2356
2357 /* On a big endian machine, the "address" of the slot
2358 is the address of the low part that fits its inherent mode. */
2359 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2360 adjust += (total_size - inherent_size);
2361
2362 /* If we have any adjustment to make, or if the stack slot is the
2363 wrong mode, make a new stack slot. */
2364 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2365 {
2366 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
2367 plus_constant (XEXP (x, 0), adjust));
2368
2369 /* If this was shared among registers, must ensure we never
2370 set it readonly since that can cause scheduling
2371 problems. Note we would only have in this adjustment
2372 case in any event, since the code above doesn't set it. */
2373
2374 if (from_reg == -1)
2375 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2376 }
2377
2378 /* Save the stack slot for later. */
2379 reg_equiv_memory_loc[i] = x;
2380 }
2381 }
2382
2383 /* Mark the slots in regs_ever_live for the hard regs
2384 used by pseudo-reg number REGNO. */
2385
2386 void
2387 mark_home_live (regno)
2388 int regno;
2389 {
2390 register int i, lim;
2391 i = reg_renumber[regno];
2392 if (i < 0)
2393 return;
2394 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2395 while (i < lim)
2396 regs_ever_live[i++] = 1;
2397 }
2398 \f
2399 /* This function handles the tracking of elimination offsets around branches.
2400
2401 X is a piece of RTL being scanned.
2402
2403 INSN is the insn that it came from, if any.
2404
2405 INITIAL_P is non-zero if we are to set the offset to be the initial
2406 offset and zero if we are setting the offset of the label to be the
2407 current offset. */
2408
2409 static void
2410 set_label_offsets (x, insn, initial_p)
2411 rtx x;
2412 rtx insn;
2413 int initial_p;
2414 {
2415 enum rtx_code code = GET_CODE (x);
2416 rtx tem;
2417 unsigned int i;
2418 struct elim_table *p;
2419
2420 switch (code)
2421 {
2422 case LABEL_REF:
2423 if (LABEL_REF_NONLOCAL_P (x))
2424 return;
2425
2426 x = XEXP (x, 0);
2427
2428 /* ... fall through ... */
2429
2430 case CODE_LABEL:
2431 /* If we know nothing about this label, set the desired offsets. Note
2432 that this sets the offset at a label to be the offset before a label
2433 if we don't know anything about the label. This is not correct for
2434 the label after a BARRIER, but is the best guess we can make. If
2435 we guessed wrong, we will suppress an elimination that might have
2436 been possible had we been able to guess correctly. */
2437
2438 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2439 {
2440 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2441 offsets_at[CODE_LABEL_NUMBER (x)][i]
2442 = (initial_p ? reg_eliminate[i].initial_offset
2443 : reg_eliminate[i].offset);
2444 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2445 }
2446
2447 /* Otherwise, if this is the definition of a label and it is
2448 preceded by a BARRIER, set our offsets to the known offset of
2449 that label. */
2450
2451 else if (x == insn
2452 && (tem = prev_nonnote_insn (insn)) != 0
2453 && GET_CODE (tem) == BARRIER)
2454 set_offsets_for_label (insn);
2455 else
2456 /* If neither of the above cases is true, compare each offset
2457 with those previously recorded and suppress any eliminations
2458 where the offsets disagree. */
2459
2460 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2461 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2462 != (initial_p ? reg_eliminate[i].initial_offset
2463 : reg_eliminate[i].offset))
2464 reg_eliminate[i].can_eliminate = 0;
2465
2466 return;
2467
2468 case JUMP_INSN:
2469 set_label_offsets (PATTERN (insn), insn, initial_p);
2470
2471 /* ... fall through ... */
2472
2473 case INSN:
2474 case CALL_INSN:
2475 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2476 and hence must have all eliminations at their initial offsets. */
2477 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2478 if (REG_NOTE_KIND (tem) == REG_LABEL)
2479 set_label_offsets (XEXP (tem, 0), insn, 1);
2480 return;
2481
2482 case ADDR_VEC:
2483 case ADDR_DIFF_VEC:
2484 /* Each of the labels in the address vector must be at their initial
2485 offsets. We want the first field for ADDR_VEC and the second
2486 field for ADDR_DIFF_VEC. */
2487
2488 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2489 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2490 insn, initial_p);
2491 return;
2492
2493 case SET:
2494 /* We only care about setting PC. If the source is not RETURN,
2495 IF_THEN_ELSE, or a label, disable any eliminations not at
2496 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2497 isn't one of those possibilities. For branches to a label,
2498 call ourselves recursively.
2499
2500 Note that this can disable elimination unnecessarily when we have
2501 a non-local goto since it will look like a non-constant jump to
2502 someplace in the current function. This isn't a significant
2503 problem since such jumps will normally be when all elimination
2504 pairs are back to their initial offsets. */
2505
2506 if (SET_DEST (x) != pc_rtx)
2507 return;
2508
2509 switch (GET_CODE (SET_SRC (x)))
2510 {
2511 case PC:
2512 case RETURN:
2513 return;
2514
2515 case LABEL_REF:
2516 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2517 return;
2518
2519 case IF_THEN_ELSE:
2520 tem = XEXP (SET_SRC (x), 1);
2521 if (GET_CODE (tem) == LABEL_REF)
2522 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2523 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2524 break;
2525
2526 tem = XEXP (SET_SRC (x), 2);
2527 if (GET_CODE (tem) == LABEL_REF)
2528 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2529 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2530 break;
2531 return;
2532
2533 default:
2534 break;
2535 }
2536
2537 /* If we reach here, all eliminations must be at their initial
2538 offset because we are doing a jump to a variable address. */
2539 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2540 if (p->offset != p->initial_offset)
2541 p->can_eliminate = 0;
2542 break;
2543
2544 default:
2545 break;
2546 }
2547 }
2548 \f
2549 /* Used for communication between the next two function to properly share
2550 the vector for an ASM_OPERANDS. */
2551
2552 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2553
2554 /* Scan X and replace any eliminable registers (such as fp) with a
2555 replacement (such as sp), plus an offset.
2556
2557 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2558 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2559 MEM, we are allowed to replace a sum of a register and the constant zero
2560 with the register, which we cannot do outside a MEM. In addition, we need
2561 to record the fact that a register is referenced outside a MEM.
2562
2563 If INSN is an insn, it is the insn containing X. If we replace a REG
2564 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2565 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2566 the REG is being modified.
2567
2568 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2569 That's used when we eliminate in expressions stored in notes.
2570 This means, do not set ref_outside_mem even if the reference
2571 is outside of MEMs.
2572
2573 If we see a modification to a register we know about, take the
2574 appropriate action (see case SET, below).
2575
2576 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2577 replacements done assuming all offsets are at their initial values. If
2578 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2579 encounter, return the actual location so that find_reloads will do
2580 the proper thing. */
2581
2582 rtx
2583 eliminate_regs (x, mem_mode, insn)
2584 rtx x;
2585 enum machine_mode mem_mode;
2586 rtx insn;
2587 {
2588 enum rtx_code code = GET_CODE (x);
2589 struct elim_table *ep;
2590 int regno;
2591 rtx new;
2592 int i, j;
2593 char *fmt;
2594 int copied = 0;
2595
2596 switch (code)
2597 {
2598 case CONST_INT:
2599 case CONST_DOUBLE:
2600 case CONST:
2601 case SYMBOL_REF:
2602 case CODE_LABEL:
2603 case PC:
2604 case CC0:
2605 case ASM_INPUT:
2606 case ADDR_VEC:
2607 case ADDR_DIFF_VEC:
2608 case RETURN:
2609 return x;
2610
2611 case ADDRESSOF:
2612 /* This is only for the benefit of the debugging backends, which call
2613 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2614 removed after CSE. */
2615 new = eliminate_regs (XEXP (x, 0), 0, insn);
2616 if (GET_CODE (new) == MEM)
2617 return XEXP (new, 0);
2618 return x;
2619
2620 case REG:
2621 regno = REGNO (x);
2622
2623 /* First handle the case where we encounter a bare register that
2624 is eliminable. Replace it with a PLUS. */
2625 if (regno < FIRST_PSEUDO_REGISTER)
2626 {
2627 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2628 ep++)
2629 if (ep->from_rtx == x && ep->can_eliminate)
2630 {
2631 if (! mem_mode
2632 /* Refs inside notes don't count for this purpose. */
2633 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2634 || GET_CODE (insn) == INSN_LIST)))
2635 ep->ref_outside_mem = 1;
2636 return plus_constant (ep->to_rtx, ep->previous_offset);
2637 }
2638
2639 }
2640 return x;
2641
2642 case PLUS:
2643 /* If this is the sum of an eliminable register and a constant, rework
2644 the sum. */
2645 if (GET_CODE (XEXP (x, 0)) == REG
2646 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2647 && CONSTANT_P (XEXP (x, 1)))
2648 {
2649 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2650 ep++)
2651 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2652 {
2653 if (! mem_mode
2654 /* Refs inside notes don't count for this purpose. */
2655 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2656 || GET_CODE (insn) == INSN_LIST)))
2657 ep->ref_outside_mem = 1;
2658
2659 /* The only time we want to replace a PLUS with a REG (this
2660 occurs when the constant operand of the PLUS is the negative
2661 of the offset) is when we are inside a MEM. We won't want
2662 to do so at other times because that would change the
2663 structure of the insn in a way that reload can't handle.
2664 We special-case the commonest situation in
2665 eliminate_regs_in_insn, so just replace a PLUS with a
2666 PLUS here, unless inside a MEM. */
2667 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2668 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2669 return ep->to_rtx;
2670 else
2671 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2672 plus_constant (XEXP (x, 1),
2673 ep->previous_offset));
2674 }
2675
2676 /* If the register is not eliminable, we are done since the other
2677 operand is a constant. */
2678 return x;
2679 }
2680
2681 /* If this is part of an address, we want to bring any constant to the
2682 outermost PLUS. We will do this by doing register replacement in
2683 our operands and seeing if a constant shows up in one of them.
2684
2685 We assume here this is part of an address (or a "load address" insn)
2686 since an eliminable register is not likely to appear in any other
2687 context.
2688
2689 If we have (plus (eliminable) (reg)), we want to produce
2690 (plus (plus (replacement) (reg) (const))). If this was part of a
2691 normal add insn, (plus (replacement) (reg)) will be pushed as a
2692 reload. This is the desired action. */
2693
2694 {
2695 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2696 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2697
2698 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2699 {
2700 /* If one side is a PLUS and the other side is a pseudo that
2701 didn't get a hard register but has a reg_equiv_constant,
2702 we must replace the constant here since it may no longer
2703 be in the position of any operand. */
2704 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2705 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2706 && reg_renumber[REGNO (new1)] < 0
2707 && reg_equiv_constant != 0
2708 && reg_equiv_constant[REGNO (new1)] != 0)
2709 new1 = reg_equiv_constant[REGNO (new1)];
2710 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2711 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2712 && reg_renumber[REGNO (new0)] < 0
2713 && reg_equiv_constant[REGNO (new0)] != 0)
2714 new0 = reg_equiv_constant[REGNO (new0)];
2715
2716 new = form_sum (new0, new1);
2717
2718 /* As above, if we are not inside a MEM we do not want to
2719 turn a PLUS into something else. We might try to do so here
2720 for an addition of 0 if we aren't optimizing. */
2721 if (! mem_mode && GET_CODE (new) != PLUS)
2722 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2723 else
2724 return new;
2725 }
2726 }
2727 return x;
2728
2729 case MULT:
2730 /* If this is the product of an eliminable register and a
2731 constant, apply the distribute law and move the constant out
2732 so that we have (plus (mult ..) ..). This is needed in order
2733 to keep load-address insns valid. This case is pathological.
2734 We ignore the possibility of overflow here. */
2735 if (GET_CODE (XEXP (x, 0)) == REG
2736 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2737 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2738 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2739 ep++)
2740 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2741 {
2742 if (! mem_mode
2743 /* Refs inside notes don't count for this purpose. */
2744 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2745 || GET_CODE (insn) == INSN_LIST)))
2746 ep->ref_outside_mem = 1;
2747
2748 return
2749 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2750 ep->previous_offset * INTVAL (XEXP (x, 1)));
2751 }
2752
2753 /* ... fall through ... */
2754
2755 case CALL:
2756 case COMPARE:
2757 case MINUS:
2758 case DIV: case UDIV:
2759 case MOD: case UMOD:
2760 case AND: case IOR: case XOR:
2761 case ROTATERT: case ROTATE:
2762 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2763 case NE: case EQ:
2764 case GE: case GT: case GEU: case GTU:
2765 case LE: case LT: case LEU: case LTU:
2766 {
2767 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2768 rtx new1
2769 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2770
2771 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2772 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2773 }
2774 return x;
2775
2776 case EXPR_LIST:
2777 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2778 if (XEXP (x, 0))
2779 {
2780 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2781 if (new != XEXP (x, 0))
2782 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2783 }
2784
2785 /* ... fall through ... */
2786
2787 case INSN_LIST:
2788 /* Now do eliminations in the rest of the chain. If this was
2789 an EXPR_LIST, this might result in allocating more memory than is
2790 strictly needed, but it simplifies the code. */
2791 if (XEXP (x, 1))
2792 {
2793 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2794 if (new != XEXP (x, 1))
2795 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2796 }
2797 return x;
2798
2799 case PRE_INC:
2800 case POST_INC:
2801 case PRE_DEC:
2802 case POST_DEC:
2803 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2804 if (ep->to_rtx == XEXP (x, 0))
2805 {
2806 int size = GET_MODE_SIZE (mem_mode);
2807
2808 /* If more bytes than MEM_MODE are pushed, account for them. */
2809 #ifdef PUSH_ROUNDING
2810 if (ep->to_rtx == stack_pointer_rtx)
2811 size = PUSH_ROUNDING (size);
2812 #endif
2813 if (code == PRE_DEC || code == POST_DEC)
2814 ep->offset += size;
2815 else
2816 ep->offset -= size;
2817 }
2818
2819 /* Fall through to generic unary operation case. */
2820 case STRICT_LOW_PART:
2821 case NEG: case NOT:
2822 case SIGN_EXTEND: case ZERO_EXTEND:
2823 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2824 case FLOAT: case FIX:
2825 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2826 case ABS:
2827 case SQRT:
2828 case FFS:
2829 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2830 if (new != XEXP (x, 0))
2831 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2832 return x;
2833
2834 case SUBREG:
2835 /* Similar to above processing, but preserve SUBREG_WORD.
2836 Convert (subreg (mem)) to (mem) if not paradoxical.
2837 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2838 pseudo didn't get a hard reg, we must replace this with the
2839 eliminated version of the memory location because push_reloads
2840 may do the replacement in certain circumstances. */
2841 if (GET_CODE (SUBREG_REG (x)) == REG
2842 && (GET_MODE_SIZE (GET_MODE (x))
2843 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2844 && reg_equiv_memory_loc != 0
2845 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2846 {
2847 #if 0
2848 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2849 mem_mode, insn);
2850
2851 /* If we didn't change anything, we must retain the pseudo. */
2852 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2853 new = SUBREG_REG (x);
2854 else
2855 {
2856 /* In this case, we must show that the pseudo is used in this
2857 insn so that delete_output_reload will do the right thing. */
2858 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2859 && GET_CODE (insn) != INSN_LIST)
2860 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode,
2861 SUBREG_REG (x)),
2862 insn))
2863 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
2864
2865 /* Ensure NEW isn't shared in case we have to reload it. */
2866 new = copy_rtx (new);
2867 }
2868 #else
2869 new = SUBREG_REG (x);
2870 #endif
2871 }
2872 else
2873 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2874
2875 if (new != XEXP (x, 0))
2876 {
2877 int x_size = GET_MODE_SIZE (GET_MODE (x));
2878 int new_size = GET_MODE_SIZE (GET_MODE (new));
2879
2880 if (GET_CODE (new) == MEM
2881 && ((x_size < new_size
2882 #ifdef WORD_REGISTER_OPERATIONS
2883 /* On these machines, combine can create rtl of the form
2884 (set (subreg:m1 (reg:m2 R) 0) ...)
2885 where m1 < m2, and expects something interesting to
2886 happen to the entire word. Moreover, it will use the
2887 (reg:m2 R) later, expecting all bits to be preserved.
2888 So if the number of words is the same, preserve the
2889 subreg so that push_reloads can see it. */
2890 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
2891 #endif
2892 )
2893 || (x_size == new_size))
2894 )
2895 {
2896 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2897 enum machine_mode mode = GET_MODE (x);
2898
2899 if (BYTES_BIG_ENDIAN)
2900 offset += (MIN (UNITS_PER_WORD,
2901 GET_MODE_SIZE (GET_MODE (new)))
2902 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2903
2904 PUT_MODE (new, mode);
2905 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2906 return new;
2907 }
2908 else
2909 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
2910 }
2911
2912 return x;
2913
2914 case USE:
2915 /* If using a register that is the source of an eliminate we still
2916 think can be performed, note it cannot be performed since we don't
2917 know how this register is used. */
2918 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2919 if (ep->from_rtx == XEXP (x, 0))
2920 ep->can_eliminate = 0;
2921
2922 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2923 if (new != XEXP (x, 0))
2924 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2925 return x;
2926
2927 case CLOBBER:
2928 /* If clobbering a register that is the replacement register for an
2929 elimination we still think can be performed, note that it cannot
2930 be performed. Otherwise, we need not be concerned about it. */
2931 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2932 if (ep->to_rtx == XEXP (x, 0))
2933 ep->can_eliminate = 0;
2934
2935 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2936 if (new != XEXP (x, 0))
2937 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2938 return x;
2939
2940 case ASM_OPERANDS:
2941 {
2942 rtx *temp_vec;
2943 /* Properly handle sharing input and constraint vectors. */
2944 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2945 {
2946 /* When we come to a new vector not seen before,
2947 scan all its elements; keep the old vector if none
2948 of them changes; otherwise, make a copy. */
2949 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2950 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2951 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2952 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2953 mem_mode, insn);
2954
2955 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2956 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2957 break;
2958
2959 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2960 new_asm_operands_vec = old_asm_operands_vec;
2961 else
2962 new_asm_operands_vec
2963 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2964 }
2965
2966 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2967 if (new_asm_operands_vec == old_asm_operands_vec)
2968 return x;
2969
2970 new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x),
2971 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
2972 ASM_OPERANDS_OUTPUT_IDX (x),
2973 new_asm_operands_vec,
2974 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
2975 ASM_OPERANDS_SOURCE_FILE (x),
2976 ASM_OPERANDS_SOURCE_LINE (x));
2977 new->volatil = x->volatil;
2978 return new;
2979 }
2980
2981 case SET:
2982 /* Check for setting a register that we know about. */
2983 if (GET_CODE (SET_DEST (x)) == REG)
2984 {
2985 /* See if this is setting the replacement register for an
2986 elimination.
2987
2988 If DEST is the hard frame pointer, we do nothing because we
2989 assume that all assignments to the frame pointer are for
2990 non-local gotos and are being done at a time when they are valid
2991 and do not disturb anything else. Some machines want to
2992 eliminate a fake argument pointer (or even a fake frame pointer)
2993 with either the real frame or the stack pointer. Assignments to
2994 the hard frame pointer must not prevent this elimination. */
2995
2996 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2997 ep++)
2998 if (ep->to_rtx == SET_DEST (x)
2999 && SET_DEST (x) != hard_frame_pointer_rtx)
3000 {
3001 /* If it is being incremented, adjust the offset. Otherwise,
3002 this elimination can't be done. */
3003 rtx src = SET_SRC (x);
3004
3005 if (GET_CODE (src) == PLUS
3006 && XEXP (src, 0) == SET_DEST (x)
3007 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3008 ep->offset -= INTVAL (XEXP (src, 1));
3009 else
3010 ep->can_eliminate = 0;
3011 }
3012
3013 /* Now check to see we are assigning to a register that can be
3014 eliminated. If so, it must be as part of a PARALLEL, since we
3015 will not have been called if this is a single SET. So indicate
3016 that we can no longer eliminate this reg. */
3017 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3018 ep++)
3019 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3020 ep->can_eliminate = 0;
3021 }
3022
3023 /* Now avoid the loop below in this common case. */
3024 {
3025 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3026 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3027
3028 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3029 write a CLOBBER insn. */
3030 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3031 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3032 && GET_CODE (insn) != INSN_LIST)
3033 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn);
3034
3035 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3036 return gen_rtx_SET (VOIDmode, new0, new1);
3037 }
3038
3039 return x;
3040
3041 case MEM:
3042 /* This is only for the benefit of the debugging backends, which call
3043 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3044 removed after CSE. */
3045 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
3046 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
3047
3048 /* Our only special processing is to pass the mode of the MEM to our
3049 recursive call and copy the flags. While we are here, handle this
3050 case more efficiently. */
3051 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3052 if (new != XEXP (x, 0))
3053 {
3054 new = gen_rtx_MEM (GET_MODE (x), new);
3055 new->volatil = x->volatil;
3056 new->unchanging = x->unchanging;
3057 new->in_struct = x->in_struct;
3058 return new;
3059 }
3060 else
3061 return x;
3062
3063 default:
3064 break;
3065 }
3066
3067 /* Process each of our operands recursively. If any have changed, make a
3068 copy of the rtx. */
3069 fmt = GET_RTX_FORMAT (code);
3070 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3071 {
3072 if (*fmt == 'e')
3073 {
3074 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3075 if (new != XEXP (x, i) && ! copied)
3076 {
3077 rtx new_x = rtx_alloc (code);
3078 bcopy ((char *) x, (char *) new_x,
3079 (sizeof (*new_x) - sizeof (new_x->fld)
3080 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3081 x = new_x;
3082 copied = 1;
3083 }
3084 XEXP (x, i) = new;
3085 }
3086 else if (*fmt == 'E')
3087 {
3088 int copied_vec = 0;
3089 for (j = 0; j < XVECLEN (x, i); j++)
3090 {
3091 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3092 if (new != XVECEXP (x, i, j) && ! copied_vec)
3093 {
3094 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3095 XVEC (x, i)->elem);
3096 if (! copied)
3097 {
3098 rtx new_x = rtx_alloc (code);
3099 bcopy ((char *) x, (char *) new_x,
3100 (sizeof (*new_x) - sizeof (new_x->fld)
3101 + (sizeof (new_x->fld[0])
3102 * GET_RTX_LENGTH (code))));
3103 x = new_x;
3104 copied = 1;
3105 }
3106 XVEC (x, i) = new_v;
3107 copied_vec = 1;
3108 }
3109 XVECEXP (x, i, j) = new;
3110 }
3111 }
3112 }
3113
3114 return x;
3115 }
3116 \f
3117 /* Scan INSN and eliminate all eliminable registers in it.
3118
3119 If REPLACE is nonzero, do the replacement destructively. Also
3120 delete the insn as dead it if it is setting an eliminable register.
3121
3122 If REPLACE is zero, do all our allocations in reload_obstack.
3123
3124 If no eliminations were done and this insn doesn't require any elimination
3125 processing (these are not identical conditions: it might be updating sp,
3126 but not referencing fp; this needs to be seen during reload_as_needed so
3127 that the offset between fp and sp can be taken into consideration), zero
3128 is returned. Otherwise, 1 is returned. */
3129
3130 static int
3131 eliminate_regs_in_insn (insn, replace)
3132 rtx insn;
3133 int replace;
3134 {
3135 rtx old_body = PATTERN (insn);
3136 rtx old_set = single_set (insn);
3137 rtx new_body;
3138 int val = 0;
3139 struct elim_table *ep;
3140
3141 if (! replace)
3142 push_obstacks (&reload_obstack, &reload_obstack);
3143
3144 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3145 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3146 {
3147 /* Check for setting an eliminable register. */
3148 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3149 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3150 {
3151 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3152 /* If this is setting the frame pointer register to the
3153 hardware frame pointer register and this is an elimination
3154 that will be done (tested above), this insn is really
3155 adjusting the frame pointer downward to compensate for
3156 the adjustment done before a nonlocal goto. */
3157 if (ep->from == FRAME_POINTER_REGNUM
3158 && ep->to == HARD_FRAME_POINTER_REGNUM)
3159 {
3160 rtx src = SET_SRC (old_set);
3161 int offset, ok = 0;
3162 rtx prev_insn, prev_set;
3163
3164 if (src == ep->to_rtx)
3165 offset = 0, ok = 1;
3166 else if (GET_CODE (src) == PLUS
3167 && GET_CODE (XEXP (src, 0)) == CONST_INT
3168 && XEXP (src, 1) == ep->to_rtx)
3169 offset = INTVAL (XEXP (src, 0)), ok = 1;
3170 else if (GET_CODE (src) == PLUS
3171 && GET_CODE (XEXP (src, 1)) == CONST_INT
3172 && XEXP (src, 0) == ep->to_rtx)
3173 offset = INTVAL (XEXP (src, 1)), ok = 1;
3174 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3175 && (prev_set = single_set (prev_insn)) != 0
3176 && rtx_equal_p (SET_DEST (prev_set), src))
3177 {
3178 src = SET_SRC (prev_set);
3179 if (src == ep->to_rtx)
3180 offset = 0, ok = 1;
3181 else if (GET_CODE (src) == PLUS
3182 && GET_CODE (XEXP (src, 0)) == CONST_INT
3183 && XEXP (src, 1) == ep->to_rtx)
3184 offset = INTVAL (XEXP (src, 0)), ok = 1;
3185 else if (GET_CODE (src) == PLUS
3186 && GET_CODE (XEXP (src, 1)) == CONST_INT
3187 && XEXP (src, 0) == ep->to_rtx)
3188 offset = INTVAL (XEXP (src, 1)), ok = 1;
3189 }
3190
3191 if (ok)
3192 {
3193 if (replace)
3194 {
3195 rtx src
3196 = plus_constant (ep->to_rtx, offset - ep->offset);
3197
3198 /* First see if this insn remains valid when we
3199 make the change. If not, keep the INSN_CODE
3200 the same and let reload fit it up. */
3201 validate_change (insn, &SET_SRC (old_set), src, 1);
3202 validate_change (insn, &SET_DEST (old_set),
3203 ep->to_rtx, 1);
3204 if (! apply_change_group ())
3205 {
3206 SET_SRC (old_set) = src;
3207 SET_DEST (old_set) = ep->to_rtx;
3208 }
3209 }
3210
3211 val = 1;
3212 goto done;
3213 }
3214 }
3215 #endif
3216
3217 /* In this case this insn isn't serving a useful purpose. We
3218 will delete it in reload_as_needed once we know that this
3219 elimination is, in fact, being done.
3220
3221 If REPLACE isn't set, we can't delete this insn, but needn't
3222 process it since it won't be used unless something changes. */
3223 if (replace)
3224 delete_dead_insn (insn);
3225 val = 1;
3226 goto done;
3227 }
3228
3229 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3230 in the insn is the negative of the offset in FROM. Substitute
3231 (set (reg) (reg to)) for the insn and change its code.
3232
3233 We have to do this here, rather than in eliminate_regs, so that we can
3234 change the insn code. */
3235
3236 if (GET_CODE (SET_SRC (old_set)) == PLUS
3237 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3238 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3239 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3240 ep++)
3241 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3242 && ep->can_eliminate)
3243 {
3244 /* We must stop at the first elimination that will be used.
3245 If this one would replace the PLUS with a REG, do it
3246 now. Otherwise, quit the loop and let eliminate_regs
3247 do its normal replacement. */
3248 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3249 {
3250 /* We assume here that we don't need a PARALLEL of
3251 any CLOBBERs for this assignment. There's not
3252 much we can do if we do need it. */
3253 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3254 SET_DEST (old_set),
3255 ep->to_rtx);
3256 INSN_CODE (insn) = -1;
3257 val = 1;
3258 goto done;
3259 }
3260
3261 break;
3262 }
3263 }
3264
3265 old_asm_operands_vec = 0;
3266
3267 /* Replace the body of this insn with a substituted form. If we changed
3268 something, return non-zero.
3269
3270 If we are replacing a body that was a (set X (plus Y Z)), try to
3271 re-recognize the insn. We do this in case we had a simple addition
3272 but now can do this as a load-address. This saves an insn in this
3273 common case. */
3274
3275 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3276 if (new_body != old_body)
3277 {
3278 /* If we aren't replacing things permanently and we changed something,
3279 make another copy to ensure that all the RTL is new. Otherwise
3280 things can go wrong if find_reload swaps commutative operands
3281 and one is inside RTL that has been copied while the other is not. */
3282
3283 /* Don't copy an asm_operands because (1) there's no need and (2)
3284 copy_rtx can't do it properly when there are multiple outputs. */
3285 if (! replace && asm_noperands (old_body) < 0)
3286 new_body = copy_rtx (new_body);
3287
3288 /* If we had a move insn but now we don't, rerecognize it. This will
3289 cause spurious re-recognition if the old move had a PARALLEL since
3290 the new one still will, but we can't call single_set without
3291 having put NEW_BODY into the insn and the re-recognition won't
3292 hurt in this rare case. */
3293 if (old_set != 0
3294 && ((GET_CODE (SET_SRC (old_set)) == REG
3295 && (GET_CODE (new_body) != SET
3296 || GET_CODE (SET_SRC (new_body)) != REG))
3297 /* If this was a load from or store to memory, compare
3298 the MEM in recog_operand to the one in the insn. If they
3299 are not equal, then rerecognize the insn. */
3300 || (old_set != 0
3301 && ((GET_CODE (SET_SRC (old_set)) == MEM
3302 && SET_SRC (old_set) != recog_operand[1])
3303 || (GET_CODE (SET_DEST (old_set)) == MEM
3304 && SET_DEST (old_set) != recog_operand[0])))
3305 /* If this was an add insn before, rerecognize. */
3306 || GET_CODE (SET_SRC (old_set)) == PLUS))
3307 {
3308 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3309 /* If recognition fails, store the new body anyway.
3310 It's normal to have recognition failures here
3311 due to bizarre memory addresses; reloading will fix them. */
3312 PATTERN (insn) = new_body;
3313 }
3314 else
3315 PATTERN (insn) = new_body;
3316
3317 val = 1;
3318 }
3319
3320 /* Loop through all elimination pairs. See if any have changed.
3321
3322 We also detect a cases where register elimination cannot be done,
3323 namely, if a register would be both changed and referenced outside a MEM
3324 in the resulting insn since such an insn is often undefined and, even if
3325 not, we cannot know what meaning will be given to it. Note that it is
3326 valid to have a register used in an address in an insn that changes it
3327 (presumably with a pre- or post-increment or decrement).
3328
3329 If anything changes, return nonzero. */
3330
3331 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3332 {
3333 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3334 ep->can_eliminate = 0;
3335
3336 ep->ref_outside_mem = 0;
3337
3338 if (ep->previous_offset != ep->offset)
3339 val = 1;
3340 }
3341
3342 done:
3343 /* If we changed something, perform elimination in REG_NOTES. This is
3344 needed even when REPLACE is zero because a REG_DEAD note might refer
3345 to a register that we eliminate and could cause a different number
3346 of spill registers to be needed in the final reload pass than in
3347 the pre-passes. */
3348 if (val && REG_NOTES (insn) != 0)
3349 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3350
3351 if (! replace)
3352 pop_obstacks ();
3353
3354 return val;
3355 }
3356
3357 /* Loop through all elimination pairs.
3358 Recalculate the number not at initial offset.
3359
3360 Compute the maximum offset (minimum offset if the stack does not
3361 grow downward) for each elimination pair. */
3362
3363 static void
3364 update_eliminable_offsets ()
3365 {
3366 struct elim_table *ep;
3367
3368 num_not_at_initial_offset = 0;
3369 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3370 {
3371 ep->previous_offset = ep->offset;
3372 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3373 num_not_at_initial_offset++;
3374 }
3375 }
3376
3377 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3378 replacement we currently believe is valid, mark it as not eliminable if X
3379 modifies DEST in any way other than by adding a constant integer to it.
3380
3381 If DEST is the frame pointer, we do nothing because we assume that
3382 all assignments to the hard frame pointer are nonlocal gotos and are being
3383 done at a time when they are valid and do not disturb anything else.
3384 Some machines want to eliminate a fake argument pointer with either the
3385 frame or stack pointer. Assignments to the hard frame pointer must not
3386 prevent this elimination.
3387
3388 Called via note_stores from reload before starting its passes to scan
3389 the insns of the function. */
3390
3391 static void
3392 mark_not_eliminable (dest, x)
3393 rtx dest;
3394 rtx x;
3395 {
3396 register unsigned int i;
3397
3398 /* A SUBREG of a hard register here is just changing its mode. We should
3399 not see a SUBREG of an eliminable hard register, but check just in
3400 case. */
3401 if (GET_CODE (dest) == SUBREG)
3402 dest = SUBREG_REG (dest);
3403
3404 if (dest == hard_frame_pointer_rtx)
3405 return;
3406
3407 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3408 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3409 && (GET_CODE (x) != SET
3410 || GET_CODE (SET_SRC (x)) != PLUS
3411 || XEXP (SET_SRC (x), 0) != dest
3412 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3413 {
3414 reg_eliminate[i].can_eliminate_previous
3415 = reg_eliminate[i].can_eliminate = 0;
3416 num_eliminable--;
3417 }
3418 }
3419
3420 /* Verify that the initial elimination offsets did not change since the
3421 last call to set_initial_elim_offsets. This is used to catch cases
3422 where something illegal happened during reload_as_needed that could
3423 cause incorrect code to be generated if we did not check for it. */
3424 static void
3425 verify_initial_elim_offsets ()
3426 {
3427 int t;
3428
3429 #ifdef ELIMINABLE_REGS
3430 struct elim_table *ep;
3431
3432 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3433 {
3434 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3435 if (t != ep->initial_offset)
3436 abort ();
3437 }
3438 #else
3439 INITIAL_FRAME_POINTER_OFFSET (t);
3440 if (t != reg_eliminate[0].initial_offset)
3441 abort ();
3442 #endif
3443 }
3444
3445 /* Reset all offsets on eliminable registers to their initial values. */
3446 static void
3447 set_initial_elim_offsets ()
3448 {
3449 struct elim_table *ep = reg_eliminate;
3450
3451 #ifdef ELIMINABLE_REGS
3452 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3453 {
3454 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3455 ep->previous_offset = ep->offset = ep->initial_offset;
3456 }
3457 #else
3458 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3459 ep->previous_offset = ep->offset = ep->initial_offset;
3460 #endif
3461
3462 num_not_at_initial_offset = 0;
3463 }
3464
3465 /* Initialize the known label offsets.
3466 Set a known offset for each forced label to be at the initial offset
3467 of each elimination. We do this because we assume that all
3468 computed jumps occur from a location where each elimination is
3469 at its initial offset.
3470 For all other labels, show that we don't know the offsets. */
3471
3472 static void
3473 set_initial_label_offsets ()
3474 {
3475 rtx x;
3476 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
3477
3478 for (x = forced_labels; x; x = XEXP (x, 1))
3479 if (XEXP (x, 0))
3480 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3481 }
3482
3483 /* Set all elimination offsets to the known values for the code label given
3484 by INSN. */
3485 static void
3486 set_offsets_for_label (insn)
3487 rtx insn;
3488 {
3489 int i;
3490 int label_nr = CODE_LABEL_NUMBER (insn);
3491 struct elim_table *ep;
3492
3493 num_not_at_initial_offset = 0;
3494 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3495 {
3496 ep->offset = ep->previous_offset = offsets_at[label_nr][i];
3497 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3498 num_not_at_initial_offset++;
3499 }
3500 }
3501
3502 /* See if anything that happened changes which eliminations are valid.
3503 For example, on the Sparc, whether or not the frame pointer can
3504 be eliminated can depend on what registers have been used. We need
3505 not check some conditions again (such as flag_omit_frame_pointer)
3506 since they can't have changed. */
3507
3508 static void
3509 update_eliminables (pset)
3510 HARD_REG_SET *pset;
3511 {
3512 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3513 int previous_frame_pointer_needed = frame_pointer_needed;
3514 #endif
3515 struct elim_table *ep;
3516
3517 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3518 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3519 #ifdef ELIMINABLE_REGS
3520 || ! CAN_ELIMINATE (ep->from, ep->to)
3521 #endif
3522 )
3523 ep->can_eliminate = 0;
3524
3525 /* Look for the case where we have discovered that we can't replace
3526 register A with register B and that means that we will now be
3527 trying to replace register A with register C. This means we can
3528 no longer replace register C with register B and we need to disable
3529 such an elimination, if it exists. This occurs often with A == ap,
3530 B == sp, and C == fp. */
3531
3532 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3533 {
3534 struct elim_table *op;
3535 register int new_to = -1;
3536
3537 if (! ep->can_eliminate && ep->can_eliminate_previous)
3538 {
3539 /* Find the current elimination for ep->from, if there is a
3540 new one. */
3541 for (op = reg_eliminate;
3542 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3543 if (op->from == ep->from && op->can_eliminate)
3544 {
3545 new_to = op->to;
3546 break;
3547 }
3548
3549 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3550 disable it. */
3551 for (op = reg_eliminate;
3552 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3553 if (op->from == new_to && op->to == ep->to)
3554 op->can_eliminate = 0;
3555 }
3556 }
3557
3558 /* See if any registers that we thought we could eliminate the previous
3559 time are no longer eliminable. If so, something has changed and we
3560 must spill the register. Also, recompute the number of eliminable
3561 registers and see if the frame pointer is needed; it is if there is
3562 no elimination of the frame pointer that we can perform. */
3563
3564 frame_pointer_needed = 1;
3565 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3566 {
3567 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3568 && ep->to != HARD_FRAME_POINTER_REGNUM)
3569 frame_pointer_needed = 0;
3570
3571 if (! ep->can_eliminate && ep->can_eliminate_previous)
3572 {
3573 ep->can_eliminate_previous = 0;
3574 SET_HARD_REG_BIT (*pset, ep->from);
3575 num_eliminable--;
3576 }
3577 }
3578
3579 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3580 /* If we didn't need a frame pointer last time, but we do now, spill
3581 the hard frame pointer. */
3582 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3583 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3584 #endif
3585 }
3586
3587 /* Initialize the table of registers to eliminate. */
3588 static void
3589 init_elim_table ()
3590 {
3591 struct elim_table *ep;
3592
3593 /* Does this function require a frame pointer? */
3594
3595 frame_pointer_needed = (! flag_omit_frame_pointer
3596 #ifdef EXIT_IGNORE_STACK
3597 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3598 and restore sp for alloca. So we can't eliminate
3599 the frame pointer in that case. At some point,
3600 we should improve this by emitting the
3601 sp-adjusting insns for this case. */
3602 || (current_function_calls_alloca
3603 && EXIT_IGNORE_STACK)
3604 #endif
3605 || FRAME_POINTER_REQUIRED);
3606
3607 num_eliminable = 0;
3608
3609 #ifdef ELIMINABLE_REGS
3610 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3611 {
3612 ep->can_eliminate = ep->can_eliminate_previous
3613 = (CAN_ELIMINATE (ep->from, ep->to)
3614 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3615 }
3616 #else
3617 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3618 = ! frame_pointer_needed;
3619 #endif
3620
3621 /* Count the number of eliminable registers and build the FROM and TO
3622 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3623 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3624 We depend on this. */
3625 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3626 {
3627 num_eliminable += ep->can_eliminate;
3628 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3629 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3630 }
3631 }
3632 \f
3633 /* Kick all pseudos out of hard register REGNO.
3634 If DUMPFILE is nonzero, log actions taken on that file.
3635
3636 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3637 because we found we can't eliminate some register. In the case, no pseudos
3638 are allowed to be in the register, even if they are only in a block that
3639 doesn't require spill registers, unlike the case when we are spilling this
3640 hard reg to produce another spill register.
3641
3642 Return nonzero if any pseudos needed to be kicked out. */
3643
3644 static void
3645 spill_hard_reg (regno, dumpfile, cant_eliminate)
3646 register int regno;
3647 FILE *dumpfile;
3648 int cant_eliminate;
3649 {
3650 register int i;
3651
3652 if (cant_eliminate)
3653 {
3654 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3655 regs_ever_live[regno] = 1;
3656 }
3657
3658 /* Spill every pseudo reg that was allocated to this reg
3659 or to something that overlaps this reg. */
3660
3661 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3662 if (reg_renumber[i] >= 0
3663 && reg_renumber[i] <= regno
3664 && (reg_renumber[i]
3665 + HARD_REGNO_NREGS (reg_renumber[i],
3666 PSEUDO_REGNO_MODE (i))
3667 > regno))
3668 SET_REGNO_REG_SET (spilled_pseudos, i);
3669 }
3670
3671 /* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3672 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
3673 static void
3674 ior_hard_reg_set (set1, set2)
3675 HARD_REG_SET *set1, *set2;
3676 {
3677 IOR_HARD_REG_SET (*set1, *set2);
3678 }
3679
3680 /* After find_reload_regs has been run for all insn that need reloads,
3681 and/or spill_hard_regs was called, this function is used to actually
3682 spill pseudo registers and try to reallocate them. It also sets up the
3683 spill_regs array for use by choose_reload_regs. */
3684
3685 static int
3686 finish_spills (global, dumpfile)
3687 int global;
3688 FILE *dumpfile;
3689 {
3690 struct insn_chain *chain;
3691 int something_changed = 0;
3692 int i;
3693
3694 /* Build the spill_regs array for the function. */
3695 /* If there are some registers still to eliminate and one of the spill regs
3696 wasn't ever used before, additional stack space may have to be
3697 allocated to store this register. Thus, we may have changed the offset
3698 between the stack and frame pointers, so mark that something has changed.
3699
3700 One might think that we need only set VAL to 1 if this is a call-used
3701 register. However, the set of registers that must be saved by the
3702 prologue is not identical to the call-used set. For example, the
3703 register used by the call insn for the return PC is a call-used register,
3704 but must be saved by the prologue. */
3705
3706 n_spills = 0;
3707 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3708 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3709 {
3710 spill_reg_order[i] = n_spills;
3711 spill_regs[n_spills++] = i;
3712 if (num_eliminable && ! regs_ever_live[i])
3713 something_changed = 1;
3714 regs_ever_live[i] = 1;
3715 }
3716 else
3717 spill_reg_order[i] = -1;
3718
3719 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3720 if (REGNO_REG_SET_P (spilled_pseudos, i))
3721 {
3722 /* Record the current hard register the pseudo is allocated to in
3723 pseudo_previous_regs so we avoid reallocating it to the same
3724 hard reg in a later pass. */
3725 if (reg_renumber[i] < 0)
3726 abort ();
3727 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3728 /* Mark it as no longer having a hard register home. */
3729 reg_renumber[i] = -1;
3730 /* We will need to scan everything again. */
3731 something_changed = 1;
3732 }
3733
3734 /* Retry global register allocation if possible. */
3735 if (global)
3736 {
3737 bzero ((char *) pseudo_forbidden_regs, max_regno * sizeof (HARD_REG_SET));
3738 /* For every insn that needs reloads, set the registers used as spill
3739 regs in pseudo_forbidden_regs for every pseudo live across the
3740 insn. */
3741 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3742 {
3743 EXECUTE_IF_SET_IN_REG_SET
3744 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
3745 {
3746 ior_hard_reg_set (pseudo_forbidden_regs + i,
3747 &chain->used_spill_regs);
3748 });
3749 EXECUTE_IF_SET_IN_REG_SET
3750 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
3751 {
3752 ior_hard_reg_set (pseudo_forbidden_regs + i,
3753 &chain->used_spill_regs);
3754 });
3755 }
3756
3757 /* Retry allocating the spilled pseudos. For each reg, merge the
3758 various reg sets that indicate which hard regs can't be used,
3759 and call retry_global_alloc.
3760 We change spill_pseudos here to only contain pseudos that did not
3761 get a new hard register. */
3762 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3763 if (reg_old_renumber[i] != reg_renumber[i])
3764 {
3765 HARD_REG_SET forbidden;
3766 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3767 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3768 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3769 retry_global_alloc (i, forbidden);
3770 if (reg_renumber[i] >= 0)
3771 CLEAR_REGNO_REG_SET (spilled_pseudos, i);
3772 }
3773 }
3774
3775 /* Fix up the register information in the insn chain.
3776 This involves deleting those of the spilled pseudos which did not get
3777 a new hard register home from the live_{before,after} sets. */
3778 for (chain = reload_insn_chain; chain; chain = chain->next)
3779 {
3780 HARD_REG_SET used_by_pseudos;
3781 HARD_REG_SET used_by_pseudos2;
3782
3783 AND_COMPL_REG_SET (chain->live_before, spilled_pseudos);
3784 AND_COMPL_REG_SET (chain->live_after, spilled_pseudos);
3785
3786 /* Mark any unallocated hard regs as available for spills. That
3787 makes inheritance work somewhat better. */
3788 if (chain->need_reload)
3789 {
3790 REG_SET_TO_HARD_REG_SET (used_by_pseudos, chain->live_before);
3791 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, chain->live_after);
3792 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3793
3794 /* Save the old value for the sanity test below. */
3795 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3796
3797 compute_use_by_pseudos (&used_by_pseudos, chain->live_before);
3798 compute_use_by_pseudos (&used_by_pseudos, chain->live_after);
3799 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3800 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3801
3802 /* Make sure we only enlarge the set. */
3803 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3804 abort ();
3805 ok:;
3806 }
3807 }
3808
3809 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3810 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3811 {
3812 int regno = reg_renumber[i];
3813 if (reg_old_renumber[i] == regno)
3814 continue;
3815
3816 alter_reg (i, reg_old_renumber[i]);
3817 reg_old_renumber[i] = regno;
3818 if (dumpfile)
3819 {
3820 if (regno == -1)
3821 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3822 else
3823 fprintf (dumpfile, " Register %d now in %d.\n\n",
3824 i, reg_renumber[i]);
3825 }
3826 }
3827
3828 return something_changed;
3829 }
3830 \f
3831 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3832 Also mark any hard registers used to store user variables as
3833 forbidden from being used for spill registers. */
3834
3835 static void
3836 scan_paradoxical_subregs (x)
3837 register rtx x;
3838 {
3839 register int i;
3840 register char *fmt;
3841 register enum rtx_code code = GET_CODE (x);
3842
3843 switch (code)
3844 {
3845 case REG:
3846 #if 0
3847 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3848 && REG_USERVAR_P (x))
3849 SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x));
3850 #endif
3851 return;
3852
3853 case CONST_INT:
3854 case CONST:
3855 case SYMBOL_REF:
3856 case LABEL_REF:
3857 case CONST_DOUBLE:
3858 case CC0:
3859 case PC:
3860 case USE:
3861 case CLOBBER:
3862 return;
3863
3864 case SUBREG:
3865 if (GET_CODE (SUBREG_REG (x)) == REG
3866 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3867 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3868 = GET_MODE_SIZE (GET_MODE (x));
3869 return;
3870
3871 default:
3872 break;
3873 }
3874
3875 fmt = GET_RTX_FORMAT (code);
3876 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3877 {
3878 if (fmt[i] == 'e')
3879 scan_paradoxical_subregs (XEXP (x, i));
3880 else if (fmt[i] == 'E')
3881 {
3882 register int j;
3883 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3884 scan_paradoxical_subregs (XVECEXP (x, i, j));
3885 }
3886 }
3887 }
3888 \f
3889 static int
3890 hard_reg_use_compare (p1p, p2p)
3891 const GENERIC_PTR p1p;
3892 const GENERIC_PTR p2p;
3893 {
3894 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p;
3895 struct hard_reg_n_uses *p2 = (struct hard_reg_n_uses *)p2p;
3896 int bad1 = TEST_HARD_REG_BIT (bad_spill_regs, p1->regno);
3897 int bad2 = TEST_HARD_REG_BIT (bad_spill_regs, p2->regno);
3898 if (bad1 && bad2)
3899 return p1->regno - p2->regno;
3900 if (bad1)
3901 return 1;
3902 if (bad2)
3903 return -1;
3904 if (p1->uses > p2->uses)
3905 return 1;
3906 if (p1->uses < p2->uses)
3907 return -1;
3908 /* If regs are equally good, sort by regno,
3909 so that the results of qsort leave nothing to chance. */
3910 return p1->regno - p2->regno;
3911 }
3912
3913 /* Used for communication between order_regs_for_reload and count_pseudo.
3914 Used to avoid counting one pseudo twice. */
3915 static regset pseudos_counted;
3916
3917 /* Update the costs in N_USES, considering that pseudo REG is live. */
3918 static void
3919 count_pseudo (n_uses, reg)
3920 struct hard_reg_n_uses *n_uses;
3921 int reg;
3922 {
3923 int r = reg_renumber[reg];
3924 int nregs;
3925
3926 if (REGNO_REG_SET_P (pseudos_counted, reg))
3927 return;
3928 SET_REGNO_REG_SET (pseudos_counted, reg);
3929
3930 if (r < 0)
3931 abort ();
3932
3933 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
3934 while (nregs-- > 0)
3935 n_uses[r++].uses += REG_N_REFS (reg);
3936 }
3937 /* Choose the order to consider regs for use as reload registers
3938 based on how much trouble would be caused by spilling one.
3939 Store them in order of decreasing preference in potential_reload_regs. */
3940
3941 static void
3942 order_regs_for_reload (chain)
3943 struct insn_chain *chain;
3944 {
3945 register int i;
3946 register int o = 0;
3947 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3948
3949 pseudos_counted = ALLOCA_REG_SET ();
3950
3951 COPY_HARD_REG_SET (bad_spill_regs, bad_spill_regs_global);
3952
3953 /* Count number of uses of each hard reg by pseudo regs allocated to it
3954 and then order them by decreasing use. */
3955
3956 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3957 {
3958 int j;
3959
3960 hard_reg_n_uses[i].regno = i;
3961 hard_reg_n_uses[i].uses = 0;
3962
3963 /* Test the various reasons why we can't use a register for
3964 spilling in this insn. */
3965 if (fixed_regs[i]
3966 || REGNO_REG_SET_P (chain->live_before, i)
3967 || REGNO_REG_SET_P (chain->live_after, i))
3968 {
3969 SET_HARD_REG_BIT (bad_spill_regs, i);
3970 continue;
3971 }
3972
3973 /* Now find out which pseudos are allocated to it, and update
3974 hard_reg_n_uses. */
3975 CLEAR_REG_SET (pseudos_counted);
3976
3977 EXECUTE_IF_SET_IN_REG_SET
3978 (chain->live_before, FIRST_PSEUDO_REGISTER, j,
3979 {
3980 count_pseudo (hard_reg_n_uses, j);
3981 });
3982 EXECUTE_IF_SET_IN_REG_SET
3983 (chain->live_after, FIRST_PSEUDO_REGISTER, j,
3984 {
3985 count_pseudo (hard_reg_n_uses, j);
3986 });
3987 }
3988
3989 FREE_REG_SET (pseudos_counted);
3990
3991 /* Prefer registers not so far used, for use in temporary loading.
3992 Among them, if REG_ALLOC_ORDER is defined, use that order.
3993 Otherwise, prefer registers not preserved by calls. */
3994
3995 #ifdef REG_ALLOC_ORDER
3996 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3997 {
3998 int regno = reg_alloc_order[i];
3999
4000 if (hard_reg_n_uses[regno].uses == 0
4001 && ! TEST_HARD_REG_BIT (bad_spill_regs, regno))
4002 potential_reload_regs[o++] = regno;
4003 }
4004 #else
4005 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4006 {
4007 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i]
4008 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
4009 potential_reload_regs[o++] = i;
4010 }
4011 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4012 {
4013 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i]
4014 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
4015 potential_reload_regs[o++] = i;
4016 }
4017 #endif
4018
4019 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
4020 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
4021
4022 /* Now add the regs that are already used,
4023 preferring those used less often. The fixed and otherwise forbidden
4024 registers will be at the end of this list. */
4025
4026 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4027 if (hard_reg_n_uses[i].uses != 0
4028 && ! TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
4029 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4030 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4031 if (TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
4032 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4033 }
4034 \f
4035 /* Reload pseudo-registers into hard regs around each insn as needed.
4036 Additional register load insns are output before the insn that needs it
4037 and perhaps store insns after insns that modify the reloaded pseudo reg.
4038
4039 reg_last_reload_reg and reg_reloaded_contents keep track of
4040 which registers are already available in reload registers.
4041 We update these for the reloads that we perform,
4042 as the insns are scanned. */
4043
4044 static void
4045 reload_as_needed (live_known)
4046 int live_known;
4047 {
4048 struct insn_chain *chain;
4049 register int i;
4050 rtx x;
4051
4052 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
4053 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
4054 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
4055 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
4056 reg_has_output_reload = (char *) alloca (max_regno);
4057 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4058
4059 set_initial_elim_offsets ();
4060
4061 for (chain = reload_insn_chain; chain; chain = chain->next)
4062 {
4063 rtx prev;
4064 rtx insn = chain->insn;
4065 rtx old_next = NEXT_INSN (insn);
4066
4067 /* If we pass a label, copy the offsets from the label information
4068 into the current offsets of each elimination. */
4069 if (GET_CODE (insn) == CODE_LABEL)
4070 set_offsets_for_label (insn);
4071
4072 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4073 {
4074 rtx oldpat = PATTERN (insn);
4075
4076 /* If this is a USE and CLOBBER of a MEM, ensure that any
4077 references to eliminable registers have been removed. */
4078
4079 if ((GET_CODE (PATTERN (insn)) == USE
4080 || GET_CODE (PATTERN (insn)) == CLOBBER)
4081 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4082 XEXP (XEXP (PATTERN (insn), 0), 0)
4083 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4084 GET_MODE (XEXP (PATTERN (insn), 0)),
4085 NULL_RTX);
4086
4087 /* If we need to do register elimination processing, do so.
4088 This might delete the insn, in which case we are done. */
4089 if (num_eliminable && chain->need_elim)
4090 {
4091 eliminate_regs_in_insn (insn, 1);
4092 if (GET_CODE (insn) == NOTE)
4093 {
4094 update_eliminable_offsets ();
4095 continue;
4096 }
4097 }
4098
4099 /* If need_elim is nonzero but need_reload is zero, one might think
4100 that we could simply set n_reloads to 0. However, find_reloads
4101 could have done some manipulation of the insn (such as swapping
4102 commutative operands), and these manipulations are lost during
4103 the first pass for every insn that needs register elimination.
4104 So the actions of find_reloads must be redone here. */
4105
4106 if (! chain->need_elim && ! chain->need_reload
4107 && ! chain->need_operand_change)
4108 n_reloads = 0;
4109 /* First find the pseudo regs that must be reloaded for this insn.
4110 This info is returned in the tables reload_... (see reload.h).
4111 Also modify the body of INSN by substituting RELOAD
4112 rtx's for those pseudo regs. */
4113 else
4114 {
4115 bzero (reg_has_output_reload, max_regno);
4116 CLEAR_HARD_REG_SET (reg_is_output_reload);
4117
4118 find_reloads (insn, 1, spill_indirect_levels, live_known,
4119 spill_reg_order);
4120 }
4121
4122 if (num_eliminable && chain->need_elim)
4123 update_eliminable_offsets ();
4124
4125 if (n_reloads > 0)
4126 {
4127 rtx next = NEXT_INSN (insn);
4128 rtx p;
4129
4130 prev = PREV_INSN (insn);
4131
4132 /* Now compute which reload regs to reload them into. Perhaps
4133 reusing reload regs from previous insns, or else output
4134 load insns to reload them. Maybe output store insns too.
4135 Record the choices of reload reg in reload_reg_rtx. */
4136 choose_reload_regs (chain);
4137
4138 /* Merge any reloads that we didn't combine for fear of
4139 increasing the number of spill registers needed but now
4140 discover can be safely merged. */
4141 if (SMALL_REGISTER_CLASSES)
4142 merge_assigned_reloads (insn);
4143
4144 /* Generate the insns to reload operands into or out of
4145 their reload regs. */
4146 emit_reload_insns (chain);
4147
4148 /* Substitute the chosen reload regs from reload_reg_rtx
4149 into the insn's body (or perhaps into the bodies of other
4150 load and store insn that we just made for reloading
4151 and that we moved the structure into). */
4152 subst_reloads ();
4153
4154 /* If this was an ASM, make sure that all the reload insns
4155 we have generated are valid. If not, give an error
4156 and delete them. */
4157
4158 if (asm_noperands (PATTERN (insn)) >= 0)
4159 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4160 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4161 && (recog_memoized (p) < 0
4162 || (insn_extract (p),
4163 ! constrain_operands (INSN_CODE (p), 1))))
4164 {
4165 error_for_asm (insn,
4166 "`asm' operand requires impossible reload");
4167 PUT_CODE (p, NOTE);
4168 NOTE_SOURCE_FILE (p) = 0;
4169 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4170 }
4171 }
4172 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4173 is no longer validly lying around to save a future reload.
4174 Note that this does not detect pseudos that were reloaded
4175 for this insn in order to be stored in
4176 (obeying register constraints). That is correct; such reload
4177 registers ARE still valid. */
4178 note_stores (oldpat, forget_old_reloads_1);
4179
4180 /* There may have been CLOBBER insns placed after INSN. So scan
4181 between INSN and NEXT and use them to forget old reloads. */
4182 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4183 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4184 note_stores (PATTERN (x), forget_old_reloads_1);
4185
4186 #ifdef AUTO_INC_DEC
4187 /* Likewise for regs altered by auto-increment in this insn.
4188 REG_INC notes have been changed by reloading:
4189 find_reloads_address_1 records substitutions for them,
4190 which have been performed by subst_reloads above. */
4191 for (i = n_reloads - 1; i >= 0; i--)
4192 {
4193 rtx in_reg = reload_in_reg[i];
4194 if (in_reg)
4195 {
4196 enum rtx_code code = GET_CODE (in_reg);
4197 /* PRE_INC / PRE_DEC will have the reload register ending up
4198 with the same value as the stack slot, but that doesn't
4199 hold true for POST_INC / POST_DEC. Either we have to
4200 convert the memory access to a true POST_INC / POST_DEC,
4201 or we can't use the reload register for inheritance. */
4202 if ((code == POST_INC || code == POST_DEC)
4203 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4204 REGNO (reload_reg_rtx[i]))
4205 /* Make sure it is the inc/dec pseudo, and not
4206 some other (e.g. output operand) pseudo. */
4207 && (reg_reloaded_contents[REGNO (reload_reg_rtx[i])]
4208 == REGNO (XEXP (in_reg, 0))))
4209
4210 {
4211 rtx reload_reg = reload_reg_rtx[i];
4212 enum machine_mode mode = GET_MODE (reload_reg);
4213 int n = 0;
4214 rtx p;
4215
4216 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4217 {
4218 /* We really want to ignore REG_INC notes here, so
4219 use PATTERN (p) as argument to reg_set_p . */
4220 if (reg_set_p (reload_reg, PATTERN (p)))
4221 break;
4222 n = count_occurrences (PATTERN (p), reload_reg);
4223 if (! n)
4224 continue;
4225 if (n == 1)
4226 n = validate_replace_rtx (reload_reg,
4227 gen_rtx (code, mode,
4228 reload_reg), p);
4229 break;
4230 }
4231 if (n == 1)
4232 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4233 REG_NOTES (p));
4234 else
4235 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX);
4236 }
4237 }
4238 }
4239 #if 0 /* ??? Is this code obsolete now? Need to check carefully. */
4240 /* Likewise for regs altered by auto-increment in this insn.
4241 But note that the reg-notes are not changed by reloading:
4242 they still contain the pseudo-regs, not the spill regs. */
4243 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4244 if (REG_NOTE_KIND (x) == REG_INC)
4245 {
4246 /* See if this pseudo reg was reloaded in this insn.
4247 If so, its last-reload info is still valid
4248 because it is based on this insn's reload. */
4249 for (i = 0; i < n_reloads; i++)
4250 if (reload_out[i] == XEXP (x, 0))
4251 break;
4252
4253 if (i == n_reloads)
4254 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4255 }
4256 #endif
4257 #endif
4258 }
4259 /* A reload reg's contents are unknown after a label. */
4260 if (GET_CODE (insn) == CODE_LABEL)
4261 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4262
4263 /* Don't assume a reload reg is still good after a call insn
4264 if it is a call-used reg. */
4265 else if (GET_CODE (insn) == CALL_INSN)
4266 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
4267
4268 /* In case registers overlap, allow certain insns to invalidate
4269 particular hard registers. */
4270
4271 #ifdef INSN_CLOBBERS_REGNO_P
4272 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++)
4273 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i)
4274 && INSN_CLOBBERS_REGNO_P (insn, i))
4275 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i);
4276 #endif
4277
4278 #ifdef USE_C_ALLOCA
4279 alloca (0);
4280 #endif
4281 }
4282 }
4283
4284 /* Discard all record of any value reloaded from X,
4285 or reloaded in X from someplace else;
4286 unless X is an output reload reg of the current insn.
4287
4288 X may be a hard reg (the reload reg)
4289 or it may be a pseudo reg that was reloaded from. */
4290
4291 static void
4292 forget_old_reloads_1 (x, ignored)
4293 rtx x;
4294 rtx ignored ATTRIBUTE_UNUSED;
4295 {
4296 register int regno;
4297 int nr;
4298 int offset = 0;
4299
4300 /* note_stores does give us subregs of hard regs. */
4301 while (GET_CODE (x) == SUBREG)
4302 {
4303 offset += SUBREG_WORD (x);
4304 x = SUBREG_REG (x);
4305 }
4306
4307 if (GET_CODE (x) != REG)
4308 return;
4309
4310 regno = REGNO (x) + offset;
4311
4312 if (regno >= FIRST_PSEUDO_REGISTER)
4313 nr = 1;
4314 else
4315 {
4316 int i;
4317 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4318 /* Storing into a spilled-reg invalidates its contents.
4319 This can happen if a block-local pseudo is allocated to that reg
4320 and it wasn't spilled because this block's total need is 0.
4321 Then some insn might have an optional reload and use this reg. */
4322 for (i = 0; i < nr; i++)
4323 /* But don't do this if the reg actually serves as an output
4324 reload reg in the current instruction. */
4325 if (n_reloads == 0
4326 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4327 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4328 }
4329
4330 /* Since value of X has changed,
4331 forget any value previously copied from it. */
4332
4333 while (nr-- > 0)
4334 /* But don't forget a copy if this is the output reload
4335 that establishes the copy's validity. */
4336 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4337 reg_last_reload_reg[regno + nr] = 0;
4338 }
4339 \f
4340 /* For each reload, the mode of the reload register. */
4341 static enum machine_mode reload_mode[MAX_RELOADS];
4342
4343 /* For each reload, the largest number of registers it will require. */
4344 static int reload_nregs[MAX_RELOADS];
4345
4346 /* Comparison function for qsort to decide which of two reloads
4347 should be handled first. *P1 and *P2 are the reload numbers. */
4348
4349 static int
4350 reload_reg_class_lower (r1p, r2p)
4351 const GENERIC_PTR r1p;
4352 const GENERIC_PTR r2p;
4353 {
4354 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4355 register int t;
4356
4357 /* Consider required reloads before optional ones. */
4358 t = reload_optional[r1] - reload_optional[r2];
4359 if (t != 0)
4360 return t;
4361
4362 /* Count all solitary classes before non-solitary ones. */
4363 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4364 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4365 if (t != 0)
4366 return t;
4367
4368 /* Aside from solitaires, consider all multi-reg groups first. */
4369 t = reload_nregs[r2] - reload_nregs[r1];
4370 if (t != 0)
4371 return t;
4372
4373 /* Consider reloads in order of increasing reg-class number. */
4374 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4375 if (t != 0)
4376 return t;
4377
4378 /* If reloads are equally urgent, sort by reload number,
4379 so that the results of qsort leave nothing to chance. */
4380 return r1 - r2;
4381 }
4382 \f
4383 /* The following HARD_REG_SETs indicate when each hard register is
4384 used for a reload of various parts of the current insn. */
4385
4386 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4387 static HARD_REG_SET reload_reg_used;
4388 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4389 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4390 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4391 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4392 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4393 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4394 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4395 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4396 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4397 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4398 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4399 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4400 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4401 static HARD_REG_SET reload_reg_used_in_op_addr;
4402 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4403 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4404 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4405 static HARD_REG_SET reload_reg_used_in_insn;
4406 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4407 static HARD_REG_SET reload_reg_used_in_other_addr;
4408
4409 /* If reg is in use as a reload reg for any sort of reload. */
4410 static HARD_REG_SET reload_reg_used_at_all;
4411
4412 /* If reg is use as an inherited reload. We just mark the first register
4413 in the group. */
4414 static HARD_REG_SET reload_reg_used_for_inherit;
4415
4416 /* Records which hard regs are allocated to a pseudo during any point of the
4417 current insn. */
4418 static HARD_REG_SET reg_used_by_pseudo;
4419
4420 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4421 TYPE. MODE is used to indicate how many consecutive regs are
4422 actually used. */
4423
4424 static void
4425 mark_reload_reg_in_use (regno, opnum, type, mode)
4426 int regno;
4427 int opnum;
4428 enum reload_type type;
4429 enum machine_mode mode;
4430 {
4431 int nregs = HARD_REGNO_NREGS (regno, mode);
4432 int i;
4433
4434 for (i = regno; i < nregs + regno; i++)
4435 {
4436 switch (type)
4437 {
4438 case RELOAD_OTHER:
4439 SET_HARD_REG_BIT (reload_reg_used, i);
4440 break;
4441
4442 case RELOAD_FOR_INPUT_ADDRESS:
4443 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4444 break;
4445
4446 case RELOAD_FOR_INPADDR_ADDRESS:
4447 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4448 break;
4449
4450 case RELOAD_FOR_OUTPUT_ADDRESS:
4451 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4452 break;
4453
4454 case RELOAD_FOR_OUTADDR_ADDRESS:
4455 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4456 break;
4457
4458 case RELOAD_FOR_OPERAND_ADDRESS:
4459 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4460 break;
4461
4462 case RELOAD_FOR_OPADDR_ADDR:
4463 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4464 break;
4465
4466 case RELOAD_FOR_OTHER_ADDRESS:
4467 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4468 break;
4469
4470 case RELOAD_FOR_INPUT:
4471 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4472 break;
4473
4474 case RELOAD_FOR_OUTPUT:
4475 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4476 break;
4477
4478 case RELOAD_FOR_INSN:
4479 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4480 break;
4481 }
4482
4483 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4484 }
4485 }
4486
4487 /* Similarly, but show REGNO is no longer in use for a reload. */
4488
4489 static void
4490 clear_reload_reg_in_use (regno, opnum, type, mode)
4491 int regno;
4492 int opnum;
4493 enum reload_type type;
4494 enum machine_mode mode;
4495 {
4496 int nregs = HARD_REGNO_NREGS (regno, mode);
4497 int start_regno, end_regno;
4498 int i;
4499 /* A complication is that for some reload types, inheritance might
4500 allow multiple reloads of the same types to share a reload register.
4501 We set check_opnum if we have to check only reloads with the same
4502 operand number, and check_any if we have to check all reloads. */
4503 int check_opnum = 0;
4504 int check_any = 0;
4505 HARD_REG_SET *used_in_set;
4506
4507 switch (type)
4508 {
4509 case RELOAD_OTHER:
4510 used_in_set = &reload_reg_used;
4511 break;
4512
4513 case RELOAD_FOR_INPUT_ADDRESS:
4514 used_in_set = &reload_reg_used_in_input_addr[opnum];
4515 break;
4516
4517 case RELOAD_FOR_INPADDR_ADDRESS:
4518 check_opnum = 1;
4519 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4520 break;
4521
4522 case RELOAD_FOR_OUTPUT_ADDRESS:
4523 used_in_set = &reload_reg_used_in_output_addr[opnum];
4524 break;
4525
4526 case RELOAD_FOR_OUTADDR_ADDRESS:
4527 check_opnum = 1;
4528 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4529 break;
4530
4531 case RELOAD_FOR_OPERAND_ADDRESS:
4532 used_in_set = &reload_reg_used_in_op_addr;
4533 break;
4534
4535 case RELOAD_FOR_OPADDR_ADDR:
4536 check_any = 1;
4537 used_in_set = &reload_reg_used_in_op_addr_reload;
4538 break;
4539
4540 case RELOAD_FOR_OTHER_ADDRESS:
4541 used_in_set = &reload_reg_used_in_other_addr;
4542 check_any = 1;
4543 break;
4544
4545 case RELOAD_FOR_INPUT:
4546 used_in_set = &reload_reg_used_in_input[opnum];
4547 break;
4548
4549 case RELOAD_FOR_OUTPUT:
4550 used_in_set = &reload_reg_used_in_output[opnum];
4551 break;
4552
4553 case RELOAD_FOR_INSN:
4554 used_in_set = &reload_reg_used_in_insn;
4555 break;
4556 default:
4557 abort ();
4558 }
4559 /* We resolve conflicts with remaining reloads of the same type by
4560 excluding the intervals of of reload registers by them from the
4561 interval of freed reload registers. Since we only keep track of
4562 one set of interval bounds, we might have to exclude somewhat
4563 more then what would be necessary if we used a HARD_REG_SET here.
4564 But this should only happen very infrequently, so there should
4565 be no reason to worry about it. */
4566
4567 start_regno = regno;
4568 end_regno = regno + nregs;
4569 if (check_opnum || check_any)
4570 {
4571 for (i = n_reloads - 1; i >= 0; i--)
4572 {
4573 if (reload_when_needed[i] == type
4574 && (check_any || reload_opnum[i] == opnum)
4575 && reload_reg_rtx[i])
4576 {
4577 int conflict_start = true_regnum (reload_reg_rtx[i]);
4578 int conflict_end
4579 = (conflict_start
4580 + HARD_REGNO_NREGS (conflict_start, reload_mode[i]));
4581
4582 /* If there is an overlap with the first to-be-freed register,
4583 adjust the interval start. */
4584 if (conflict_start <= start_regno && conflict_end > start_regno)
4585 start_regno = conflict_end;
4586 /* Otherwise, if there is a conflict with one of the other
4587 to-be-freed registers, adjust the interval end. */
4588 if (conflict_start > start_regno && conflict_start < end_regno)
4589 end_regno = conflict_start;
4590 }
4591 }
4592 }
4593 for (i = start_regno; i < end_regno; i++)
4594 CLEAR_HARD_REG_BIT (*used_in_set, i);
4595 }
4596
4597 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4598 specified by OPNUM and TYPE. */
4599
4600 static int
4601 reload_reg_free_p (regno, opnum, type)
4602 int regno;
4603 int opnum;
4604 enum reload_type type;
4605 {
4606 int i;
4607
4608 /* In use for a RELOAD_OTHER means it's not available for anything. */
4609 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4610 return 0;
4611
4612 switch (type)
4613 {
4614 case RELOAD_OTHER:
4615 /* In use for anything means we can't use it for RELOAD_OTHER. */
4616 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4617 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4618 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4619 return 0;
4620
4621 for (i = 0; i < reload_n_operands; i++)
4622 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4623 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4624 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4625 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4626 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4627 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4628 return 0;
4629
4630 return 1;
4631
4632 case RELOAD_FOR_INPUT:
4633 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4634 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4635 return 0;
4636
4637 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4638 return 0;
4639
4640 /* If it is used for some other input, can't use it. */
4641 for (i = 0; i < reload_n_operands; i++)
4642 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4643 return 0;
4644
4645 /* If it is used in a later operand's address, can't use it. */
4646 for (i = opnum + 1; i < reload_n_operands; i++)
4647 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4648 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4649 return 0;
4650
4651 return 1;
4652
4653 case RELOAD_FOR_INPUT_ADDRESS:
4654 /* Can't use a register if it is used for an input address for this
4655 operand or used as an input in an earlier one. */
4656 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4657 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4658 return 0;
4659
4660 for (i = 0; i < opnum; i++)
4661 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4662 return 0;
4663
4664 return 1;
4665
4666 case RELOAD_FOR_INPADDR_ADDRESS:
4667 /* Can't use a register if it is used for an input address
4668 for this operand or used as an input in an earlier
4669 one. */
4670 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4671 return 0;
4672
4673 for (i = 0; i < opnum; i++)
4674 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4675 return 0;
4676
4677 return 1;
4678
4679 case RELOAD_FOR_OUTPUT_ADDRESS:
4680 /* Can't use a register if it is used for an output address for this
4681 operand or used as an output in this or a later operand. */
4682 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4683 return 0;
4684
4685 for (i = opnum; i < reload_n_operands; i++)
4686 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4687 return 0;
4688
4689 return 1;
4690
4691 case RELOAD_FOR_OUTADDR_ADDRESS:
4692 /* Can't use a register if it is used for an output address
4693 for this operand or used as an output in this or a
4694 later operand. */
4695 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4696 return 0;
4697
4698 for (i = opnum; i < reload_n_operands; i++)
4699 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4700 return 0;
4701
4702 return 1;
4703
4704 case RELOAD_FOR_OPERAND_ADDRESS:
4705 for (i = 0; i < reload_n_operands; i++)
4706 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4707 return 0;
4708
4709 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4710 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4711
4712 case RELOAD_FOR_OPADDR_ADDR:
4713 for (i = 0; i < reload_n_operands; i++)
4714 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4715 return 0;
4716
4717 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4718
4719 case RELOAD_FOR_OUTPUT:
4720 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4721 outputs, or an operand address for this or an earlier output. */
4722 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4723 return 0;
4724
4725 for (i = 0; i < reload_n_operands; i++)
4726 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4727 return 0;
4728
4729 for (i = 0; i <= opnum; i++)
4730 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4731 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4732 return 0;
4733
4734 return 1;
4735
4736 case RELOAD_FOR_INSN:
4737 for (i = 0; i < reload_n_operands; i++)
4738 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4739 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4740 return 0;
4741
4742 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4743 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4744
4745 case RELOAD_FOR_OTHER_ADDRESS:
4746 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4747 }
4748 abort ();
4749 }
4750
4751 /* Return 1 if the value in reload reg REGNO, as used by a reload
4752 needed for the part of the insn specified by OPNUM and TYPE,
4753 is not in use for a reload in any prior part of the insn.
4754
4755 We can assume that the reload reg was already tested for availability
4756 at the time it is needed, and we should not check this again,
4757 in case the reg has already been marked in use.
4758
4759 However, if EQUIV is set, we are checking the availability of a register
4760 holding an equivalence to the value to be loaded into the reload register,
4761 not the availability of the reload register itself.
4762
4763 This is still less stringent than what reload_reg_free_p checks; for
4764 example, compare the checks for RELOAD_OTHER. */
4765
4766 static int
4767 reload_reg_free_before_p (regno, opnum, type, equiv)
4768 int regno;
4769 int opnum;
4770 enum reload_type type;
4771 int equiv;
4772 {
4773 int i;
4774
4775 /* The code to handle EQUIV below is wrong.
4776
4777 If we wnat to know if a value in a particular reload register is available
4778 at a particular point in time during reloading, we must check *all*
4779 prior reloads to see if they clobber the value.
4780
4781 Note this is significantly different from determining when a register is
4782 free for usage in a reload!
4783
4784 This change is temporary. It will go away. */
4785 if (equiv)
4786 return 0;
4787
4788 switch (type)
4789 {
4790 case RELOAD_FOR_OTHER_ADDRESS:
4791 /* These always come first. */
4792 if (equiv && TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno))
4793 return 0;
4794 return 1;
4795
4796 case RELOAD_OTHER:
4797 if (equiv && TEST_HARD_REG_BIT (reload_reg_used, regno))
4798 return 0;
4799 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4800
4801 /* If this use is for part of the insn,
4802 check the reg is not in use for any prior part. It is tempting
4803 to try to do this by falling through from objecs that occur
4804 later in the insn to ones that occur earlier, but that will not
4805 correctly take into account the fact that here we MUST ignore
4806 things that would prevent the register from being allocated in
4807 the first place, since we know that it was allocated. */
4808
4809 case RELOAD_FOR_OUTPUT_ADDRESS:
4810 if (equiv
4811 && TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4812 return 0;
4813 /* Earlier reloads include RELOAD_FOR_OUTADDR_ADDRESS reloads. */
4814 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4815 return 0;
4816 /* ... fall through ... */
4817 case RELOAD_FOR_OUTADDR_ADDRESS:
4818 if (equiv
4819 && (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno)
4820 || TEST_HARD_REG_BIT (reload_reg_used, regno)))
4821 return 0;
4822 /* Earlier reloads are for earlier outputs or their addresses,
4823 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4824 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4825 RELOAD_OTHER).. */
4826 for (i = 0; i < opnum; i++)
4827 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4828 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4829 return 0;
4830
4831 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4832 return 0;
4833
4834 for (i = 0; i < reload_n_operands; i++)
4835 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4836 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4837 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4838 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4839 return 0;
4840
4841 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4842 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4843 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4844 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4845
4846 case RELOAD_FOR_OUTPUT:
4847 case RELOAD_FOR_INSN:
4848 /* There is no reason to call this function for output reloads, thus
4849 anything we'd put here wouldn't be tested. So just abort. */
4850 abort ();
4851
4852 case RELOAD_FOR_OPERAND_ADDRESS:
4853 if (equiv && TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4854 return 0;
4855
4856 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4857 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4858 return 0;
4859
4860 /* ... fall through ... */
4861
4862 case RELOAD_FOR_OPADDR_ADDR:
4863 if (equiv)
4864 {
4865 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4866 || TEST_HARD_REG_BIT (reload_reg_used, regno))
4867 return 0;
4868 for (i = 0; i < reload_n_operands; i++)
4869 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4870 return 0;
4871 }
4872 /* These can't conflict with inputs, or each other, so all we have to
4873 test is input addresses and the addresses of OTHER items. */
4874
4875 for (i = 0; i < reload_n_operands; i++)
4876 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4877 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4878 return 0;
4879
4880 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4881
4882 case RELOAD_FOR_INPUT:
4883 if (equiv && TEST_HARD_REG_BIT (reload_reg_used, regno))
4884 return 0;
4885
4886 /* The only things earlier are the address for this and
4887 earlier inputs, other inputs (which we know we don't conflict
4888 with), and addresses of RELOAD_OTHER objects.
4889 We can ignore the conflict with addresses of this operand, since
4890 when we inherit this operand, its address reloads are discarded. */
4891
4892 for (i = 0; i < opnum; i++)
4893 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4894 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4895 return 0;
4896
4897 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4898
4899 case RELOAD_FOR_INPUT_ADDRESS:
4900 /* Earlier reloads include RELOAD_FOR_INPADDR_ADDRESS reloads. */
4901 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4902 return 0;
4903 /* ... fall through ... */
4904 case RELOAD_FOR_INPADDR_ADDRESS:
4905 if (equiv && TEST_HARD_REG_BIT (reload_reg_used, regno))
4906 return 0;
4907
4908 /* Similarly, all we have to check is for use in earlier inputs'
4909 addresses. */
4910 for (i = 0; i < opnum; i++)
4911 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4912 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4913 return 0;
4914
4915 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4916 }
4917 abort ();
4918 }
4919
4920 /* Return 1 if the value in reload reg REGNO, as used by a reload
4921 needed for the part of the insn specified by OPNUM and TYPE,
4922 is still available in REGNO at the end of the insn.
4923
4924 We can assume that the reload reg was already tested for availability
4925 at the time it is needed, and we should not check this again,
4926 in case the reg has already been marked in use. */
4927
4928 static int
4929 reload_reg_reaches_end_p (regno, opnum, type)
4930 int regno;
4931 int opnum;
4932 enum reload_type type;
4933 {
4934 int i;
4935
4936 switch (type)
4937 {
4938 case RELOAD_OTHER:
4939 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4940 its value must reach the end. */
4941 return 1;
4942
4943 /* If this use is for part of the insn,
4944 its value reaches if no subsequent part uses the same register.
4945 Just like the above function, don't try to do this with lots
4946 of fallthroughs. */
4947
4948 case RELOAD_FOR_OTHER_ADDRESS:
4949 /* Here we check for everything else, since these don't conflict
4950 with anything else and everything comes later. */
4951
4952 for (i = 0; i < reload_n_operands; i++)
4953 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4954 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4955 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4956 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4957 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4958 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4959 return 0;
4960
4961 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4962 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4963 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4964
4965 case RELOAD_FOR_INPUT_ADDRESS:
4966 case RELOAD_FOR_INPADDR_ADDRESS:
4967 /* Similar, except that we check only for this and subsequent inputs
4968 and the address of only subsequent inputs and we do not need
4969 to check for RELOAD_OTHER objects since they are known not to
4970 conflict. */
4971
4972 for (i = opnum; i < reload_n_operands; i++)
4973 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4974 return 0;
4975
4976 for (i = opnum + 1; i < reload_n_operands; i++)
4977 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4978 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4979 return 0;
4980
4981 for (i = 0; i < reload_n_operands; i++)
4982 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4983 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4984 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4985 return 0;
4986
4987 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4988 return 0;
4989
4990 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4991 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
4992
4993 case RELOAD_FOR_INPUT:
4994 /* Similar to input address, except we start at the next operand for
4995 both input and input address and we do not check for
4996 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4997 would conflict. */
4998
4999 for (i = opnum + 1; i < reload_n_operands; i++)
5000 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5001 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5002 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5003 return 0;
5004
5005 /* ... fall through ... */
5006
5007 case RELOAD_FOR_OPERAND_ADDRESS:
5008 /* Check outputs and their addresses. */
5009
5010 for (i = 0; i < reload_n_operands; i++)
5011 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5012 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5013 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5014 return 0;
5015
5016 return 1;
5017
5018 case RELOAD_FOR_OPADDR_ADDR:
5019 for (i = 0; i < reload_n_operands; i++)
5020 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5021 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5022 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5023 return 0;
5024
5025 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5026 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
5027
5028 case RELOAD_FOR_INSN:
5029 /* These conflict with other outputs with RELOAD_OTHER. So
5030 we need only check for output addresses. */
5031
5032 opnum = -1;
5033
5034 /* ... fall through ... */
5035
5036 case RELOAD_FOR_OUTPUT:
5037 case RELOAD_FOR_OUTPUT_ADDRESS:
5038 case RELOAD_FOR_OUTADDR_ADDRESS:
5039 /* We already know these can't conflict with a later output. So the
5040 only thing to check are later output addresses. */
5041 for (i = opnum + 1; i < reload_n_operands; i++)
5042 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5043 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5044 return 0;
5045
5046 return 1;
5047 }
5048
5049 abort ();
5050 }
5051 \f
5052 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5053 Return 0 otherwise.
5054
5055 This function uses the same algorithm as reload_reg_free_p above. */
5056
5057 int
5058 reloads_conflict (r1, r2)
5059 int r1, r2;
5060 {
5061 enum reload_type r1_type = reload_when_needed[r1];
5062 enum reload_type r2_type = reload_when_needed[r2];
5063 int r1_opnum = reload_opnum[r1];
5064 int r2_opnum = reload_opnum[r2];
5065
5066 /* RELOAD_OTHER conflicts with everything. */
5067 if (r2_type == RELOAD_OTHER)
5068 return 1;
5069
5070 /* Otherwise, check conflicts differently for each type. */
5071
5072 switch (r1_type)
5073 {
5074 case RELOAD_FOR_INPUT:
5075 return (r2_type == RELOAD_FOR_INSN
5076 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5077 || r2_type == RELOAD_FOR_OPADDR_ADDR
5078 || r2_type == RELOAD_FOR_INPUT
5079 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5080 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5081 && r2_opnum > r1_opnum));
5082
5083 case RELOAD_FOR_INPUT_ADDRESS:
5084 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5085 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5086
5087 case RELOAD_FOR_INPADDR_ADDRESS:
5088 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5089 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5090
5091 case RELOAD_FOR_OUTPUT_ADDRESS:
5092 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5093 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5094
5095 case RELOAD_FOR_OUTADDR_ADDRESS:
5096 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5097 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5098
5099 case RELOAD_FOR_OPERAND_ADDRESS:
5100 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5101 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5102
5103 case RELOAD_FOR_OPADDR_ADDR:
5104 return (r2_type == RELOAD_FOR_INPUT
5105 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5106
5107 case RELOAD_FOR_OUTPUT:
5108 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5109 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5110 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5111 && r2_opnum >= r1_opnum));
5112
5113 case RELOAD_FOR_INSN:
5114 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5115 || r2_type == RELOAD_FOR_INSN
5116 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5117
5118 case RELOAD_FOR_OTHER_ADDRESS:
5119 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5120
5121 case RELOAD_OTHER:
5122 return 1;
5123
5124 default:
5125 abort ();
5126 }
5127 }
5128 \f
5129 /* Vector of reload-numbers showing the order in which the reloads should
5130 be processed. */
5131 short reload_order[MAX_RELOADS];
5132
5133 /* Indexed by reload number, 1 if incoming value
5134 inherited from previous insns. */
5135 char reload_inherited[MAX_RELOADS];
5136
5137 /* For an inherited reload, this is the insn the reload was inherited from,
5138 if we know it. Otherwise, this is 0. */
5139 rtx reload_inheritance_insn[MAX_RELOADS];
5140
5141 /* If non-zero, this is a place to get the value of the reload,
5142 rather than using reload_in. */
5143 rtx reload_override_in[MAX_RELOADS];
5144
5145 /* For each reload, the hard register number of the register used,
5146 or -1 if we did not need a register for this reload. */
5147 int reload_spill_index[MAX_RELOADS];
5148
5149 /* Return 1 if the value in reload reg REGNO, as used by a reload
5150 needed for the part of the insn specified by OPNUM and TYPE,
5151 may be used to load VALUE into it.
5152
5153 Other read-only reloads with the same value do not conflict
5154 unless OUT is non-zero and these other reloads have to live while
5155 output reloads live.
5156
5157 RELOADNUM is the number of the reload we want to load this value for;
5158 a reload does not conflict with itself.
5159
5160 The caller has to make sure that there is no conflict with the return
5161 register. */
5162 static int
5163 reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum)
5164 int regno;
5165 int opnum;
5166 enum reload_type type;
5167 rtx value, out;
5168 int reloadnum;
5169 {
5170 int time1;
5171 int i;
5172
5173 /* We use some pseudo 'time' value to check if the lifetimes of the
5174 new register use would overlap with the one of a previous reload
5175 that is not read-only or uses a different value.
5176 The 'time' used doesn't have to be linear in any shape or form, just
5177 monotonic.
5178 Some reload types use different 'buckets' for each operand.
5179 So there are MAX_RECOG_OPERANDS different time values for each
5180 such reload type.
5181 We compute TIME1 as the time when the register for the prospective
5182 new reload ceases to be live, and TIME2 for each existing
5183 reload as the time when that the reload register of that reload
5184 becomes live.
5185 Where there is little to be gained by exact lifetime calculations,
5186 we just make conservative assumptions, i.e. a longer lifetime;
5187 this is done in the 'default:' cases. */
5188 switch (type)
5189 {
5190 case RELOAD_FOR_OTHER_ADDRESS:
5191 time1 = 0;
5192 break;
5193 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5194 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5195 respectively, to the time values for these, we get distinct time
5196 values. To get distinct time values for each operand, we have to
5197 multiply opnum by at least three. We round that up to four because
5198 multiply by four is often cheaper. */
5199 case RELOAD_FOR_INPADDR_ADDRESS:
5200 time1 = opnum * 4 + 1;
5201 break;
5202 case RELOAD_FOR_INPUT_ADDRESS:
5203 time1 = opnum * 4 + 2;
5204 break;
5205 case RELOAD_FOR_OPADDR_ADDR:
5206 /* opnum * 4 + 3 < opnum * 4 + 4
5207 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5208 time1 = MAX_RECOG_OPERANDS * 4;
5209 break;
5210 case RELOAD_FOR_INPUT:
5211 /* All RELOAD_FOR_INPUT reloads remain live till just before the
5212 instruction is executed. */
5213 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5214 break;
5215 case RELOAD_FOR_OPERAND_ADDRESS:
5216 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5217 is executed. */
5218 time1 = MAX_RECOG_OPERANDS * 4 + 2;
5219 break;
5220 case RELOAD_FOR_OUTPUT_ADDRESS:
5221 time1 = MAX_RECOG_OPERANDS * 4 + 3 + opnum;
5222 break;
5223 default:
5224 time1 = MAX_RECOG_OPERANDS * 5 + 3;
5225 }
5226
5227 for (i = 0; i < n_reloads; i++)
5228 {
5229 rtx reg = reload_reg_rtx[i];
5230 if (reg && GET_CODE (reg) == REG
5231 && ((unsigned) regno - true_regnum (reg)
5232 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
5233 && i != reloadnum)
5234 {
5235 if (out
5236 && reload_when_needed[i] != RELOAD_FOR_INPUT
5237 && reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
5238 && reload_when_needed[i] != RELOAD_FOR_INPADDR_ADDRESS)
5239 return 0;
5240 if (! reload_in[i] || ! rtx_equal_p (reload_in[i], value)
5241 || reload_out[i])
5242 {
5243 int time2;
5244 switch (reload_when_needed[i])
5245 {
5246 case RELOAD_FOR_OTHER_ADDRESS:
5247 time2 = 0;
5248 break;
5249 case RELOAD_FOR_INPADDR_ADDRESS:
5250 /* find_reloads makes sure that a
5251 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5252 by at most one - the first -
5253 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5254 address reload is inherited, the address address reload
5255 goes away, so we can ignore this conflict. */
5256 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1)
5257 continue;
5258 time2 = reload_opnum[i] * 4 + 1;
5259 break;
5260 case RELOAD_FOR_INPUT_ADDRESS:
5261 time2 = reload_opnum[i] * 4 + 2;
5262 break;
5263 case RELOAD_FOR_INPUT:
5264 time2 = reload_opnum[i] * 4 + 3;
5265 break;
5266 case RELOAD_FOR_OPADDR_ADDR:
5267 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1)
5268 continue;
5269 time2 = MAX_RECOG_OPERANDS * 4;
5270 break;
5271 case RELOAD_FOR_OPERAND_ADDRESS:
5272 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5273 break;
5274 case RELOAD_FOR_OUTPUT:
5275 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5276 instruction is executed. */
5277 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5278 break;
5279 case RELOAD_FOR_OUTADDR_ADDRESS:
5280 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1)
5281 continue;
5282 /* fall through. */
5283 /* The first RELOAD_FOR_OUTPUT_ADDRESS reload conflicts with the
5284 RELOAD_FOR_OUTPUT reloads, so assign it the same time value. */
5285 case RELOAD_FOR_OUTPUT_ADDRESS:
5286 time2 = MAX_RECOG_OPERANDS * 4 + 3 + reload_opnum[i];
5287 break;
5288 case RELOAD_OTHER:
5289 if (! reload_in[i] || rtx_equal_p (reload_in[i], value))
5290 {
5291 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5292 break;
5293 }
5294 default:
5295 time2 = 0;
5296 }
5297 if (time1 >= time2)
5298 return 0;
5299 }
5300 }
5301 }
5302 return 1;
5303 }
5304
5305 /* Find a spill register to use as a reload register for reload R.
5306 LAST_RELOAD is non-zero if this is the last reload for the insn being
5307 processed.
5308
5309 Set reload_reg_rtx[R] to the register allocated.
5310
5311 If NOERROR is nonzero, we return 1 if successful,
5312 or 0 if we couldn't find a spill reg and we didn't change anything. */
5313
5314 static int
5315 allocate_reload_reg (chain, r, last_reload, noerror)
5316 struct insn_chain *chain;
5317 int r;
5318 int last_reload;
5319 int noerror;
5320 {
5321 rtx insn = chain->insn;
5322 int i, pass, count, regno;
5323 rtx new;
5324
5325 /* If we put this reload ahead, thinking it is a group,
5326 then insist on finding a group. Otherwise we can grab a
5327 reg that some other reload needs.
5328 (That can happen when we have a 68000 DATA_OR_FP_REG
5329 which is a group of data regs or one fp reg.)
5330 We need not be so restrictive if there are no more reloads
5331 for this insn.
5332
5333 ??? Really it would be nicer to have smarter handling
5334 for that kind of reg class, where a problem like this is normal.
5335 Perhaps those classes should be avoided for reloading
5336 by use of more alternatives. */
5337
5338 int force_group = reload_nregs[r] > 1 && ! last_reload;
5339
5340 /* If we want a single register and haven't yet found one,
5341 take any reg in the right class and not in use.
5342 If we want a consecutive group, here is where we look for it.
5343
5344 We use two passes so we can first look for reload regs to
5345 reuse, which are already in use for other reloads in this insn,
5346 and only then use additional registers.
5347 I think that maximizing reuse is needed to make sure we don't
5348 run out of reload regs. Suppose we have three reloads, and
5349 reloads A and B can share regs. These need two regs.
5350 Suppose A and B are given different regs.
5351 That leaves none for C. */
5352 for (pass = 0; pass < 2; pass++)
5353 {
5354 /* I is the index in spill_regs.
5355 We advance it round-robin between insns to use all spill regs
5356 equally, so that inherited reloads have a chance
5357 of leapfrogging each other. Don't do this, however, when we have
5358 group needs and failure would be fatal; if we only have a relatively
5359 small number of spill registers, and more than one of them has
5360 group needs, then by starting in the middle, we may end up
5361 allocating the first one in such a way that we are not left with
5362 sufficient groups to handle the rest. */
5363
5364 if (noerror || ! force_group)
5365 i = last_spill_reg;
5366 else
5367 i = -1;
5368
5369 for (count = 0; count < n_spills; count++)
5370 {
5371 int class = (int) reload_reg_class[r];
5372 int regnum;
5373
5374 i++;
5375 if (i >= n_spills)
5376 i -= n_spills;
5377 regnum = spill_regs[i];
5378
5379 if ((reload_reg_free_p (regnum, reload_opnum[r],
5380 reload_when_needed[r])
5381 || (reload_in[r]
5382 /* We check reload_reg_used to make sure we
5383 don't clobber the return register. */
5384 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5385 && reload_reg_free_for_value_p (regnum,
5386 reload_opnum[r],
5387 reload_when_needed[r],
5388 reload_in[r],
5389 reload_out[r], r)))
5390 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5391 && HARD_REGNO_MODE_OK (regnum, reload_mode[r])
5392 /* Look first for regs to share, then for unshared. But
5393 don't share regs used for inherited reloads; they are
5394 the ones we want to preserve. */
5395 && (pass
5396 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5397 regnum)
5398 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5399 regnum))))
5400 {
5401 int nr = HARD_REGNO_NREGS (regnum, reload_mode[r]);
5402 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5403 (on 68000) got us two FP regs. If NR is 1,
5404 we would reject both of them. */
5405 if (force_group)
5406 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5407 /* If we need only one reg, we have already won. */
5408 if (nr == 1)
5409 {
5410 /* But reject a single reg if we demand a group. */
5411 if (force_group)
5412 continue;
5413 break;
5414 }
5415 /* Otherwise check that as many consecutive regs as we need
5416 are available here.
5417 Also, don't use for a group registers that are
5418 needed for nongroups. */
5419 if (! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regnum))
5420 while (nr > 1)
5421 {
5422 regno = regnum + nr - 1;
5423 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5424 && spill_reg_order[regno] >= 0
5425 && reload_reg_free_p (regno, reload_opnum[r],
5426 reload_when_needed[r])
5427 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups,
5428 regno)))
5429 break;
5430 nr--;
5431 }
5432 if (nr == 1)
5433 break;
5434 }
5435 }
5436
5437 /* If we found something on pass 1, omit pass 2. */
5438 if (count < n_spills)
5439 break;
5440 }
5441
5442 /* We should have found a spill register by now. */
5443 if (count == n_spills)
5444 {
5445 if (noerror)
5446 return 0;
5447 goto failure;
5448 }
5449
5450 /* I is the index in SPILL_REG_RTX of the reload register we are to
5451 allocate. Get an rtx for it and find its register number. */
5452
5453 new = spill_reg_rtx[i];
5454
5455 if (new == 0 || GET_MODE (new) != reload_mode[r])
5456 spill_reg_rtx[i] = new
5457 = gen_rtx_REG (reload_mode[r], spill_regs[i]);
5458
5459 regno = true_regnum (new);
5460
5461 /* Detect when the reload reg can't hold the reload mode.
5462 This used to be one `if', but Sequent compiler can't handle that. */
5463 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5464 {
5465 enum machine_mode test_mode = VOIDmode;
5466 if (reload_in[r])
5467 test_mode = GET_MODE (reload_in[r]);
5468 /* If reload_in[r] has VOIDmode, it means we will load it
5469 in whatever mode the reload reg has: to wit, reload_mode[r].
5470 We have already tested that for validity. */
5471 /* Aside from that, we need to test that the expressions
5472 to reload from or into have modes which are valid for this
5473 reload register. Otherwise the reload insns would be invalid. */
5474 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5475 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5476 if (! (reload_out[r] != 0
5477 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5478 {
5479 /* The reg is OK. */
5480 last_spill_reg = i;
5481
5482 /* Mark as in use for this insn the reload regs we use
5483 for this. */
5484 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5485 reload_when_needed[r], reload_mode[r]);
5486
5487 reload_reg_rtx[r] = new;
5488 reload_spill_index[r] = spill_regs[i];
5489 return 1;
5490 }
5491 }
5492
5493 /* The reg is not OK. */
5494 if (noerror)
5495 return 0;
5496
5497 failure:
5498 if (asm_noperands (PATTERN (insn)) < 0)
5499 /* It's the compiler's fault. */
5500 fatal_insn ("Could not find a spill register", insn);
5501
5502 /* It's the user's fault; the operand's mode and constraint
5503 don't match. Disable this reload so we don't crash in final. */
5504 error_for_asm (insn,
5505 "`asm' operand constraint incompatible with operand size");
5506 reload_in[r] = 0;
5507 reload_out[r] = 0;
5508 reload_reg_rtx[r] = 0;
5509 reload_optional[r] = 1;
5510 reload_secondary_p[r] = 1;
5511
5512 return 1;
5513 }
5514 \f
5515 /* Assign hard reg targets for the pseudo-registers we must reload
5516 into hard regs for this insn.
5517 Also output the instructions to copy them in and out of the hard regs.
5518
5519 For machines with register classes, we are responsible for
5520 finding a reload reg in the proper class. */
5521
5522 static void
5523 choose_reload_regs (chain)
5524 struct insn_chain *chain;
5525 {
5526 rtx insn = chain->insn;
5527 register int i, j;
5528 int max_group_size = 1;
5529 enum reg_class group_class = NO_REGS;
5530 int inheritance;
5531 int pass;
5532
5533 rtx save_reload_reg_rtx[MAX_RELOADS];
5534 char save_reload_inherited[MAX_RELOADS];
5535 rtx save_reload_inheritance_insn[MAX_RELOADS];
5536 rtx save_reload_override_in[MAX_RELOADS];
5537 int save_reload_spill_index[MAX_RELOADS];
5538 HARD_REG_SET save_reload_reg_used;
5539 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5540 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5541 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5542 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5543 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5544 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5545 HARD_REG_SET save_reload_reg_used_in_op_addr;
5546 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5547 HARD_REG_SET save_reload_reg_used_in_insn;
5548 HARD_REG_SET save_reload_reg_used_in_other_addr;
5549 HARD_REG_SET save_reload_reg_used_at_all;
5550
5551 bzero (reload_inherited, MAX_RELOADS);
5552 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5553 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5554
5555 CLEAR_HARD_REG_SET (reload_reg_used);
5556 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5557 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5558 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5559 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5560 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5561
5562 CLEAR_HARD_REG_SET (reg_used_by_pseudo);
5563 compute_use_by_pseudos (&reg_used_by_pseudo, chain->live_before);
5564 compute_use_by_pseudos (&reg_used_by_pseudo, chain->live_after);
5565
5566 for (i = 0; i < reload_n_operands; i++)
5567 {
5568 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5569 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5570 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5571 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5572 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5573 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5574 }
5575
5576 IOR_COMPL_HARD_REG_SET (reload_reg_used, chain->used_spill_regs);
5577
5578 #if 0 /* Not needed, now that we can always retry without inheritance. */
5579 /* See if we have more mandatory reloads than spill regs.
5580 If so, then we cannot risk optimizations that could prevent
5581 reloads from sharing one spill register.
5582
5583 Since we will try finding a better register than reload_reg_rtx
5584 unless it is equal to reload_in or reload_out, count such reloads. */
5585
5586 {
5587 int tem = 0;
5588 for (j = 0; j < n_reloads; j++)
5589 if (! reload_optional[j]
5590 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5591 && (reload_reg_rtx[j] == 0
5592 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5593 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5594 tem++;
5595 if (tem > n_spills)
5596 must_reuse = 1;
5597 }
5598 #endif
5599
5600 /* In order to be certain of getting the registers we need,
5601 we must sort the reloads into order of increasing register class.
5602 Then our grabbing of reload registers will parallel the process
5603 that provided the reload registers.
5604
5605 Also note whether any of the reloads wants a consecutive group of regs.
5606 If so, record the maximum size of the group desired and what
5607 register class contains all the groups needed by this insn. */
5608
5609 for (j = 0; j < n_reloads; j++)
5610 {
5611 reload_order[j] = j;
5612 reload_spill_index[j] = -1;
5613
5614 reload_mode[j]
5615 = (reload_inmode[j] == VOIDmode
5616 || (GET_MODE_SIZE (reload_outmode[j])
5617 > GET_MODE_SIZE (reload_inmode[j])))
5618 ? reload_outmode[j] : reload_inmode[j];
5619
5620 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5621
5622 if (reload_nregs[j] > 1)
5623 {
5624 max_group_size = MAX (reload_nregs[j], max_group_size);
5625 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5626 }
5627
5628 /* If we have already decided to use a certain register,
5629 don't use it in another way. */
5630 if (reload_reg_rtx[j])
5631 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5632 reload_when_needed[j], reload_mode[j]);
5633 }
5634
5635 if (n_reloads > 1)
5636 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5637
5638 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5639 sizeof reload_reg_rtx);
5640 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5641 bcopy ((char *) reload_inheritance_insn,
5642 (char *) save_reload_inheritance_insn,
5643 sizeof reload_inheritance_insn);
5644 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5645 sizeof reload_override_in);
5646 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5647 sizeof reload_spill_index);
5648 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5649 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5650 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5651 reload_reg_used_in_op_addr);
5652
5653 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5654 reload_reg_used_in_op_addr_reload);
5655
5656 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5657 reload_reg_used_in_insn);
5658 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5659 reload_reg_used_in_other_addr);
5660
5661 for (i = 0; i < reload_n_operands; i++)
5662 {
5663 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5664 reload_reg_used_in_output[i]);
5665 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5666 reload_reg_used_in_input[i]);
5667 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5668 reload_reg_used_in_input_addr[i]);
5669 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5670 reload_reg_used_in_inpaddr_addr[i]);
5671 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5672 reload_reg_used_in_output_addr[i]);
5673 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5674 reload_reg_used_in_outaddr_addr[i]);
5675 }
5676
5677 /* If -O, try first with inheritance, then turning it off.
5678 If not -O, don't do inheritance.
5679 Using inheritance when not optimizing leads to paradoxes
5680 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5681 because one side of the comparison might be inherited. */
5682
5683 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5684 {
5685 /* Process the reloads in order of preference just found.
5686 Beyond this point, subregs can be found in reload_reg_rtx.
5687
5688 This used to look for an existing reloaded home for all
5689 of the reloads, and only then perform any new reloads.
5690 But that could lose if the reloads were done out of reg-class order
5691 because a later reload with a looser constraint might have an old
5692 home in a register needed by an earlier reload with a tighter constraint.
5693
5694 To solve this, we make two passes over the reloads, in the order
5695 described above. In the first pass we try to inherit a reload
5696 from a previous insn. If there is a later reload that needs a
5697 class that is a proper subset of the class being processed, we must
5698 also allocate a spill register during the first pass.
5699
5700 Then make a second pass over the reloads to allocate any reloads
5701 that haven't been given registers yet. */
5702
5703 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5704
5705 for (j = 0; j < n_reloads; j++)
5706 {
5707 register int r = reload_order[j];
5708
5709 /* Ignore reloads that got marked inoperative. */
5710 if (reload_out[r] == 0 && reload_in[r] == 0
5711 && ! reload_secondary_p[r])
5712 continue;
5713
5714 /* If find_reloads chose to use reload_in or reload_out as a reload
5715 register, we don't need to chose one. Otherwise, try even if it
5716 found one since we might save an insn if we find the value lying
5717 around.
5718 Try also when reload_in is a pseudo without a hard reg. */
5719 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5720 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5721 || (rtx_equal_p (reload_out[r], reload_reg_rtx[r])
5722 && GET_CODE (reload_in[r]) != MEM
5723 && true_regnum (reload_in[r]) < FIRST_PSEUDO_REGISTER)))
5724 continue;
5725
5726 #if 0 /* No longer needed for correct operation.
5727 It might give better code, or might not; worth an experiment? */
5728 /* If this is an optional reload, we can't inherit from earlier insns
5729 until we are sure that any non-optional reloads have been allocated.
5730 The following code takes advantage of the fact that optional reloads
5731 are at the end of reload_order. */
5732 if (reload_optional[r] != 0)
5733 for (i = 0; i < j; i++)
5734 if ((reload_out[reload_order[i]] != 0
5735 || reload_in[reload_order[i]] != 0
5736 || reload_secondary_p[reload_order[i]])
5737 && ! reload_optional[reload_order[i]]
5738 && reload_reg_rtx[reload_order[i]] == 0)
5739 allocate_reload_reg (chain, reload_order[i], 0, inheritance);
5740 #endif
5741
5742 /* First see if this pseudo is already available as reloaded
5743 for a previous insn. We cannot try to inherit for reloads
5744 that are smaller than the maximum number of registers needed
5745 for groups unless the register we would allocate cannot be used
5746 for the groups.
5747
5748 We could check here to see if this is a secondary reload for
5749 an object that is already in a register of the desired class.
5750 This would avoid the need for the secondary reload register.
5751 But this is complex because we can't easily determine what
5752 objects might want to be loaded via this reload. So let a
5753 register be allocated here. In `emit_reload_insns' we suppress
5754 one of the loads in the case described above. */
5755
5756 if (inheritance)
5757 {
5758 int word = 0;
5759 register int regno = -1;
5760 enum machine_mode mode;
5761
5762 if (reload_in[r] == 0)
5763 ;
5764 else if (GET_CODE (reload_in[r]) == REG)
5765 {
5766 regno = REGNO (reload_in[r]);
5767 mode = GET_MODE (reload_in[r]);
5768 }
5769 else if (GET_CODE (reload_in_reg[r]) == REG)
5770 {
5771 regno = REGNO (reload_in_reg[r]);
5772 mode = GET_MODE (reload_in_reg[r]);
5773 }
5774 else if (GET_CODE (reload_in_reg[r]) == SUBREG
5775 && GET_CODE (SUBREG_REG (reload_in_reg[r])) == REG)
5776 {
5777 word = SUBREG_WORD (reload_in_reg[r]);
5778 regno = REGNO (SUBREG_REG (reload_in_reg[r]));
5779 if (regno < FIRST_PSEUDO_REGISTER)
5780 regno += word;
5781 mode = GET_MODE (reload_in_reg[r]);
5782 }
5783 #ifdef AUTO_INC_DEC
5784 else if ((GET_CODE (reload_in_reg[r]) == PRE_INC
5785 || GET_CODE (reload_in_reg[r]) == PRE_DEC
5786 || GET_CODE (reload_in_reg[r]) == POST_INC
5787 || GET_CODE (reload_in_reg[r]) == POST_DEC)
5788 && GET_CODE (XEXP (reload_in_reg[r], 0)) == REG)
5789 {
5790 regno = REGNO (XEXP (reload_in_reg[r], 0));
5791 mode = GET_MODE (XEXP (reload_in_reg[r], 0));
5792 reload_out[r] = reload_in[r];
5793 }
5794 #endif
5795 #if 0
5796 /* This won't work, since REGNO can be a pseudo reg number.
5797 Also, it takes much more hair to keep track of all the things
5798 that can invalidate an inherited reload of part of a pseudoreg. */
5799 else if (GET_CODE (reload_in[r]) == SUBREG
5800 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5801 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5802 #endif
5803
5804 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5805 {
5806 enum reg_class class = reload_reg_class[r], last_class;
5807 rtx last_reg = reg_last_reload_reg[regno];
5808
5809 i = REGNO (last_reg) + word;
5810 last_class = REGNO_REG_CLASS (i);
5811 if ((GET_MODE_SIZE (GET_MODE (last_reg))
5812 >= GET_MODE_SIZE (mode) + word * UNITS_PER_WORD)
5813 && reg_reloaded_contents[i] == regno
5814 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5815 && HARD_REGNO_MODE_OK (i, reload_mode[r])
5816 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5817 /* Even if we can't use this register as a reload
5818 register, we might use it for reload_override_in,
5819 if copying it to the desired class is cheap
5820 enough. */
5821 || ((REGISTER_MOVE_COST (last_class, class)
5822 < MEMORY_MOVE_COST (mode, class, 1))
5823 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5824 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5825 last_reg)
5826 == NO_REGS)
5827 #endif
5828 #ifdef SECONDARY_MEMORY_NEEDED
5829 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5830 mode)
5831 #endif
5832 ))
5833
5834 && (reload_nregs[r] == max_group_size
5835 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5836 i))
5837 && ((reload_reg_free_p (i, reload_opnum[r],
5838 reload_when_needed[r])
5839 && reload_reg_free_before_p (i, reload_opnum[r],
5840 reload_when_needed[r],
5841 0))
5842 || reload_reg_free_for_value_p (i, reload_opnum[r],
5843 reload_when_needed[r],
5844 reload_in[r],
5845 reload_out[r], r)))
5846 {
5847 /* If a group is needed, verify that all the subsequent
5848 registers still have their values intact. */
5849 int nr
5850 = HARD_REGNO_NREGS (i, reload_mode[r]);
5851 int k;
5852
5853 for (k = 1; k < nr; k++)
5854 if (reg_reloaded_contents[i + k] != regno
5855 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5856 break;
5857
5858 if (k == nr)
5859 {
5860 int i1;
5861
5862 last_reg = (GET_MODE (last_reg) == mode
5863 ? last_reg : gen_rtx_REG (mode, i));
5864
5865 /* We found a register that contains the
5866 value we need. If this register is the
5867 same as an `earlyclobber' operand of the
5868 current insn, just mark it as a place to
5869 reload from since we can't use it as the
5870 reload register itself. */
5871
5872 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5873 if (reg_overlap_mentioned_for_reload_p
5874 (reg_last_reload_reg[regno],
5875 reload_earlyclobbers[i1]))
5876 break;
5877
5878 if (i1 != n_earlyclobbers
5879 /* Don't use it if we'd clobber a pseudo reg. */
5880 || (TEST_HARD_REG_BIT (reg_used_by_pseudo, i)
5881 && reload_out[r]
5882 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5883 /* Don't really use the inherited spill reg
5884 if we need it wider than we've got it. */
5885 || (GET_MODE_SIZE (reload_mode[r])
5886 > GET_MODE_SIZE (mode))
5887 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5888 i)
5889
5890 /* If find_reloads chose reload_out as reload
5891 register, stay with it - that leaves the
5892 inherited register for subsequent reloads. */
5893 || (reload_out[r] && reload_reg_rtx[r]
5894 && rtx_equal_p (reload_out[r],
5895 reload_reg_rtx[r])))
5896 {
5897 reload_override_in[r] = last_reg;
5898 reload_inheritance_insn[r]
5899 = reg_reloaded_insn[i];
5900 }
5901 else
5902 {
5903 int k;
5904 /* We can use this as a reload reg. */
5905 /* Mark the register as in use for this part of
5906 the insn. */
5907 mark_reload_reg_in_use (i,
5908 reload_opnum[r],
5909 reload_when_needed[r],
5910 reload_mode[r]);
5911 reload_reg_rtx[r] = last_reg;
5912 reload_inherited[r] = 1;
5913 reload_inheritance_insn[r]
5914 = reg_reloaded_insn[i];
5915 reload_spill_index[r] = i;
5916 for (k = 0; k < nr; k++)
5917 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5918 i + k);
5919 }
5920 }
5921 }
5922 }
5923 }
5924
5925 /* Here's another way to see if the value is already lying around. */
5926 if (inheritance
5927 && reload_in[r] != 0
5928 && ! reload_inherited[r]
5929 && reload_out[r] == 0
5930 && (CONSTANT_P (reload_in[r])
5931 || GET_CODE (reload_in[r]) == PLUS
5932 || GET_CODE (reload_in[r]) == REG
5933 || GET_CODE (reload_in[r]) == MEM)
5934 && (reload_nregs[r] == max_group_size
5935 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5936 {
5937 register rtx equiv
5938 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5939 -1, NULL_PTR, 0, reload_mode[r]);
5940 int regno;
5941
5942 if (equiv != 0)
5943 {
5944 if (GET_CODE (equiv) == REG)
5945 regno = REGNO (equiv);
5946 else if (GET_CODE (equiv) == SUBREG)
5947 {
5948 /* This must be a SUBREG of a hard register.
5949 Make a new REG since this might be used in an
5950 address and not all machines support SUBREGs
5951 there. */
5952 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
5953 equiv = gen_rtx_REG (reload_mode[r], regno);
5954 }
5955 else
5956 abort ();
5957 }
5958
5959 /* If we found a spill reg, reject it unless it is free
5960 and of the desired class. */
5961 if (equiv != 0
5962 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
5963 && ! reload_reg_free_for_value_p (regno, reload_opnum[r],
5964 reload_when_needed[r],
5965 reload_in[r],
5966 reload_out[r], r))
5967 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5968 regno)))
5969 equiv = 0;
5970
5971 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5972 equiv = 0;
5973
5974 /* We found a register that contains the value we need.
5975 If this register is the same as an `earlyclobber' operand
5976 of the current insn, just mark it as a place to reload from
5977 since we can't use it as the reload register itself. */
5978
5979 if (equiv != 0)
5980 for (i = 0; i < n_earlyclobbers; i++)
5981 if (reg_overlap_mentioned_for_reload_p (equiv,
5982 reload_earlyclobbers[i]))
5983 {
5984 reload_override_in[r] = equiv;
5985 equiv = 0;
5986 break;
5987 }
5988
5989 /* If the equiv register we have found is explicitly clobbered
5990 in the current insn, it depends on the reload type if we
5991 can use it, use it for reload_override_in, or not at all.
5992 In particular, we then can't use EQUIV for a
5993 RELOAD_FOR_OUTPUT_ADDRESS reload. */
5994
5995 if (equiv != 0 && regno_clobbered_p (regno, insn))
5996 {
5997 switch (reload_when_needed[r])
5998 {
5999 case RELOAD_FOR_OTHER_ADDRESS:
6000 case RELOAD_FOR_INPADDR_ADDRESS:
6001 case RELOAD_FOR_INPUT_ADDRESS:
6002 case RELOAD_FOR_OPADDR_ADDR:
6003 break;
6004 case RELOAD_OTHER:
6005 case RELOAD_FOR_INPUT:
6006 case RELOAD_FOR_OPERAND_ADDRESS:
6007 reload_override_in[r] = equiv;
6008 /* Fall through. */
6009 default:
6010 equiv = 0;
6011 break;
6012 }
6013 }
6014
6015 /* If we found an equivalent reg, say no code need be generated
6016 to load it, and use it as our reload reg. */
6017 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
6018 {
6019 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
6020 int k;
6021 reload_reg_rtx[r] = equiv;
6022 reload_inherited[r] = 1;
6023
6024 /* If reg_reloaded_valid is not set for this register,
6025 there might be a stale spill_reg_store lying around.
6026 We must clear it, since otherwise emit_reload_insns
6027 might delete the store. */
6028 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6029 spill_reg_store[regno] = NULL_RTX;
6030 /* If any of the hard registers in EQUIV are spill
6031 registers, mark them as in use for this insn. */
6032 for (k = 0; k < nr; k++)
6033 {
6034 i = spill_reg_order[regno + k];
6035 if (i >= 0)
6036 {
6037 mark_reload_reg_in_use (regno, reload_opnum[r],
6038 reload_when_needed[r],
6039 reload_mode[r]);
6040 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6041 regno + k);
6042 }
6043 }
6044 }
6045 }
6046
6047 /* If we found a register to use already, or if this is an optional
6048 reload, we are done. */
6049 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
6050 continue;
6051
6052 #if 0 /* No longer needed for correct operation. Might or might not
6053 give better code on the average. Want to experiment? */
6054
6055 /* See if there is a later reload that has a class different from our
6056 class that intersects our class or that requires less register
6057 than our reload. If so, we must allocate a register to this
6058 reload now, since that reload might inherit a previous reload
6059 and take the only available register in our class. Don't do this
6060 for optional reloads since they will force all previous reloads
6061 to be allocated. Also don't do this for reloads that have been
6062 turned off. */
6063
6064 for (i = j + 1; i < n_reloads; i++)
6065 {
6066 int s = reload_order[i];
6067
6068 if ((reload_in[s] == 0 && reload_out[s] == 0
6069 && ! reload_secondary_p[s])
6070 || reload_optional[s])
6071 continue;
6072
6073 if ((reload_reg_class[s] != reload_reg_class[r]
6074 && reg_classes_intersect_p (reload_reg_class[r],
6075 reload_reg_class[s]))
6076 || reload_nregs[s] < reload_nregs[r])
6077 break;
6078 }
6079
6080 if (i == n_reloads)
6081 continue;
6082
6083 allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance);
6084 #endif
6085 }
6086
6087 /* Now allocate reload registers for anything non-optional that
6088 didn't get one yet. */
6089 for (j = 0; j < n_reloads; j++)
6090 {
6091 register int r = reload_order[j];
6092
6093 /* Ignore reloads that got marked inoperative. */
6094 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
6095 continue;
6096
6097 /* Skip reloads that already have a register allocated or are
6098 optional. */
6099 if (reload_reg_rtx[r] != 0 || reload_optional[r])
6100 continue;
6101
6102 if (! allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance))
6103 break;
6104 }
6105
6106 /* If that loop got all the way, we have won. */
6107 if (j == n_reloads)
6108 break;
6109
6110 /* Loop around and try without any inheritance. */
6111 /* First undo everything done by the failed attempt
6112 to allocate with inheritance. */
6113 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
6114 sizeof reload_reg_rtx);
6115 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
6116 sizeof reload_inherited);
6117 bcopy ((char *) save_reload_inheritance_insn,
6118 (char *) reload_inheritance_insn,
6119 sizeof reload_inheritance_insn);
6120 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
6121 sizeof reload_override_in);
6122 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
6123 sizeof reload_spill_index);
6124 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
6125 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
6126 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
6127 save_reload_reg_used_in_op_addr);
6128 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
6129 save_reload_reg_used_in_op_addr_reload);
6130 COPY_HARD_REG_SET (reload_reg_used_in_insn,
6131 save_reload_reg_used_in_insn);
6132 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
6133 save_reload_reg_used_in_other_addr);
6134
6135 for (i = 0; i < reload_n_operands; i++)
6136 {
6137 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
6138 save_reload_reg_used_in_input[i]);
6139 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
6140 save_reload_reg_used_in_output[i]);
6141 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
6142 save_reload_reg_used_in_input_addr[i]);
6143 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
6144 save_reload_reg_used_in_inpaddr_addr[i]);
6145 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
6146 save_reload_reg_used_in_output_addr[i]);
6147 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
6148 save_reload_reg_used_in_outaddr_addr[i]);
6149 }
6150 }
6151
6152 /* If we thought we could inherit a reload, because it seemed that
6153 nothing else wanted the same reload register earlier in the insn,
6154 verify that assumption, now that all reloads have been assigned.
6155 Likewise for reloads where reload_override_in has been set. */
6156
6157 /* If doing expensive optimizations, do one preliminary pass that doesn't
6158 cancel any inheritance, but removes reloads that have been needed only
6159 for reloads that we know can be inherited. */
6160 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6161 {
6162 for (j = 0; j < n_reloads; j++)
6163 {
6164 register int r = reload_order[j];
6165 rtx check_reg;
6166 if (reload_inherited[r] && reload_reg_rtx[r])
6167 check_reg = reload_reg_rtx[r];
6168 else if (reload_override_in[r]
6169 && (GET_CODE (reload_override_in[r]) == REG
6170 || GET_CODE (reload_override_in[r]) == SUBREG))
6171 check_reg = reload_override_in[r];
6172 else
6173 continue;
6174 if (! (reload_reg_free_before_p (true_regnum (check_reg),
6175 reload_opnum[r], reload_when_needed[r],
6176 ! reload_inherited[r])
6177 || reload_reg_free_for_value_p (true_regnum (check_reg),
6178 reload_opnum[r],
6179 reload_when_needed[r],
6180 reload_in[r],
6181 reload_out[r], r)))
6182 {
6183 if (pass)
6184 continue;
6185 reload_inherited[r] = 0;
6186 reload_override_in[r] = 0;
6187 }
6188 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6189 reload_override_in, then we do not need its related
6190 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6191 likewise for other reload types.
6192 We handle this by removing a reload when its only replacement
6193 is mentioned in reload_in of the reload we are going to inherit.
6194 A special case are auto_inc expressions; even if the input is
6195 inherited, we still need the address for the output. We can
6196 recognize them because they have RELOAD_OUT set but not
6197 RELOAD_OUT_REG.
6198 If we suceeded removing some reload and we are doing a preliminary
6199 pass just to remove such reloads, make another pass, since the
6200 removal of one reload might allow us to inherit another one. */
6201 else if ((! reload_out[r] || reload_out_reg[r])
6202 && remove_address_replacements (reload_in[r]) && pass)
6203 pass = 2;
6204 }
6205 }
6206
6207 /* Now that reload_override_in is known valid,
6208 actually override reload_in. */
6209 for (j = 0; j < n_reloads; j++)
6210 if (reload_override_in[j])
6211 reload_in[j] = reload_override_in[j];
6212
6213 /* If this reload won't be done because it has been cancelled or is
6214 optional and not inherited, clear reload_reg_rtx so other
6215 routines (such as subst_reloads) don't get confused. */
6216 for (j = 0; j < n_reloads; j++)
6217 if (reload_reg_rtx[j] != 0
6218 && ((reload_optional[j] && ! reload_inherited[j])
6219 || (reload_in[j] == 0 && reload_out[j] == 0
6220 && ! reload_secondary_p[j])))
6221 {
6222 int regno = true_regnum (reload_reg_rtx[j]);
6223
6224 if (spill_reg_order[regno] >= 0)
6225 clear_reload_reg_in_use (regno, reload_opnum[j],
6226 reload_when_needed[j], reload_mode[j]);
6227 reload_reg_rtx[j] = 0;
6228 }
6229
6230 /* Record which pseudos and which spill regs have output reloads. */
6231 for (j = 0; j < n_reloads; j++)
6232 {
6233 register int r = reload_order[j];
6234
6235 i = reload_spill_index[r];
6236
6237 /* I is nonneg if this reload uses a register.
6238 If reload_reg_rtx[r] is 0, this is an optional reload
6239 that we opted to ignore. */
6240 if (reload_out_reg[r] != 0 && GET_CODE (reload_out_reg[r]) == REG
6241 && reload_reg_rtx[r] != 0)
6242 {
6243 register int nregno = REGNO (reload_out_reg[r]);
6244 int nr = 1;
6245
6246 if (nregno < FIRST_PSEUDO_REGISTER)
6247 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
6248
6249 while (--nr >= 0)
6250 reg_has_output_reload[nregno + nr] = 1;
6251
6252 if (i >= 0)
6253 {
6254 nr = HARD_REGNO_NREGS (i, reload_mode[r]);
6255 while (--nr >= 0)
6256 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6257 }
6258
6259 if (reload_when_needed[r] != RELOAD_OTHER
6260 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
6261 && reload_when_needed[r] != RELOAD_FOR_INSN)
6262 abort ();
6263 }
6264 }
6265 }
6266
6267 /* Deallocate the reload register for reload R. This is called from
6268 remove_address_replacements. */
6269 void
6270 deallocate_reload_reg (r)
6271 int r;
6272 {
6273 int regno;
6274
6275 if (! reload_reg_rtx[r])
6276 return;
6277 regno = true_regnum (reload_reg_rtx[r]);
6278 reload_reg_rtx[r] = 0;
6279 if (spill_reg_order[regno] >= 0)
6280 clear_reload_reg_in_use (regno, reload_opnum[r], reload_when_needed[r],
6281 reload_mode[r]);
6282 reload_spill_index[r] = -1;
6283 }
6284 \f
6285 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
6286 reloads of the same item for fear that we might not have enough reload
6287 registers. However, normally they will get the same reload register
6288 and hence actually need not be loaded twice.
6289
6290 Here we check for the most common case of this phenomenon: when we have
6291 a number of reloads for the same object, each of which were allocated
6292 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6293 reload, and is not modified in the insn itself. If we find such,
6294 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6295 This will not increase the number of spill registers needed and will
6296 prevent redundant code. */
6297
6298 static void
6299 merge_assigned_reloads (insn)
6300 rtx insn;
6301 {
6302 int i, j;
6303
6304 /* Scan all the reloads looking for ones that only load values and
6305 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6306 assigned and not modified by INSN. */
6307
6308 for (i = 0; i < n_reloads; i++)
6309 {
6310 int conflicting_input = 0;
6311 int max_input_address_opnum = -1;
6312 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6313
6314 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
6315 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
6316 || reg_set_p (reload_reg_rtx[i], insn))
6317 continue;
6318
6319 /* Look at all other reloads. Ensure that the only use of this
6320 reload_reg_rtx is in a reload that just loads the same value
6321 as we do. Note that any secondary reloads must be of the identical
6322 class since the values, modes, and result registers are the
6323 same, so we need not do anything with any secondary reloads. */
6324
6325 for (j = 0; j < n_reloads; j++)
6326 {
6327 if (i == j || reload_reg_rtx[j] == 0
6328 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
6329 reload_reg_rtx[i]))
6330 continue;
6331
6332 if (reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6333 && reload_opnum[j] > max_input_address_opnum)
6334 max_input_address_opnum = reload_opnum[j];
6335
6336 /* If the reload regs aren't exactly the same (e.g, different modes)
6337 or if the values are different, we can't merge this reload.
6338 But if it is an input reload, we might still merge
6339 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6340
6341 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6342 || reload_out[j] != 0 || reload_in[j] == 0
6343 || ! rtx_equal_p (reload_in[i], reload_in[j]))
6344 {
6345 if (reload_when_needed[j] != RELOAD_FOR_INPUT
6346 || ((reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
6347 || reload_opnum[i] > reload_opnum[j])
6348 && reload_when_needed[i] != RELOAD_FOR_OTHER_ADDRESS))
6349 break;
6350 conflicting_input = 1;
6351 if (min_conflicting_input_opnum > reload_opnum[j])
6352 min_conflicting_input_opnum = reload_opnum[j];
6353 }
6354 }
6355
6356 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6357 we, in fact, found any matching reloads. */
6358
6359 if (j == n_reloads
6360 && max_input_address_opnum <= min_conflicting_input_opnum)
6361 {
6362 for (j = 0; j < n_reloads; j++)
6363 if (i != j && reload_reg_rtx[j] != 0
6364 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6365 && (! conflicting_input
6366 || reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6367 || reload_when_needed[j] == RELOAD_FOR_OTHER_ADDRESS))
6368 {
6369 reload_when_needed[i] = RELOAD_OTHER;
6370 reload_in[j] = 0;
6371 reload_spill_index[j] = -1;
6372 transfer_replacements (i, j);
6373 }
6374
6375 /* If this is now RELOAD_OTHER, look for any reloads that load
6376 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6377 if they were for inputs, RELOAD_OTHER for outputs. Note that
6378 this test is equivalent to looking for reloads for this operand
6379 number. */
6380
6381 if (reload_when_needed[i] == RELOAD_OTHER)
6382 for (j = 0; j < n_reloads; j++)
6383 if (reload_in[j] != 0
6384 && reload_when_needed[i] != RELOAD_OTHER
6385 && reg_overlap_mentioned_for_reload_p (reload_in[j],
6386 reload_in[i]))
6387 reload_when_needed[j]
6388 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
6389 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
6390 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6391 }
6392 }
6393 }
6394
6395 \f
6396 /* Output insns to reload values in and out of the chosen reload regs. */
6397
6398 static void
6399 emit_reload_insns (chain)
6400 struct insn_chain *chain;
6401 {
6402 rtx insn = chain->insn;
6403
6404 register int j;
6405 rtx input_reload_insns[MAX_RECOG_OPERANDS];
6406 rtx other_input_address_reload_insns = 0;
6407 rtx other_input_reload_insns = 0;
6408 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6409 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6410 rtx output_reload_insns[MAX_RECOG_OPERANDS];
6411 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6412 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6413 rtx operand_reload_insns = 0;
6414 rtx other_operand_reload_insns = 0;
6415 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6416 rtx following_insn = NEXT_INSN (insn);
6417 rtx before_insn = PREV_INSN (insn);
6418 int special;
6419 /* Values to be put in spill_reg_store are put here first. */
6420 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6421 HARD_REG_SET reg_reloaded_died;
6422
6423 CLEAR_HARD_REG_SET (reg_reloaded_died);
6424
6425 for (j = 0; j < reload_n_operands; j++)
6426 input_reload_insns[j] = input_address_reload_insns[j]
6427 = inpaddr_address_reload_insns[j]
6428 = output_reload_insns[j] = output_address_reload_insns[j]
6429 = outaddr_address_reload_insns[j]
6430 = other_output_reload_insns[j] = 0;
6431
6432 /* Now output the instructions to copy the data into and out of the
6433 reload registers. Do these in the order that the reloads were reported,
6434 since reloads of base and index registers precede reloads of operands
6435 and the operands may need the base and index registers reloaded. */
6436
6437 for (j = 0; j < n_reloads; j++)
6438 {
6439 register rtx old;
6440 rtx oldequiv_reg = 0;
6441 rtx this_reload_insn = 0;
6442 int expect_occurrences = 1;
6443
6444 if (reload_reg_rtx[j]
6445 && REGNO (reload_reg_rtx[j]) < FIRST_PSEUDO_REGISTER)
6446 new_spill_reg_store[REGNO (reload_reg_rtx[j])] = 0;
6447
6448 old = (reload_in[j] && GET_CODE (reload_in[j]) == MEM
6449 ? reload_in_reg[j] : reload_in[j]);
6450
6451 if (old != 0
6452 /* AUTO_INC reloads need to be handled even if inherited. We got an
6453 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6454 && (! reload_inherited[j] || (reload_out[j] && ! reload_out_reg[j]))
6455 && ! rtx_equal_p (reload_reg_rtx[j], old)
6456 && reload_reg_rtx[j] != 0)
6457 {
6458 register rtx reloadreg = reload_reg_rtx[j];
6459 rtx oldequiv = 0;
6460 enum machine_mode mode;
6461 rtx *where;
6462
6463 /* Determine the mode to reload in.
6464 This is very tricky because we have three to choose from.
6465 There is the mode the insn operand wants (reload_inmode[J]).
6466 There is the mode of the reload register RELOADREG.
6467 There is the intrinsic mode of the operand, which we could find
6468 by stripping some SUBREGs.
6469 It turns out that RELOADREG's mode is irrelevant:
6470 we can change that arbitrarily.
6471
6472 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6473 then the reload reg may not support QImode moves, so use SImode.
6474 If foo is in memory due to spilling a pseudo reg, this is safe,
6475 because the QImode value is in the least significant part of a
6476 slot big enough for a SImode. If foo is some other sort of
6477 memory reference, then it is impossible to reload this case,
6478 so previous passes had better make sure this never happens.
6479
6480 Then consider a one-word union which has SImode and one of its
6481 members is a float, being fetched as (SUBREG:SF union:SI).
6482 We must fetch that as SFmode because we could be loading into
6483 a float-only register. In this case OLD's mode is correct.
6484
6485 Consider an immediate integer: it has VOIDmode. Here we need
6486 to get a mode from something else.
6487
6488 In some cases, there is a fourth mode, the operand's
6489 containing mode. If the insn specifies a containing mode for
6490 this operand, it overrides all others.
6491
6492 I am not sure whether the algorithm here is always right,
6493 but it does the right things in those cases. */
6494
6495 mode = GET_MODE (old);
6496 if (mode == VOIDmode)
6497 mode = reload_inmode[j];
6498
6499 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6500 /* If we need a secondary register for this operation, see if
6501 the value is already in a register in that class. Don't
6502 do this if the secondary register will be used as a scratch
6503 register. */
6504
6505 if (reload_secondary_in_reload[j] >= 0
6506 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6507 && optimize)
6508 oldequiv
6509 = find_equiv_reg (old, insn,
6510 reload_reg_class[reload_secondary_in_reload[j]],
6511 -1, NULL_PTR, 0, mode);
6512 #endif
6513
6514 /* If reloading from memory, see if there is a register
6515 that already holds the same value. If so, reload from there.
6516 We can pass 0 as the reload_reg_p argument because
6517 any other reload has either already been emitted,
6518 in which case find_equiv_reg will see the reload-insn,
6519 or has yet to be emitted, in which case it doesn't matter
6520 because we will use this equiv reg right away. */
6521
6522 if (oldequiv == 0 && optimize
6523 && (GET_CODE (old) == MEM
6524 || (GET_CODE (old) == REG
6525 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6526 && reg_renumber[REGNO (old)] < 0)))
6527 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6528 -1, NULL_PTR, 0, mode);
6529
6530 if (oldequiv)
6531 {
6532 int regno = true_regnum (oldequiv);
6533
6534 /* If OLDEQUIV is a spill register, don't use it for this
6535 if any other reload needs it at an earlier stage of this insn
6536 or at this stage. */
6537 if (spill_reg_order[regno] >= 0
6538 && (! reload_reg_free_p (regno, reload_opnum[j],
6539 reload_when_needed[j])
6540 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6541 reload_when_needed[j], 1)))
6542 oldequiv = 0;
6543
6544 /* If OLDEQUIV is not a spill register,
6545 don't use it if any other reload wants it. */
6546 if (spill_reg_order[regno] < 0)
6547 {
6548 int k;
6549 for (k = 0; k < n_reloads; k++)
6550 if (reload_reg_rtx[k] != 0 && k != j
6551 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6552 oldequiv))
6553 {
6554 oldequiv = 0;
6555 break;
6556 }
6557 }
6558
6559 /* If it is no cheaper to copy from OLDEQUIV into the
6560 reload register than it would be to move from memory,
6561 don't use it. Likewise, if we need a secondary register
6562 or memory. */
6563
6564 if (oldequiv != 0
6565 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6566 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6567 reload_reg_class[j])
6568 >= MEMORY_MOVE_COST (mode, reload_reg_class[j], 1)))
6569 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6570 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6571 mode, oldequiv)
6572 != NO_REGS)
6573 #endif
6574 #ifdef SECONDARY_MEMORY_NEEDED
6575 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6576 reload_reg_class[j],
6577 mode)
6578 #endif
6579 ))
6580 oldequiv = 0;
6581 }
6582
6583 /* delete_output_reload is only invoked properly if old contains
6584 the original pseudo register. Since this is replaced with a
6585 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6586 find the pseudo in RELOAD_IN_REG. */
6587 if (oldequiv == 0
6588 && reload_override_in[j]
6589 && GET_CODE (reload_in_reg[j]) == REG)
6590 {
6591 oldequiv = old;
6592 old = reload_in_reg[j];
6593 }
6594 if (oldequiv == 0)
6595 oldequiv = old;
6596 else if (GET_CODE (oldequiv) == REG)
6597 oldequiv_reg = oldequiv;
6598 else if (GET_CODE (oldequiv) == SUBREG)
6599 oldequiv_reg = SUBREG_REG (oldequiv);
6600
6601 /* If we are reloading from a register that was recently stored in
6602 with an output-reload, see if we can prove there was
6603 actually no need to store the old value in it. */
6604
6605 if (optimize && GET_CODE (oldequiv) == REG
6606 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6607 && spill_reg_store[REGNO (oldequiv)]
6608 && GET_CODE (old) == REG
6609 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6610 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6611 reload_out_reg[j])))
6612 delete_output_reload (insn, j, REGNO (oldequiv));
6613
6614 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6615 then load RELOADREG from OLDEQUIV. Note that we cannot use
6616 gen_lowpart_common since it can do the wrong thing when
6617 RELOADREG has a multi-word mode. Note that RELOADREG
6618 must always be a REG here. */
6619
6620 if (GET_MODE (reloadreg) != mode)
6621 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6622 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6623 oldequiv = SUBREG_REG (oldequiv);
6624 if (GET_MODE (oldequiv) != VOIDmode
6625 && mode != GET_MODE (oldequiv))
6626 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
6627
6628 /* Switch to the right place to emit the reload insns. */
6629 switch (reload_when_needed[j])
6630 {
6631 case RELOAD_OTHER:
6632 where = &other_input_reload_insns;
6633 break;
6634 case RELOAD_FOR_INPUT:
6635 where = &input_reload_insns[reload_opnum[j]];
6636 break;
6637 case RELOAD_FOR_INPUT_ADDRESS:
6638 where = &input_address_reload_insns[reload_opnum[j]];
6639 break;
6640 case RELOAD_FOR_INPADDR_ADDRESS:
6641 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6642 break;
6643 case RELOAD_FOR_OUTPUT_ADDRESS:
6644 where = &output_address_reload_insns[reload_opnum[j]];
6645 break;
6646 case RELOAD_FOR_OUTADDR_ADDRESS:
6647 where = &outaddr_address_reload_insns[reload_opnum[j]];
6648 break;
6649 case RELOAD_FOR_OPERAND_ADDRESS:
6650 where = &operand_reload_insns;
6651 break;
6652 case RELOAD_FOR_OPADDR_ADDR:
6653 where = &other_operand_reload_insns;
6654 break;
6655 case RELOAD_FOR_OTHER_ADDRESS:
6656 where = &other_input_address_reload_insns;
6657 break;
6658 default:
6659 abort ();
6660 }
6661
6662 push_to_sequence (*where);
6663 special = 0;
6664
6665 /* Auto-increment addresses must be reloaded in a special way. */
6666 if (reload_out[j] && ! reload_out_reg[j])
6667 {
6668 /* We are not going to bother supporting the case where a
6669 incremented register can't be copied directly from
6670 OLDEQUIV since this seems highly unlikely. */
6671 if (reload_secondary_in_reload[j] >= 0)
6672 abort ();
6673
6674 if (reload_inherited[j])
6675 oldequiv = reloadreg;
6676
6677 old = XEXP (reload_in_reg[j], 0);
6678
6679 if (optimize && GET_CODE (oldequiv) == REG
6680 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6681 && spill_reg_store[REGNO (oldequiv)]
6682 && GET_CODE (old) == REG
6683 && (dead_or_set_p (insn,
6684 spill_reg_stored_to[REGNO (oldequiv)])
6685 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6686 old)))
6687 delete_output_reload (insn, j, REGNO (oldequiv));
6688
6689 /* Prevent normal processing of this reload. */
6690 special = 1;
6691 /* Output a special code sequence for this case. */
6692 new_spill_reg_store[REGNO (reloadreg)]
6693 = inc_for_reload (reloadreg, oldequiv, reload_out[j],
6694 reload_inc[j]);
6695 }
6696
6697 /* If we are reloading a pseudo-register that was set by the previous
6698 insn, see if we can get rid of that pseudo-register entirely
6699 by redirecting the previous insn into our reload register. */
6700
6701 else if (optimize && GET_CODE (old) == REG
6702 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6703 && dead_or_set_p (insn, old)
6704 /* This is unsafe if some other reload
6705 uses the same reg first. */
6706 && reload_reg_free_before_p (REGNO (reloadreg),
6707 reload_opnum[j],
6708 reload_when_needed[j], 0))
6709 {
6710 rtx temp = PREV_INSN (insn);
6711 while (temp && GET_CODE (temp) == NOTE)
6712 temp = PREV_INSN (temp);
6713 if (temp
6714 && GET_CODE (temp) == INSN
6715 && GET_CODE (PATTERN (temp)) == SET
6716 && SET_DEST (PATTERN (temp)) == old
6717 /* Make sure we can access insn_operand_constraint. */
6718 && asm_noperands (PATTERN (temp)) < 0
6719 /* This is unsafe if prev insn rejects our reload reg. */
6720 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6721 reloadreg)
6722 /* This is unsafe if operand occurs more than once in current
6723 insn. Perhaps some occurrences aren't reloaded. */
6724 && count_occurrences (PATTERN (insn), old) == 1
6725 /* Don't risk splitting a matching pair of operands. */
6726 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6727 {
6728 /* Store into the reload register instead of the pseudo. */
6729 SET_DEST (PATTERN (temp)) = reloadreg;
6730 /* If these are the only uses of the pseudo reg,
6731 pretend for GDB it lives in the reload reg we used. */
6732 if (REG_N_DEATHS (REGNO (old)) == 1
6733 && REG_N_SETS (REGNO (old)) == 1)
6734 {
6735 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6736 alter_reg (REGNO (old), -1);
6737 }
6738 special = 1;
6739 }
6740 }
6741
6742 /* We can't do that, so output an insn to load RELOADREG. */
6743
6744 if (! special)
6745 {
6746 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6747 rtx second_reload_reg = 0;
6748 enum insn_code icode;
6749
6750 /* If we have a secondary reload, pick up the secondary register
6751 and icode, if any. If OLDEQUIV and OLD are different or
6752 if this is an in-out reload, recompute whether or not we
6753 still need a secondary register and what the icode should
6754 be. If we still need a secondary register and the class or
6755 icode is different, go back to reloading from OLD if using
6756 OLDEQUIV means that we got the wrong type of register. We
6757 cannot have different class or icode due to an in-out reload
6758 because we don't make such reloads when both the input and
6759 output need secondary reload registers. */
6760
6761 if (reload_secondary_in_reload[j] >= 0)
6762 {
6763 int secondary_reload = reload_secondary_in_reload[j];
6764 rtx real_oldequiv = oldequiv;
6765 rtx real_old = old;
6766
6767 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6768 and similarly for OLD.
6769 See comments in get_secondary_reload in reload.c. */
6770 /* If it is a pseudo that cannot be replaced with its
6771 equivalent MEM, we must fall back to reload_in, which
6772 will have all the necessary substitutions registered. */
6773
6774 if (GET_CODE (oldequiv) == REG
6775 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6776 && reg_equiv_memory_loc[REGNO (oldequiv)] != 0)
6777 {
6778 if (reg_equiv_address[REGNO (oldequiv)]
6779 || num_not_at_initial_offset)
6780 real_oldequiv = reload_in[j];
6781 else
6782 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6783 }
6784
6785 if (GET_CODE (old) == REG
6786 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6787 && reg_equiv_memory_loc[REGNO (old)] != 0)
6788 {
6789 if (reg_equiv_address[REGNO (old)]
6790 || num_not_at_initial_offset)
6791 real_old = reload_in[j];
6792 else
6793 real_old = reg_equiv_mem[REGNO (old)];
6794 }
6795
6796 second_reload_reg = reload_reg_rtx[secondary_reload];
6797 icode = reload_secondary_in_icode[j];
6798
6799 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6800 || (reload_in[j] != 0 && reload_out[j] != 0))
6801 {
6802 enum reg_class new_class
6803 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6804 mode, real_oldequiv);
6805
6806 if (new_class == NO_REGS)
6807 second_reload_reg = 0;
6808 else
6809 {
6810 enum insn_code new_icode;
6811 enum machine_mode new_mode;
6812
6813 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6814 REGNO (second_reload_reg)))
6815 oldequiv = old, real_oldequiv = real_old;
6816 else
6817 {
6818 new_icode = reload_in_optab[(int) mode];
6819 if (new_icode != CODE_FOR_nothing
6820 && ((insn_operand_predicate[(int) new_icode][0]
6821 && ! ((*insn_operand_predicate[(int) new_icode][0])
6822 (reloadreg, mode)))
6823 || (insn_operand_predicate[(int) new_icode][1]
6824 && ! ((*insn_operand_predicate[(int) new_icode][1])
6825 (real_oldequiv, mode)))))
6826 new_icode = CODE_FOR_nothing;
6827
6828 if (new_icode == CODE_FOR_nothing)
6829 new_mode = mode;
6830 else
6831 new_mode = insn_operand_mode[(int) new_icode][2];
6832
6833 if (GET_MODE (second_reload_reg) != new_mode)
6834 {
6835 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6836 new_mode))
6837 oldequiv = old, real_oldequiv = real_old;
6838 else
6839 second_reload_reg
6840 = gen_rtx_REG (new_mode,
6841 REGNO (second_reload_reg));
6842 }
6843 }
6844 }
6845 }
6846
6847 /* If we still need a secondary reload register, check
6848 to see if it is being used as a scratch or intermediate
6849 register and generate code appropriately. If we need
6850 a scratch register, use REAL_OLDEQUIV since the form of
6851 the insn may depend on the actual address if it is
6852 a MEM. */
6853
6854 if (second_reload_reg)
6855 {
6856 if (icode != CODE_FOR_nothing)
6857 {
6858 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6859 second_reload_reg));
6860 special = 1;
6861 }
6862 else
6863 {
6864 /* See if we need a scratch register to load the
6865 intermediate register (a tertiary reload). */
6866 enum insn_code tertiary_icode
6867 = reload_secondary_in_icode[secondary_reload];
6868
6869 if (tertiary_icode != CODE_FOR_nothing)
6870 {
6871 rtx third_reload_reg
6872 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6873
6874 emit_insn ((GEN_FCN (tertiary_icode)
6875 (second_reload_reg, real_oldequiv,
6876 third_reload_reg)));
6877 }
6878 else
6879 gen_reload (second_reload_reg, real_oldequiv,
6880 reload_opnum[j],
6881 reload_when_needed[j]);
6882
6883 oldequiv = second_reload_reg;
6884 }
6885 }
6886 }
6887 #endif
6888
6889 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6890 {
6891 rtx real_oldequiv = oldequiv;
6892
6893 if ((GET_CODE (oldequiv) == REG
6894 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6895 && reg_equiv_memory_loc[REGNO (oldequiv)] != 0)
6896 || (GET_CODE (oldequiv) == SUBREG
6897 && GET_CODE (SUBREG_REG (oldequiv)) == REG
6898 && (REGNO (SUBREG_REG (oldequiv))
6899 >= FIRST_PSEUDO_REGISTER)
6900 && (reg_equiv_memory_loc
6901 [REGNO (SUBREG_REG (oldequiv))] != 0)))
6902 real_oldequiv = reload_in[j];
6903 gen_reload (reloadreg, real_oldequiv, reload_opnum[j],
6904 reload_when_needed[j]);
6905 }
6906
6907 }
6908
6909 this_reload_insn = get_last_insn ();
6910 /* End this sequence. */
6911 *where = get_insns ();
6912 end_sequence ();
6913
6914 /* Update reload_override_in so that delete_address_reloads_1
6915 can see the actual register usage. */
6916 if (oldequiv_reg)
6917 reload_override_in[j] = oldequiv;
6918 }
6919
6920 /* When inheriting a wider reload, we have a MEM in reload_in[j],
6921 e.g. inheriting a SImode output reload for
6922 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6923 if (optimize && reload_inherited[j] && reload_in[j]
6924 && GET_CODE (reload_in[j]) == MEM
6925 && GET_CODE (reload_in_reg[j]) == MEM
6926 && reload_spill_index[j] >= 0
6927 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6928 {
6929 expect_occurrences
6930 = count_occurrences (PATTERN (insn), reload_in[j]) == 1 ? 0 : -1;
6931 reload_in[j]
6932 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6933 }
6934
6935 /* If we are reloading a register that was recently stored in with an
6936 output-reload, see if we can prove there was
6937 actually no need to store the old value in it. */
6938
6939 if (optimize
6940 && (reload_inherited[j] || reload_override_in[j])
6941 && reload_reg_rtx[j]
6942 && GET_CODE (reload_reg_rtx[j]) == REG
6943 && spill_reg_store[REGNO (reload_reg_rtx[j])] != 0
6944 #if 0
6945 /* There doesn't seem to be any reason to restrict this to pseudos
6946 and doing so loses in the case where we are copying from a
6947 register of the wrong class. */
6948 && REGNO (spill_reg_stored_to[REGNO (reload_reg_rtx[j])])
6949 >= FIRST_PSEUDO_REGISTER
6950 #endif
6951 /* The insn might have already some references to stackslots
6952 replaced by MEMs, while reload_out_reg still names the
6953 original pseudo. */
6954 && (dead_or_set_p (insn,
6955 spill_reg_stored_to[REGNO (reload_reg_rtx[j])])
6956 || rtx_equal_p (spill_reg_stored_to[REGNO (reload_reg_rtx[j])],
6957 reload_out_reg[j])))
6958 delete_output_reload (insn, j, REGNO (reload_reg_rtx[j]));
6959
6960 /* Input-reloading is done. Now do output-reloading,
6961 storing the value from the reload-register after the main insn
6962 if reload_out[j] is nonzero.
6963
6964 ??? At some point we need to support handling output reloads of
6965 JUMP_INSNs or insns that set cc0. */
6966
6967 /* If this is an output reload that stores something that is
6968 not loaded in this same reload, see if we can eliminate a previous
6969 store. */
6970 {
6971 rtx pseudo = reload_out_reg[j];
6972
6973 if (pseudo
6974 && GET_CODE (pseudo) == REG
6975 && ! rtx_equal_p (reload_in_reg[j], pseudo)
6976 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
6977 && reg_last_reload_reg[REGNO (pseudo)])
6978 {
6979 int pseudo_no = REGNO (pseudo);
6980 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
6981
6982 /* We don't need to test full validity of last_regno for
6983 inherit here; we only want to know if the store actually
6984 matches the pseudo. */
6985 if (reg_reloaded_contents[last_regno] == pseudo_no
6986 && spill_reg_store[last_regno]
6987 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
6988 delete_output_reload (insn, j, last_regno);
6989 }
6990 }
6991
6992 old = reload_out_reg[j];
6993 if (old != 0
6994 && reload_reg_rtx[j] != old
6995 && reload_reg_rtx[j] != 0)
6996 {
6997 register rtx reloadreg = reload_reg_rtx[j];
6998 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6999 register rtx second_reloadreg = 0;
7000 #endif
7001 rtx note, p;
7002 enum machine_mode mode;
7003 int special = 0;
7004
7005 /* An output operand that dies right away does need a reload,
7006 but need not be copied from it. Show the new location in the
7007 REG_UNUSED note. */
7008 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
7009 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7010 {
7011 XEXP (note, 0) = reload_reg_rtx[j];
7012 continue;
7013 }
7014 /* Likewise for a SUBREG of an operand that dies. */
7015 else if (GET_CODE (old) == SUBREG
7016 && GET_CODE (SUBREG_REG (old)) == REG
7017 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7018 SUBREG_REG (old))))
7019 {
7020 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7021 reload_reg_rtx[j]);
7022 continue;
7023 }
7024 else if (GET_CODE (old) == SCRATCH)
7025 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7026 but we don't want to make an output reload. */
7027 continue;
7028
7029 #if 0
7030 /* Strip off of OLD any size-increasing SUBREGs such as
7031 (SUBREG:SI foo:QI 0). */
7032
7033 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
7034 && (GET_MODE_SIZE (GET_MODE (old))
7035 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
7036 old = SUBREG_REG (old);
7037 #endif
7038
7039 /* If is a JUMP_INSN, we can't support output reloads yet. */
7040 if (GET_CODE (insn) == JUMP_INSN)
7041 abort ();
7042
7043 if (reload_when_needed[j] == RELOAD_OTHER)
7044 start_sequence ();
7045 else
7046 push_to_sequence (output_reload_insns[reload_opnum[j]]);
7047
7048 old = reload_out[j];
7049
7050 /* Determine the mode to reload in.
7051 See comments above (for input reloading). */
7052
7053 mode = GET_MODE (old);
7054 if (mode == VOIDmode)
7055 {
7056 /* VOIDmode should never happen for an output. */
7057 if (asm_noperands (PATTERN (insn)) < 0)
7058 /* It's the compiler's fault. */
7059 fatal_insn ("VOIDmode on an output", insn);
7060 error_for_asm (insn, "output operand is constant in `asm'");
7061 /* Prevent crash--use something we know is valid. */
7062 mode = word_mode;
7063 old = gen_rtx_REG (mode, REGNO (reloadreg));
7064 }
7065
7066 if (GET_MODE (reloadreg) != mode)
7067 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
7068
7069 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7070
7071 /* If we need two reload regs, set RELOADREG to the intermediate
7072 one, since it will be stored into OLD. We might need a secondary
7073 register only for an input reload, so check again here. */
7074
7075 if (reload_secondary_out_reload[j] >= 0)
7076 {
7077 rtx real_old = old;
7078
7079 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
7080 && reg_equiv_mem[REGNO (old)] != 0)
7081 real_old = reg_equiv_mem[REGNO (old)];
7082
7083 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
7084 mode, real_old)
7085 != NO_REGS))
7086 {
7087 second_reloadreg = reloadreg;
7088 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
7089
7090 /* See if RELOADREG is to be used as a scratch register
7091 or as an intermediate register. */
7092 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
7093 {
7094 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
7095 (real_old, second_reloadreg, reloadreg)));
7096 special = 1;
7097 }
7098 else
7099 {
7100 /* See if we need both a scratch and intermediate reload
7101 register. */
7102
7103 int secondary_reload = reload_secondary_out_reload[j];
7104 enum insn_code tertiary_icode
7105 = reload_secondary_out_icode[secondary_reload];
7106
7107 if (GET_MODE (reloadreg) != mode)
7108 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
7109
7110 if (tertiary_icode != CODE_FOR_nothing)
7111 {
7112 rtx third_reloadreg
7113 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
7114 rtx tem;
7115
7116 /* Copy primary reload reg to secondary reload reg.
7117 (Note that these have been swapped above, then
7118 secondary reload reg to OLD using our insn. */
7119
7120 /* If REAL_OLD is a paradoxical SUBREG, remove it
7121 and try to put the opposite SUBREG on
7122 RELOADREG. */
7123 if (GET_CODE (real_old) == SUBREG
7124 && (GET_MODE_SIZE (GET_MODE (real_old))
7125 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7126 && 0 != (tem = gen_lowpart_common
7127 (GET_MODE (SUBREG_REG (real_old)),
7128 reloadreg)))
7129 real_old = SUBREG_REG (real_old), reloadreg = tem;
7130
7131 gen_reload (reloadreg, second_reloadreg,
7132 reload_opnum[j], reload_when_needed[j]);
7133 emit_insn ((GEN_FCN (tertiary_icode)
7134 (real_old, reloadreg, third_reloadreg)));
7135 special = 1;
7136 }
7137
7138 else
7139 /* Copy between the reload regs here and then to
7140 OUT later. */
7141
7142 gen_reload (reloadreg, second_reloadreg,
7143 reload_opnum[j], reload_when_needed[j]);
7144 }
7145 }
7146 }
7147 #endif
7148
7149 /* Output the last reload insn. */
7150 if (! special)
7151 {
7152 rtx set;
7153
7154 /* Don't output the last reload if OLD is not the dest of
7155 INSN and is in the src and is clobbered by INSN. */
7156 if (! flag_expensive_optimizations
7157 || GET_CODE (old) != REG
7158 || !(set = single_set (insn))
7159 || rtx_equal_p (old, SET_DEST (set))
7160 || !reg_mentioned_p (old, SET_SRC (set))
7161 || !regno_clobbered_p (REGNO (old), insn))
7162 gen_reload (old, reloadreg, reload_opnum[j],
7163 reload_when_needed[j]);
7164 }
7165
7166 /* Look at all insns we emitted, just to be safe. */
7167 for (p = get_insns (); p; p = NEXT_INSN (p))
7168 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
7169 {
7170 rtx pat = PATTERN (p);
7171
7172 /* If this output reload doesn't come from a spill reg,
7173 clear any memory of reloaded copies of the pseudo reg.
7174 If this output reload comes from a spill reg,
7175 reg_has_output_reload will make this do nothing. */
7176 note_stores (pat, forget_old_reloads_1);
7177
7178 if (reg_mentioned_p (reload_reg_rtx[j], pat))
7179 {
7180 rtx set = single_set (insn);
7181 if (reload_spill_index[j] < 0
7182 && set
7183 && SET_SRC (set) == reload_reg_rtx[j])
7184 {
7185 int src = REGNO (SET_SRC (set));
7186
7187 reload_spill_index[j] = src;
7188 SET_HARD_REG_BIT (reg_is_output_reload, src);
7189 if (find_regno_note (insn, REG_DEAD, src))
7190 SET_HARD_REG_BIT (reg_reloaded_died, src);
7191 }
7192 if (REGNO (reload_reg_rtx[j]) < FIRST_PSEUDO_REGISTER)
7193 {
7194 int s = reload_secondary_out_reload[j];
7195 set = single_set (p);
7196 /* If this reload copies only to the secondary reload
7197 register, the secondary reload does the actual
7198 store. */
7199 if (s >= 0 && set == NULL_RTX)
7200 ; /* We can't tell what function the secondary reload
7201 has and where the actual store to the pseudo is
7202 made; leave new_spill_reg_store alone. */
7203 else if (s >= 0
7204 && SET_SRC (set) == reload_reg_rtx[j]
7205 && SET_DEST (set) == reload_reg_rtx[s])
7206 {
7207 /* Usually the next instruction will be the
7208 secondary reload insn; if we can confirm
7209 that it is, setting new_spill_reg_store to
7210 that insn will allow an extra optimization. */
7211 rtx s_reg = reload_reg_rtx[s];
7212 rtx next = NEXT_INSN (p);
7213 reload_out[s] = reload_out[j];
7214 reload_out_reg[s] = reload_out_reg[j];
7215 set = single_set (next);
7216 if (set && SET_SRC (set) == s_reg
7217 && ! new_spill_reg_store[REGNO (s_reg)])
7218 {
7219 SET_HARD_REG_BIT (reg_is_output_reload,
7220 REGNO (s_reg));
7221 new_spill_reg_store[REGNO (s_reg)] = next;
7222 }
7223 }
7224 else
7225 new_spill_reg_store[REGNO (reload_reg_rtx[j])] = p;
7226 }
7227 }
7228 }
7229
7230 if (reload_when_needed[j] == RELOAD_OTHER)
7231 {
7232 emit_insns (other_output_reload_insns[reload_opnum[j]]);
7233 other_output_reload_insns[reload_opnum[j]] = get_insns ();
7234 }
7235 else
7236 output_reload_insns[reload_opnum[j]] = get_insns ();
7237
7238 end_sequence ();
7239 }
7240 }
7241
7242 /* Now write all the insns we made for reloads in the order expected by
7243 the allocation functions. Prior to the insn being reloaded, we write
7244 the following reloads:
7245
7246 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7247
7248 RELOAD_OTHER reloads.
7249
7250 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7251 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7252 RELOAD_FOR_INPUT reload for the operand.
7253
7254 RELOAD_FOR_OPADDR_ADDRS reloads.
7255
7256 RELOAD_FOR_OPERAND_ADDRESS reloads.
7257
7258 After the insn being reloaded, we write the following:
7259
7260 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7261 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7262 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7263 reloads for the operand. The RELOAD_OTHER output reloads are
7264 output in descending order by reload number. */
7265
7266 emit_insns_before (other_input_address_reload_insns, insn);
7267 emit_insns_before (other_input_reload_insns, insn);
7268
7269 for (j = 0; j < reload_n_operands; j++)
7270 {
7271 emit_insns_before (inpaddr_address_reload_insns[j], insn);
7272 emit_insns_before (input_address_reload_insns[j], insn);
7273 emit_insns_before (input_reload_insns[j], insn);
7274 }
7275
7276 emit_insns_before (other_operand_reload_insns, insn);
7277 emit_insns_before (operand_reload_insns, insn);
7278
7279 for (j = 0; j < reload_n_operands; j++)
7280 {
7281 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
7282 emit_insns_before (output_address_reload_insns[j], following_insn);
7283 emit_insns_before (output_reload_insns[j], following_insn);
7284 emit_insns_before (other_output_reload_insns[j], following_insn);
7285 }
7286
7287 /* Keep basic block info up to date. */
7288 if (n_basic_blocks)
7289 {
7290 if (basic_block_head[chain->block] == insn)
7291 basic_block_head[chain->block] = NEXT_INSN (before_insn);
7292 if (basic_block_end[chain->block] == insn)
7293 basic_block_end[chain->block] = PREV_INSN (following_insn);
7294 }
7295
7296 /* For all the spill regs newly reloaded in this instruction,
7297 record what they were reloaded from, so subsequent instructions
7298 can inherit the reloads.
7299
7300 Update spill_reg_store for the reloads of this insn.
7301 Copy the elements that were updated in the loop above. */
7302
7303 for (j = 0; j < n_reloads; j++)
7304 {
7305 register int r = reload_order[j];
7306 register int i = reload_spill_index[r];
7307
7308 /* I is nonneg if this reload used a register.
7309 If reload_reg_rtx[r] is 0, this is an optional reload
7310 that we opted to ignore. */
7311
7312 if (i >= 0 && reload_reg_rtx[r] != 0)
7313 {
7314 int nr
7315 = HARD_REGNO_NREGS (i, GET_MODE (reload_reg_rtx[r]));
7316 int k;
7317 int part_reaches_end = 0;
7318 int all_reaches_end = 1;
7319
7320 /* For a multi register reload, we need to check if all or part
7321 of the value lives to the end. */
7322 for (k = 0; k < nr; k++)
7323 {
7324 if (reload_reg_reaches_end_p (i + k, reload_opnum[r],
7325 reload_when_needed[r]))
7326 part_reaches_end = 1;
7327 else
7328 all_reaches_end = 0;
7329 }
7330
7331 /* Ignore reloads that don't reach the end of the insn in
7332 entirety. */
7333 if (all_reaches_end)
7334 {
7335 /* First, clear out memory of what used to be in this spill reg.
7336 If consecutive registers are used, clear them all. */
7337
7338 for (k = 0; k < nr; k++)
7339 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7340
7341 /* Maybe the spill reg contains a copy of reload_out. */
7342 if (reload_out[r] != 0
7343 && (GET_CODE (reload_out[r]) == REG
7344 #ifdef AUTO_INC_DEC
7345 || ! reload_out_reg[r]
7346 #endif
7347 || GET_CODE (reload_out_reg[r]) == REG))
7348 {
7349 rtx out = (GET_CODE (reload_out[r]) == REG
7350 ? reload_out[r]
7351 : reload_out_reg[r]
7352 ? reload_out_reg[r]
7353 /* AUTO_INC */ : XEXP (reload_in_reg[r], 0));
7354 register int nregno = REGNO (out);
7355 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7356 : HARD_REGNO_NREGS (nregno,
7357 GET_MODE (reload_reg_rtx[r])));
7358
7359 spill_reg_store[i] = new_spill_reg_store[i];
7360 spill_reg_stored_to[i] = out;
7361 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7362
7363 /* If NREGNO is a hard register, it may occupy more than
7364 one register. If it does, say what is in the
7365 rest of the registers assuming that both registers
7366 agree on how many words the object takes. If not,
7367 invalidate the subsequent registers. */
7368
7369 if (nregno < FIRST_PSEUDO_REGISTER)
7370 for (k = 1; k < nnr; k++)
7371 reg_last_reload_reg[nregno + k]
7372 = (nr == nnr
7373 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7374 REGNO (reload_reg_rtx[r]) + k)
7375 : 0);
7376
7377 /* Now do the inverse operation. */
7378 for (k = 0; k < nr; k++)
7379 {
7380 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7381 reg_reloaded_contents[i + k]
7382 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7383 ? nregno
7384 : nregno + k);
7385 reg_reloaded_insn[i + k] = insn;
7386 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7387 }
7388 }
7389
7390 /* Maybe the spill reg contains a copy of reload_in. Only do
7391 something if there will not be an output reload for
7392 the register being reloaded. */
7393 else if (reload_out_reg[r] == 0
7394 && reload_in[r] != 0
7395 && ((GET_CODE (reload_in[r]) == REG
7396 && REGNO (reload_in[r]) >= FIRST_PSEUDO_REGISTER
7397 && ! reg_has_output_reload[REGNO (reload_in[r])])
7398 || (GET_CODE (reload_in_reg[r]) == REG
7399 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))
7400 && ! reg_set_p (reload_reg_rtx[r], PATTERN (insn)))
7401 {
7402 register int nregno;
7403 int nnr;
7404
7405 if (GET_CODE (reload_in[r]) == REG
7406 && REGNO (reload_in[r]) >= FIRST_PSEUDO_REGISTER)
7407 nregno = REGNO (reload_in[r]);
7408 else if (GET_CODE (reload_in_reg[r]) == REG)
7409 nregno = REGNO (reload_in_reg[r]);
7410 else
7411 nregno = REGNO (XEXP (reload_in_reg[r], 0));
7412
7413 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7414 : HARD_REGNO_NREGS (nregno,
7415 GET_MODE (reload_reg_rtx[r])));
7416
7417 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7418
7419 if (nregno < FIRST_PSEUDO_REGISTER)
7420 for (k = 1; k < nnr; k++)
7421 reg_last_reload_reg[nregno + k]
7422 = (nr == nnr
7423 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7424 REGNO (reload_reg_rtx[r]) + k)
7425 : 0);
7426
7427 /* Unless we inherited this reload, show we haven't
7428 recently done a store.
7429 Previous stores of inherited auto_inc expressions
7430 also have to be discarded. */
7431 if (! reload_inherited[r]
7432 || (reload_out[r] && ! reload_out_reg[r]))
7433 spill_reg_store[i] = 0;
7434
7435 for (k = 0; k < nr; k++)
7436 {
7437 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7438 reg_reloaded_contents[i + k]
7439 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7440 ? nregno
7441 : nregno + k);
7442 reg_reloaded_insn[i + k] = insn;
7443 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7444 }
7445 }
7446 }
7447
7448 /* However, if part of the reload reaches the end, then we must
7449 invalidate the old info for the part that survives to the end. */
7450 else if (part_reaches_end)
7451 {
7452 for (k = 0; k < nr; k++)
7453 if (reload_reg_reaches_end_p (i + k,
7454 reload_opnum[r],
7455 reload_when_needed[r]))
7456 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7457 }
7458 }
7459
7460 /* The following if-statement was #if 0'd in 1.34 (or before...).
7461 It's reenabled in 1.35 because supposedly nothing else
7462 deals with this problem. */
7463
7464 /* If a register gets output-reloaded from a non-spill register,
7465 that invalidates any previous reloaded copy of it.
7466 But forget_old_reloads_1 won't get to see it, because
7467 it thinks only about the original insn. So invalidate it here. */
7468 if (i < 0 && reload_out[r] != 0
7469 && (GET_CODE (reload_out[r]) == REG
7470 || (GET_CODE (reload_out[r]) == MEM
7471 && GET_CODE (reload_out_reg[r]) == REG)))
7472 {
7473 rtx out = (GET_CODE (reload_out[r]) == REG
7474 ? reload_out[r] : reload_out_reg[r]);
7475 register int nregno = REGNO (out);
7476 if (nregno >= FIRST_PSEUDO_REGISTER)
7477 {
7478 rtx src_reg, store_insn;
7479
7480 reg_last_reload_reg[nregno] = 0;
7481
7482 /* If we can find a hard register that is stored, record
7483 the storing insn so that we may delete this insn with
7484 delete_output_reload. */
7485 src_reg = reload_reg_rtx[r];
7486
7487 /* If this is an optional reload, try to find the source reg
7488 from an input reload. */
7489 if (! src_reg)
7490 {
7491 rtx set = single_set (insn);
7492 if (SET_DEST (set) == reload_out[r])
7493 {
7494 int k;
7495
7496 src_reg = SET_SRC (set);
7497 store_insn = insn;
7498 for (k = 0; k < n_reloads; k++)
7499 {
7500 if (reload_in[k] == src_reg)
7501 {
7502 src_reg = reload_reg_rtx[k];
7503 break;
7504 }
7505 }
7506 }
7507 }
7508 else
7509 store_insn = new_spill_reg_store[REGNO (src_reg)];
7510 if (src_reg && GET_CODE (src_reg) == REG
7511 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7512 {
7513 int src_regno = REGNO (src_reg);
7514 int nr = HARD_REGNO_NREGS (src_regno, reload_mode[r]);
7515 /* The place where to find a death note varies with
7516 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7517 necessarily checked exactly in the code that moves
7518 notes, so just check both locations. */
7519 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7520 if (! note)
7521 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7522 while (nr-- > 0)
7523 {
7524 spill_reg_store[src_regno + nr] = store_insn;
7525 spill_reg_stored_to[src_regno + nr] = out;
7526 reg_reloaded_contents[src_regno + nr] = nregno;
7527 reg_reloaded_insn[src_regno + nr] = store_insn;
7528 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7529 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7530 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7531 if (note)
7532 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7533 else
7534 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7535 }
7536 reg_last_reload_reg[nregno] = src_reg;
7537 }
7538 }
7539 else
7540 {
7541 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7542
7543 while (num_regs-- > 0)
7544 reg_last_reload_reg[nregno + num_regs] = 0;
7545 }
7546 }
7547 }
7548 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7549 }
7550 \f
7551 /* Emit code to perform a reload from IN (which may be a reload register) to
7552 OUT (which may also be a reload register). IN or OUT is from operand
7553 OPNUM with reload type TYPE.
7554
7555 Returns first insn emitted. */
7556
7557 rtx
7558 gen_reload (out, in, opnum, type)
7559 rtx out;
7560 rtx in;
7561 int opnum;
7562 enum reload_type type;
7563 {
7564 rtx last = get_last_insn ();
7565 rtx tem;
7566
7567 /* If IN is a paradoxical SUBREG, remove it and try to put the
7568 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7569 if (GET_CODE (in) == SUBREG
7570 && (GET_MODE_SIZE (GET_MODE (in))
7571 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7572 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7573 in = SUBREG_REG (in), out = tem;
7574 else if (GET_CODE (out) == SUBREG
7575 && (GET_MODE_SIZE (GET_MODE (out))
7576 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7577 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7578 out = SUBREG_REG (out), in = tem;
7579
7580 /* How to do this reload can get quite tricky. Normally, we are being
7581 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7582 register that didn't get a hard register. In that case we can just
7583 call emit_move_insn.
7584
7585 We can also be asked to reload a PLUS that adds a register or a MEM to
7586 another register, constant or MEM. This can occur during frame pointer
7587 elimination and while reloading addresses. This case is handled by
7588 trying to emit a single insn to perform the add. If it is not valid,
7589 we use a two insn sequence.
7590
7591 Finally, we could be called to handle an 'o' constraint by putting
7592 an address into a register. In that case, we first try to do this
7593 with a named pattern of "reload_load_address". If no such pattern
7594 exists, we just emit a SET insn and hope for the best (it will normally
7595 be valid on machines that use 'o').
7596
7597 This entire process is made complex because reload will never
7598 process the insns we generate here and so we must ensure that
7599 they will fit their constraints and also by the fact that parts of
7600 IN might be being reloaded separately and replaced with spill registers.
7601 Because of this, we are, in some sense, just guessing the right approach
7602 here. The one listed above seems to work.
7603
7604 ??? At some point, this whole thing needs to be rethought. */
7605
7606 if (GET_CODE (in) == PLUS
7607 && (GET_CODE (XEXP (in, 0)) == REG
7608 || GET_CODE (XEXP (in, 0)) == SUBREG
7609 || GET_CODE (XEXP (in, 0)) == MEM)
7610 && (GET_CODE (XEXP (in, 1)) == REG
7611 || GET_CODE (XEXP (in, 1)) == SUBREG
7612 || CONSTANT_P (XEXP (in, 1))
7613 || GET_CODE (XEXP (in, 1)) == MEM))
7614 {
7615 /* We need to compute the sum of a register or a MEM and another
7616 register, constant, or MEM, and put it into the reload
7617 register. The best possible way of doing this is if the machine
7618 has a three-operand ADD insn that accepts the required operands.
7619
7620 The simplest approach is to try to generate such an insn and see if it
7621 is recognized and matches its constraints. If so, it can be used.
7622
7623 It might be better not to actually emit the insn unless it is valid,
7624 but we need to pass the insn as an operand to `recog' and
7625 `insn_extract' and it is simpler to emit and then delete the insn if
7626 not valid than to dummy things up. */
7627
7628 rtx op0, op1, tem, insn;
7629 int code;
7630
7631 op0 = find_replacement (&XEXP (in, 0));
7632 op1 = find_replacement (&XEXP (in, 1));
7633
7634 /* Since constraint checking is strict, commutativity won't be
7635 checked, so we need to do that here to avoid spurious failure
7636 if the add instruction is two-address and the second operand
7637 of the add is the same as the reload reg, which is frequently
7638 the case. If the insn would be A = B + A, rearrange it so
7639 it will be A = A + B as constrain_operands expects. */
7640
7641 if (GET_CODE (XEXP (in, 1)) == REG
7642 && REGNO (out) == REGNO (XEXP (in, 1)))
7643 tem = op0, op0 = op1, op1 = tem;
7644
7645 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7646 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7647
7648 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7649 code = recog_memoized (insn);
7650
7651 if (code >= 0)
7652 {
7653 insn_extract (insn);
7654 /* We want constrain operands to treat this insn strictly in
7655 its validity determination, i.e., the way it would after reload
7656 has completed. */
7657 if (constrain_operands (code, 1))
7658 return insn;
7659 }
7660
7661 delete_insns_since (last);
7662
7663 /* If that failed, we must use a conservative two-insn sequence.
7664 use move to copy constant, MEM, or pseudo register to the reload
7665 register since "move" will be able to handle an arbitrary operand,
7666 unlike add which can't, in general. Then add the registers.
7667
7668 If there is another way to do this for a specific machine, a
7669 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7670 we emit below. */
7671
7672 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7673 || (GET_CODE (op1) == REG
7674 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7675 tem = op0, op0 = op1, op1 = tem;
7676
7677 gen_reload (out, op0, opnum, type);
7678
7679 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7680 This fixes a problem on the 32K where the stack pointer cannot
7681 be used as an operand of an add insn. */
7682
7683 if (rtx_equal_p (op0, op1))
7684 op1 = out;
7685
7686 insn = emit_insn (gen_add2_insn (out, op1));
7687
7688 /* If that failed, copy the address register to the reload register.
7689 Then add the constant to the reload register. */
7690
7691 code = recog_memoized (insn);
7692
7693 if (code >= 0)
7694 {
7695 insn_extract (insn);
7696 /* We want constrain operands to treat this insn strictly in
7697 its validity determination, i.e., the way it would after reload
7698 has completed. */
7699 if (constrain_operands (code, 1))
7700 {
7701 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7702 REG_NOTES (insn)
7703 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7704 return insn;
7705 }
7706 }
7707
7708 delete_insns_since (last);
7709
7710 gen_reload (out, op1, opnum, type);
7711 insn = emit_insn (gen_add2_insn (out, op0));
7712 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7713 }
7714
7715 #ifdef SECONDARY_MEMORY_NEEDED
7716 /* If we need a memory location to do the move, do it that way. */
7717 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7718 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7719 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7720 REGNO_REG_CLASS (REGNO (out)),
7721 GET_MODE (out)))
7722 {
7723 /* Get the memory to use and rewrite both registers to its mode. */
7724 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7725
7726 if (GET_MODE (loc) != GET_MODE (out))
7727 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7728
7729 if (GET_MODE (loc) != GET_MODE (in))
7730 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7731
7732 gen_reload (loc, in, opnum, type);
7733 gen_reload (out, loc, opnum, type);
7734 }
7735 #endif
7736
7737 /* If IN is a simple operand, use gen_move_insn. */
7738 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7739 emit_insn (gen_move_insn (out, in));
7740
7741 #ifdef HAVE_reload_load_address
7742 else if (HAVE_reload_load_address)
7743 emit_insn (gen_reload_load_address (out, in));
7744 #endif
7745
7746 /* Otherwise, just write (set OUT IN) and hope for the best. */
7747 else
7748 emit_insn (gen_rtx_SET (VOIDmode, out, in));
7749
7750 /* Return the first insn emitted.
7751 We can not just return get_last_insn, because there may have
7752 been multiple instructions emitted. Also note that gen_move_insn may
7753 emit more than one insn itself, so we can not assume that there is one
7754 insn emitted per emit_insn_before call. */
7755
7756 return last ? NEXT_INSN (last) : get_insns ();
7757 }
7758 \f
7759 /* Delete a previously made output-reload
7760 whose result we now believe is not needed.
7761 First we double-check.
7762
7763 INSN is the insn now being processed.
7764 LAST_RELOAD_REG is the hard register number for which we want to delete
7765 the last output reload.
7766 J is the reload-number that originally used REG. The caller has made
7767 certain that reload J doesn't use REG any longer for input. */
7768
7769 static void
7770 delete_output_reload (insn, j, last_reload_reg)
7771 rtx insn;
7772 int j;
7773 int last_reload_reg;
7774 {
7775 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7776 rtx reg = spill_reg_stored_to[last_reload_reg];
7777 int k;
7778 int n_occurrences;
7779 int n_inherited = 0;
7780 register rtx i1;
7781 rtx substed;
7782
7783 /* Get the raw pseudo-register referred to. */
7784
7785 while (GET_CODE (reg) == SUBREG)
7786 reg = SUBREG_REG (reg);
7787 substed = reg_equiv_memory_loc[REGNO (reg)];
7788
7789 /* This is unsafe if the operand occurs more often in the current
7790 insn than it is inherited. */
7791 for (k = n_reloads - 1; k >= 0; k--)
7792 {
7793 rtx reg2 = reload_in[k];
7794 if (! reg2)
7795 continue;
7796 if (GET_CODE (reg2) == MEM || reload_override_in[k])
7797 reg2 = reload_in_reg[k];
7798 #ifdef AUTO_INC_DEC
7799 if (reload_out[k] && ! reload_out_reg[k])
7800 reg2 = XEXP (reload_in_reg[k], 0);
7801 #endif
7802 while (GET_CODE (reg2) == SUBREG)
7803 reg2 = SUBREG_REG (reg2);
7804 if (rtx_equal_p (reg2, reg))
7805 {
7806 if (reload_inherited[k] || reload_override_in[k] || k == j)
7807 {
7808 n_inherited++;
7809 reg2 = reload_out_reg[k];
7810 if (! reg2)
7811 continue;
7812 while (GET_CODE (reg2) == SUBREG)
7813 reg2 = XEXP (reg2, 0);
7814 if (rtx_equal_p (reg2, reg))
7815 n_inherited++;
7816 }
7817 else
7818 return;
7819 }
7820 }
7821 n_occurrences = count_occurrences (PATTERN (insn), reg);
7822 if (substed)
7823 n_occurrences += count_occurrences (PATTERN (insn), substed);
7824 if (n_occurrences > n_inherited)
7825 return;
7826
7827 /* If the pseudo-reg we are reloading is no longer referenced
7828 anywhere between the store into it and here,
7829 and no jumps or labels intervene, then the value can get
7830 here through the reload reg alone.
7831 Otherwise, give up--return. */
7832 for (i1 = NEXT_INSN (output_reload_insn);
7833 i1 != insn; i1 = NEXT_INSN (i1))
7834 {
7835 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7836 return;
7837 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7838 && reg_mentioned_p (reg, PATTERN (i1)))
7839 {
7840 /* If this is USE in front of INSN, we only have to check that
7841 there are no more references than accounted for by inheritance. */
7842 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
7843 {
7844 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7845 i1 = NEXT_INSN (i1);
7846 }
7847 if (n_occurrences <= n_inherited && i1 == insn)
7848 break;
7849 return;
7850 }
7851 }
7852
7853 /* The caller has already checked that REG dies or is set in INSN.
7854 It has also checked that we are optimizing, and thus some inaccurancies
7855 in the debugging information are acceptable.
7856 So we could just delete output_reload_insn.
7857 But in some cases we can improve the debugging information without
7858 sacrificing optimization - maybe even improving the code:
7859 See if the pseudo reg has been completely replaced
7860 with reload regs. If so, delete the store insn
7861 and forget we had a stack slot for the pseudo. */
7862 if (reload_out[j] != reload_in[j]
7863 && REG_N_DEATHS (REGNO (reg)) == 1
7864 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7865 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7866 {
7867 rtx i2;
7868
7869 /* We know that it was used only between here
7870 and the beginning of the current basic block.
7871 (We also know that the last use before INSN was
7872 the output reload we are thinking of deleting, but never mind that.)
7873 Search that range; see if any ref remains. */
7874 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7875 {
7876 rtx set = single_set (i2);
7877
7878 /* Uses which just store in the pseudo don't count,
7879 since if they are the only uses, they are dead. */
7880 if (set != 0 && SET_DEST (set) == reg)
7881 continue;
7882 if (GET_CODE (i2) == CODE_LABEL
7883 || GET_CODE (i2) == JUMP_INSN)
7884 break;
7885 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7886 && reg_mentioned_p (reg, PATTERN (i2)))
7887 {
7888 /* Some other ref remains; just delete the output reload we
7889 know to be dead. */
7890 delete_address_reloads (output_reload_insn, insn);
7891 PUT_CODE (output_reload_insn, NOTE);
7892 NOTE_SOURCE_FILE (output_reload_insn) = 0;
7893 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
7894 return;
7895 }
7896 }
7897
7898 /* Delete the now-dead stores into this pseudo. */
7899 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7900 {
7901 rtx set = single_set (i2);
7902
7903 if (set != 0 && SET_DEST (set) == reg)
7904 {
7905 delete_address_reloads (i2, insn);
7906 /* This might be a basic block head,
7907 thus don't use delete_insn. */
7908 PUT_CODE (i2, NOTE);
7909 NOTE_SOURCE_FILE (i2) = 0;
7910 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7911 }
7912 if (GET_CODE (i2) == CODE_LABEL
7913 || GET_CODE (i2) == JUMP_INSN)
7914 break;
7915 }
7916
7917 /* For the debugging info,
7918 say the pseudo lives in this reload reg. */
7919 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7920 alter_reg (REGNO (reg), -1);
7921 }
7922 delete_address_reloads (output_reload_insn, insn);
7923 PUT_CODE (output_reload_insn, NOTE);
7924 NOTE_SOURCE_FILE (output_reload_insn) = 0;
7925 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
7926
7927 }
7928
7929 /* We are going to delete DEAD_INSN. Recursively delete loads of
7930 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7931 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
7932 static void
7933 delete_address_reloads (dead_insn, current_insn)
7934 rtx dead_insn, current_insn;
7935 {
7936 rtx set = single_set (dead_insn);
7937 rtx set2, dst, prev, next;
7938 if (set)
7939 {
7940 rtx dst = SET_DEST (set);
7941 if (GET_CODE (dst) == MEM)
7942 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
7943 }
7944 /* If we deleted the store from a reloaded post_{in,de}c expression,
7945 we can delete the matching adds. */
7946 prev = PREV_INSN (dead_insn);
7947 next = NEXT_INSN (dead_insn);
7948 if (! prev || ! next)
7949 return;
7950 set = single_set (next);
7951 set2 = single_set (prev);
7952 if (! set || ! set2
7953 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
7954 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
7955 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
7956 return;
7957 dst = SET_DEST (set);
7958 if (! rtx_equal_p (dst, SET_DEST (set2))
7959 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
7960 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
7961 || (INTVAL (XEXP (SET_SRC (set), 1))
7962 != - INTVAL (XEXP (SET_SRC (set2), 1))))
7963 return;
7964 delete_insn (prev);
7965 delete_insn (next);
7966 }
7967
7968 /* Subfunction of delete_address_reloads: process registers found in X. */
7969 static void
7970 delete_address_reloads_1 (dead_insn, x, current_insn)
7971 rtx dead_insn, x, current_insn;
7972 {
7973 rtx prev, set, dst, i2;
7974 int i, j;
7975 enum rtx_code code = GET_CODE (x);
7976
7977 if (code != REG)
7978 {
7979 char *fmt= GET_RTX_FORMAT (code);
7980 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7981 {
7982 if (fmt[i] == 'e')
7983 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
7984 else if (fmt[i] == 'E')
7985 {
7986 for (j = XVECLEN (x, i) - 1; j >=0; j--)
7987 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
7988 current_insn);
7989 }
7990 }
7991 return;
7992 }
7993
7994 if (spill_reg_order[REGNO (x)] < 0)
7995 return;
7996
7997 /* Scan backwards for the insn that sets x. This might be a way back due
7998 to inheritance. */
7999 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8000 {
8001 code = GET_CODE (prev);
8002 if (code == CODE_LABEL || code == JUMP_INSN)
8003 return;
8004 if (GET_RTX_CLASS (code) != 'i')
8005 continue;
8006 if (reg_set_p (x, PATTERN (prev)))
8007 break;
8008 if (reg_referenced_p (x, PATTERN (prev)))
8009 return;
8010 }
8011 if (! prev || INSN_UID (prev) < reload_first_uid)
8012 return;
8013 /* Check that PREV only sets the reload register. */
8014 set = single_set (prev);
8015 if (! set)
8016 return;
8017 dst = SET_DEST (set);
8018 if (GET_CODE (dst) != REG
8019 || ! rtx_equal_p (dst, x))
8020 return;
8021 if (! reg_set_p (dst, PATTERN (dead_insn)))
8022 {
8023 /* Check if DST was used in a later insn -
8024 it might have been inherited. */
8025 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8026 {
8027 if (GET_CODE (i2) == CODE_LABEL)
8028 break;
8029 if (GET_RTX_CLASS (GET_CODE (i2)) != 'i')
8030 continue;
8031 if (reg_referenced_p (dst, PATTERN (i2)))
8032 {
8033 /* If there is a reference to the register in the current insn,
8034 it might be loaded in a non-inherited reload. If no other
8035 reload uses it, that means the register is set before
8036 referenced. */
8037 if (i2 == current_insn)
8038 {
8039 for (j = n_reloads - 1; j >= 0; j--)
8040 if ((reload_reg_rtx[j] == dst && reload_inherited[j])
8041 || reload_override_in[j] == dst)
8042 return;
8043 for (j = n_reloads - 1; j >= 0; j--)
8044 if (reload_in[j] && reload_reg_rtx[j] == dst)
8045 break;
8046 if (j >= 0)
8047 break;
8048 }
8049 return;
8050 }
8051 if (GET_CODE (i2) == JUMP_INSN)
8052 break;
8053 if (reg_set_p (dst, PATTERN (i2)))
8054 break;
8055 /* If DST is still live at CURRENT_INSN, check if it is used for
8056 any reload. */
8057 if (i2 == current_insn)
8058 {
8059 for (j = n_reloads - 1; j >= 0; j--)
8060 if ((reload_reg_rtx[j] == dst && reload_inherited[j])
8061 || reload_override_in[j] == dst)
8062 return;
8063 /* ??? We can't finish the loop here, because dst might be
8064 allocated to a pseudo in this block if no reload in this
8065 block needs any of the clsses containing DST - see
8066 spill_hard_reg. There is no easy way to tell this, so we
8067 have to scan till the end of the basic block. */
8068 }
8069 }
8070 }
8071 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8072 reg_reloaded_contents[REGNO (dst)] = -1;
8073 /* Can't use delete_insn here because PREV might be a basic block head. */
8074 PUT_CODE (prev, NOTE);
8075 NOTE_LINE_NUMBER (prev) = NOTE_INSN_DELETED;
8076 NOTE_SOURCE_FILE (prev) = 0;
8077 }
8078 \f
8079 /* Output reload-insns to reload VALUE into RELOADREG.
8080 VALUE is an autoincrement or autodecrement RTX whose operand
8081 is a register or memory location;
8082 so reloading involves incrementing that location.
8083 IN is either identical to VALUE, or some cheaper place to reload from.
8084
8085 INC_AMOUNT is the number to increment or decrement by (always positive).
8086 This cannot be deduced from VALUE.
8087
8088 Return the instruction that stores into RELOADREG. */
8089
8090 static rtx
8091 inc_for_reload (reloadreg, in, value, inc_amount)
8092 rtx reloadreg;
8093 rtx in, value;
8094 int inc_amount;
8095 {
8096 /* REG or MEM to be copied and incremented. */
8097 rtx incloc = XEXP (value, 0);
8098 /* Nonzero if increment after copying. */
8099 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
8100 rtx last;
8101 rtx inc;
8102 rtx add_insn;
8103 int code;
8104 rtx store;
8105 rtx real_in = in == value ? XEXP (in, 0) : in;
8106
8107 /* No hard register is equivalent to this register after
8108 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
8109 we could inc/dec that register as well (maybe even using it for
8110 the source), but I'm not sure it's worth worrying about. */
8111 if (GET_CODE (incloc) == REG)
8112 reg_last_reload_reg[REGNO (incloc)] = 0;
8113
8114 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8115 inc_amount = - inc_amount;
8116
8117 inc = GEN_INT (inc_amount);
8118
8119 /* If this is post-increment, first copy the location to the reload reg. */
8120 if (post && real_in != reloadreg)
8121 emit_insn (gen_move_insn (reloadreg, real_in));
8122
8123 if (in == value)
8124 {
8125 /* See if we can directly increment INCLOC. Use a method similar to
8126 that in gen_reload. */
8127
8128 last = get_last_insn ();
8129 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8130 gen_rtx_PLUS (GET_MODE (incloc),
8131 incloc, inc)));
8132
8133 code = recog_memoized (add_insn);
8134 if (code >= 0)
8135 {
8136 insn_extract (add_insn);
8137 if (constrain_operands (code, 1))
8138 {
8139 /* If this is a pre-increment and we have incremented the value
8140 where it lives, copy the incremented value to RELOADREG to
8141 be used as an address. */
8142
8143 if (! post)
8144 emit_insn (gen_move_insn (reloadreg, incloc));
8145
8146 return add_insn;
8147 }
8148 }
8149 delete_insns_since (last);
8150 }
8151
8152 /* If couldn't do the increment directly, must increment in RELOADREG.
8153 The way we do this depends on whether this is pre- or post-increment.
8154 For pre-increment, copy INCLOC to the reload register, increment it
8155 there, then save back. */
8156
8157 if (! post)
8158 {
8159 if (in != reloadreg)
8160 emit_insn (gen_move_insn (reloadreg, real_in));
8161 emit_insn (gen_add2_insn (reloadreg, inc));
8162 store = emit_insn (gen_move_insn (incloc, reloadreg));
8163 }
8164 else
8165 {
8166 /* Postincrement.
8167 Because this might be a jump insn or a compare, and because RELOADREG
8168 may not be available after the insn in an input reload, we must do
8169 the incrementation before the insn being reloaded for.
8170
8171 We have already copied IN to RELOADREG. Increment the copy in
8172 RELOADREG, save that back, then decrement RELOADREG so it has
8173 the original value. */
8174
8175 emit_insn (gen_add2_insn (reloadreg, inc));
8176 store = emit_insn (gen_move_insn (incloc, reloadreg));
8177 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
8178 }
8179
8180 return store;
8181 }
8182 \f
8183 /* Return 1 if we are certain that the constraint-string STRING allows
8184 the hard register REG. Return 0 if we can't be sure of this. */
8185
8186 static int
8187 constraint_accepts_reg_p (string, reg)
8188 char *string;
8189 rtx reg;
8190 {
8191 int value = 0;
8192 int regno = true_regnum (reg);
8193 int c;
8194
8195 /* Initialize for first alternative. */
8196 value = 0;
8197 /* Check that each alternative contains `g' or `r'. */
8198 while (1)
8199 switch (c = *string++)
8200 {
8201 case 0:
8202 /* If an alternative lacks `g' or `r', we lose. */
8203 return value;
8204 case ',':
8205 /* If an alternative lacks `g' or `r', we lose. */
8206 if (value == 0)
8207 return 0;
8208 /* Initialize for next alternative. */
8209 value = 0;
8210 break;
8211 case 'g':
8212 case 'r':
8213 /* Any general reg wins for this alternative. */
8214 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
8215 value = 1;
8216 break;
8217 default:
8218 /* Any reg in specified class wins for this alternative. */
8219 {
8220 enum reg_class class = REG_CLASS_FROM_LETTER (c);
8221
8222 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
8223 value = 1;
8224 }
8225 }
8226 }
8227 \f
8228 /* Return the number of places FIND appears within X, but don't count
8229 an occurrence if some SET_DEST is FIND. */
8230
8231 int
8232 count_occurrences (x, find)
8233 register rtx x, find;
8234 {
8235 register int i, j;
8236 register enum rtx_code code;
8237 register char *format_ptr;
8238 int count;
8239
8240 if (x == find)
8241 return 1;
8242 if (x == 0)
8243 return 0;
8244
8245 code = GET_CODE (x);
8246
8247 switch (code)
8248 {
8249 case REG:
8250 case QUEUED:
8251 case CONST_INT:
8252 case CONST_DOUBLE:
8253 case SYMBOL_REF:
8254 case CODE_LABEL:
8255 case PC:
8256 case CC0:
8257 return 0;
8258
8259 case MEM:
8260 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
8261 return 1;
8262 break;
8263 case SET:
8264 if (SET_DEST (x) == find)
8265 return count_occurrences (SET_SRC (x), find);
8266 break;
8267
8268 default:
8269 break;
8270 }
8271
8272 format_ptr = GET_RTX_FORMAT (code);
8273 count = 0;
8274
8275 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8276 {
8277 switch (*format_ptr++)
8278 {
8279 case 'e':
8280 count += count_occurrences (XEXP (x, i), find);
8281 break;
8282
8283 case 'E':
8284 if (XVEC (x, i) != NULL)
8285 {
8286 for (j = 0; j < XVECLEN (x, i); j++)
8287 count += count_occurrences (XVECEXP (x, i, j), find);
8288 }
8289 break;
8290 }
8291 }
8292 return count;
8293 }
8294 \f
8295 /* This array holds values which are equivalent to a hard register
8296 during reload_cse_regs. Each array element is an EXPR_LIST of
8297 values. Each time a hard register is set, we set the corresponding
8298 array element to the value. Each time a hard register is copied
8299 into memory, we add the memory location to the corresponding array
8300 element. We don't store values or memory addresses with side
8301 effects in this array.
8302
8303 If the value is a CONST_INT, then the mode of the containing
8304 EXPR_LIST is the mode in which that CONST_INT was referenced.
8305
8306 We sometimes clobber a specific entry in a list. In that case, we
8307 just set XEXP (list-entry, 0) to 0. */
8308
8309 static rtx *reg_values;
8310
8311 /* This is a preallocated REG rtx which we use as a temporary in
8312 reload_cse_invalidate_regno, so that we don't need to allocate a
8313 new one each time through a loop in that function. */
8314
8315 static rtx invalidate_regno_rtx;
8316
8317 /* Invalidate any entries in reg_values which depend on REGNO,
8318 including those for REGNO itself. This is called if REGNO is
8319 changing. If CLOBBER is true, then always forget anything we
8320 currently know about REGNO. MODE is the mode of the assignment to
8321 REGNO, which is used to determine how many hard registers are being
8322 changed. If MODE is VOIDmode, then only REGNO is being changed;
8323 this is used when invalidating call clobbered registers across a
8324 call. */
8325
8326 static void
8327 reload_cse_invalidate_regno (regno, mode, clobber)
8328 int regno;
8329 enum machine_mode mode;
8330 int clobber;
8331 {
8332 int endregno;
8333 register int i;
8334
8335 /* Our callers don't always go through true_regnum; we may see a
8336 pseudo-register here from a CLOBBER or the like. We probably
8337 won't ever see a pseudo-register that has a real register number,
8338 for we check anyhow for safety. */
8339 if (regno >= FIRST_PSEUDO_REGISTER)
8340 regno = reg_renumber[regno];
8341 if (regno < 0)
8342 return;
8343
8344 if (mode == VOIDmode)
8345 endregno = regno + 1;
8346 else
8347 endregno = regno + HARD_REGNO_NREGS (regno, mode);
8348
8349 if (clobber)
8350 for (i = regno; i < endregno; i++)
8351 reg_values[i] = 0;
8352
8353 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8354 {
8355 rtx x;
8356
8357 for (x = reg_values[i]; x; x = XEXP (x, 1))
8358 {
8359 if (XEXP (x, 0) != 0
8360 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
8361 {
8362 /* If this is the only entry on the list, clear
8363 reg_values[i]. Otherwise, just clear this entry on
8364 the list. */
8365 if (XEXP (x, 1) == 0 && x == reg_values[i])
8366 {
8367 reg_values[i] = 0;
8368 break;
8369 }
8370 XEXP (x, 0) = 0;
8371 }
8372 }
8373 }
8374
8375 /* We must look at earlier registers, in case REGNO is part of a
8376 multi word value but is not the first register. If an earlier
8377 register has a value in a mode which overlaps REGNO, then we must
8378 invalidate that earlier register. Note that we do not need to
8379 check REGNO or later registers (we must not check REGNO itself,
8380 because we would incorrectly conclude that there was a conflict). */
8381
8382 for (i = 0; i < regno; i++)
8383 {
8384 rtx x;
8385
8386 for (x = reg_values[i]; x; x = XEXP (x, 1))
8387 {
8388 if (XEXP (x, 0) != 0)
8389 {
8390 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
8391 REGNO (invalidate_regno_rtx) = i;
8392 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
8393 NULL_PTR))
8394 {
8395 reload_cse_invalidate_regno (i, VOIDmode, 1);
8396 break;
8397 }
8398 }
8399 }
8400 }
8401 }
8402
8403 /* The memory at address MEM_BASE is being changed.
8404 Return whether this change will invalidate VAL. */
8405
8406 static int
8407 reload_cse_mem_conflict_p (mem_base, val)
8408 rtx mem_base;
8409 rtx val;
8410 {
8411 enum rtx_code code;
8412 char *fmt;
8413 int i;
8414
8415 code = GET_CODE (val);
8416 switch (code)
8417 {
8418 /* Get rid of a few simple cases quickly. */
8419 case REG:
8420 case PC:
8421 case CC0:
8422 case SCRATCH:
8423 case CONST:
8424 case CONST_INT:
8425 case CONST_DOUBLE:
8426 case SYMBOL_REF:
8427 case LABEL_REF:
8428 return 0;
8429
8430 case MEM:
8431 if (GET_MODE (mem_base) == BLKmode
8432 || GET_MODE (val) == BLKmode)
8433 return 1;
8434 if (anti_dependence (val, mem_base))
8435 return 1;
8436 /* The address may contain nested MEMs. */
8437 break;
8438
8439 default:
8440 break;
8441 }
8442
8443 fmt = GET_RTX_FORMAT (code);
8444
8445 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8446 {
8447 if (fmt[i] == 'e')
8448 {
8449 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
8450 return 1;
8451 }
8452 else if (fmt[i] == 'E')
8453 {
8454 int j;
8455
8456 for (j = 0; j < XVECLEN (val, i); j++)
8457 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
8458 return 1;
8459 }
8460 }
8461
8462 return 0;
8463 }
8464
8465 /* Invalidate any entries in reg_values which are changed because of a
8466 store to MEM_RTX. If this is called because of a non-const call
8467 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8468
8469 static void
8470 reload_cse_invalidate_mem (mem_rtx)
8471 rtx mem_rtx;
8472 {
8473 register int i;
8474
8475 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8476 {
8477 rtx x;
8478
8479 for (x = reg_values[i]; x; x = XEXP (x, 1))
8480 {
8481 if (XEXP (x, 0) != 0
8482 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
8483 {
8484 /* If this is the only entry on the list, clear
8485 reg_values[i]. Otherwise, just clear this entry on
8486 the list. */
8487 if (XEXP (x, 1) == 0 && x == reg_values[i])
8488 {
8489 reg_values[i] = 0;
8490 break;
8491 }
8492 XEXP (x, 0) = 0;
8493 }
8494 }
8495 }
8496 }
8497
8498 /* Invalidate DEST, which is being assigned to or clobbered. The
8499 second parameter exists so that this function can be passed to
8500 note_stores; it is ignored. */
8501
8502 static void
8503 reload_cse_invalidate_rtx (dest, ignore)
8504 rtx dest;
8505 rtx ignore ATTRIBUTE_UNUSED;
8506 {
8507 while (GET_CODE (dest) == STRICT_LOW_PART
8508 || GET_CODE (dest) == SIGN_EXTRACT
8509 || GET_CODE (dest) == ZERO_EXTRACT
8510 || GET_CODE (dest) == SUBREG)
8511 dest = XEXP (dest, 0);
8512
8513 if (GET_CODE (dest) == REG)
8514 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
8515 else if (GET_CODE (dest) == MEM)
8516 reload_cse_invalidate_mem (dest);
8517 }
8518
8519 /* Do a very simple CSE pass over the hard registers.
8520
8521 This function detects no-op moves where we happened to assign two
8522 different pseudo-registers to the same hard register, and then
8523 copied one to the other. Reload will generate a useless
8524 instruction copying a register to itself.
8525
8526 This function also detects cases where we load a value from memory
8527 into two different registers, and (if memory is more expensive than
8528 registers) changes it to simply copy the first register into the
8529 second register.
8530
8531 Another optimization is performed that scans the operands of each
8532 instruction to see whether the value is already available in a
8533 hard register. It then replaces the operand with the hard register
8534 if possible, much like an optional reload would. */
8535
8536 static void
8537 reload_cse_regs_1 (first)
8538 rtx first;
8539 {
8540 char *firstobj;
8541 rtx callmem;
8542 register int i;
8543 rtx insn;
8544
8545 init_alias_analysis ();
8546
8547 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8548 bzero ((char *)reg_values, FIRST_PSEUDO_REGISTER * sizeof (rtx));
8549
8550 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8551 free them when we are done. */
8552 push_obstacks (&reload_obstack, &reload_obstack);
8553 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
8554
8555 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8556 memory for a non-const call instruction. */
8557 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
8558
8559 /* This is used in reload_cse_invalidate_regno to avoid consing a
8560 new REG in a loop in that function. */
8561 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
8562
8563 for (insn = first; insn; insn = NEXT_INSN (insn))
8564 {
8565 rtx body;
8566
8567 if (GET_CODE (insn) == CODE_LABEL)
8568 {
8569 /* Forget all the register values at a code label. We don't
8570 try to do anything clever around jumps. */
8571 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8572 reg_values[i] = 0;
8573
8574 continue;
8575 }
8576
8577 #ifdef NON_SAVING_SETJMP
8578 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
8579 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
8580 {
8581 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8582 reg_values[i] = 0;
8583
8584 continue;
8585 }
8586 #endif
8587
8588 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8589 continue;
8590
8591 /* If this is a call instruction, forget anything stored in a
8592 call clobbered register, or, if this is not a const call, in
8593 memory. */
8594 if (GET_CODE (insn) == CALL_INSN)
8595 {
8596 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8597 if (call_used_regs[i])
8598 reload_cse_invalidate_regno (i, VOIDmode, 1);
8599
8600 if (! CONST_CALL_P (insn))
8601 reload_cse_invalidate_mem (callmem);
8602 }
8603
8604 body = PATTERN (insn);
8605 if (GET_CODE (body) == SET)
8606 {
8607 int count = 0;
8608 if (reload_cse_noop_set_p (body, insn))
8609 {
8610 /* If this sets the return value of the function, we must keep
8611 a USE around, in case this is in a different basic block
8612 than the final USE. Otherwise, we could loose important
8613 register lifeness information on SMALL_REGISTER_CLASSES
8614 machines, where return registers might be used as spills:
8615 subsequent passes assume that spill registers are dead at
8616 the end of a basic block. */
8617 if (REG_FUNCTION_VALUE_P (SET_DEST (body)))
8618 {
8619 pop_obstacks ();
8620 PATTERN (insn) = gen_rtx_USE (VOIDmode, SET_DEST (body));
8621 INSN_CODE (insn) = -1;
8622 REG_NOTES (insn) = NULL_RTX;
8623 push_obstacks (&reload_obstack, &reload_obstack);
8624 }
8625 else
8626 {
8627 PUT_CODE (insn, NOTE);
8628 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8629 NOTE_SOURCE_FILE (insn) = 0;
8630 }
8631
8632 /* We're done with this insn. */
8633 continue;
8634 }
8635
8636 /* It's not a no-op, but we can try to simplify it. */
8637 count += reload_cse_simplify_set (body, insn);
8638
8639 if (count > 0)
8640 apply_change_group ();
8641 else
8642 reload_cse_simplify_operands (insn);
8643
8644 reload_cse_record_set (body, body);
8645 }
8646 else if (GET_CODE (body) == PARALLEL)
8647 {
8648 int count = 0;
8649 rtx value = NULL_RTX;
8650
8651 /* If every action in a PARALLEL is a noop, we can delete
8652 the entire PARALLEL. */
8653 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8654 {
8655 rtx part = XVECEXP (body, 0, i);
8656 if (GET_CODE (part) == SET)
8657 {
8658 if (! reload_cse_noop_set_p (part, insn))
8659 break;
8660 if (REG_FUNCTION_VALUE_P (SET_DEST (part)))
8661 {
8662 if (value)
8663 break;
8664 value = SET_DEST (part);
8665 }
8666 }
8667 else if (GET_CODE (part) != CLOBBER)
8668 break;
8669 }
8670 if (i < 0)
8671 {
8672 if (value)
8673 {
8674 pop_obstacks ();
8675 PATTERN (insn) = gen_rtx_USE (VOIDmode, value);
8676 INSN_CODE (insn) = -1;
8677 REG_NOTES (insn) = NULL_RTX;
8678 push_obstacks (&reload_obstack, &reload_obstack);
8679 }
8680 else
8681 {
8682 PUT_CODE (insn, NOTE);
8683 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8684 NOTE_SOURCE_FILE (insn) = 0;
8685 }
8686
8687 /* We're done with this insn. */
8688 continue;
8689 }
8690
8691 /* It's not a no-op, but we can try to simplify it. */
8692 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8693 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8694 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8695
8696 if (count > 0)
8697 apply_change_group ();
8698 else
8699 reload_cse_simplify_operands (insn);
8700
8701 /* Look through the PARALLEL and record the values being
8702 set, if possible. Also handle any CLOBBERs. */
8703 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8704 {
8705 rtx x = XVECEXP (body, 0, i);
8706
8707 if (GET_CODE (x) == SET)
8708 reload_cse_record_set (x, body);
8709 else
8710 note_stores (x, reload_cse_invalidate_rtx);
8711 }
8712 }
8713 else
8714 note_stores (body, reload_cse_invalidate_rtx);
8715
8716 #ifdef AUTO_INC_DEC
8717 /* Clobber any registers which appear in REG_INC notes. We
8718 could keep track of the changes to their values, but it is
8719 unlikely to help. */
8720 {
8721 rtx x;
8722
8723 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8724 if (REG_NOTE_KIND (x) == REG_INC)
8725 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8726 }
8727 #endif
8728
8729 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8730 after we have processed the insn. */
8731 if (GET_CODE (insn) == CALL_INSN)
8732 {
8733 rtx x;
8734
8735 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8736 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8737 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8738 }
8739 }
8740
8741 /* Free all the temporary structures we created, and go back to the
8742 regular obstacks. */
8743 obstack_free (&reload_obstack, firstobj);
8744 pop_obstacks ();
8745 }
8746
8747 /* Call cse / combine like post-reload optimization phases.
8748 FIRST is the first instruction. */
8749 void
8750 reload_cse_regs (first)
8751 rtx first;
8752 {
8753 reload_cse_regs_1 (first);
8754 reload_combine ();
8755 reload_cse_move2add (first);
8756 if (flag_expensive_optimizations)
8757 reload_cse_regs_1 (first);
8758 }
8759
8760 /* Return whether the values known for REGNO are equal to VAL. MODE
8761 is the mode of the object that VAL is being copied to; this matters
8762 if VAL is a CONST_INT. */
8763
8764 static int
8765 reload_cse_regno_equal_p (regno, val, mode)
8766 int regno;
8767 rtx val;
8768 enum machine_mode mode;
8769 {
8770 rtx x;
8771
8772 if (val == 0)
8773 return 0;
8774
8775 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8776 if (XEXP (x, 0) != 0
8777 && rtx_equal_p (XEXP (x, 0), val)
8778 && (! flag_float_store || GET_CODE (XEXP (x, 0)) != MEM
8779 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
8780 && (GET_CODE (val) != CONST_INT
8781 || mode == GET_MODE (x)
8782 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8783 /* On a big endian machine if the value spans more than
8784 one register then this register holds the high part of
8785 it and we can't use it.
8786
8787 ??? We should also compare with the high part of the
8788 value. */
8789 && !(WORDS_BIG_ENDIAN
8790 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
8791 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8792 GET_MODE_BITSIZE (GET_MODE (x))))))
8793 return 1;
8794
8795 return 0;
8796 }
8797
8798 /* See whether a single set is a noop. SET is the set instruction we
8799 are should check, and INSN is the instruction from which it came. */
8800
8801 static int
8802 reload_cse_noop_set_p (set, insn)
8803 rtx set;
8804 rtx insn;
8805 {
8806 rtx src, dest;
8807 enum machine_mode dest_mode;
8808 int dreg, sreg;
8809 int ret;
8810
8811 src = SET_SRC (set);
8812 dest = SET_DEST (set);
8813 dest_mode = GET_MODE (dest);
8814
8815 if (side_effects_p (src))
8816 return 0;
8817
8818 dreg = true_regnum (dest);
8819 sreg = true_regnum (src);
8820
8821 /* Check for setting a register to itself. In this case, we don't
8822 have to worry about REG_DEAD notes. */
8823 if (dreg >= 0 && dreg == sreg)
8824 return 1;
8825
8826 ret = 0;
8827 if (dreg >= 0)
8828 {
8829 /* Check for setting a register to itself. */
8830 if (dreg == sreg)
8831 ret = 1;
8832
8833 /* Check for setting a register to a value which we already know
8834 is in the register. */
8835 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8836 ret = 1;
8837
8838 /* Check for setting a register DREG to another register SREG
8839 where SREG is equal to a value which is already in DREG. */
8840 else if (sreg >= 0)
8841 {
8842 rtx x;
8843
8844 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8845 {
8846 rtx tmp;
8847
8848 if (XEXP (x, 0) == 0)
8849 continue;
8850
8851 if (dest_mode == GET_MODE (x))
8852 tmp = XEXP (x, 0);
8853 else if (GET_MODE_BITSIZE (dest_mode)
8854 < GET_MODE_BITSIZE (GET_MODE (x)))
8855 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8856 else
8857 continue;
8858
8859 if (tmp
8860 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8861 {
8862 ret = 1;
8863 break;
8864 }
8865 }
8866 }
8867 }
8868 else if (GET_CODE (dest) == MEM)
8869 {
8870 /* Check for storing a register to memory when we know that the
8871 register is equivalent to the memory location. */
8872 if (sreg >= 0
8873 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8874 && ! side_effects_p (dest))
8875 ret = 1;
8876 }
8877
8878 return ret;
8879 }
8880
8881 /* Try to simplify a single SET instruction. SET is the set pattern.
8882 INSN is the instruction it came from.
8883 This function only handles one case: if we set a register to a value
8884 which is not a register, we try to find that value in some other register
8885 and change the set into a register copy. */
8886
8887 static int
8888 reload_cse_simplify_set (set, insn)
8889 rtx set;
8890 rtx insn;
8891 {
8892 int dreg;
8893 rtx src;
8894 enum machine_mode dest_mode;
8895 enum reg_class dclass;
8896 register int i;
8897
8898 dreg = true_regnum (SET_DEST (set));
8899 if (dreg < 0)
8900 return 0;
8901
8902 src = SET_SRC (set);
8903 if (side_effects_p (src) || true_regnum (src) >= 0)
8904 return 0;
8905
8906 dclass = REGNO_REG_CLASS (dreg);
8907
8908 /* If memory loads are cheaper than register copies, don't change them. */
8909 if (GET_CODE (src) == MEM
8910 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2)
8911 return 0;
8912
8913 /* If the constant is cheaper than a register, don't change it. */
8914 if (CONSTANT_P (src)
8915 && rtx_cost (src, SET) < 2)
8916 return 0;
8917
8918 dest_mode = GET_MODE (SET_DEST (set));
8919 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8920 {
8921 if (i != dreg
8922 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8923 && reload_cse_regno_equal_p (i, src, dest_mode))
8924 {
8925 int validated;
8926
8927 /* Pop back to the real obstacks while changing the insn. */
8928 pop_obstacks ();
8929
8930 validated = validate_change (insn, &SET_SRC (set),
8931 gen_rtx_REG (dest_mode, i), 1);
8932
8933 /* Go back to the obstack we are using for temporary
8934 storage. */
8935 push_obstacks (&reload_obstack, &reload_obstack);
8936
8937 if (validated)
8938 return 1;
8939 }
8940 }
8941 return 0;
8942 }
8943
8944 /* Try to replace operands in INSN with equivalent values that are already
8945 in registers. This can be viewed as optional reloading.
8946
8947 For each non-register operand in the insn, see if any hard regs are
8948 known to be equivalent to that operand. Record the alternatives which
8949 can accept these hard registers. Among all alternatives, select the
8950 ones which are better or equal to the one currently matching, where
8951 "better" is in terms of '?' and '!' constraints. Among the remaining
8952 alternatives, select the one which replaces most operands with
8953 hard registers. */
8954
8955 static int
8956 reload_cse_simplify_operands (insn)
8957 rtx insn;
8958 {
8959 #ifdef REGISTER_CONSTRAINTS
8960 int insn_code_number, n_operands, n_alternatives;
8961 int i,j;
8962
8963 char *constraints[MAX_RECOG_OPERANDS];
8964
8965 /* Vector recording how bad an alternative is. */
8966 int *alternative_reject;
8967 /* Vector recording how many registers can be introduced by choosing
8968 this alternative. */
8969 int *alternative_nregs;
8970 /* Array of vectors recording, for each operand and each alternative,
8971 which hard register to substitute, or -1 if the operand should be
8972 left as it is. */
8973 int *op_alt_regno[MAX_RECOG_OPERANDS];
8974 /* Array of alternatives, sorted in order of decreasing desirability. */
8975 int *alternative_order;
8976 rtx reg = gen_rtx_REG (VOIDmode, -1);
8977
8978 /* Find out some information about this insn. */
8979 insn_code_number = recog_memoized (insn);
8980 /* We don't modify asm instructions. */
8981 if (insn_code_number < 0)
8982 return 0;
8983
8984 n_operands = insn_n_operands[insn_code_number];
8985 n_alternatives = insn_n_alternatives[insn_code_number];
8986
8987 if (n_alternatives == 0 || n_operands == 0)
8988 return 0;
8989 insn_extract (insn);
8990
8991 /* Figure out which alternative currently matches. */
8992 if (! constrain_operands (insn_code_number, 1))
8993 fatal_insn_not_found (insn);
8994
8995 alternative_reject = (int *) alloca (n_alternatives * sizeof (int));
8996 alternative_nregs = (int *) alloca (n_alternatives * sizeof (int));
8997 alternative_order = (int *) alloca (n_alternatives * sizeof (int));
8998 bzero ((char *)alternative_reject, n_alternatives * sizeof (int));
8999 bzero ((char *)alternative_nregs, n_alternatives * sizeof (int));
9000
9001 for (i = 0; i < n_operands; i++)
9002 {
9003 enum machine_mode mode;
9004 int regno;
9005 char *p;
9006
9007 op_alt_regno[i] = (int *) alloca (n_alternatives * sizeof (int));
9008 for (j = 0; j < n_alternatives; j++)
9009 op_alt_regno[i][j] = -1;
9010
9011 p = constraints[i] = insn_operand_constraint[insn_code_number][i];
9012 mode = insn_operand_mode[insn_code_number][i];
9013
9014 /* Add the reject values for each alternative given by the constraints
9015 for this operand. */
9016 j = 0;
9017 while (*p != '\0')
9018 {
9019 char c = *p++;
9020 if (c == ',')
9021 j++;
9022 else if (c == '?')
9023 alternative_reject[j] += 3;
9024 else if (c == '!')
9025 alternative_reject[j] += 300;
9026 }
9027
9028 /* We won't change operands which are already registers. We
9029 also don't want to modify output operands. */
9030 regno = true_regnum (recog_operand[i]);
9031 if (regno >= 0
9032 || constraints[i][0] == '='
9033 || constraints[i][0] == '+')
9034 continue;
9035
9036 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9037 {
9038 int class = (int) NO_REGS;
9039
9040 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
9041 continue;
9042
9043 REGNO (reg) = regno;
9044 PUT_MODE (reg, mode);
9045
9046 /* We found a register equal to this operand. Now look for all
9047 alternatives that can accept this register and have not been
9048 assigned a register they can use yet. */
9049 j = 0;
9050 p = constraints[i];
9051 for (;;)
9052 {
9053 char c = *p++;
9054
9055 switch (c)
9056 {
9057 case '=': case '+': case '?':
9058 case '#': case '&': case '!':
9059 case '*': case '%':
9060 case '0': case '1': case '2': case '3': case '4':
9061 case 'm': case '<': case '>': case 'V': case 'o':
9062 case 'E': case 'F': case 'G': case 'H':
9063 case 's': case 'i': case 'n':
9064 case 'I': case 'J': case 'K': case 'L':
9065 case 'M': case 'N': case 'O': case 'P':
9066 #ifdef EXTRA_CONSTRAINT
9067 case 'Q': case 'R': case 'S': case 'T': case 'U':
9068 #endif
9069 case 'p': case 'X':
9070 /* These don't say anything we care about. */
9071 break;
9072
9073 case 'g': case 'r':
9074 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
9075 break;
9076
9077 default:
9078 class
9079 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
9080 break;
9081
9082 case ',': case '\0':
9083 /* See if REGNO fits this alternative, and set it up as the
9084 replacement register if we don't have one for this
9085 alternative yet and the operand being replaced is not
9086 a cheap CONST_INT. */
9087 if (op_alt_regno[i][j] == -1
9088 && reg_fits_class_p (reg, class, 0, mode)
9089 && (GET_CODE (recog_operand[i]) != CONST_INT
9090 || rtx_cost (recog_operand[i], SET) > rtx_cost (reg, SET)))
9091 {
9092 alternative_nregs[j]++;
9093 op_alt_regno[i][j] = regno;
9094 }
9095 j++;
9096 break;
9097 }
9098
9099 if (c == '\0')
9100 break;
9101 }
9102 }
9103 }
9104
9105 /* Record all alternatives which are better or equal to the currently
9106 matching one in the alternative_order array. */
9107 for (i = j = 0; i < n_alternatives; i++)
9108 if (alternative_reject[i] <= alternative_reject[which_alternative])
9109 alternative_order[j++] = i;
9110 n_alternatives = j;
9111
9112 /* Sort it. Given a small number of alternatives, a dumb algorithm
9113 won't hurt too much. */
9114 for (i = 0; i < n_alternatives - 1; i++)
9115 {
9116 int best = i;
9117 int best_reject = alternative_reject[alternative_order[i]];
9118 int best_nregs = alternative_nregs[alternative_order[i]];
9119 int tmp;
9120
9121 for (j = i + 1; j < n_alternatives; j++)
9122 {
9123 int this_reject = alternative_reject[alternative_order[j]];
9124 int this_nregs = alternative_nregs[alternative_order[j]];
9125
9126 if (this_reject < best_reject
9127 || (this_reject == best_reject && this_nregs < best_nregs))
9128 {
9129 best = j;
9130 best_reject = this_reject;
9131 best_nregs = this_nregs;
9132 }
9133 }
9134
9135 tmp = alternative_order[best];
9136 alternative_order[best] = alternative_order[i];
9137 alternative_order[i] = tmp;
9138 }
9139
9140 /* Substitute the operands as determined by op_alt_regno for the best
9141 alternative. */
9142 j = alternative_order[0];
9143
9144 /* Pop back to the real obstacks while changing the insn. */
9145 pop_obstacks ();
9146
9147 for (i = 0; i < n_operands; i++)
9148 {
9149 enum machine_mode mode = insn_operand_mode[insn_code_number][i];
9150 if (op_alt_regno[i][j] == -1)
9151 continue;
9152
9153 validate_change (insn, recog_operand_loc[i],
9154 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
9155 }
9156
9157 for (i = insn_n_dups[insn_code_number] - 1; i >= 0; i--)
9158 {
9159 int op = recog_dup_num[i];
9160 enum machine_mode mode = insn_operand_mode[insn_code_number][op];
9161
9162 if (op_alt_regno[op][j] == -1)
9163 continue;
9164
9165 validate_change (insn, recog_dup_loc[i],
9166 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
9167 }
9168
9169 /* Go back to the obstack we are using for temporary
9170 storage. */
9171 push_obstacks (&reload_obstack, &reload_obstack);
9172
9173 return apply_change_group ();
9174 #else
9175 return 0;
9176 #endif
9177 }
9178
9179 /* These two variables are used to pass information from
9180 reload_cse_record_set to reload_cse_check_clobber. */
9181
9182 static int reload_cse_check_clobbered;
9183 static rtx reload_cse_check_src;
9184
9185 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
9186 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
9187 second argument, which is passed by note_stores, is ignored. */
9188
9189 static void
9190 reload_cse_check_clobber (dest, ignore)
9191 rtx dest;
9192 rtx ignore ATTRIBUTE_UNUSED;
9193 {
9194 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
9195 reload_cse_check_clobbered = 1;
9196 }
9197
9198 /* Record the result of a SET instruction. SET is the set pattern.
9199 BODY is the pattern of the insn that it came from. */
9200
9201 static void
9202 reload_cse_record_set (set, body)
9203 rtx set;
9204 rtx body;
9205 {
9206 rtx dest, src, x;
9207 int dreg, sreg;
9208 enum machine_mode dest_mode;
9209
9210 dest = SET_DEST (set);
9211 src = SET_SRC (set);
9212 dreg = true_regnum (dest);
9213 sreg = true_regnum (src);
9214 dest_mode = GET_MODE (dest);
9215
9216 /* Some machines don't define AUTO_INC_DEC, but they still use push
9217 instructions. We need to catch that case here in order to
9218 invalidate the stack pointer correctly. Note that invalidating
9219 the stack pointer is different from invalidating DEST. */
9220 x = dest;
9221 while (GET_CODE (x) == SUBREG
9222 || GET_CODE (x) == ZERO_EXTRACT
9223 || GET_CODE (x) == SIGN_EXTRACT
9224 || GET_CODE (x) == STRICT_LOW_PART)
9225 x = XEXP (x, 0);
9226 if (push_operand (x, GET_MODE (x)))
9227 {
9228 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
9229 reload_cse_invalidate_rtx (dest, NULL_RTX);
9230 return;
9231 }
9232
9233 /* We can only handle an assignment to a register, or a store of a
9234 register to a memory location. For other cases, we just clobber
9235 the destination. We also have to just clobber if there are side
9236 effects in SRC or DEST. */
9237 if ((dreg < 0 && GET_CODE (dest) != MEM)
9238 || side_effects_p (src)
9239 || side_effects_p (dest))
9240 {
9241 reload_cse_invalidate_rtx (dest, NULL_RTX);
9242 return;
9243 }
9244
9245 #ifdef HAVE_cc0
9246 /* We don't try to handle values involving CC, because it's a pain
9247 to keep track of when they have to be invalidated. */
9248 if (reg_mentioned_p (cc0_rtx, src)
9249 || reg_mentioned_p (cc0_rtx, dest))
9250 {
9251 reload_cse_invalidate_rtx (dest, NULL_RTX);
9252 return;
9253 }
9254 #endif
9255
9256 /* If BODY is a PARALLEL, then we need to see whether the source of
9257 SET is clobbered by some other instruction in the PARALLEL. */
9258 if (GET_CODE (body) == PARALLEL)
9259 {
9260 int i;
9261
9262 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
9263 {
9264 rtx x;
9265
9266 x = XVECEXP (body, 0, i);
9267 if (x == set)
9268 continue;
9269
9270 reload_cse_check_clobbered = 0;
9271 reload_cse_check_src = src;
9272 note_stores (x, reload_cse_check_clobber);
9273 if (reload_cse_check_clobbered)
9274 {
9275 reload_cse_invalidate_rtx (dest, NULL_RTX);
9276 return;
9277 }
9278 }
9279 }
9280
9281 if (dreg >= 0)
9282 {
9283 int i;
9284
9285 /* This is an assignment to a register. Update the value we
9286 have stored for the register. */
9287 if (sreg >= 0)
9288 {
9289 rtx x;
9290
9291 /* This is a copy from one register to another. Any values
9292 which were valid for SREG are now valid for DREG. If the
9293 mode changes, we use gen_lowpart_common to extract only
9294 the part of the value that is copied. */
9295 reg_values[dreg] = 0;
9296 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
9297 {
9298 rtx tmp;
9299
9300 if (XEXP (x, 0) == 0)
9301 continue;
9302 if (dest_mode == GET_MODE (XEXP (x, 0)))
9303 tmp = XEXP (x, 0);
9304 else if (GET_MODE_BITSIZE (dest_mode)
9305 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
9306 continue;
9307 else
9308 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
9309 if (tmp)
9310 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
9311 reg_values[dreg]);
9312 }
9313 }
9314 else
9315 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
9316
9317 /* We've changed DREG, so invalidate any values held by other
9318 registers that depend upon it. */
9319 reload_cse_invalidate_regno (dreg, dest_mode, 0);
9320
9321 /* If this assignment changes more than one hard register,
9322 forget anything we know about the others. */
9323 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
9324 reg_values[dreg + i] = 0;
9325 }
9326 else if (GET_CODE (dest) == MEM)
9327 {
9328 /* Invalidate conflicting memory locations. */
9329 reload_cse_invalidate_mem (dest);
9330
9331 /* If we're storing a register to memory, add DEST to the list
9332 in REG_VALUES. */
9333 if (sreg >= 0 && ! side_effects_p (dest))
9334 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
9335 reg_values[sreg]);
9336 }
9337 else
9338 {
9339 /* We should have bailed out earlier. */
9340 abort ();
9341 }
9342 }
9343 \f
9344 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
9345 addressing now.
9346 This code might also be useful when reload gave up on reg+reg addresssing
9347 because of clashes between the return register and INDEX_REG_CLASS. */
9348
9349 /* The maximum number of uses of a register we can keep track of to
9350 replace them with reg+reg addressing. */
9351 #define RELOAD_COMBINE_MAX_USES 6
9352
9353 /* INSN is the insn where a register has ben used, and USEP points to the
9354 location of the register within the rtl. */
9355 struct reg_use { rtx insn, *usep; };
9356
9357 /* If the register is used in some unknown fashion, USE_INDEX is negative.
9358 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
9359 indicates where it becomes live again.
9360 Otherwise, USE_INDEX is the index of the last encountered use of the
9361 register (which is first among these we have seen since we scan backwards),
9362 OFFSET contains the constant offset that is added to the register in
9363 all encountered uses, and USE_RUID indicates the first encountered, i.e.
9364 last, of these uses. */
9365 static struct
9366 {
9367 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
9368 int use_index;
9369 rtx offset;
9370 int store_ruid;
9371 int use_ruid;
9372 } reg_state[FIRST_PSEUDO_REGISTER];
9373
9374 /* Reverse linear uid. This is increased in reload_combine while scanning
9375 the instructions from last to first. It is used to set last_label_ruid
9376 and the store_ruid / use_ruid fields in reg_state. */
9377 static int reload_combine_ruid;
9378
9379 static void
9380 reload_combine ()
9381 {
9382 rtx insn, set;
9383 int first_index_reg = 1, last_index_reg = 0;
9384 int i;
9385 int last_label_ruid;
9386
9387 /* If reg+reg can be used in offsetable memory adresses, the main chunk of
9388 reload has already used it where appropriate, so there is no use in
9389 trying to generate it now. */
9390 if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
9391 return;
9392
9393 /* To avoid wasting too much time later searching for an index register,
9394 determine the minimum and maximum index register numbers. */
9395 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9396 {
9397 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i))
9398 {
9399 if (! last_index_reg)
9400 last_index_reg = i;
9401 first_index_reg = i;
9402 }
9403 }
9404 /* If no index register is available, we can quit now. */
9405 if (first_index_reg > last_index_reg)
9406 return;
9407
9408 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
9409 last_label_ruid = reload_combine_ruid = 0;
9410 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9411 {
9412 if (fixed_regs[i])
9413 reg_state[i].use_index = -1;
9414 else
9415 {
9416 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9417 reg_state[i].store_ruid = reload_combine_ruid;
9418 }
9419 }
9420
9421 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
9422 {
9423 rtx note;
9424
9425 /* We cannot do our optimization across labels. Invalidating all the use
9426 information we have would be costly, so we just note where the label
9427 is and then later disable any optimization that would cross it. */
9428 if (GET_CODE (insn) == CODE_LABEL)
9429 last_label_ruid = reload_combine_ruid;
9430 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9431 continue;
9432 reload_combine_ruid++;
9433
9434 /* Look for (set (REGX) (CONST_INT))
9435 (set (REGX) (PLUS (REGX) (REGY)))
9436 ...
9437 ... (MEM (REGX)) ...
9438 and convert it to
9439 (set (REGZ) (CONST_INT))
9440 ...
9441 ... (MEM (PLUS (REGZ) (REGY)))... .
9442
9443 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
9444 and that we know all uses of REGX before it dies. */
9445 set = single_set (insn);
9446 if (set != NULL_RTX
9447 && GET_CODE (SET_DEST (set)) == REG
9448 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
9449 GET_MODE (SET_DEST (set)))
9450 == 1)
9451 && GET_CODE (SET_SRC (set)) == PLUS
9452 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
9453 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
9454 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
9455 {
9456 rtx reg = SET_DEST (set);
9457 rtx plus = SET_SRC (set);
9458 rtx base = XEXP (plus, 1);
9459 rtx prev = prev_nonnote_insn (insn);
9460 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
9461 int regno = REGNO (reg);
9462 rtx const_reg;
9463 rtx reg_sum = NULL_RTX;
9464
9465 /* Now, we need an index register.
9466 We'll set index_reg to this index register, const_reg to the
9467 register that is to be loaded with the constant
9468 (denoted as REGZ in the substitution illustration above),
9469 and reg_sum to the register-register that we want to use to
9470 substitute uses of REG (typically in MEMs) with.
9471 First check REG and BASE for being index registers;
9472 we can use them even if they are not dead. */
9473 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
9474 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
9475 REGNO (base)))
9476 {
9477 const_reg = reg;
9478 reg_sum = plus;
9479 }
9480 else
9481 {
9482 /* Otherwise, look for a free index register. Since we have
9483 checked above that neiter REG nor BASE are index registers,
9484 if we find anything at all, it will be different from these
9485 two registers. */
9486 for (i = first_index_reg; i <= last_index_reg; i++)
9487 {
9488 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
9489 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
9490 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
9491 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
9492 {
9493 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
9494 const_reg = index_reg;
9495 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
9496 break;
9497 }
9498 }
9499 }
9500 if (prev_set
9501 && GET_CODE (SET_SRC (prev_set)) == CONST_INT
9502 && rtx_equal_p (SET_DEST (prev_set), reg)
9503 && reg_state[regno].use_index >= 0
9504 && reg_sum)
9505 {
9506 int i;
9507
9508 /* Change destination register and - if necessary - the
9509 constant value in PREV, the constant loading instruction. */
9510 validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
9511 if (reg_state[regno].offset != const0_rtx)
9512 validate_change (prev,
9513 &SET_SRC (prev_set),
9514 GEN_INT (INTVAL (SET_SRC (prev_set))
9515 + INTVAL (reg_state[regno].offset)),
9516 1);
9517 /* Now for every use of REG that we have recorded, replace REG
9518 with REG_SUM. */
9519 for (i = reg_state[regno].use_index;
9520 i < RELOAD_COMBINE_MAX_USES; i++)
9521 validate_change (reg_state[regno].reg_use[i].insn,
9522 reg_state[regno].reg_use[i].usep,
9523 reg_sum, 1);
9524
9525 if (apply_change_group ())
9526 {
9527 rtx *np;
9528
9529 /* Delete the reg-reg addition. */
9530 PUT_CODE (insn, NOTE);
9531 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9532 NOTE_SOURCE_FILE (insn) = 0;
9533
9534 if (reg_state[regno].offset != const0_rtx)
9535 {
9536 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
9537 are now invalid. */
9538 for (np = &REG_NOTES (prev); *np; )
9539 {
9540 if (REG_NOTE_KIND (*np) == REG_EQUAL
9541 || REG_NOTE_KIND (*np) == REG_EQUIV)
9542 *np = XEXP (*np, 1);
9543 else
9544 np = &XEXP (*np, 1);
9545 }
9546 }
9547 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9548 reg_state[REGNO (const_reg)].store_ruid = reload_combine_ruid;
9549 continue;
9550 }
9551 }
9552 }
9553 note_stores (PATTERN (insn), reload_combine_note_store);
9554 if (GET_CODE (insn) == CALL_INSN)
9555 {
9556 rtx link;
9557
9558 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9559 {
9560 if (call_used_regs[i])
9561 {
9562 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9563 reg_state[i].store_ruid = reload_combine_ruid;
9564 }
9565 }
9566 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
9567 link = XEXP (link, 1))
9568 {
9569 rtx use = XEXP (link, 0);
9570 int regno = REGNO (XEXP (use, 0));
9571 if (GET_CODE (use) == CLOBBER)
9572 {
9573 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9574 reg_state[regno].store_ruid = reload_combine_ruid;
9575 }
9576 else
9577 reg_state[regno].use_index = -1;
9578 }
9579 }
9580 if (GET_CODE (insn) == JUMP_INSN)
9581 {
9582 /* Non-spill registers might be used at the call destination in
9583 some unknown fashion, so we have to mark the unknown use. */
9584 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9585 {
9586 if (1)
9587 reg_state[i].use_index = -1;
9588 }
9589 }
9590 reload_combine_note_use (&PATTERN (insn), insn);
9591 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9592 {
9593 if (REG_NOTE_KIND (note) == REG_INC
9594 && GET_CODE (XEXP (note, 0)) == REG)
9595 reg_state[REGNO (XEXP (note, 0))].use_index = -1;
9596 }
9597 }
9598 }
9599
9600 /* Check if DST is a register or a subreg of a register; if it is,
9601 update reg_state[regno].store_ruid and reg_state[regno].use_index
9602 accordingly. Called via note_stores from reload_combine.
9603 The second argument, SET, is ignored. */
9604 static void
9605 reload_combine_note_store (dst, set)
9606 rtx dst, set ATTRIBUTE_UNUSED;
9607 {
9608 int regno = 0;
9609 int i;
9610 unsigned size = GET_MODE_SIZE (GET_MODE (dst));
9611
9612 if (GET_CODE (dst) == SUBREG)
9613 {
9614 regno = SUBREG_WORD (dst);
9615 dst = SUBREG_REG (dst);
9616 }
9617 if (GET_CODE (dst) != REG)
9618 return;
9619 regno += REGNO (dst);
9620 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
9621 careful with registers / register parts that are not full words. */
9622 if (size < (unsigned) UNITS_PER_WORD)
9623 reg_state[regno].use_index = -1;
9624 else
9625 {
9626 for (i = size / UNITS_PER_WORD - 1 + regno; i >= regno; i--)
9627 {
9628 reg_state[i].store_ruid = reload_combine_ruid;
9629 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9630 }
9631 }
9632 }
9633
9634 /* XP points to a piece of rtl that has to be checked for any uses of
9635 registers.
9636 *XP is the pattern of INSN, or a part of it.
9637 Called from reload_combine, and recursively by itself. */
9638 static void
9639 reload_combine_note_use (xp, insn)
9640 rtx *xp, insn;
9641 {
9642 rtx x = *xp;
9643 enum rtx_code code = x->code;
9644 char *fmt;
9645 int i, j;
9646 rtx offset = const0_rtx; /* For the REG case below. */
9647
9648 switch (code)
9649 {
9650 case SET:
9651 if (GET_CODE (SET_DEST (x)) == REG)
9652 {
9653 reload_combine_note_use (&SET_SRC (x), insn);
9654 return;
9655 }
9656 break;
9657
9658 case CLOBBER:
9659 if (GET_CODE (SET_DEST (x)) == REG)
9660 return;
9661 break;
9662
9663 case PLUS:
9664 /* We are interested in (plus (reg) (const_int)) . */
9665 if (GET_CODE (XEXP (x, 0)) != REG || GET_CODE (XEXP (x, 1)) != CONST_INT)
9666 break;
9667 offset = XEXP (x, 1);
9668 x = XEXP (x, 0);
9669 /* Fall through. */
9670 case REG:
9671 {
9672 int regno = REGNO (x);
9673 int use_index;
9674
9675 /* Some spurious USEs of pseudo registers might remain.
9676 Just ignore them. */
9677 if (regno >= FIRST_PSEUDO_REGISTER)
9678 return;
9679
9680 /* If this register is already used in some unknown fashion, we
9681 can't do anything.
9682 If we decrement the index from zero to -1, we can't store more
9683 uses, so this register becomes used in an unknown fashion. */
9684 use_index = --reg_state[regno].use_index;
9685 if (use_index < 0)
9686 return;
9687
9688 if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9689 {
9690 /* We have found another use for a register that is already
9691 used later. Check if the offsets match; if not, mark the
9692 register as used in an unknown fashion. */
9693 if (! rtx_equal_p (offset, reg_state[regno].offset))
9694 {
9695 reg_state[regno].use_index = -1;
9696 return;
9697 }
9698 }
9699 else
9700 {
9701 /* This is the first use of this register we have seen since we
9702 marked it as dead. */
9703 reg_state[regno].offset = offset;
9704 reg_state[regno].use_ruid = reload_combine_ruid;
9705 }
9706 reg_state[regno].reg_use[use_index].insn = insn;
9707 reg_state[regno].reg_use[use_index].usep = xp;
9708 return;
9709 }
9710
9711 default:
9712 break;
9713 }
9714
9715 /* Recursively process the components of X. */
9716 fmt = GET_RTX_FORMAT (code);
9717 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9718 {
9719 if (fmt[i] == 'e')
9720 reload_combine_note_use (&XEXP (x, i), insn);
9721 else if (fmt[i] == 'E')
9722 {
9723 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9724 reload_combine_note_use (&XVECEXP (x, i, j), insn);
9725 }
9726 }
9727 }
9728 \f
9729 /* See if we can reduce the cost of a constant by replacing a move with
9730 an add. */
9731 /* We cannot do our optimization across labels. Invalidating all the
9732 information about register contents we have would be costly, so we
9733 use last_label_luid (local variable of reload_cse_move2add) to note
9734 where the label is and then later disable any optimization that would
9735 cross it.
9736 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9737 reg_set_luid[n] is larger than last_label_luid[n] . */
9738 static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9739 /* reg_offset[n] has to be CONST_INT for it and reg_base_reg[n] /
9740 reg_mode[n] to be valid.
9741 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is negative, register n
9742 has been set to reg_offset[n] in mode reg_mode[n] .
9743 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is non-negative,
9744 register n has been set to the sum of reg_offset[n] and register
9745 reg_base_reg[n], calculated in mode reg_mode[n] . */
9746 static rtx reg_offset[FIRST_PSEUDO_REGISTER];
9747 static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9748 static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9749 /* move2add_luid is linearily increased while scanning the instructions
9750 from first to last. It is used to set reg_set_luid in
9751 reload_cse_move2add and move2add_note_store. */
9752 static int move2add_luid;
9753
9754 static void
9755 reload_cse_move2add (first)
9756 rtx first;
9757 {
9758 int i;
9759 rtx insn;
9760 int last_label_luid;
9761
9762 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
9763 reg_set_luid[i] = 0;
9764
9765 last_label_luid = 0;
9766 move2add_luid = 1;
9767 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
9768 {
9769 rtx pat, note;
9770
9771 if (GET_CODE (insn) == CODE_LABEL)
9772 last_label_luid = move2add_luid;
9773 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9774 continue;
9775 pat = PATTERN (insn);
9776 /* For simplicity, we only perform this optimization on
9777 straightforward SETs. */
9778 if (GET_CODE (pat) == SET
9779 && GET_CODE (SET_DEST (pat)) == REG)
9780 {
9781 rtx reg = SET_DEST (pat);
9782 int regno = REGNO (reg);
9783 rtx src = SET_SRC (pat);
9784
9785 /* Check if we have valid information on the contents of this
9786 register in the mode of REG. */
9787 /* ??? We don't know how zero / sign extension is handled, hence
9788 we can't go from a narrower to a wider mode. */
9789 if (reg_set_luid[regno] > last_label_luid
9790 && (GET_MODE_SIZE (GET_MODE (reg))
9791 <= GET_MODE_SIZE (reg_mode[regno]))
9792 && GET_CODE (reg_offset[regno]) == CONST_INT)
9793 {
9794 /* Try to transform (set (REGX) (CONST_INT A))
9795 ...
9796 (set (REGX) (CONST_INT B))
9797 to
9798 (set (REGX) (CONST_INT A))
9799 ...
9800 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9801
9802 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
9803 {
9804 int success = 0;
9805 rtx new_src = GEN_INT (INTVAL (src)
9806 - INTVAL (reg_offset[regno]));
9807 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
9808 use (set (reg) (reg)) instead.
9809 We don't delete this insn, nor do we convert it into a
9810 note, to avoid losing register notes or the return
9811 value flag. jump2 already knowns how to get rid of
9812 no-op moves. */
9813 if (new_src == const0_rtx)
9814 success = validate_change (insn, &SET_SRC (pat), reg, 0);
9815 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
9816 && have_add2_insn (GET_MODE (reg)))
9817 success = validate_change (insn, &PATTERN (insn),
9818 gen_add2_insn (reg, new_src), 0);
9819 reg_set_luid[regno] = move2add_luid;
9820 reg_mode[regno] = GET_MODE (reg);
9821 reg_offset[regno] = src;
9822 continue;
9823 }
9824
9825 /* Try to transform (set (REGX) (REGY))
9826 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9827 ...
9828 (set (REGX) (REGY))
9829 (set (REGX) (PLUS (REGX) (CONST_INT B)))
9830 to
9831 (REGX) (REGY))
9832 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9833 ...
9834 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9835 else if (GET_CODE (src) == REG
9836 && reg_base_reg[regno] == REGNO (src)
9837 && reg_set_luid[regno] > reg_set_luid[REGNO (src)])
9838 {
9839 rtx next = next_nonnote_insn (insn);
9840 rtx set;
9841 if (next)
9842 set = single_set (next);
9843 if (next
9844 && set
9845 && SET_DEST (set) == reg
9846 && GET_CODE (SET_SRC (set)) == PLUS
9847 && XEXP (SET_SRC (set), 0) == reg
9848 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
9849 {
9850 rtx src3 = XEXP (SET_SRC (set), 1);
9851 rtx new_src = GEN_INT (INTVAL (src3)
9852 - INTVAL (reg_offset[regno]));
9853 int success = 0;
9854
9855 if (new_src == const0_rtx)
9856 /* See above why we create (set (reg) (reg)) here. */
9857 success
9858 = validate_change (next, &SET_SRC (set), reg, 0);
9859 else if ((rtx_cost (new_src, PLUS)
9860 < 2 + rtx_cost (src3, SET))
9861 && have_add2_insn (GET_MODE (reg)))
9862 success
9863 = validate_change (next, &PATTERN (next),
9864 gen_add2_insn (reg, new_src), 0);
9865 if (success)
9866 {
9867 /* INSN might be the first insn in a basic block
9868 if the preceding insn is a conditional jump
9869 or a possible-throwing call. */
9870 PUT_CODE (insn, NOTE);
9871 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9872 NOTE_SOURCE_FILE (insn) = 0;
9873 }
9874 insn = next;
9875 reg_set_luid[regno] = move2add_luid;
9876 reg_mode[regno] = GET_MODE (reg);
9877 reg_offset[regno] = src3;
9878 continue;
9879 }
9880 }
9881 }
9882 }
9883
9884 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9885 {
9886 if (REG_NOTE_KIND (note) == REG_INC
9887 && GET_CODE (XEXP (note, 0)) == REG)
9888 {
9889 /* Indicate that this register has been recently written to,
9890 but the exact contents are not available. */
9891 int regno = REGNO (XEXP (note, 0));
9892 if (regno < FIRST_PSEUDO_REGISTER)
9893 {
9894 reg_set_luid[regno] = move2add_luid;
9895 reg_offset[regno] = note;
9896 }
9897 }
9898 }
9899 note_stores (PATTERN (insn), move2add_note_store);
9900 /* If this is a CALL_INSN, all call used registers are stored with
9901 unknown values. */
9902 if (GET_CODE (insn) == CALL_INSN)
9903 {
9904 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
9905 {
9906 if (call_used_regs[i])
9907 {
9908 reg_set_luid[i] = move2add_luid;
9909 reg_offset[i] = insn; /* Invalidate contents. */
9910 }
9911 }
9912 }
9913 }
9914 }
9915
9916 /* SET is a SET or CLOBBER that sets DST.
9917 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
9918 Called from reload_cse_move2add via note_stores. */
9919 static void
9920 move2add_note_store (dst, set)
9921 rtx dst, set;
9922 {
9923 int regno = 0;
9924 int i;
9925
9926 enum machine_mode mode = GET_MODE (dst);
9927 if (GET_CODE (dst) == SUBREG)
9928 {
9929 regno = SUBREG_WORD (dst);
9930 dst = SUBREG_REG (dst);
9931 }
9932 if (GET_CODE (dst) != REG)
9933 return;
9934
9935 regno += REGNO (dst);
9936
9937 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET)
9938 {
9939 rtx src = SET_SRC (set);
9940
9941 reg_mode[regno] = mode;
9942 switch (GET_CODE (src))
9943 {
9944 case PLUS:
9945 {
9946 rtx src0 = XEXP (src, 0);
9947 if (GET_CODE (src0) == REG)
9948 {
9949 if (REGNO (src0) != regno
9950 || reg_offset[regno] != const0_rtx)
9951 {
9952 reg_base_reg[regno] = REGNO (src0);
9953 reg_set_luid[regno] = move2add_luid;
9954 }
9955 reg_offset[regno] = XEXP (src, 1);
9956 break;
9957 }
9958 reg_set_luid[regno] = move2add_luid;
9959 reg_offset[regno] = set; /* Invalidate contents. */
9960 break;
9961 }
9962
9963 case REG:
9964 reg_base_reg[regno] = REGNO (SET_SRC (set));
9965 reg_offset[regno] = const0_rtx;
9966 reg_set_luid[regno] = move2add_luid;
9967 break;
9968
9969 default:
9970 reg_base_reg[regno] = -1;
9971 reg_offset[regno] = SET_SRC (set);
9972 reg_set_luid[regno] = move2add_luid;
9973 break;
9974 }
9975 }
9976 else
9977 {
9978 for (i = regno + HARD_REGNO_NREGS (regno, mode) - 1; i >= regno; i--)
9979 {
9980 /* Indicate that this register has been recently written to,
9981 but the exact contents are not available. */
9982 reg_set_luid[i] = move2add_luid;
9983 reg_offset[i] = dst;
9984 }
9985 }
9986 }
This page took 0.503846 seconds and 6 git commands to generate.