]> gcc.gnu.org Git - gcc.git/blob - gcc/reload1.c
recog.h (enum op_type): Define.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24
25 #include "machmode.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "obstack.h"
29 #include "insn-config.h"
30 #include "insn-flags.h"
31 #include "insn-codes.h"
32 #include "flags.h"
33 #include "expr.h"
34 #include "regs.h"
35 #include "basic-block.h"
36 #include "reload.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "real.h"
40 #include "toplev.h"
41
42 /* This file contains the reload pass of the compiler, which is
43 run after register allocation has been done. It checks that
44 each insn is valid (operands required to be in registers really
45 are in registers of the proper class) and fixes up invalid ones
46 by copying values temporarily into registers for the insns
47 that need them.
48
49 The results of register allocation are described by the vector
50 reg_renumber; the insns still contain pseudo regs, but reg_renumber
51 can be used to find which hard reg, if any, a pseudo reg is in.
52
53 The technique we always use is to free up a few hard regs that are
54 called ``reload regs'', and for each place where a pseudo reg
55 must be in a hard reg, copy it temporarily into one of the reload regs.
56
57 Reload regs are allocated locally for every instruction that needs
58 reloads. When there are pseudos which are allocated to a register that
59 has been chosen as a reload reg, such pseudos must be ``spilled''.
60 This means that they go to other hard regs, or to stack slots if no other
61 available hard regs can be found. Spilling can invalidate more
62 insns, requiring additional need for reloads, so we must keep checking
63 until the process stabilizes.
64
65 For machines with different classes of registers, we must keep track
66 of the register class needed for each reload, and make sure that
67 we allocate enough reload registers of each class.
68
69 The file reload.c contains the code that checks one insn for
70 validity and reports the reloads that it needs. This file
71 is in charge of scanning the entire rtl code, accumulating the
72 reload needs, spilling, assigning reload registers to use for
73 fixing up each insn, and generating the new insns to copy values
74 into the reload registers. */
75
76
77 #ifndef REGISTER_MOVE_COST
78 #define REGISTER_MOVE_COST(x, y) 2
79 #endif
80 \f
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx *reg_last_reload_reg;
84
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload;
88
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload;
92
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx *reg_equiv_constant;
98
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx *reg_equiv_memory_loc;
104
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx *reg_equiv_address;
109
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
112 rtx *reg_equiv_mem;
113
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width;
116
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx *reg_equiv_init;
120
121 /* Vector to remember old contents of reg_renumber before spilling. */
122 static short *reg_old_renumber;
123
124 /* During reload_as_needed, element N contains the last pseudo regno reloaded
125 into hard register N. If that pseudo reg occupied more than one register,
126 reg_reloaded_contents points to that pseudo for each spill register in
127 use; all of these must remain set for an inheritance to occur. */
128 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
129
130 /* During reload_as_needed, element N contains the insn for which
131 hard register N was last used. Its contents are significant only
132 when reg_reloaded_valid is set for this register. */
133 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
134
135 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
136 static HARD_REG_SET reg_reloaded_valid;
137 /* Indicate if the register was dead at the end of the reload.
138 This is only valid if reg_reloaded_contents is set and valid. */
139 static HARD_REG_SET reg_reloaded_dead;
140
141 /* Number of spill-regs so far; number of valid elements of spill_regs. */
142 static int n_spills;
143
144 /* In parallel with spill_regs, contains REG rtx's for those regs.
145 Holds the last rtx used for any given reg, or 0 if it has never
146 been used for spilling yet. This rtx is reused, provided it has
147 the proper mode. */
148 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
149
150 /* In parallel with spill_regs, contains nonzero for a spill reg
151 that was stored after the last time it was used.
152 The precise value is the insn generated to do the store. */
153 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
154
155 /* This is the register that was stored with spill_reg_store. This is a
156 copy of reload_out / reload_out_reg when the value was stored; if
157 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
158 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
159
160 /* This table is the inverse mapping of spill_regs:
161 indexed by hard reg number,
162 it contains the position of that reg in spill_regs,
163 or -1 for something that is not in spill_regs.
164
165 ?!? This is no longer accurate. */
166 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
167
168 /* This reg set indicates registers that can't be used as spill registers for
169 the currently processed insn. These are the hard registers which are live
170 during the insn, but not allocated to pseudos, as well as fixed
171 registers. */
172 static HARD_REG_SET bad_spill_regs;
173
174 /* These are the hard registers that can't be used as spill register for any
175 insn. This includes registers used for user variables and registers that
176 we can't eliminate. A register that appears in this set also can't be used
177 to retry register allocation. */
178 static HARD_REG_SET bad_spill_regs_global;
179
180 /* Describes order of use of registers for reloading
181 of spilled pseudo-registers. `n_spills' is the number of
182 elements that are actually valid; new ones are added at the end.
183
184 Both spill_regs and spill_reg_order are used on two occasions:
185 once during find_reload_regs, where they keep track of the spill registers
186 for a single insn, but also during reload_as_needed where they show all
187 the registers ever used by reload. For the latter case, the information
188 is calculated during finish_spills. */
189 static short spill_regs[FIRST_PSEUDO_REGISTER];
190
191 /* This vector of reg sets indicates, for each pseudo, which hard registers
192 may not be used for retrying global allocation because the register was
193 formerly spilled from one of them. If we allowed reallocating a pseudo to
194 a register that it was already allocated to, reload might not
195 terminate. */
196 static HARD_REG_SET *pseudo_previous_regs;
197
198 /* This vector of reg sets indicates, for each pseudo, which hard
199 registers may not be used for retrying global allocation because they
200 are used as spill registers during one of the insns in which the
201 pseudo is live. */
202 static HARD_REG_SET *pseudo_forbidden_regs;
203
204 /* All hard regs that have been used as spill registers for any insn are
205 marked in this set. */
206 static HARD_REG_SET used_spill_regs;
207
208 /* Index of last register assigned as a spill register. We allocate in
209 a round-robin fashion. */
210 static int last_spill_reg;
211
212 /* Describes order of preference for putting regs into spill_regs.
213 Contains the numbers of all the hard regs, in order most preferred first.
214 This order is different for each function.
215 It is set up by order_regs_for_reload.
216 Empty elements at the end contain -1. */
217 static short potential_reload_regs[FIRST_PSEUDO_REGISTER];
218
219 /* Nonzero if indirect addressing is supported on the machine; this means
220 that spilling (REG n) does not require reloading it into a register in
221 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
222 value indicates the level of indirect addressing supported, e.g., two
223 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
224 a hard register. */
225 static char spill_indirect_levels;
226
227 /* Nonzero if indirect addressing is supported when the innermost MEM is
228 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
229 which these are valid is the same as spill_indirect_levels, above. */
230 char indirect_symref_ok;
231
232 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
233 char double_reg_address_ok;
234
235 /* Record the stack slot for each spilled hard register. */
236 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
237
238 /* Width allocated so far for that stack slot. */
239 static int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
240
241 /* Record which pseudos needed to be spilled. */
242 static regset spilled_pseudos;
243
244 /* First uid used by insns created by reload in this function.
245 Used in find_equiv_reg. */
246 int reload_first_uid;
247
248 /* Flag set by local-alloc or global-alloc if anything is live in
249 a call-clobbered reg across calls. */
250 int caller_save_needed;
251
252 /* Set to 1 while reload_as_needed is operating.
253 Required by some machines to handle any generated moves differently. */
254 int reload_in_progress = 0;
255
256 /* These arrays record the insn_code of insns that may be needed to
257 perform input and output reloads of special objects. They provide a
258 place to pass a scratch register. */
259 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
260 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
261
262 /* This obstack is used for allocation of rtl during register elimination.
263 The allocated storage can be freed once find_reloads has processed the
264 insn. */
265 struct obstack reload_obstack;
266
267 /* Points to the beginning of the reload_obstack. All insn_chain structures
268 are allocated first. */
269 char *reload_startobj;
270
271 /* The point after all insn_chain structures. Used to quickly deallocate
272 memory used while processing one insn. */
273 char *reload_firstobj;
274
275 #define obstack_chunk_alloc xmalloc
276 #define obstack_chunk_free free
277
278 /* List of labels that must never be deleted. */
279 extern rtx forced_labels;
280
281 /* List of insn_chain instructions, one for every insn that reload needs to
282 examine. */
283 struct insn_chain *reload_insn_chain;
284
285 /* List of all insns needing reloads. */
286 static struct insn_chain *insns_need_reload;
287 \f
288 /* This structure is used to record information about register eliminations.
289 Each array entry describes one possible way of eliminating a register
290 in favor of another. If there is more than one way of eliminating a
291 particular register, the most preferred should be specified first. */
292
293 struct elim_table
294 {
295 int from; /* Register number to be eliminated. */
296 int to; /* Register number used as replacement. */
297 int initial_offset; /* Initial difference between values. */
298 int can_eliminate; /* Non-zero if this elimination can be done. */
299 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
300 insns made by reload. */
301 int offset; /* Current offset between the two regs. */
302 int previous_offset; /* Offset at end of previous insn. */
303 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
304 rtx from_rtx; /* REG rtx for the register to be eliminated.
305 We cannot simply compare the number since
306 we might then spuriously replace a hard
307 register corresponding to a pseudo
308 assigned to the reg to be eliminated. */
309 rtx to_rtx; /* REG rtx for the replacement. */
310 };
311
312 static struct elim_table * reg_eliminate = 0;
313
314 /* This is an intermediate structure to initialize the table. It has
315 exactly the members provided by ELIMINABLE_REGS. */
316 static struct elim_table_1
317 {
318 int from;
319 int to;
320 } reg_eliminate_1[] =
321
322 /* If a set of eliminable registers was specified, define the table from it.
323 Otherwise, default to the normal case of the frame pointer being
324 replaced by the stack pointer. */
325
326 #ifdef ELIMINABLE_REGS
327 ELIMINABLE_REGS;
328 #else
329 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
330 #endif
331
332 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate_1/sizeof reg_eliminate_1[0])
333
334 /* Record the number of pending eliminations that have an offset not equal
335 to their initial offset. If non-zero, we use a new copy of each
336 replacement result in any insns encountered. */
337 int num_not_at_initial_offset;
338
339 /* Count the number of registers that we may be able to eliminate. */
340 static int num_eliminable;
341
342 /* For each label, we record the offset of each elimination. If we reach
343 a label by more than one path and an offset differs, we cannot do the
344 elimination. This information is indexed by the number of the label.
345 The first table is an array of flags that records whether we have yet
346 encountered a label and the second table is an array of arrays, one
347 entry in the latter array for each elimination. */
348
349 static char *offsets_known_at;
350 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
351
352 /* Number of labels in the current function. */
353
354 static int num_labels;
355
356 struct hard_reg_n_uses
357 {
358 int regno;
359 unsigned int uses;
360 };
361 \f
362 static void maybe_fix_stack_asms PROTO((void));
363 static void calculate_needs_all_insns PROTO((int));
364 static void calculate_needs PROTO((struct insn_chain *));
365 static void find_reload_regs PROTO((struct insn_chain *chain,
366 FILE *));
367 static void find_tworeg_group PROTO((struct insn_chain *, int,
368 FILE *));
369 static void find_group PROTO((struct insn_chain *, int,
370 FILE *));
371 static int possible_group_p PROTO((struct insn_chain *, int));
372 static void count_possible_groups PROTO((struct insn_chain *, int));
373 static int modes_equiv_for_class_p PROTO((enum machine_mode,
374 enum machine_mode,
375 enum reg_class));
376 static void delete_caller_save_insns PROTO((void));
377
378 static void spill_failure PROTO((rtx));
379 static void new_spill_reg PROTO((struct insn_chain *, int, int,
380 int, FILE *));
381 static void maybe_mark_pseudo_spilled PROTO((int));
382 static void delete_dead_insn PROTO((rtx));
383 static void alter_reg PROTO((int, int));
384 static void set_label_offsets PROTO((rtx, rtx, int));
385 static int eliminate_regs_in_insn PROTO((rtx, int));
386 static void update_eliminable_offsets PROTO((void));
387 static void mark_not_eliminable PROTO((rtx, rtx));
388 static void set_initial_elim_offsets PROTO((void));
389 static void verify_initial_elim_offsets PROTO((void));
390 static void set_initial_label_offsets PROTO((void));
391 static void set_offsets_for_label PROTO((rtx));
392 static void init_elim_table PROTO((void));
393 static void update_eliminables PROTO((HARD_REG_SET *));
394 static void spill_hard_reg PROTO((int, FILE *, int));
395 static int finish_spills PROTO((int, FILE *));
396 static void ior_hard_reg_set PROTO((HARD_REG_SET *, HARD_REG_SET *));
397 static void scan_paradoxical_subregs PROTO((rtx));
398 static int hard_reg_use_compare PROTO((const GENERIC_PTR, const GENERIC_PTR));
399 static void count_pseudo PROTO((struct hard_reg_n_uses *, int));
400 static void order_regs_for_reload PROTO((struct insn_chain *));
401 static void reload_as_needed PROTO((int));
402 static void forget_old_reloads_1 PROTO((rtx, rtx));
403 static int reload_reg_class_lower PROTO((const GENERIC_PTR, const GENERIC_PTR));
404 static void mark_reload_reg_in_use PROTO((int, int, enum reload_type,
405 enum machine_mode));
406 static void clear_reload_reg_in_use PROTO((int, int, enum reload_type,
407 enum machine_mode));
408 static int reload_reg_free_p PROTO((int, int, enum reload_type));
409 static int reload_reg_free_before_p PROTO((int, int, enum reload_type, int));
410 static int reload_reg_free_for_value_p PROTO((int, int, enum reload_type, rtx, rtx, int));
411 static int reload_reg_reaches_end_p PROTO((int, int, enum reload_type));
412 static int allocate_reload_reg PROTO((struct insn_chain *, int, int,
413 int));
414 static void choose_reload_regs PROTO((struct insn_chain *));
415 static void merge_assigned_reloads PROTO((rtx));
416 static void emit_reload_insns PROTO((struct insn_chain *));
417 static void delete_output_reload PROTO((rtx, int, int));
418 static void delete_address_reloads PROTO((rtx, rtx));
419 static void delete_address_reloads_1 PROTO((rtx, rtx, rtx));
420 static rtx inc_for_reload PROTO((rtx, rtx, rtx, int));
421 static int constraint_accepts_reg_p PROTO((char *, rtx));
422 static void reload_cse_regs_1 PROTO((rtx));
423 static void reload_cse_invalidate_regno PROTO((int, enum machine_mode, int));
424 static int reload_cse_mem_conflict_p PROTO((rtx, rtx));
425 static void reload_cse_invalidate_mem PROTO((rtx));
426 static void reload_cse_invalidate_rtx PROTO((rtx, rtx));
427 static int reload_cse_regno_equal_p PROTO((int, rtx, enum machine_mode));
428 static int reload_cse_noop_set_p PROTO((rtx, rtx));
429 static int reload_cse_simplify_set PROTO((rtx, rtx));
430 static int reload_cse_simplify_operands PROTO((rtx));
431 static void reload_cse_check_clobber PROTO((rtx, rtx));
432 static void reload_cse_record_set PROTO((rtx, rtx));
433 static void reload_combine PROTO((void));
434 static void reload_combine_note_use PROTO((rtx *, rtx));
435 static void reload_combine_note_store PROTO((rtx, rtx));
436 static void reload_cse_move2add PROTO((rtx));
437 static void move2add_note_store PROTO((rtx, rtx));
438 \f
439 /* Initialize the reload pass once per compilation. */
440
441 void
442 init_reload ()
443 {
444 register int i;
445
446 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
447 Set spill_indirect_levels to the number of levels such addressing is
448 permitted, zero if it is not permitted at all. */
449
450 register rtx tem
451 = gen_rtx_MEM (Pmode,
452 gen_rtx_PLUS (Pmode,
453 gen_rtx_REG (Pmode, LAST_VIRTUAL_REGISTER + 1),
454 GEN_INT (4)));
455 spill_indirect_levels = 0;
456
457 while (memory_address_p (QImode, tem))
458 {
459 spill_indirect_levels++;
460 tem = gen_rtx_MEM (Pmode, tem);
461 }
462
463 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
464
465 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
466 indirect_symref_ok = memory_address_p (QImode, tem);
467
468 /* See if reg+reg is a valid (and offsettable) address. */
469
470 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
471 {
472 tem = gen_rtx_PLUS (Pmode,
473 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
474 gen_rtx_REG (Pmode, i));
475 /* This way, we make sure that reg+reg is an offsettable address. */
476 tem = plus_constant (tem, 4);
477
478 if (memory_address_p (QImode, tem))
479 {
480 double_reg_address_ok = 1;
481 break;
482 }
483 }
484
485 /* Initialize obstack for our rtl allocation. */
486 gcc_obstack_init (&reload_obstack);
487 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
488 }
489
490 /* List of insn chains that are currently unused. */
491 static struct insn_chain *unused_insn_chains = 0;
492
493 /* Allocate an empty insn_chain structure. */
494 struct insn_chain *
495 new_insn_chain ()
496 {
497 struct insn_chain *c;
498
499 if (unused_insn_chains == 0)
500 {
501 c = obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
502 c->live_before = OBSTACK_ALLOC_REG_SET (&reload_obstack);
503 c->live_after = OBSTACK_ALLOC_REG_SET (&reload_obstack);
504 }
505 else
506 {
507 c = unused_insn_chains;
508 unused_insn_chains = c->next;
509 }
510 c->is_caller_save_insn = 0;
511 c->need_operand_change = 0;
512 c->need_reload = 0;
513 c->need_elim = 0;
514 return c;
515 }
516
517 /* Small utility function to set all regs in hard reg set TO which are
518 allocated to pseudos in regset FROM. */
519 void
520 compute_use_by_pseudos (to, from)
521 HARD_REG_SET *to;
522 regset from;
523 {
524 int regno;
525 EXECUTE_IF_SET_IN_REG_SET
526 (from, FIRST_PSEUDO_REGISTER, regno,
527 {
528 int r = reg_renumber[regno];
529 int nregs;
530 if (r < 0)
531 abort ();
532 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
533 while (nregs-- > 0)
534 SET_HARD_REG_BIT (*to, r + nregs);
535 });
536 }
537 \f
538 /* Global variables used by reload and its subroutines. */
539
540 /* Set during calculate_needs if an insn needs register elimination. */
541 static int something_needs_elimination;
542 /* Set during calculate_needs if an insn needs an operand changed. */
543 int something_needs_operands_changed;
544
545 /* Nonzero means we couldn't get enough spill regs. */
546 static int failure;
547
548 /* Main entry point for the reload pass.
549
550 FIRST is the first insn of the function being compiled.
551
552 GLOBAL nonzero means we were called from global_alloc
553 and should attempt to reallocate any pseudoregs that we
554 displace from hard regs we will use for reloads.
555 If GLOBAL is zero, we do not have enough information to do that,
556 so any pseudo reg that is spilled must go to the stack.
557
558 DUMPFILE is the global-reg debugging dump file stream, or 0.
559 If it is nonzero, messages are written to it to describe
560 which registers are seized as reload regs, which pseudo regs
561 are spilled from them, and where the pseudo regs are reallocated to.
562
563 Return value is nonzero if reload failed
564 and we must not do any more for this function. */
565
566 int
567 reload (first, global, dumpfile)
568 rtx first;
569 int global;
570 FILE *dumpfile;
571 {
572 register int i;
573 register rtx insn;
574 register struct elim_table *ep;
575
576 /* The two pointers used to track the true location of the memory used
577 for label offsets. */
578 char *real_known_ptr = NULL_PTR;
579 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
580
581 /* Make sure even insns with volatile mem refs are recognizable. */
582 init_recog ();
583
584 failure = 0;
585
586 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
587
588 /* Make sure that the last insn in the chain
589 is not something that needs reloading. */
590 emit_note (NULL_PTR, NOTE_INSN_DELETED);
591
592 /* Enable find_equiv_reg to distinguish insns made by reload. */
593 reload_first_uid = get_max_uid ();
594
595 #ifdef SECONDARY_MEMORY_NEEDED
596 /* Initialize the secondary memory table. */
597 clear_secondary_mem ();
598 #endif
599
600 /* We don't have a stack slot for any spill reg yet. */
601 bzero ((char *) spill_stack_slot, sizeof spill_stack_slot);
602 bzero ((char *) spill_stack_slot_width, sizeof spill_stack_slot_width);
603
604 /* Initialize the save area information for caller-save, in case some
605 are needed. */
606 init_save_areas ();
607
608 /* Compute which hard registers are now in use
609 as homes for pseudo registers.
610 This is done here rather than (eg) in global_alloc
611 because this point is reached even if not optimizing. */
612 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
613 mark_home_live (i);
614
615 /* A function that receives a nonlocal goto must save all call-saved
616 registers. */
617 if (current_function_has_nonlocal_label)
618 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
619 {
620 if (! call_used_regs[i] && ! fixed_regs[i])
621 regs_ever_live[i] = 1;
622 }
623
624 /* Find all the pseudo registers that didn't get hard regs
625 but do have known equivalent constants or memory slots.
626 These include parameters (known equivalent to parameter slots)
627 and cse'd or loop-moved constant memory addresses.
628
629 Record constant equivalents in reg_equiv_constant
630 so they will be substituted by find_reloads.
631 Record memory equivalents in reg_mem_equiv so they can
632 be substituted eventually by altering the REG-rtx's. */
633
634 reg_equiv_constant = (rtx *) xmalloc (max_regno * sizeof (rtx));
635 bzero ((char *) reg_equiv_constant, max_regno * sizeof (rtx));
636 reg_equiv_memory_loc = (rtx *) xmalloc (max_regno * sizeof (rtx));
637 bzero ((char *) reg_equiv_memory_loc, max_regno * sizeof (rtx));
638 reg_equiv_mem = (rtx *) xmalloc (max_regno * sizeof (rtx));
639 bzero ((char *) reg_equiv_mem, max_regno * sizeof (rtx));
640 reg_equiv_init = (rtx *) xmalloc (max_regno * sizeof (rtx));
641 bzero ((char *) reg_equiv_init, max_regno * sizeof (rtx));
642 reg_equiv_address = (rtx *) xmalloc (max_regno * sizeof (rtx));
643 bzero ((char *) reg_equiv_address, max_regno * sizeof (rtx));
644 reg_max_ref_width = (int *) xmalloc (max_regno * sizeof (int));
645 bzero ((char *) reg_max_ref_width, max_regno * sizeof (int));
646 reg_old_renumber = (short *) xmalloc (max_regno * sizeof (short));
647 bcopy (reg_renumber, reg_old_renumber, max_regno * sizeof (short));
648 pseudo_forbidden_regs
649 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
650 pseudo_previous_regs
651 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
652
653 CLEAR_HARD_REG_SET (bad_spill_regs_global);
654 bzero ((char *) pseudo_previous_regs, max_regno * sizeof (HARD_REG_SET));
655
656 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
657 Also find all paradoxical subregs and find largest such for each pseudo.
658 On machines with small register classes, record hard registers that
659 are used for user variables. These can never be used for spills.
660 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
661 caller-saved registers must be marked live. */
662
663 for (insn = first; insn; insn = NEXT_INSN (insn))
664 {
665 rtx set = single_set (insn);
666
667 if (GET_CODE (insn) == NOTE && CONST_CALL_P (insn)
668 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
669 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
670 if (! call_used_regs[i])
671 regs_ever_live[i] = 1;
672
673 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
674 {
675 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
676 if (note
677 #ifdef LEGITIMATE_PIC_OPERAND_P
678 && (! CONSTANT_P (XEXP (note, 0)) || ! flag_pic
679 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
680 #endif
681 )
682 {
683 rtx x = XEXP (note, 0);
684 i = REGNO (SET_DEST (set));
685 if (i > LAST_VIRTUAL_REGISTER)
686 {
687 if (GET_CODE (x) == MEM)
688 {
689 /* If the operand is a PLUS, the MEM may be shared,
690 so make sure we have an unshared copy here. */
691 if (GET_CODE (XEXP (x, 0)) == PLUS)
692 x = copy_rtx (x);
693
694 reg_equiv_memory_loc[i] = x;
695 }
696 else if (CONSTANT_P (x))
697 {
698 if (LEGITIMATE_CONSTANT_P (x))
699 reg_equiv_constant[i] = x;
700 else
701 reg_equiv_memory_loc[i]
702 = force_const_mem (GET_MODE (SET_DEST (set)), x);
703 }
704 else
705 continue;
706
707 /* If this register is being made equivalent to a MEM
708 and the MEM is not SET_SRC, the equivalencing insn
709 is one with the MEM as a SET_DEST and it occurs later.
710 So don't mark this insn now. */
711 if (GET_CODE (x) != MEM
712 || rtx_equal_p (SET_SRC (set), x))
713 reg_equiv_init[i] = insn;
714 }
715 }
716 }
717
718 /* If this insn is setting a MEM from a register equivalent to it,
719 this is the equivalencing insn. */
720 else if (set && GET_CODE (SET_DEST (set)) == MEM
721 && GET_CODE (SET_SRC (set)) == REG
722 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
723 && rtx_equal_p (SET_DEST (set),
724 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
725 reg_equiv_init[REGNO (SET_SRC (set))] = insn;
726
727 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
728 scan_paradoxical_subregs (PATTERN (insn));
729 }
730
731 init_elim_table ();
732
733 num_labels = max_label_num () - get_first_label_num ();
734
735 /* Allocate the tables used to store offset information at labels. */
736 /* We used to use alloca here, but the size of what it would try to
737 allocate would occasionally cause it to exceed the stack limit and
738 cause a core dump. */
739 real_known_ptr = xmalloc (num_labels);
740 real_at_ptr
741 = (int (*)[NUM_ELIMINABLE_REGS])
742 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
743
744 offsets_known_at = real_known_ptr - get_first_label_num ();
745 offsets_at
746 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
747
748 /* Alter each pseudo-reg rtx to contain its hard reg number.
749 Assign stack slots to the pseudos that lack hard regs or equivalents.
750 Do not touch virtual registers. */
751
752 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
753 alter_reg (i, -1);
754
755 /* If we have some registers we think can be eliminated, scan all insns to
756 see if there is an insn that sets one of these registers to something
757 other than itself plus a constant. If so, the register cannot be
758 eliminated. Doing this scan here eliminates an extra pass through the
759 main reload loop in the most common case where register elimination
760 cannot be done. */
761 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
762 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
763 || GET_CODE (insn) == CALL_INSN)
764 note_stores (PATTERN (insn), mark_not_eliminable);
765
766 #ifndef REGISTER_CONSTRAINTS
767 /* If all the pseudo regs have hard regs,
768 except for those that are never referenced,
769 we know that no reloads are needed. */
770 /* But that is not true if there are register constraints, since
771 in that case some pseudos might be in the wrong kind of hard reg. */
772
773 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
774 if (reg_renumber[i] == -1 && REG_N_REFS (i) != 0)
775 break;
776
777 if (i == max_regno && num_eliminable == 0 && ! caller_save_needed)
778 {
779 free (real_known_ptr);
780 free (real_at_ptr);
781 free (reg_equiv_constant);
782 free (reg_equiv_memory_loc);
783 free (reg_equiv_mem);
784 free (reg_equiv_init);
785 free (reg_equiv_address);
786 free (reg_max_ref_width);
787 free (reg_old_renumber);
788 free (pseudo_previous_regs);
789 free (pseudo_forbidden_regs);
790 return 0;
791 }
792 #endif
793
794 maybe_fix_stack_asms ();
795
796 insns_need_reload = 0;
797 something_needs_elimination = 0;
798
799 /* Initialize to -1, which means take the first spill register. */
800 last_spill_reg = -1;
801
802 spilled_pseudos = ALLOCA_REG_SET ();
803
804 /* Spill any hard regs that we know we can't eliminate. */
805 CLEAR_HARD_REG_SET (used_spill_regs);
806 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
807 if (! ep->can_eliminate)
808 spill_hard_reg (ep->from, dumpfile, 1);
809
810 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
811 if (frame_pointer_needed)
812 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, dumpfile, 1);
813 #endif
814 finish_spills (global, dumpfile);
815
816 /* From now on, we need to emit any moves without making new pseudos. */
817 reload_in_progress = 1;
818
819 /* This loop scans the entire function each go-round
820 and repeats until one repetition spills no additional hard regs. */
821 for (;;)
822 {
823 int something_changed;
824 int did_spill;
825 struct insn_chain *chain;
826
827 HOST_WIDE_INT starting_frame_size;
828
829 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
830 here because the stack size may be a part of the offset computation
831 for register elimination, and there might have been new stack slots
832 created in the last iteration of this loop. */
833 assign_stack_local (BLKmode, 0, 0);
834
835 starting_frame_size = get_frame_size ();
836
837 set_initial_elim_offsets ();
838 set_initial_label_offsets ();
839
840 /* For each pseudo register that has an equivalent location defined,
841 try to eliminate any eliminable registers (such as the frame pointer)
842 assuming initial offsets for the replacement register, which
843 is the normal case.
844
845 If the resulting location is directly addressable, substitute
846 the MEM we just got directly for the old REG.
847
848 If it is not addressable but is a constant or the sum of a hard reg
849 and constant, it is probably not addressable because the constant is
850 out of range, in that case record the address; we will generate
851 hairy code to compute the address in a register each time it is
852 needed. Similarly if it is a hard register, but one that is not
853 valid as an address register.
854
855 If the location is not addressable, but does not have one of the
856 above forms, assign a stack slot. We have to do this to avoid the
857 potential of producing lots of reloads if, e.g., a location involves
858 a pseudo that didn't get a hard register and has an equivalent memory
859 location that also involves a pseudo that didn't get a hard register.
860
861 Perhaps at some point we will improve reload_when_needed handling
862 so this problem goes away. But that's very hairy. */
863
864 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
865 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
866 {
867 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
868
869 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
870 XEXP (x, 0)))
871 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
872 else if (CONSTANT_P (XEXP (x, 0))
873 || (GET_CODE (XEXP (x, 0)) == REG
874 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
875 || (GET_CODE (XEXP (x, 0)) == PLUS
876 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
877 && (REGNO (XEXP (XEXP (x, 0), 0))
878 < FIRST_PSEUDO_REGISTER)
879 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
880 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
881 else
882 {
883 /* Make a new stack slot. Then indicate that something
884 changed so we go back and recompute offsets for
885 eliminable registers because the allocation of memory
886 below might change some offset. reg_equiv_{mem,address}
887 will be set up for this pseudo on the next pass around
888 the loop. */
889 reg_equiv_memory_loc[i] = 0;
890 reg_equiv_init[i] = 0;
891 alter_reg (i, -1);
892 }
893 }
894
895 if (caller_save_needed)
896 setup_save_areas ();
897
898 /* If we allocated another stack slot, redo elimination bookkeeping. */
899 if (starting_frame_size != get_frame_size ())
900 continue;
901
902 if (caller_save_needed)
903 {
904 save_call_clobbered_regs ();
905 /* That might have allocated new insn_chain structures. */
906 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
907 }
908
909 calculate_needs_all_insns (global);
910
911 CLEAR_REG_SET (spilled_pseudos);
912 did_spill = 0;
913
914 something_changed = 0;
915
916 /* If we allocated any new memory locations, make another pass
917 since it might have changed elimination offsets. */
918 if (starting_frame_size != get_frame_size ())
919 something_changed = 1;
920
921 {
922 HARD_REG_SET to_spill;
923 CLEAR_HARD_REG_SET (to_spill);
924 update_eliminables (&to_spill);
925 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
926 if (TEST_HARD_REG_BIT (to_spill, i))
927 {
928 spill_hard_reg (i, dumpfile, 1);
929 did_spill = 1;
930 }
931 }
932
933 CLEAR_HARD_REG_SET (used_spill_regs);
934 /* Try to satisfy the needs for each insn. */
935 for (chain = insns_need_reload; chain != 0;
936 chain = chain->next_need_reload)
937 find_reload_regs (chain, dumpfile);
938
939 if (failure)
940 goto failed;
941
942 if (insns_need_reload != 0 || did_spill)
943 something_changed |= finish_spills (global, dumpfile);
944
945 if (! something_changed)
946 break;
947
948 if (caller_save_needed)
949 delete_caller_save_insns ();
950 }
951
952 /* If global-alloc was run, notify it of any register eliminations we have
953 done. */
954 if (global)
955 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
956 if (ep->can_eliminate)
957 mark_elimination (ep->from, ep->to);
958
959 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
960 If that insn didn't set the register (i.e., it copied the register to
961 memory), just delete that insn instead of the equivalencing insn plus
962 anything now dead. If we call delete_dead_insn on that insn, we may
963 delete the insn that actually sets the register if the register die
964 there and that is incorrect. */
965
966 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
967 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0
968 && GET_CODE (reg_equiv_init[i]) != NOTE)
969 {
970 if (reg_set_p (regno_reg_rtx[i], PATTERN (reg_equiv_init[i])))
971 delete_dead_insn (reg_equiv_init[i]);
972 else
973 {
974 PUT_CODE (reg_equiv_init[i], NOTE);
975 NOTE_SOURCE_FILE (reg_equiv_init[i]) = 0;
976 NOTE_LINE_NUMBER (reg_equiv_init[i]) = NOTE_INSN_DELETED;
977 }
978 }
979
980 /* Use the reload registers where necessary
981 by generating move instructions to move the must-be-register
982 values into or out of the reload registers. */
983
984 if (insns_need_reload != 0 || something_needs_elimination
985 || something_needs_operands_changed)
986 {
987 int old_frame_size = get_frame_size ();
988
989 reload_as_needed (global);
990
991 if (old_frame_size != get_frame_size ())
992 abort ();
993
994 if (num_eliminable)
995 verify_initial_elim_offsets ();
996 }
997
998 /* If we were able to eliminate the frame pointer, show that it is no
999 longer live at the start of any basic block. If it ls live by
1000 virtue of being in a pseudo, that pseudo will be marked live
1001 and hence the frame pointer will be known to be live via that
1002 pseudo. */
1003
1004 if (! frame_pointer_needed)
1005 for (i = 0; i < n_basic_blocks; i++)
1006 CLEAR_REGNO_REG_SET (basic_block_live_at_start[i],
1007 HARD_FRAME_POINTER_REGNUM);
1008
1009 /* Come here (with failure set nonzero) if we can't get enough spill regs
1010 and we decide not to abort about it. */
1011 failed:
1012
1013 reload_in_progress = 0;
1014
1015 /* Now eliminate all pseudo regs by modifying them into
1016 their equivalent memory references.
1017 The REG-rtx's for the pseudos are modified in place,
1018 so all insns that used to refer to them now refer to memory.
1019
1020 For a reg that has a reg_equiv_address, all those insns
1021 were changed by reloading so that no insns refer to it any longer;
1022 but the DECL_RTL of a variable decl may refer to it,
1023 and if so this causes the debugging info to mention the variable. */
1024
1025 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1026 {
1027 rtx addr = 0;
1028 int in_struct = 0;
1029 int is_readonly = 0;
1030
1031 if (reg_equiv_memory_loc[i])
1032 {
1033 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
1034 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
1035 }
1036
1037 if (reg_equiv_mem[i])
1038 addr = XEXP (reg_equiv_mem[i], 0);
1039
1040 if (reg_equiv_address[i])
1041 addr = reg_equiv_address[i];
1042
1043 if (addr)
1044 {
1045 if (reg_renumber[i] < 0)
1046 {
1047 rtx reg = regno_reg_rtx[i];
1048 XEXP (reg, 0) = addr;
1049 REG_USERVAR_P (reg) = 0;
1050 RTX_UNCHANGING_P (reg) = is_readonly;
1051 MEM_IN_STRUCT_P (reg) = in_struct;
1052 /* We have no alias information about this newly created
1053 MEM. */
1054 MEM_ALIAS_SET (reg) = 0;
1055 PUT_CODE (reg, MEM);
1056 }
1057 else if (reg_equiv_mem[i])
1058 XEXP (reg_equiv_mem[i], 0) = addr;
1059 }
1060 }
1061
1062 /* We've finished reloading. This reload_completed must be set before we
1063 perform instruction splitting below. */
1064 reload_completed = 1;
1065
1066 /* Make a pass over all the insns and delete all USEs which we inserted
1067 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1068 notes. Delete all CLOBBER insns and simplify (subreg (reg)) operands. */
1069
1070 for (insn = first; insn; insn = NEXT_INSN (insn))
1071 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1072 {
1073 rtx *pnote;
1074
1075 if ((GET_CODE (PATTERN (insn)) == USE
1076 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1077 || GET_CODE (PATTERN (insn)) == CLOBBER)
1078 {
1079 PUT_CODE (insn, NOTE);
1080 NOTE_SOURCE_FILE (insn) = 0;
1081 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1082 continue;
1083 }
1084
1085 pnote = &REG_NOTES (insn);
1086 while (*pnote != 0)
1087 {
1088 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1089 || REG_NOTE_KIND (*pnote) == REG_UNUSED)
1090 *pnote = XEXP (*pnote, 1);
1091 else
1092 pnote = &XEXP (*pnote, 1);
1093 }
1094
1095 /* And simplify (subreg (reg)) if it appears as an operand. */
1096 cleanup_subreg_operands (insn);
1097
1098 /* If optimizing and we are performing instruction scheduling after
1099 reload, then go ahead and split insns now since we are about to
1100 recompute flow information anyway. */
1101 if (optimize && flag_schedule_insns_after_reload)
1102 {
1103 rtx last, first;
1104
1105 last = try_split (PATTERN (insn), insn, 1);
1106
1107 if (last != insn)
1108 {
1109 PUT_CODE (insn, NOTE);
1110 NOTE_SOURCE_FILE (insn) = 0;
1111 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1112 }
1113 }
1114
1115 }
1116
1117 /* If we are doing stack checking, give a warning if this function's
1118 frame size is larger than we expect. */
1119 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1120 {
1121 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1122
1123 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1124 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1125 size += UNITS_PER_WORD;
1126
1127 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1128 warning ("frame size too large for reliable stack checking");
1129 }
1130
1131 /* Indicate that we no longer have known memory locations or constants. */
1132 if (reg_equiv_constant)
1133 free (reg_equiv_constant);
1134 reg_equiv_constant = 0;
1135 if (reg_equiv_memory_loc)
1136 free (reg_equiv_memory_loc);
1137 reg_equiv_memory_loc = 0;
1138
1139 if (real_known_ptr)
1140 free (real_known_ptr);
1141 if (real_at_ptr)
1142 free (real_at_ptr);
1143
1144 free (reg_equiv_mem);
1145 free (reg_equiv_init);
1146 free (reg_equiv_address);
1147 free (reg_max_ref_width);
1148 free (reg_old_renumber);
1149 free (pseudo_previous_regs);
1150 free (pseudo_forbidden_regs);
1151
1152 FREE_REG_SET (spilled_pseudos);
1153
1154 CLEAR_HARD_REG_SET (used_spill_regs);
1155 for (i = 0; i < n_spills; i++)
1156 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1157
1158 /* Free all the insn_chain structures at once. */
1159 obstack_free (&reload_obstack, reload_startobj);
1160 unused_insn_chains = 0;
1161
1162 return failure;
1163 }
1164
1165 /* Yet another special case. Unfortunately, reg-stack forces people to
1166 write incorrect clobbers in asm statements. These clobbers must not
1167 cause the register to appear in bad_spill_regs, otherwise we'll call
1168 fatal_insn later. We clear the corresponding regnos in the live
1169 register sets to avoid this.
1170 The whole thing is rather sick, I'm afraid. */
1171 static void
1172 maybe_fix_stack_asms ()
1173 {
1174 #ifdef STACK_REGS
1175 char *constraints[MAX_RECOG_OPERANDS];
1176 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1177 struct insn_chain *chain;
1178
1179 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1180 {
1181 int i, noperands;
1182 HARD_REG_SET clobbered, allowed;
1183 rtx pat;
1184
1185 if (GET_RTX_CLASS (GET_CODE (chain->insn)) != 'i'
1186 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1187 continue;
1188 pat = PATTERN (chain->insn);
1189 if (GET_CODE (pat) != PARALLEL)
1190 continue;
1191
1192 CLEAR_HARD_REG_SET (clobbered);
1193 CLEAR_HARD_REG_SET (allowed);
1194
1195 /* First, make a mask of all stack regs that are clobbered. */
1196 for (i = 0; i < XVECLEN (pat, 0); i++)
1197 {
1198 rtx t = XVECEXP (pat, 0, i);
1199 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1200 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1201 }
1202
1203 /* Get the operand values and constraints out of the insn. */
1204 decode_asm_operands (pat, recog_operand, recog_operand_loc,
1205 constraints, operand_mode);
1206
1207 /* For every operand, see what registers are allowed. */
1208 for (i = 0; i < noperands; i++)
1209 {
1210 char *p = constraints[i];
1211 /* For every alternative, we compute the class of registers allowed
1212 for reloading in CLS, and merge its contents into the reg set
1213 ALLOWED. */
1214 int cls = (int) NO_REGS;
1215
1216 for (;;)
1217 {
1218 char c = *p++;
1219
1220 if (c == '\0' || c == ',' || c == '#')
1221 {
1222 /* End of one alternative - mark the regs in the current
1223 class, and reset the class. */
1224 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1225 cls = NO_REGS;
1226 if (c == '#')
1227 do {
1228 c = *p++;
1229 } while (c != '\0' && c != ',');
1230 if (c == '\0')
1231 break;
1232 continue;
1233 }
1234
1235 switch (c)
1236 {
1237 case '=': case '+': case '*': case '%': case '?': case '!':
1238 case '0': case '1': case '2': case '3': case '4': case 'm':
1239 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1240 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1241 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1242 case 'P':
1243 #ifdef EXTRA_CONSTRAINT
1244 case 'Q': case 'R': case 'S': case 'T': case 'U':
1245 #endif
1246 break;
1247
1248 case 'p':
1249 cls = (int) reg_class_subunion[cls][(int) BASE_REG_CLASS];
1250 break;
1251
1252 case 'g':
1253 case 'r':
1254 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1255 break;
1256
1257 default:
1258 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)];
1259
1260 }
1261 }
1262 }
1263 /* Those of the registers which are clobbered, but allowed by the
1264 constraints, must be usable as reload registers. So clear them
1265 out of the life information. */
1266 AND_HARD_REG_SET (allowed, clobbered);
1267 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1268 if (TEST_HARD_REG_BIT (allowed, i))
1269 {
1270 CLEAR_REGNO_REG_SET (chain->live_before, i);
1271 CLEAR_REGNO_REG_SET (chain->live_after, i);
1272 }
1273 }
1274
1275 #endif
1276 }
1277
1278 \f
1279 /* Walk the chain of insns, and determine for each whether it needs reloads
1280 and/or eliminations. Build the corresponding insns_need_reload list, and
1281 set something_needs_elimination as appropriate. */
1282 static void
1283 calculate_needs_all_insns (global)
1284 int global;
1285 {
1286 struct insn_chain **pprev_reload = &insns_need_reload;
1287 struct insn_chain **pchain;
1288
1289 something_needs_elimination = 0;
1290
1291 for (pchain = &reload_insn_chain; *pchain != 0; pchain = &(*pchain)->next)
1292 {
1293 rtx insn;
1294 struct insn_chain *chain;
1295
1296 chain = *pchain;
1297 insn = chain->insn;
1298
1299 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1300 include REG_LABEL), we need to see what effects this has on the
1301 known offsets at labels. */
1302
1303 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1304 || (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1305 && REG_NOTES (insn) != 0))
1306 set_label_offsets (insn, insn, 0);
1307
1308 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1309 {
1310 rtx old_body = PATTERN (insn);
1311 int old_code = INSN_CODE (insn);
1312 rtx old_notes = REG_NOTES (insn);
1313 int did_elimination = 0;
1314 int operands_changed = 0;
1315
1316 /* If needed, eliminate any eliminable registers. */
1317 if (num_eliminable)
1318 did_elimination = eliminate_regs_in_insn (insn, 0);
1319
1320 /* Analyze the instruction. */
1321 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1322 global, spill_reg_order);
1323
1324 /* If a no-op set needs more than one reload, this is likely
1325 to be something that needs input address reloads. We
1326 can't get rid of this cleanly later, and it is of no use
1327 anyway, so discard it now.
1328 We only do this when expensive_optimizations is enabled,
1329 since this complements reload inheritance / output
1330 reload deletion, and it can make debugging harder. */
1331 if (flag_expensive_optimizations && n_reloads > 1)
1332 {
1333 rtx set = single_set (insn);
1334 if (set
1335 && SET_SRC (set) == SET_DEST (set)
1336 && GET_CODE (SET_SRC (set)) == REG
1337 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1338 {
1339 PUT_CODE (insn, NOTE);
1340 NOTE_SOURCE_FILE (insn) = 0;
1341 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1342 continue;
1343 }
1344 }
1345 if (num_eliminable)
1346 update_eliminable_offsets ();
1347
1348 /* Remember for later shortcuts which insns had any reloads or
1349 register eliminations. */
1350 chain->need_elim = did_elimination;
1351 chain->need_reload = n_reloads > 0;
1352 chain->need_operand_change = operands_changed;
1353
1354 /* Discard any register replacements done. */
1355 if (did_elimination)
1356 {
1357 obstack_free (&reload_obstack, reload_firstobj);
1358 PATTERN (insn) = old_body;
1359 INSN_CODE (insn) = old_code;
1360 REG_NOTES (insn) = old_notes;
1361 something_needs_elimination = 1;
1362 }
1363
1364 something_needs_operands_changed |= operands_changed;
1365
1366 if (n_reloads != 0)
1367 {
1368 *pprev_reload = chain;
1369 pprev_reload = &chain->next_need_reload;
1370
1371 calculate_needs (chain);
1372 }
1373 }
1374 }
1375 *pprev_reload = 0;
1376 }
1377
1378 /* Compute the most additional registers needed by one instruction,
1379 given by CHAIN. Collect information separately for each class of regs.
1380
1381 To compute the number of reload registers of each class needed for an
1382 insn, we must simulate what choose_reload_regs can do. We do this by
1383 splitting an insn into an "input" and an "output" part. RELOAD_OTHER
1384 reloads are used in both. The input part uses those reloads,
1385 RELOAD_FOR_INPUT reloads, which must be live over the entire input section
1386 of reloads, and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1387 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the inputs.
1388
1389 The registers needed for output are RELOAD_OTHER and RELOAD_FOR_OUTPUT,
1390 which are live for the entire output portion, and the maximum of all the
1391 RELOAD_FOR_OUTPUT_ADDRESS reloads for each operand.
1392
1393 The total number of registers needed is the maximum of the
1394 inputs and outputs. */
1395
1396 static void
1397 calculate_needs (chain)
1398 struct insn_chain *chain;
1399 {
1400 int i;
1401
1402 /* Each `struct needs' corresponds to one RELOAD_... type. */
1403 struct {
1404 struct needs other;
1405 struct needs input;
1406 struct needs output;
1407 struct needs insn;
1408 struct needs other_addr;
1409 struct needs op_addr;
1410 struct needs op_addr_reload;
1411 struct needs in_addr[MAX_RECOG_OPERANDS];
1412 struct needs in_addr_addr[MAX_RECOG_OPERANDS];
1413 struct needs out_addr[MAX_RECOG_OPERANDS];
1414 struct needs out_addr_addr[MAX_RECOG_OPERANDS];
1415 } insn_needs;
1416
1417 bzero ((char *) chain->group_size, sizeof chain->group_size);
1418 for (i = 0; i < N_REG_CLASSES; i++)
1419 chain->group_mode[i] = VOIDmode;
1420 bzero ((char *) &insn_needs, sizeof insn_needs);
1421
1422 /* Count each reload once in every class
1423 containing the reload's own class. */
1424
1425 for (i = 0; i < n_reloads; i++)
1426 {
1427 register enum reg_class *p;
1428 enum reg_class class = reload_reg_class[i];
1429 int size;
1430 enum machine_mode mode;
1431 struct needs *this_needs;
1432
1433 /* Don't count the dummy reloads, for which one of the
1434 regs mentioned in the insn can be used for reloading.
1435 Don't count optional reloads.
1436 Don't count reloads that got combined with others. */
1437 if (reload_reg_rtx[i] != 0
1438 || reload_optional[i] != 0
1439 || (reload_out[i] == 0 && reload_in[i] == 0
1440 && ! reload_secondary_p[i]))
1441 continue;
1442
1443 mode = reload_inmode[i];
1444 if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode))
1445 mode = reload_outmode[i];
1446 size = CLASS_MAX_NREGS (class, mode);
1447
1448 /* Decide which time-of-use to count this reload for. */
1449 switch (reload_when_needed[i])
1450 {
1451 case RELOAD_OTHER:
1452 this_needs = &insn_needs.other;
1453 break;
1454 case RELOAD_FOR_INPUT:
1455 this_needs = &insn_needs.input;
1456 break;
1457 case RELOAD_FOR_OUTPUT:
1458 this_needs = &insn_needs.output;
1459 break;
1460 case RELOAD_FOR_INSN:
1461 this_needs = &insn_needs.insn;
1462 break;
1463 case RELOAD_FOR_OTHER_ADDRESS:
1464 this_needs = &insn_needs.other_addr;
1465 break;
1466 case RELOAD_FOR_INPUT_ADDRESS:
1467 this_needs = &insn_needs.in_addr[reload_opnum[i]];
1468 break;
1469 case RELOAD_FOR_INPADDR_ADDRESS:
1470 this_needs = &insn_needs.in_addr_addr[reload_opnum[i]];
1471 break;
1472 case RELOAD_FOR_OUTPUT_ADDRESS:
1473 this_needs = &insn_needs.out_addr[reload_opnum[i]];
1474 break;
1475 case RELOAD_FOR_OUTADDR_ADDRESS:
1476 this_needs = &insn_needs.out_addr_addr[reload_opnum[i]];
1477 break;
1478 case RELOAD_FOR_OPERAND_ADDRESS:
1479 this_needs = &insn_needs.op_addr;
1480 break;
1481 case RELOAD_FOR_OPADDR_ADDR:
1482 this_needs = &insn_needs.op_addr_reload;
1483 break;
1484 }
1485
1486 if (size > 1)
1487 {
1488 enum machine_mode other_mode, allocate_mode;
1489
1490 /* Count number of groups needed separately from
1491 number of individual regs needed. */
1492 this_needs->groups[(int) class]++;
1493 p = reg_class_superclasses[(int) class];
1494 while (*p != LIM_REG_CLASSES)
1495 this_needs->groups[(int) *p++]++;
1496
1497 /* Record size and mode of a group of this class. */
1498 /* If more than one size group is needed,
1499 make all groups the largest needed size. */
1500 if (chain->group_size[(int) class] < size)
1501 {
1502 other_mode = chain->group_mode[(int) class];
1503 allocate_mode = mode;
1504
1505 chain->group_size[(int) class] = size;
1506 chain->group_mode[(int) class] = mode;
1507 }
1508 else
1509 {
1510 other_mode = mode;
1511 allocate_mode = chain->group_mode[(int) class];
1512 }
1513
1514 /* Crash if two dissimilar machine modes both need
1515 groups of consecutive regs of the same class. */
1516
1517 if (other_mode != VOIDmode && other_mode != allocate_mode
1518 && ! modes_equiv_for_class_p (allocate_mode,
1519 other_mode, class))
1520 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1521 chain->insn);
1522 }
1523 else if (size == 1)
1524 {
1525 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) class] += 1;
1526 p = reg_class_superclasses[(int) class];
1527 while (*p != LIM_REG_CLASSES)
1528 this_needs->regs[(unsigned char)reload_nongroup[i]][(int) *p++] += 1;
1529 }
1530 else
1531 abort ();
1532 }
1533
1534 /* All reloads have been counted for this insn;
1535 now merge the various times of use.
1536 This sets insn_needs, etc., to the maximum total number
1537 of registers needed at any point in this insn. */
1538
1539 for (i = 0; i < N_REG_CLASSES; i++)
1540 {
1541 int j, in_max, out_max;
1542
1543 /* Compute normal and nongroup needs. */
1544 for (j = 0; j <= 1; j++)
1545 {
1546 int k;
1547 for (in_max = 0, out_max = 0, k = 0; k < reload_n_operands; k++)
1548 {
1549 in_max = MAX (in_max,
1550 (insn_needs.in_addr[k].regs[j][i]
1551 + insn_needs.in_addr_addr[k].regs[j][i]));
1552 out_max = MAX (out_max, insn_needs.out_addr[k].regs[j][i]);
1553 out_max = MAX (out_max,
1554 insn_needs.out_addr_addr[k].regs[j][i]);
1555 }
1556
1557 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1558 and operand addresses but not things used to reload
1559 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1560 don't conflict with things needed to reload inputs or
1561 outputs. */
1562
1563 in_max = MAX (MAX (insn_needs.op_addr.regs[j][i],
1564 insn_needs.op_addr_reload.regs[j][i]),
1565 in_max);
1566
1567 out_max = MAX (out_max, insn_needs.insn.regs[j][i]);
1568
1569 insn_needs.input.regs[j][i]
1570 = MAX (insn_needs.input.regs[j][i]
1571 + insn_needs.op_addr.regs[j][i]
1572 + insn_needs.insn.regs[j][i],
1573 in_max + insn_needs.input.regs[j][i]);
1574
1575 insn_needs.output.regs[j][i] += out_max;
1576 insn_needs.other.regs[j][i]
1577 += MAX (MAX (insn_needs.input.regs[j][i],
1578 insn_needs.output.regs[j][i]),
1579 insn_needs.other_addr.regs[j][i]);
1580
1581 }
1582
1583 /* Now compute group needs. */
1584 for (in_max = 0, out_max = 0, j = 0; j < reload_n_operands; j++)
1585 {
1586 in_max = MAX (in_max, insn_needs.in_addr[j].groups[i]);
1587 in_max = MAX (in_max, insn_needs.in_addr_addr[j].groups[i]);
1588 out_max = MAX (out_max, insn_needs.out_addr[j].groups[i]);
1589 out_max = MAX (out_max, insn_needs.out_addr_addr[j].groups[i]);
1590 }
1591
1592 in_max = MAX (MAX (insn_needs.op_addr.groups[i],
1593 insn_needs.op_addr_reload.groups[i]),
1594 in_max);
1595 out_max = MAX (out_max, insn_needs.insn.groups[i]);
1596
1597 insn_needs.input.groups[i]
1598 = MAX (insn_needs.input.groups[i]
1599 + insn_needs.op_addr.groups[i]
1600 + insn_needs.insn.groups[i],
1601 in_max + insn_needs.input.groups[i]);
1602
1603 insn_needs.output.groups[i] += out_max;
1604 insn_needs.other.groups[i]
1605 += MAX (MAX (insn_needs.input.groups[i],
1606 insn_needs.output.groups[i]),
1607 insn_needs.other_addr.groups[i]);
1608 }
1609
1610 /* Record the needs for later. */
1611 chain->need = insn_needs.other;
1612 }
1613 \f
1614 /* Find a group of exactly 2 registers.
1615
1616 First try to fill out the group by spilling a single register which
1617 would allow completion of the group.
1618
1619 Then try to create a new group from a pair of registers, neither of
1620 which are explicitly used.
1621
1622 Then try to create a group from any pair of registers. */
1623
1624 static void
1625 find_tworeg_group (chain, class, dumpfile)
1626 struct insn_chain *chain;
1627 int class;
1628 FILE *dumpfile;
1629 {
1630 int i;
1631 /* First, look for a register that will complete a group. */
1632 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1633 {
1634 int j, other;
1635
1636 j = potential_reload_regs[i];
1637 if (j >= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs, j)
1638 && ((j > 0 && (other = j - 1, spill_reg_order[other] >= 0)
1639 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1640 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1641 && HARD_REGNO_MODE_OK (other, chain->group_mode[class])
1642 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1643 /* We don't want one part of another group.
1644 We could get "two groups" that overlap! */
1645 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))
1646 || (j < FIRST_PSEUDO_REGISTER - 1
1647 && (other = j + 1, spill_reg_order[other] >= 0)
1648 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1649 && TEST_HARD_REG_BIT (reg_class_contents[class], other)
1650 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1651 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, other)
1652 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, other))))
1653 {
1654 register enum reg_class *p;
1655
1656 /* We have found one that will complete a group,
1657 so count off one group as provided. */
1658 chain->need.groups[class]--;
1659 p = reg_class_superclasses[class];
1660 while (*p != LIM_REG_CLASSES)
1661 {
1662 if (chain->group_size [(int) *p] <= chain->group_size [class])
1663 chain->need.groups[(int) *p]--;
1664 p++;
1665 }
1666
1667 /* Indicate both these regs are part of a group. */
1668 SET_HARD_REG_BIT (chain->counted_for_groups, j);
1669 SET_HARD_REG_BIT (chain->counted_for_groups, other);
1670 break;
1671 }
1672 }
1673 /* We can't complete a group, so start one. */
1674 if (i == FIRST_PSEUDO_REGISTER)
1675 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1676 {
1677 int j, k;
1678 j = potential_reload_regs[i];
1679 /* Verify that J+1 is a potential reload reg. */
1680 for (k = 0; k < FIRST_PSEUDO_REGISTER; k++)
1681 if (potential_reload_regs[k] == j + 1)
1682 break;
1683 if (j >= 0 && j + 1 < FIRST_PSEUDO_REGISTER
1684 && k < FIRST_PSEUDO_REGISTER
1685 && spill_reg_order[j] < 0 && spill_reg_order[j + 1] < 0
1686 && TEST_HARD_REG_BIT (reg_class_contents[class], j)
1687 && TEST_HARD_REG_BIT (reg_class_contents[class], j + 1)
1688 && HARD_REGNO_MODE_OK (j, chain->group_mode[class])
1689 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, j + 1)
1690 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + 1))
1691 break;
1692 }
1693
1694 /* I should be the index in potential_reload_regs
1695 of the new reload reg we have found. */
1696
1697 new_spill_reg (chain, i, class, 0, dumpfile);
1698 }
1699
1700 /* Find a group of more than 2 registers.
1701 Look for a sufficient sequence of unspilled registers, and spill them all
1702 at once. */
1703
1704 static void
1705 find_group (chain, class, dumpfile)
1706 struct insn_chain *chain;
1707 int class;
1708 FILE *dumpfile;
1709 {
1710 int i;
1711
1712 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1713 {
1714 int j = potential_reload_regs[i];
1715
1716 if (j >= 0
1717 && j + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
1718 && HARD_REGNO_MODE_OK (j, chain->group_mode[class]))
1719 {
1720 int k;
1721 /* Check each reg in the sequence. */
1722 for (k = 0; k < chain->group_size[class]; k++)
1723 if (! (spill_reg_order[j + k] < 0
1724 && ! TEST_HARD_REG_BIT (bad_spill_regs, j + k)
1725 && TEST_HARD_REG_BIT (reg_class_contents[class], j + k)))
1726 break;
1727 /* We got a full sequence, so spill them all. */
1728 if (k == chain->group_size[class])
1729 {
1730 register enum reg_class *p;
1731 for (k = 0; k < chain->group_size[class]; k++)
1732 {
1733 int idx;
1734 SET_HARD_REG_BIT (chain->counted_for_groups, j + k);
1735 for (idx = 0; idx < FIRST_PSEUDO_REGISTER; idx++)
1736 if (potential_reload_regs[idx] == j + k)
1737 break;
1738 new_spill_reg (chain, idx, class, 0, dumpfile);
1739 }
1740
1741 /* We have found one that will complete a group,
1742 so count off one group as provided. */
1743 chain->need.groups[class]--;
1744 p = reg_class_superclasses[class];
1745 while (*p != LIM_REG_CLASSES)
1746 {
1747 if (chain->group_size [(int) *p]
1748 <= chain->group_size [class])
1749 chain->need.groups[(int) *p]--;
1750 p++;
1751 }
1752 return;
1753 }
1754 }
1755 }
1756 /* There are no groups left. */
1757 spill_failure (chain->insn);
1758 failure = 1;
1759 }
1760
1761 /* If pseudo REG conflicts with one of our reload registers, mark it as
1762 spilled. */
1763 static void
1764 maybe_mark_pseudo_spilled (reg)
1765 int reg;
1766 {
1767 int i;
1768 int r = reg_renumber[reg];
1769 int nregs;
1770
1771 if (r < 0)
1772 abort ();
1773 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1774 for (i = 0; i < n_spills; i++)
1775 if (r <= spill_regs[i] && r + nregs > spill_regs[i])
1776 {
1777 SET_REGNO_REG_SET (spilled_pseudos, reg);
1778 return;
1779 }
1780 }
1781
1782 /* Find more reload regs to satisfy the remaining need of an insn, which
1783 is given by CHAIN.
1784 Do it by ascending class number, since otherwise a reg
1785 might be spilled for a big class and might fail to count
1786 for a smaller class even though it belongs to that class.
1787
1788 Count spilled regs in `spills', and add entries to
1789 `spill_regs' and `spill_reg_order'.
1790
1791 ??? Note there is a problem here.
1792 When there is a need for a group in a high-numbered class,
1793 and also need for non-group regs that come from a lower class,
1794 the non-group regs are chosen first. If there aren't many regs,
1795 they might leave no room for a group.
1796
1797 This was happening on the 386. To fix it, we added the code
1798 that calls possible_group_p, so that the lower class won't
1799 break up the last possible group.
1800
1801 Really fixing the problem would require changes above
1802 in counting the regs already spilled, and in choose_reload_regs.
1803 It might be hard to avoid introducing bugs there. */
1804
1805 static void
1806 find_reload_regs (chain, dumpfile)
1807 struct insn_chain *chain;
1808 FILE *dumpfile;
1809 {
1810 int i, class;
1811 short *group_needs = chain->need.groups;
1812 short *simple_needs = chain->need.regs[0];
1813 short *nongroup_needs = chain->need.regs[1];
1814
1815 if (dumpfile)
1816 fprintf (dumpfile, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1817
1818 /* Compute the order of preference for hard registers to spill.
1819 Store them by decreasing preference in potential_reload_regs. */
1820
1821 order_regs_for_reload (chain);
1822
1823 /* So far, no hard regs have been spilled. */
1824 n_spills = 0;
1825 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1826 spill_reg_order[i] = -1;
1827
1828 CLEAR_HARD_REG_SET (chain->used_spill_regs);
1829 CLEAR_HARD_REG_SET (chain->counted_for_groups);
1830 CLEAR_HARD_REG_SET (chain->counted_for_nongroups);
1831
1832 for (class = 0; class < N_REG_CLASSES; class++)
1833 {
1834 /* First get the groups of registers.
1835 If we got single registers first, we might fragment
1836 possible groups. */
1837 while (group_needs[class] > 0)
1838 {
1839 /* If any single spilled regs happen to form groups,
1840 count them now. Maybe we don't really need
1841 to spill another group. */
1842 count_possible_groups (chain, class);
1843
1844 if (group_needs[class] <= 0)
1845 break;
1846
1847 /* Groups of size 2, the only groups used on most machines,
1848 are treated specially. */
1849 if (chain->group_size[class] == 2)
1850 find_tworeg_group (chain, class, dumpfile);
1851 else
1852 find_group (chain, class, dumpfile);
1853 if (failure)
1854 return;
1855 }
1856
1857 /* Now similarly satisfy all need for single registers. */
1858
1859 while (simple_needs[class] > 0 || nongroup_needs[class] > 0)
1860 {
1861 /* If we spilled enough regs, but they weren't counted
1862 against the non-group need, see if we can count them now.
1863 If so, we can avoid some actual spilling. */
1864 if (simple_needs[class] <= 0 && nongroup_needs[class] > 0)
1865 for (i = 0; i < n_spills; i++)
1866 {
1867 int regno = spill_regs[i];
1868 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
1869 && !TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
1870 && !TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno)
1871 && nongroup_needs[class] > 0)
1872 {
1873 register enum reg_class *p;
1874
1875 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
1876 nongroup_needs[class]--;
1877 p = reg_class_superclasses[class];
1878 while (*p != LIM_REG_CLASSES)
1879 nongroup_needs[(int) *p++]--;
1880 }
1881 }
1882
1883 if (simple_needs[class] <= 0 && nongroup_needs[class] <= 0)
1884 break;
1885
1886 /* Consider the potential reload regs that aren't
1887 yet in use as reload regs, in order of preference.
1888 Find the most preferred one that's in this class. */
1889
1890 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1891 {
1892 int regno = potential_reload_regs[i];
1893 if (regno >= 0
1894 && TEST_HARD_REG_BIT (reg_class_contents[class], regno)
1895 /* If this reg will not be available for groups,
1896 pick one that does not foreclose possible groups.
1897 This is a kludge, and not very general,
1898 but it should be sufficient to make the 386 work,
1899 and the problem should not occur on machines with
1900 more registers. */
1901 && (nongroup_needs[class] == 0
1902 || possible_group_p (chain, regno)))
1903 break;
1904 }
1905
1906 /* If we couldn't get a register, try to get one even if we
1907 might foreclose possible groups. This may cause problems
1908 later, but that's better than aborting now, since it is
1909 possible that we will, in fact, be able to form the needed
1910 group even with this allocation. */
1911
1912 if (i >= FIRST_PSEUDO_REGISTER
1913 && asm_noperands (chain->insn) < 0)
1914 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1915 if (potential_reload_regs[i] >= 0
1916 && TEST_HARD_REG_BIT (reg_class_contents[class],
1917 potential_reload_regs[i]))
1918 break;
1919
1920 /* I should be the index in potential_reload_regs
1921 of the new reload reg we have found. */
1922
1923 new_spill_reg (chain, i, class, 1, dumpfile);
1924 if (failure)
1925 return;
1926 }
1927 }
1928
1929 /* We know which hard regs to use, now mark the pseudos that live in them
1930 as needing to be kicked out. */
1931 EXECUTE_IF_SET_IN_REG_SET
1932 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
1933 {
1934 maybe_mark_pseudo_spilled (i);
1935 });
1936 EXECUTE_IF_SET_IN_REG_SET
1937 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
1938 {
1939 maybe_mark_pseudo_spilled (i);
1940 });
1941
1942 IOR_HARD_REG_SET (used_spill_regs, chain->used_spill_regs);
1943 }
1944
1945 void
1946 dump_needs (chain, dumpfile)
1947 struct insn_chain *chain;
1948 FILE *dumpfile;
1949 {
1950 static char *reg_class_names[] = REG_CLASS_NAMES;
1951 int i;
1952 struct needs *n = &chain->need;
1953
1954 for (i = 0; i < N_REG_CLASSES; i++)
1955 {
1956 if (n->regs[i][0] > 0)
1957 fprintf (dumpfile,
1958 ";; Need %d reg%s of class %s.\n",
1959 n->regs[i][0], n->regs[i][0] == 1 ? "" : "s",
1960 reg_class_names[i]);
1961 if (n->regs[i][1] > 0)
1962 fprintf (dumpfile,
1963 ";; Need %d nongroup reg%s of class %s.\n",
1964 n->regs[i][1], n->regs[i][1] == 1 ? "" : "s",
1965 reg_class_names[i]);
1966 if (n->groups[i] > 0)
1967 fprintf (dumpfile,
1968 ";; Need %d group%s (%smode) of class %s.\n",
1969 n->groups[i], n->groups[i] == 1 ? "" : "s",
1970 mode_name[(int) chain->group_mode[i]],
1971 reg_class_names[i]);
1972 }
1973 }
1974 \f
1975 /* Delete all insns that were inserted by emit_caller_save_insns during
1976 this iteration. */
1977 static void
1978 delete_caller_save_insns ()
1979 {
1980 struct insn_chain *c = reload_insn_chain;
1981
1982 while (c != 0)
1983 {
1984 while (c != 0 && c->is_caller_save_insn)
1985 {
1986 struct insn_chain *next = c->next;
1987 rtx insn = c->insn;
1988
1989 if (insn == basic_block_head[c->block])
1990 basic_block_head[c->block] = NEXT_INSN (insn);
1991 if (insn == basic_block_end[c->block])
1992 basic_block_end[c->block] = PREV_INSN (insn);
1993 if (c == reload_insn_chain)
1994 reload_insn_chain = next;
1995
1996 if (NEXT_INSN (insn) != 0)
1997 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1998 if (PREV_INSN (insn) != 0)
1999 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2000
2001 if (next)
2002 next->prev = c->prev;
2003 if (c->prev)
2004 c->prev->next = next;
2005 c->next = unused_insn_chains;
2006 unused_insn_chains = c;
2007 c = next;
2008 }
2009 if (c != 0)
2010 c = c->next;
2011 }
2012 }
2013 \f
2014 /* Nonzero if, after spilling reg REGNO for non-groups,
2015 it will still be possible to find a group if we still need one. */
2016
2017 static int
2018 possible_group_p (chain, regno)
2019 struct insn_chain *chain;
2020 int regno;
2021 {
2022 int i;
2023 int class = (int) NO_REGS;
2024
2025 for (i = 0; i < (int) N_REG_CLASSES; i++)
2026 if (chain->need.groups[i] > 0)
2027 {
2028 class = i;
2029 break;
2030 }
2031
2032 if (class == (int) NO_REGS)
2033 return 1;
2034
2035 /* Consider each pair of consecutive registers. */
2036 for (i = 0; i < FIRST_PSEUDO_REGISTER - 1; i++)
2037 {
2038 /* Ignore pairs that include reg REGNO. */
2039 if (i == regno || i + 1 == regno)
2040 continue;
2041
2042 /* Ignore pairs that are outside the class that needs the group.
2043 ??? Here we fail to handle the case where two different classes
2044 independently need groups. But this never happens with our
2045 current machine descriptions. */
2046 if (! (TEST_HARD_REG_BIT (reg_class_contents[class], i)
2047 && TEST_HARD_REG_BIT (reg_class_contents[class], i + 1)))
2048 continue;
2049
2050 /* A pair of consecutive regs we can still spill does the trick. */
2051 if (spill_reg_order[i] < 0 && spill_reg_order[i + 1] < 0
2052 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2053 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1))
2054 return 1;
2055
2056 /* A pair of one already spilled and one we can spill does it
2057 provided the one already spilled is not otherwise reserved. */
2058 if (spill_reg_order[i] < 0
2059 && ! TEST_HARD_REG_BIT (bad_spill_regs, i)
2060 && spill_reg_order[i + 1] >= 0
2061 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i + 1)
2062 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i + 1))
2063 return 1;
2064 if (spill_reg_order[i + 1] < 0
2065 && ! TEST_HARD_REG_BIT (bad_spill_regs, i + 1)
2066 && spill_reg_order[i] >= 0
2067 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, i)
2068 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, i))
2069 return 1;
2070 }
2071
2072 return 0;
2073 }
2074
2075 /* Count any groups of CLASS that can be formed from the registers recently
2076 spilled. */
2077
2078 static void
2079 count_possible_groups (chain, class)
2080 struct insn_chain *chain;
2081 int class;
2082 {
2083 HARD_REG_SET new;
2084 int i, j;
2085
2086 /* Now find all consecutive groups of spilled registers
2087 and mark each group off against the need for such groups.
2088 But don't count them against ordinary need, yet. */
2089
2090 if (chain->group_size[class] == 0)
2091 return;
2092
2093 CLEAR_HARD_REG_SET (new);
2094
2095 /* Make a mask of all the regs that are spill regs in class I. */
2096 for (i = 0; i < n_spills; i++)
2097 {
2098 int regno = spill_regs[i];
2099
2100 if (TEST_HARD_REG_BIT (reg_class_contents[class], regno)
2101 && ! TEST_HARD_REG_BIT (chain->counted_for_groups, regno)
2102 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regno))
2103 SET_HARD_REG_BIT (new, regno);
2104 }
2105
2106 /* Find each consecutive group of them. */
2107 for (i = 0; i < FIRST_PSEUDO_REGISTER && chain->need.groups[class] > 0; i++)
2108 if (TEST_HARD_REG_BIT (new, i)
2109 && i + chain->group_size[class] <= FIRST_PSEUDO_REGISTER
2110 && HARD_REGNO_MODE_OK (i, chain->group_mode[class]))
2111 {
2112 for (j = 1; j < chain->group_size[class]; j++)
2113 if (! TEST_HARD_REG_BIT (new, i + j))
2114 break;
2115
2116 if (j == chain->group_size[class])
2117 {
2118 /* We found a group. Mark it off against this class's need for
2119 groups, and against each superclass too. */
2120 register enum reg_class *p;
2121
2122 chain->need.groups[class]--;
2123 p = reg_class_superclasses[class];
2124 while (*p != LIM_REG_CLASSES)
2125 {
2126 if (chain->group_size [(int) *p] <= chain->group_size [class])
2127 chain->need.groups[(int) *p]--;
2128 p++;
2129 }
2130
2131 /* Don't count these registers again. */
2132 for (j = 0; j < chain->group_size[class]; j++)
2133 SET_HARD_REG_BIT (chain->counted_for_groups, i + j);
2134 }
2135
2136 /* Skip to the last reg in this group. When i is incremented above,
2137 it will then point to the first reg of the next possible group. */
2138 i += j - 1;
2139 }
2140 }
2141 \f
2142 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2143 another mode that needs to be reloaded for the same register class CLASS.
2144 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2145 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2146
2147 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2148 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2149 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2150 causes unnecessary failures on machines requiring alignment of register
2151 groups when the two modes are different sizes, because the larger mode has
2152 more strict alignment rules than the smaller mode. */
2153
2154 static int
2155 modes_equiv_for_class_p (allocate_mode, other_mode, class)
2156 enum machine_mode allocate_mode, other_mode;
2157 enum reg_class class;
2158 {
2159 register int regno;
2160 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2161 {
2162 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno)
2163 && HARD_REGNO_MODE_OK (regno, allocate_mode)
2164 && ! HARD_REGNO_MODE_OK (regno, other_mode))
2165 return 0;
2166 }
2167 return 1;
2168 }
2169 \f
2170 /* Handle the failure to find a register to spill.
2171 INSN should be one of the insns which needed this particular spill reg. */
2172
2173 static void
2174 spill_failure (insn)
2175 rtx insn;
2176 {
2177 if (asm_noperands (PATTERN (insn)) >= 0)
2178 error_for_asm (insn, "`asm' needs too many reloads");
2179 else
2180 fatal_insn ("Unable to find a register to spill.", insn);
2181 }
2182
2183 /* Add a new register to the tables of available spill-registers.
2184 CHAIN is the insn for which the register will be used; we decrease the
2185 needs of that insn.
2186 I is the index of this register in potential_reload_regs.
2187 CLASS is the regclass whose need is being satisfied.
2188 NONGROUP is 0 if this register is part of a group.
2189 DUMPFILE is the same as the one that `reload' got. */
2190
2191 static void
2192 new_spill_reg (chain, i, class, nongroup, dumpfile)
2193 struct insn_chain *chain;
2194 int i;
2195 int class;
2196 int nongroup;
2197 FILE *dumpfile;
2198 {
2199 register enum reg_class *p;
2200 int regno = potential_reload_regs[i];
2201
2202 if (i >= FIRST_PSEUDO_REGISTER)
2203 {
2204 spill_failure (chain->insn);
2205 failure = 1;
2206 return;
2207 }
2208
2209 if (TEST_HARD_REG_BIT (bad_spill_regs, regno))
2210 {
2211 static char *reg_class_names[] = REG_CLASS_NAMES;
2212
2213 if (asm_noperands (PATTERN (chain->insn)) < 0)
2214 {
2215 /* The error message is still correct - we know only that it wasn't
2216 an asm statement that caused the problem, but one of the global
2217 registers declared by the users might have screwed us. */
2218 error ("fixed or forbidden register %d (%s) was spilled for class %s.",
2219 regno, reg_names[regno], reg_class_names[class]);
2220 error ("This may be due to a compiler bug or to impossible asm");
2221 error ("statements or clauses.");
2222 fatal_insn ("This is the instruction:", chain->insn);
2223 }
2224 error_for_asm (chain->insn, "Invalid `asm' statement:");
2225 error_for_asm (chain->insn,
2226 "fixed or forbidden register %d (%s) was spilled for class %s.",
2227 regno, reg_names[regno], reg_class_names[class]);
2228 failure = 1;
2229 return;
2230 }
2231
2232 /* Make reg REGNO an additional reload reg. */
2233
2234 potential_reload_regs[i] = -1;
2235 spill_regs[n_spills] = regno;
2236 spill_reg_order[regno] = n_spills;
2237 if (dumpfile)
2238 fprintf (dumpfile, "Spilling reg %d.\n", regno);
2239 SET_HARD_REG_BIT (chain->used_spill_regs, regno);
2240
2241 /* Clear off the needs we just satisfied. */
2242
2243 chain->need.regs[0][class]--;
2244 p = reg_class_superclasses[class];
2245 while (*p != LIM_REG_CLASSES)
2246 chain->need.regs[0][(int) *p++]--;
2247
2248 if (nongroup && chain->need.regs[1][class] > 0)
2249 {
2250 SET_HARD_REG_BIT (chain->counted_for_nongroups, regno);
2251 chain->need.regs[1][class]--;
2252 p = reg_class_superclasses[class];
2253 while (*p != LIM_REG_CLASSES)
2254 chain->need.regs[1][(int) *p++]--;
2255 }
2256
2257 n_spills++;
2258 }
2259 \f
2260 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2261 data that is dead in INSN. */
2262
2263 static void
2264 delete_dead_insn (insn)
2265 rtx insn;
2266 {
2267 rtx prev = prev_real_insn (insn);
2268 rtx prev_dest;
2269
2270 /* If the previous insn sets a register that dies in our insn, delete it
2271 too. */
2272 if (prev && GET_CODE (PATTERN (prev)) == SET
2273 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
2274 && reg_mentioned_p (prev_dest, PATTERN (insn))
2275 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
2276 && ! side_effects_p (SET_SRC (PATTERN (prev))))
2277 delete_dead_insn (prev);
2278
2279 PUT_CODE (insn, NOTE);
2280 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2281 NOTE_SOURCE_FILE (insn) = 0;
2282 }
2283
2284 /* Modify the home of pseudo-reg I.
2285 The new home is present in reg_renumber[I].
2286
2287 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2288 or it may be -1, meaning there is none or it is not relevant.
2289 This is used so that all pseudos spilled from a given hard reg
2290 can share one stack slot. */
2291
2292 static void
2293 alter_reg (i, from_reg)
2294 register int i;
2295 int from_reg;
2296 {
2297 /* When outputting an inline function, this can happen
2298 for a reg that isn't actually used. */
2299 if (regno_reg_rtx[i] == 0)
2300 return;
2301
2302 /* If the reg got changed to a MEM at rtl-generation time,
2303 ignore it. */
2304 if (GET_CODE (regno_reg_rtx[i]) != REG)
2305 return;
2306
2307 /* Modify the reg-rtx to contain the new hard reg
2308 number or else to contain its pseudo reg number. */
2309 REGNO (regno_reg_rtx[i])
2310 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
2311
2312 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2313 allocate a stack slot for it. */
2314
2315 if (reg_renumber[i] < 0
2316 && REG_N_REFS (i) > 0
2317 && reg_equiv_constant[i] == 0
2318 && reg_equiv_memory_loc[i] == 0)
2319 {
2320 register rtx x;
2321 int inherent_size = PSEUDO_REGNO_BYTES (i);
2322 int total_size = MAX (inherent_size, reg_max_ref_width[i]);
2323 int adjust = 0;
2324
2325 /* Each pseudo reg has an inherent size which comes from its own mode,
2326 and a total size which provides room for paradoxical subregs
2327 which refer to the pseudo reg in wider modes.
2328
2329 We can use a slot already allocated if it provides both
2330 enough inherent space and enough total space.
2331 Otherwise, we allocate a new slot, making sure that it has no less
2332 inherent space, and no less total space, then the previous slot. */
2333 if (from_reg == -1)
2334 {
2335 /* No known place to spill from => no slot to reuse. */
2336 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
2337 inherent_size == total_size ? 0 : -1);
2338 if (BYTES_BIG_ENDIAN)
2339 /* Cancel the big-endian correction done in assign_stack_local.
2340 Get the address of the beginning of the slot.
2341 This is so we can do a big-endian correction unconditionally
2342 below. */
2343 adjust = inherent_size - total_size;
2344
2345 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2346 }
2347 /* Reuse a stack slot if possible. */
2348 else if (spill_stack_slot[from_reg] != 0
2349 && spill_stack_slot_width[from_reg] >= total_size
2350 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2351 >= inherent_size))
2352 x = spill_stack_slot[from_reg];
2353 /* Allocate a bigger slot. */
2354 else
2355 {
2356 /* Compute maximum size needed, both for inherent size
2357 and for total size. */
2358 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2359 rtx stack_slot;
2360 if (spill_stack_slot[from_reg])
2361 {
2362 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2363 > inherent_size)
2364 mode = GET_MODE (spill_stack_slot[from_reg]);
2365 if (spill_stack_slot_width[from_reg] > total_size)
2366 total_size = spill_stack_slot_width[from_reg];
2367 }
2368 /* Make a slot with that size. */
2369 x = assign_stack_local (mode, total_size,
2370 inherent_size == total_size ? 0 : -1);
2371 stack_slot = x;
2372 if (BYTES_BIG_ENDIAN)
2373 {
2374 /* Cancel the big-endian correction done in assign_stack_local.
2375 Get the address of the beginning of the slot.
2376 This is so we can do a big-endian correction unconditionally
2377 below. */
2378 adjust = GET_MODE_SIZE (mode) - total_size;
2379 if (adjust)
2380 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2381 * BITS_PER_UNIT,
2382 MODE_INT, 1),
2383 plus_constant (XEXP (x, 0), adjust));
2384 }
2385 spill_stack_slot[from_reg] = stack_slot;
2386 spill_stack_slot_width[from_reg] = total_size;
2387 }
2388
2389 /* On a big endian machine, the "address" of the slot
2390 is the address of the low part that fits its inherent mode. */
2391 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2392 adjust += (total_size - inherent_size);
2393
2394 /* If we have any adjustment to make, or if the stack slot is the
2395 wrong mode, make a new stack slot. */
2396 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2397 {
2398 x = gen_rtx_MEM (GET_MODE (regno_reg_rtx[i]),
2399 plus_constant (XEXP (x, 0), adjust));
2400
2401 /* If this was shared among registers, must ensure we never
2402 set it readonly since that can cause scheduling
2403 problems. Note we would only have in this adjustment
2404 case in any event, since the code above doesn't set it. */
2405
2406 if (from_reg == -1)
2407 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
2408 }
2409
2410 /* Save the stack slot for later. */
2411 reg_equiv_memory_loc[i] = x;
2412 }
2413 }
2414
2415 /* Mark the slots in regs_ever_live for the hard regs
2416 used by pseudo-reg number REGNO. */
2417
2418 void
2419 mark_home_live (regno)
2420 int regno;
2421 {
2422 register int i, lim;
2423 i = reg_renumber[regno];
2424 if (i < 0)
2425 return;
2426 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2427 while (i < lim)
2428 regs_ever_live[i++] = 1;
2429 }
2430 \f
2431 /* This function handles the tracking of elimination offsets around branches.
2432
2433 X is a piece of RTL being scanned.
2434
2435 INSN is the insn that it came from, if any.
2436
2437 INITIAL_P is non-zero if we are to set the offset to be the initial
2438 offset and zero if we are setting the offset of the label to be the
2439 current offset. */
2440
2441 static void
2442 set_label_offsets (x, insn, initial_p)
2443 rtx x;
2444 rtx insn;
2445 int initial_p;
2446 {
2447 enum rtx_code code = GET_CODE (x);
2448 rtx tem;
2449 unsigned int i;
2450 struct elim_table *p;
2451
2452 switch (code)
2453 {
2454 case LABEL_REF:
2455 if (LABEL_REF_NONLOCAL_P (x))
2456 return;
2457
2458 x = XEXP (x, 0);
2459
2460 /* ... fall through ... */
2461
2462 case CODE_LABEL:
2463 /* If we know nothing about this label, set the desired offsets. Note
2464 that this sets the offset at a label to be the offset before a label
2465 if we don't know anything about the label. This is not correct for
2466 the label after a BARRIER, but is the best guess we can make. If
2467 we guessed wrong, we will suppress an elimination that might have
2468 been possible had we been able to guess correctly. */
2469
2470 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2471 {
2472 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2473 offsets_at[CODE_LABEL_NUMBER (x)][i]
2474 = (initial_p ? reg_eliminate[i].initial_offset
2475 : reg_eliminate[i].offset);
2476 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2477 }
2478
2479 /* Otherwise, if this is the definition of a label and it is
2480 preceded by a BARRIER, set our offsets to the known offset of
2481 that label. */
2482
2483 else if (x == insn
2484 && (tem = prev_nonnote_insn (insn)) != 0
2485 && GET_CODE (tem) == BARRIER)
2486 set_offsets_for_label (insn);
2487 else
2488 /* If neither of the above cases is true, compare each offset
2489 with those previously recorded and suppress any eliminations
2490 where the offsets disagree. */
2491
2492 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2493 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2494 != (initial_p ? reg_eliminate[i].initial_offset
2495 : reg_eliminate[i].offset))
2496 reg_eliminate[i].can_eliminate = 0;
2497
2498 return;
2499
2500 case JUMP_INSN:
2501 set_label_offsets (PATTERN (insn), insn, initial_p);
2502
2503 /* ... fall through ... */
2504
2505 case INSN:
2506 case CALL_INSN:
2507 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2508 and hence must have all eliminations at their initial offsets. */
2509 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2510 if (REG_NOTE_KIND (tem) == REG_LABEL)
2511 set_label_offsets (XEXP (tem, 0), insn, 1);
2512 return;
2513
2514 case ADDR_VEC:
2515 case ADDR_DIFF_VEC:
2516 /* Each of the labels in the address vector must be at their initial
2517 offsets. We want the first field for ADDR_VEC and the second
2518 field for ADDR_DIFF_VEC. */
2519
2520 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2521 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2522 insn, initial_p);
2523 return;
2524
2525 case SET:
2526 /* We only care about setting PC. If the source is not RETURN,
2527 IF_THEN_ELSE, or a label, disable any eliminations not at
2528 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2529 isn't one of those possibilities. For branches to a label,
2530 call ourselves recursively.
2531
2532 Note that this can disable elimination unnecessarily when we have
2533 a non-local goto since it will look like a non-constant jump to
2534 someplace in the current function. This isn't a significant
2535 problem since such jumps will normally be when all elimination
2536 pairs are back to their initial offsets. */
2537
2538 if (SET_DEST (x) != pc_rtx)
2539 return;
2540
2541 switch (GET_CODE (SET_SRC (x)))
2542 {
2543 case PC:
2544 case RETURN:
2545 return;
2546
2547 case LABEL_REF:
2548 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2549 return;
2550
2551 case IF_THEN_ELSE:
2552 tem = XEXP (SET_SRC (x), 1);
2553 if (GET_CODE (tem) == LABEL_REF)
2554 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2555 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2556 break;
2557
2558 tem = XEXP (SET_SRC (x), 2);
2559 if (GET_CODE (tem) == LABEL_REF)
2560 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2561 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2562 break;
2563 return;
2564
2565 default:
2566 break;
2567 }
2568
2569 /* If we reach here, all eliminations must be at their initial
2570 offset because we are doing a jump to a variable address. */
2571 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2572 if (p->offset != p->initial_offset)
2573 p->can_eliminate = 0;
2574 break;
2575
2576 default:
2577 break;
2578 }
2579 }
2580 \f
2581 /* Used for communication between the next two function to properly share
2582 the vector for an ASM_OPERANDS. */
2583
2584 static struct rtvec_def *old_asm_operands_vec, *new_asm_operands_vec;
2585
2586 /* Scan X and replace any eliminable registers (such as fp) with a
2587 replacement (such as sp), plus an offset.
2588
2589 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2590 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2591 MEM, we are allowed to replace a sum of a register and the constant zero
2592 with the register, which we cannot do outside a MEM. In addition, we need
2593 to record the fact that a register is referenced outside a MEM.
2594
2595 If INSN is an insn, it is the insn containing X. If we replace a REG
2596 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2597 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2598 the REG is being modified.
2599
2600 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2601 That's used when we eliminate in expressions stored in notes.
2602 This means, do not set ref_outside_mem even if the reference
2603 is outside of MEMs.
2604
2605 If we see a modification to a register we know about, take the
2606 appropriate action (see case SET, below).
2607
2608 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2609 replacements done assuming all offsets are at their initial values. If
2610 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2611 encounter, return the actual location so that find_reloads will do
2612 the proper thing. */
2613
2614 rtx
2615 eliminate_regs (x, mem_mode, insn)
2616 rtx x;
2617 enum machine_mode mem_mode;
2618 rtx insn;
2619 {
2620 enum rtx_code code = GET_CODE (x);
2621 struct elim_table *ep;
2622 int regno;
2623 rtx new;
2624 int i, j;
2625 char *fmt;
2626 int copied = 0;
2627
2628 switch (code)
2629 {
2630 case CONST_INT:
2631 case CONST_DOUBLE:
2632 case CONST:
2633 case SYMBOL_REF:
2634 case CODE_LABEL:
2635 case PC:
2636 case CC0:
2637 case ASM_INPUT:
2638 case ADDR_VEC:
2639 case ADDR_DIFF_VEC:
2640 case RETURN:
2641 return x;
2642
2643 case ADDRESSOF:
2644 /* This is only for the benefit of the debugging backends, which call
2645 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2646 removed after CSE. */
2647 new = eliminate_regs (XEXP (x, 0), 0, insn);
2648 if (GET_CODE (new) == MEM)
2649 return XEXP (new, 0);
2650 return x;
2651
2652 case REG:
2653 regno = REGNO (x);
2654
2655 /* First handle the case where we encounter a bare register that
2656 is eliminable. Replace it with a PLUS. */
2657 if (regno < FIRST_PSEUDO_REGISTER)
2658 {
2659 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2660 ep++)
2661 if (ep->from_rtx == x && ep->can_eliminate)
2662 {
2663 if (! mem_mode
2664 /* Refs inside notes don't count for this purpose. */
2665 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2666 || GET_CODE (insn) == INSN_LIST)))
2667 ep->ref_outside_mem = 1;
2668 return plus_constant (ep->to_rtx, ep->previous_offset);
2669 }
2670
2671 }
2672 return x;
2673
2674 case PLUS:
2675 /* If this is the sum of an eliminable register and a constant, rework
2676 the sum. */
2677 if (GET_CODE (XEXP (x, 0)) == REG
2678 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2679 && CONSTANT_P (XEXP (x, 1)))
2680 {
2681 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2682 ep++)
2683 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2684 {
2685 if (! mem_mode
2686 /* Refs inside notes don't count for this purpose. */
2687 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2688 || GET_CODE (insn) == INSN_LIST)))
2689 ep->ref_outside_mem = 1;
2690
2691 /* The only time we want to replace a PLUS with a REG (this
2692 occurs when the constant operand of the PLUS is the negative
2693 of the offset) is when we are inside a MEM. We won't want
2694 to do so at other times because that would change the
2695 structure of the insn in a way that reload can't handle.
2696 We special-case the commonest situation in
2697 eliminate_regs_in_insn, so just replace a PLUS with a
2698 PLUS here, unless inside a MEM. */
2699 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2700 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2701 return ep->to_rtx;
2702 else
2703 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2704 plus_constant (XEXP (x, 1),
2705 ep->previous_offset));
2706 }
2707
2708 /* If the register is not eliminable, we are done since the other
2709 operand is a constant. */
2710 return x;
2711 }
2712
2713 /* If this is part of an address, we want to bring any constant to the
2714 outermost PLUS. We will do this by doing register replacement in
2715 our operands and seeing if a constant shows up in one of them.
2716
2717 We assume here this is part of an address (or a "load address" insn)
2718 since an eliminable register is not likely to appear in any other
2719 context.
2720
2721 If we have (plus (eliminable) (reg)), we want to produce
2722 (plus (plus (replacement) (reg) (const))). If this was part of a
2723 normal add insn, (plus (replacement) (reg)) will be pushed as a
2724 reload. This is the desired action. */
2725
2726 {
2727 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2728 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2729
2730 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2731 {
2732 /* If one side is a PLUS and the other side is a pseudo that
2733 didn't get a hard register but has a reg_equiv_constant,
2734 we must replace the constant here since it may no longer
2735 be in the position of any operand. */
2736 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2737 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2738 && reg_renumber[REGNO (new1)] < 0
2739 && reg_equiv_constant != 0
2740 && reg_equiv_constant[REGNO (new1)] != 0)
2741 new1 = reg_equiv_constant[REGNO (new1)];
2742 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2743 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2744 && reg_renumber[REGNO (new0)] < 0
2745 && reg_equiv_constant[REGNO (new0)] != 0)
2746 new0 = reg_equiv_constant[REGNO (new0)];
2747
2748 new = form_sum (new0, new1);
2749
2750 /* As above, if we are not inside a MEM we do not want to
2751 turn a PLUS into something else. We might try to do so here
2752 for an addition of 0 if we aren't optimizing. */
2753 if (! mem_mode && GET_CODE (new) != PLUS)
2754 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2755 else
2756 return new;
2757 }
2758 }
2759 return x;
2760
2761 case MULT:
2762 /* If this is the product of an eliminable register and a
2763 constant, apply the distribute law and move the constant out
2764 so that we have (plus (mult ..) ..). This is needed in order
2765 to keep load-address insns valid. This case is pathological.
2766 We ignore the possibility of overflow here. */
2767 if (GET_CODE (XEXP (x, 0)) == REG
2768 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2769 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2770 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2771 ep++)
2772 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2773 {
2774 if (! mem_mode
2775 /* Refs inside notes don't count for this purpose. */
2776 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2777 || GET_CODE (insn) == INSN_LIST)))
2778 ep->ref_outside_mem = 1;
2779
2780 return
2781 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2782 ep->previous_offset * INTVAL (XEXP (x, 1)));
2783 }
2784
2785 /* ... fall through ... */
2786
2787 case CALL:
2788 case COMPARE:
2789 case MINUS:
2790 case DIV: case UDIV:
2791 case MOD: case UMOD:
2792 case AND: case IOR: case XOR:
2793 case ROTATERT: case ROTATE:
2794 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2795 case NE: case EQ:
2796 case GE: case GT: case GEU: case GTU:
2797 case LE: case LT: case LEU: case LTU:
2798 {
2799 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2800 rtx new1
2801 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2802
2803 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2804 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2805 }
2806 return x;
2807
2808 case EXPR_LIST:
2809 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2810 if (XEXP (x, 0))
2811 {
2812 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2813 if (new != XEXP (x, 0))
2814 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2815 }
2816
2817 /* ... fall through ... */
2818
2819 case INSN_LIST:
2820 /* Now do eliminations in the rest of the chain. If this was
2821 an EXPR_LIST, this might result in allocating more memory than is
2822 strictly needed, but it simplifies the code. */
2823 if (XEXP (x, 1))
2824 {
2825 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2826 if (new != XEXP (x, 1))
2827 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2828 }
2829 return x;
2830
2831 case PRE_INC:
2832 case POST_INC:
2833 case PRE_DEC:
2834 case POST_DEC:
2835 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2836 if (ep->to_rtx == XEXP (x, 0))
2837 {
2838 int size = GET_MODE_SIZE (mem_mode);
2839
2840 /* If more bytes than MEM_MODE are pushed, account for them. */
2841 #ifdef PUSH_ROUNDING
2842 if (ep->to_rtx == stack_pointer_rtx)
2843 size = PUSH_ROUNDING (size);
2844 #endif
2845 if (code == PRE_DEC || code == POST_DEC)
2846 ep->offset += size;
2847 else
2848 ep->offset -= size;
2849 }
2850
2851 /* Fall through to generic unary operation case. */
2852 case STRICT_LOW_PART:
2853 case NEG: case NOT:
2854 case SIGN_EXTEND: case ZERO_EXTEND:
2855 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2856 case FLOAT: case FIX:
2857 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2858 case ABS:
2859 case SQRT:
2860 case FFS:
2861 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2862 if (new != XEXP (x, 0))
2863 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2864 return x;
2865
2866 case SUBREG:
2867 /* Similar to above processing, but preserve SUBREG_WORD.
2868 Convert (subreg (mem)) to (mem) if not paradoxical.
2869 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2870 pseudo didn't get a hard reg, we must replace this with the
2871 eliminated version of the memory location because push_reloads
2872 may do the replacement in certain circumstances. */
2873 if (GET_CODE (SUBREG_REG (x)) == REG
2874 && (GET_MODE_SIZE (GET_MODE (x))
2875 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2876 && reg_equiv_memory_loc != 0
2877 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2878 {
2879 #if 0
2880 new = eliminate_regs (reg_equiv_memory_loc[REGNO (SUBREG_REG (x))],
2881 mem_mode, insn);
2882
2883 /* If we didn't change anything, we must retain the pseudo. */
2884 if (new == reg_equiv_memory_loc[REGNO (SUBREG_REG (x))])
2885 new = SUBREG_REG (x);
2886 else
2887 {
2888 /* In this case, we must show that the pseudo is used in this
2889 insn so that delete_output_reload will do the right thing. */
2890 if (insn != 0 && GET_CODE (insn) != EXPR_LIST
2891 && GET_CODE (insn) != INSN_LIST)
2892 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode,
2893 SUBREG_REG (x)),
2894 insn))
2895 = gen_rtx_EXPR_LIST (REG_EQUAL, new, NULL_RTX);
2896
2897 /* Ensure NEW isn't shared in case we have to reload it. */
2898 new = copy_rtx (new);
2899 }
2900 #else
2901 new = SUBREG_REG (x);
2902 #endif
2903 }
2904 else
2905 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2906
2907 if (new != XEXP (x, 0))
2908 {
2909 int x_size = GET_MODE_SIZE (GET_MODE (x));
2910 int new_size = GET_MODE_SIZE (GET_MODE (new));
2911
2912 if (GET_CODE (new) == MEM
2913 && ((x_size < new_size
2914 #ifdef WORD_REGISTER_OPERATIONS
2915 /* On these machines, combine can create rtl of the form
2916 (set (subreg:m1 (reg:m2 R) 0) ...)
2917 where m1 < m2, and expects something interesting to
2918 happen to the entire word. Moreover, it will use the
2919 (reg:m2 R) later, expecting all bits to be preserved.
2920 So if the number of words is the same, preserve the
2921 subreg so that push_reloads can see it. */
2922 && ! ((x_size-1)/UNITS_PER_WORD == (new_size-1)/UNITS_PER_WORD)
2923 #endif
2924 )
2925 || (x_size == new_size))
2926 )
2927 {
2928 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
2929 enum machine_mode mode = GET_MODE (x);
2930
2931 if (BYTES_BIG_ENDIAN)
2932 offset += (MIN (UNITS_PER_WORD,
2933 GET_MODE_SIZE (GET_MODE (new)))
2934 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
2935
2936 PUT_MODE (new, mode);
2937 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2938 return new;
2939 }
2940 else
2941 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_WORD (x));
2942 }
2943
2944 return x;
2945
2946 case USE:
2947 /* If using a register that is the source of an eliminate we still
2948 think can be performed, note it cannot be performed since we don't
2949 know how this register is used. */
2950 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2951 if (ep->from_rtx == XEXP (x, 0))
2952 ep->can_eliminate = 0;
2953
2954 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2955 if (new != XEXP (x, 0))
2956 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2957 return x;
2958
2959 case CLOBBER:
2960 /* If clobbering a register that is the replacement register for an
2961 elimination we still think can be performed, note that it cannot
2962 be performed. Otherwise, we need not be concerned about it. */
2963 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2964 if (ep->to_rtx == XEXP (x, 0))
2965 ep->can_eliminate = 0;
2966
2967 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2968 if (new != XEXP (x, 0))
2969 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2970 return x;
2971
2972 case ASM_OPERANDS:
2973 {
2974 rtx *temp_vec;
2975 /* Properly handle sharing input and constraint vectors. */
2976 if (ASM_OPERANDS_INPUT_VEC (x) != old_asm_operands_vec)
2977 {
2978 /* When we come to a new vector not seen before,
2979 scan all its elements; keep the old vector if none
2980 of them changes; otherwise, make a copy. */
2981 old_asm_operands_vec = ASM_OPERANDS_INPUT_VEC (x);
2982 temp_vec = (rtx *) alloca (XVECLEN (x, 3) * sizeof (rtx));
2983 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2984 temp_vec[i] = eliminate_regs (ASM_OPERANDS_INPUT (x, i),
2985 mem_mode, insn);
2986
2987 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
2988 if (temp_vec[i] != ASM_OPERANDS_INPUT (x, i))
2989 break;
2990
2991 if (i == ASM_OPERANDS_INPUT_LENGTH (x))
2992 new_asm_operands_vec = old_asm_operands_vec;
2993 else
2994 new_asm_operands_vec
2995 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x), temp_vec);
2996 }
2997
2998 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2999 if (new_asm_operands_vec == old_asm_operands_vec)
3000 return x;
3001
3002 new = gen_rtx_ASM_OPERANDS (VOIDmode, ASM_OPERANDS_TEMPLATE (x),
3003 ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
3004 ASM_OPERANDS_OUTPUT_IDX (x),
3005 new_asm_operands_vec,
3006 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x),
3007 ASM_OPERANDS_SOURCE_FILE (x),
3008 ASM_OPERANDS_SOURCE_LINE (x));
3009 new->volatil = x->volatil;
3010 return new;
3011 }
3012
3013 case SET:
3014 /* Check for setting a register that we know about. */
3015 if (GET_CODE (SET_DEST (x)) == REG)
3016 {
3017 /* See if this is setting the replacement register for an
3018 elimination.
3019
3020 If DEST is the hard frame pointer, we do nothing because we
3021 assume that all assignments to the frame pointer are for
3022 non-local gotos and are being done at a time when they are valid
3023 and do not disturb anything else. Some machines want to
3024 eliminate a fake argument pointer (or even a fake frame pointer)
3025 with either the real frame or the stack pointer. Assignments to
3026 the hard frame pointer must not prevent this elimination. */
3027
3028 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3029 ep++)
3030 if (ep->to_rtx == SET_DEST (x)
3031 && SET_DEST (x) != hard_frame_pointer_rtx)
3032 {
3033 /* If it is being incremented, adjust the offset. Otherwise,
3034 this elimination can't be done. */
3035 rtx src = SET_SRC (x);
3036
3037 if (GET_CODE (src) == PLUS
3038 && XEXP (src, 0) == SET_DEST (x)
3039 && GET_CODE (XEXP (src, 1)) == CONST_INT)
3040 ep->offset -= INTVAL (XEXP (src, 1));
3041 else
3042 ep->can_eliminate = 0;
3043 }
3044
3045 /* Now check to see we are assigning to a register that can be
3046 eliminated. If so, it must be as part of a PARALLEL, since we
3047 will not have been called if this is a single SET. So indicate
3048 that we can no longer eliminate this reg. */
3049 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3050 ep++)
3051 if (ep->from_rtx == SET_DEST (x) && ep->can_eliminate)
3052 ep->can_eliminate = 0;
3053 }
3054
3055 /* Now avoid the loop below in this common case. */
3056 {
3057 rtx new0 = eliminate_regs (SET_DEST (x), 0, insn);
3058 rtx new1 = eliminate_regs (SET_SRC (x), 0, insn);
3059
3060 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3061 write a CLOBBER insn. */
3062 if (GET_CODE (SET_DEST (x)) == REG && GET_CODE (new0) == MEM
3063 && insn != 0 && GET_CODE (insn) != EXPR_LIST
3064 && GET_CODE (insn) != INSN_LIST)
3065 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, SET_DEST (x)), insn);
3066
3067 if (new0 != SET_DEST (x) || new1 != SET_SRC (x))
3068 return gen_rtx_SET (VOIDmode, new0, new1);
3069 }
3070
3071 return x;
3072
3073 case MEM:
3074 /* This is only for the benefit of the debugging backends, which call
3075 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3076 removed after CSE. */
3077 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
3078 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
3079
3080 /* Our only special processing is to pass the mode of the MEM to our
3081 recursive call and copy the flags. While we are here, handle this
3082 case more efficiently. */
3083 new = eliminate_regs (XEXP (x, 0), GET_MODE (x), insn);
3084 if (new != XEXP (x, 0))
3085 {
3086 new = gen_rtx_MEM (GET_MODE (x), new);
3087 new->volatil = x->volatil;
3088 new->unchanging = x->unchanging;
3089 new->in_struct = x->in_struct;
3090 return new;
3091 }
3092 else
3093 return x;
3094
3095 default:
3096 break;
3097 }
3098
3099 /* Process each of our operands recursively. If any have changed, make a
3100 copy of the rtx. */
3101 fmt = GET_RTX_FORMAT (code);
3102 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3103 {
3104 if (*fmt == 'e')
3105 {
3106 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
3107 if (new != XEXP (x, i) && ! copied)
3108 {
3109 rtx new_x = rtx_alloc (code);
3110 bcopy ((char *) x, (char *) new_x,
3111 (sizeof (*new_x) - sizeof (new_x->fld)
3112 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
3113 x = new_x;
3114 copied = 1;
3115 }
3116 XEXP (x, i) = new;
3117 }
3118 else if (*fmt == 'E')
3119 {
3120 int copied_vec = 0;
3121 for (j = 0; j < XVECLEN (x, i); j++)
3122 {
3123 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
3124 if (new != XVECEXP (x, i, j) && ! copied_vec)
3125 {
3126 rtvec new_v = gen_rtvec_vv (XVECLEN (x, i),
3127 XVEC (x, i)->elem);
3128 if (! copied)
3129 {
3130 rtx new_x = rtx_alloc (code);
3131 bcopy ((char *) x, (char *) new_x,
3132 (sizeof (*new_x) - sizeof (new_x->fld)
3133 + (sizeof (new_x->fld[0])
3134 * GET_RTX_LENGTH (code))));
3135 x = new_x;
3136 copied = 1;
3137 }
3138 XVEC (x, i) = new_v;
3139 copied_vec = 1;
3140 }
3141 XVECEXP (x, i, j) = new;
3142 }
3143 }
3144 }
3145
3146 return x;
3147 }
3148 \f
3149 /* Scan INSN and eliminate all eliminable registers in it.
3150
3151 If REPLACE is nonzero, do the replacement destructively. Also
3152 delete the insn as dead it if it is setting an eliminable register.
3153
3154 If REPLACE is zero, do all our allocations in reload_obstack.
3155
3156 If no eliminations were done and this insn doesn't require any elimination
3157 processing (these are not identical conditions: it might be updating sp,
3158 but not referencing fp; this needs to be seen during reload_as_needed so
3159 that the offset between fp and sp can be taken into consideration), zero
3160 is returned. Otherwise, 1 is returned. */
3161
3162 static int
3163 eliminate_regs_in_insn (insn, replace)
3164 rtx insn;
3165 int replace;
3166 {
3167 rtx old_body = PATTERN (insn);
3168 rtx old_set = single_set (insn);
3169 rtx new_body;
3170 int val = 0;
3171 struct elim_table *ep;
3172
3173 if (! replace)
3174 push_obstacks (&reload_obstack, &reload_obstack);
3175
3176 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
3177 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
3178 {
3179 /* Check for setting an eliminable register. */
3180 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3181 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
3182 {
3183 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3184 /* If this is setting the frame pointer register to the
3185 hardware frame pointer register and this is an elimination
3186 that will be done (tested above), this insn is really
3187 adjusting the frame pointer downward to compensate for
3188 the adjustment done before a nonlocal goto. */
3189 if (ep->from == FRAME_POINTER_REGNUM
3190 && ep->to == HARD_FRAME_POINTER_REGNUM)
3191 {
3192 rtx src = SET_SRC (old_set);
3193 int offset, ok = 0;
3194 rtx prev_insn, prev_set;
3195
3196 if (src == ep->to_rtx)
3197 offset = 0, ok = 1;
3198 else if (GET_CODE (src) == PLUS
3199 && GET_CODE (XEXP (src, 0)) == CONST_INT
3200 && XEXP (src, 1) == ep->to_rtx)
3201 offset = INTVAL (XEXP (src, 0)), ok = 1;
3202 else if (GET_CODE (src) == PLUS
3203 && GET_CODE (XEXP (src, 1)) == CONST_INT
3204 && XEXP (src, 0) == ep->to_rtx)
3205 offset = INTVAL (XEXP (src, 1)), ok = 1;
3206 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
3207 && (prev_set = single_set (prev_insn)) != 0
3208 && rtx_equal_p (SET_DEST (prev_set), src))
3209 {
3210 src = SET_SRC (prev_set);
3211 if (src == ep->to_rtx)
3212 offset = 0, ok = 1;
3213 else if (GET_CODE (src) == PLUS
3214 && GET_CODE (XEXP (src, 0)) == CONST_INT
3215 && XEXP (src, 1) == ep->to_rtx)
3216 offset = INTVAL (XEXP (src, 0)), ok = 1;
3217 else if (GET_CODE (src) == PLUS
3218 && GET_CODE (XEXP (src, 1)) == CONST_INT
3219 && XEXP (src, 0) == ep->to_rtx)
3220 offset = INTVAL (XEXP (src, 1)), ok = 1;
3221 }
3222
3223 if (ok)
3224 {
3225 if (replace)
3226 {
3227 rtx src
3228 = plus_constant (ep->to_rtx, offset - ep->offset);
3229
3230 /* First see if this insn remains valid when we
3231 make the change. If not, keep the INSN_CODE
3232 the same and let reload fit it up. */
3233 validate_change (insn, &SET_SRC (old_set), src, 1);
3234 validate_change (insn, &SET_DEST (old_set),
3235 ep->to_rtx, 1);
3236 if (! apply_change_group ())
3237 {
3238 SET_SRC (old_set) = src;
3239 SET_DEST (old_set) = ep->to_rtx;
3240 }
3241 }
3242
3243 val = 1;
3244 goto done;
3245 }
3246 }
3247 #endif
3248
3249 /* In this case this insn isn't serving a useful purpose. We
3250 will delete it in reload_as_needed once we know that this
3251 elimination is, in fact, being done.
3252
3253 If REPLACE isn't set, we can't delete this insn, but needn't
3254 process it since it won't be used unless something changes. */
3255 if (replace)
3256 delete_dead_insn (insn);
3257 val = 1;
3258 goto done;
3259 }
3260
3261 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3262 in the insn is the negative of the offset in FROM. Substitute
3263 (set (reg) (reg to)) for the insn and change its code.
3264
3265 We have to do this here, rather than in eliminate_regs, so that we can
3266 change the insn code. */
3267
3268 if (GET_CODE (SET_SRC (old_set)) == PLUS
3269 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3270 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT)
3271 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3272 ep++)
3273 if (ep->from_rtx == XEXP (SET_SRC (old_set), 0)
3274 && ep->can_eliminate)
3275 {
3276 /* We must stop at the first elimination that will be used.
3277 If this one would replace the PLUS with a REG, do it
3278 now. Otherwise, quit the loop and let eliminate_regs
3279 do its normal replacement. */
3280 if (ep->offset == - INTVAL (XEXP (SET_SRC (old_set), 1)))
3281 {
3282 /* We assume here that we don't need a PARALLEL of
3283 any CLOBBERs for this assignment. There's not
3284 much we can do if we do need it. */
3285 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3286 SET_DEST (old_set),
3287 ep->to_rtx);
3288 INSN_CODE (insn) = -1;
3289 val = 1;
3290 goto done;
3291 }
3292
3293 break;
3294 }
3295 }
3296
3297 old_asm_operands_vec = 0;
3298
3299 /* Replace the body of this insn with a substituted form. If we changed
3300 something, return non-zero.
3301
3302 If we are replacing a body that was a (set X (plus Y Z)), try to
3303 re-recognize the insn. We do this in case we had a simple addition
3304 but now can do this as a load-address. This saves an insn in this
3305 common case. */
3306
3307 new_body = eliminate_regs (old_body, 0, replace ? insn : NULL_RTX);
3308 if (new_body != old_body)
3309 {
3310 /* If we aren't replacing things permanently and we changed something,
3311 make another copy to ensure that all the RTL is new. Otherwise
3312 things can go wrong if find_reload swaps commutative operands
3313 and one is inside RTL that has been copied while the other is not. */
3314
3315 /* Don't copy an asm_operands because (1) there's no need and (2)
3316 copy_rtx can't do it properly when there are multiple outputs. */
3317 if (! replace && asm_noperands (old_body) < 0)
3318 new_body = copy_rtx (new_body);
3319
3320 /* If we had a move insn but now we don't, rerecognize it. This will
3321 cause spurious re-recognition if the old move had a PARALLEL since
3322 the new one still will, but we can't call single_set without
3323 having put NEW_BODY into the insn and the re-recognition won't
3324 hurt in this rare case. */
3325 if (old_set != 0
3326 && ((GET_CODE (SET_SRC (old_set)) == REG
3327 && (GET_CODE (new_body) != SET
3328 || GET_CODE (SET_SRC (new_body)) != REG))
3329 /* If this was a load from or store to memory, compare
3330 the MEM in recog_operand to the one in the insn. If they
3331 are not equal, then rerecognize the insn. */
3332 || (old_set != 0
3333 && ((GET_CODE (SET_SRC (old_set)) == MEM
3334 && SET_SRC (old_set) != recog_operand[1])
3335 || (GET_CODE (SET_DEST (old_set)) == MEM
3336 && SET_DEST (old_set) != recog_operand[0])))
3337 /* If this was an add insn before, rerecognize. */
3338 || GET_CODE (SET_SRC (old_set)) == PLUS))
3339 {
3340 if (! validate_change (insn, &PATTERN (insn), new_body, 0))
3341 /* If recognition fails, store the new body anyway.
3342 It's normal to have recognition failures here
3343 due to bizarre memory addresses; reloading will fix them. */
3344 PATTERN (insn) = new_body;
3345 }
3346 else
3347 PATTERN (insn) = new_body;
3348
3349 val = 1;
3350 }
3351
3352 /* Loop through all elimination pairs. See if any have changed.
3353
3354 We also detect a cases where register elimination cannot be done,
3355 namely, if a register would be both changed and referenced outside a MEM
3356 in the resulting insn since such an insn is often undefined and, even if
3357 not, we cannot know what meaning will be given to it. Note that it is
3358 valid to have a register used in an address in an insn that changes it
3359 (presumably with a pre- or post-increment or decrement).
3360
3361 If anything changes, return nonzero. */
3362
3363 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3364 {
3365 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3366 ep->can_eliminate = 0;
3367
3368 ep->ref_outside_mem = 0;
3369
3370 if (ep->previous_offset != ep->offset)
3371 val = 1;
3372 }
3373
3374 done:
3375 /* If we changed something, perform elimination in REG_NOTES. This is
3376 needed even when REPLACE is zero because a REG_DEAD note might refer
3377 to a register that we eliminate and could cause a different number
3378 of spill registers to be needed in the final reload pass than in
3379 the pre-passes. */
3380 if (val && REG_NOTES (insn) != 0)
3381 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3382
3383 if (! replace)
3384 pop_obstacks ();
3385
3386 return val;
3387 }
3388
3389 /* Loop through all elimination pairs.
3390 Recalculate the number not at initial offset.
3391
3392 Compute the maximum offset (minimum offset if the stack does not
3393 grow downward) for each elimination pair. */
3394
3395 static void
3396 update_eliminable_offsets ()
3397 {
3398 struct elim_table *ep;
3399
3400 num_not_at_initial_offset = 0;
3401 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3402 {
3403 ep->previous_offset = ep->offset;
3404 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3405 num_not_at_initial_offset++;
3406 }
3407 }
3408
3409 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3410 replacement we currently believe is valid, mark it as not eliminable if X
3411 modifies DEST in any way other than by adding a constant integer to it.
3412
3413 If DEST is the frame pointer, we do nothing because we assume that
3414 all assignments to the hard frame pointer are nonlocal gotos and are being
3415 done at a time when they are valid and do not disturb anything else.
3416 Some machines want to eliminate a fake argument pointer with either the
3417 frame or stack pointer. Assignments to the hard frame pointer must not
3418 prevent this elimination.
3419
3420 Called via note_stores from reload before starting its passes to scan
3421 the insns of the function. */
3422
3423 static void
3424 mark_not_eliminable (dest, x)
3425 rtx dest;
3426 rtx x;
3427 {
3428 register unsigned int i;
3429
3430 /* A SUBREG of a hard register here is just changing its mode. We should
3431 not see a SUBREG of an eliminable hard register, but check just in
3432 case. */
3433 if (GET_CODE (dest) == SUBREG)
3434 dest = SUBREG_REG (dest);
3435
3436 if (dest == hard_frame_pointer_rtx)
3437 return;
3438
3439 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3440 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3441 && (GET_CODE (x) != SET
3442 || GET_CODE (SET_SRC (x)) != PLUS
3443 || XEXP (SET_SRC (x), 0) != dest
3444 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3445 {
3446 reg_eliminate[i].can_eliminate_previous
3447 = reg_eliminate[i].can_eliminate = 0;
3448 num_eliminable--;
3449 }
3450 }
3451
3452 /* Verify that the initial elimination offsets did not change since the
3453 last call to set_initial_elim_offsets. This is used to catch cases
3454 where something illegal happened during reload_as_needed that could
3455 cause incorrect code to be generated if we did not check for it. */
3456 static void
3457 verify_initial_elim_offsets ()
3458 {
3459 int t;
3460
3461 #ifdef ELIMINABLE_REGS
3462 struct elim_table *ep;
3463
3464 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3465 {
3466 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3467 if (t != ep->initial_offset)
3468 abort ();
3469 }
3470 #else
3471 INITIAL_FRAME_POINTER_OFFSET (t);
3472 if (t != reg_eliminate[0].initial_offset)
3473 abort ();
3474 #endif
3475 }
3476
3477 /* Reset all offsets on eliminable registers to their initial values. */
3478 static void
3479 set_initial_elim_offsets ()
3480 {
3481 struct elim_table *ep = reg_eliminate;
3482
3483 #ifdef ELIMINABLE_REGS
3484 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3485 {
3486 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3487 ep->previous_offset = ep->offset = ep->initial_offset;
3488 }
3489 #else
3490 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3491 ep->previous_offset = ep->offset = ep->initial_offset;
3492 #endif
3493
3494 num_not_at_initial_offset = 0;
3495 }
3496
3497 /* Initialize the known label offsets.
3498 Set a known offset for each forced label to be at the initial offset
3499 of each elimination. We do this because we assume that all
3500 computed jumps occur from a location where each elimination is
3501 at its initial offset.
3502 For all other labels, show that we don't know the offsets. */
3503
3504 static void
3505 set_initial_label_offsets ()
3506 {
3507 rtx x;
3508 bzero ((char *) &offsets_known_at[get_first_label_num ()], num_labels);
3509
3510 for (x = forced_labels; x; x = XEXP (x, 1))
3511 if (XEXP (x, 0))
3512 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3513 }
3514
3515 /* Set all elimination offsets to the known values for the code label given
3516 by INSN. */
3517 static void
3518 set_offsets_for_label (insn)
3519 rtx insn;
3520 {
3521 int i;
3522 int label_nr = CODE_LABEL_NUMBER (insn);
3523 struct elim_table *ep;
3524
3525 num_not_at_initial_offset = 0;
3526 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3527 {
3528 ep->offset = ep->previous_offset = offsets_at[label_nr][i];
3529 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3530 num_not_at_initial_offset++;
3531 }
3532 }
3533
3534 /* See if anything that happened changes which eliminations are valid.
3535 For example, on the Sparc, whether or not the frame pointer can
3536 be eliminated can depend on what registers have been used. We need
3537 not check some conditions again (such as flag_omit_frame_pointer)
3538 since they can't have changed. */
3539
3540 static void
3541 update_eliminables (pset)
3542 HARD_REG_SET *pset;
3543 {
3544 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3545 int previous_frame_pointer_needed = frame_pointer_needed;
3546 #endif
3547 struct elim_table *ep;
3548
3549 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3550 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3551 #ifdef ELIMINABLE_REGS
3552 || ! CAN_ELIMINATE (ep->from, ep->to)
3553 #endif
3554 )
3555 ep->can_eliminate = 0;
3556
3557 /* Look for the case where we have discovered that we can't replace
3558 register A with register B and that means that we will now be
3559 trying to replace register A with register C. This means we can
3560 no longer replace register C with register B and we need to disable
3561 such an elimination, if it exists. This occurs often with A == ap,
3562 B == sp, and C == fp. */
3563
3564 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3565 {
3566 struct elim_table *op;
3567 register int new_to = -1;
3568
3569 if (! ep->can_eliminate && ep->can_eliminate_previous)
3570 {
3571 /* Find the current elimination for ep->from, if there is a
3572 new one. */
3573 for (op = reg_eliminate;
3574 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3575 if (op->from == ep->from && op->can_eliminate)
3576 {
3577 new_to = op->to;
3578 break;
3579 }
3580
3581 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3582 disable it. */
3583 for (op = reg_eliminate;
3584 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3585 if (op->from == new_to && op->to == ep->to)
3586 op->can_eliminate = 0;
3587 }
3588 }
3589
3590 /* See if any registers that we thought we could eliminate the previous
3591 time are no longer eliminable. If so, something has changed and we
3592 must spill the register. Also, recompute the number of eliminable
3593 registers and see if the frame pointer is needed; it is if there is
3594 no elimination of the frame pointer that we can perform. */
3595
3596 frame_pointer_needed = 1;
3597 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3598 {
3599 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3600 && ep->to != HARD_FRAME_POINTER_REGNUM)
3601 frame_pointer_needed = 0;
3602
3603 if (! ep->can_eliminate && ep->can_eliminate_previous)
3604 {
3605 ep->can_eliminate_previous = 0;
3606 SET_HARD_REG_BIT (*pset, ep->from);
3607 num_eliminable--;
3608 }
3609 }
3610
3611 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3612 /* If we didn't need a frame pointer last time, but we do now, spill
3613 the hard frame pointer. */
3614 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3615 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3616 #endif
3617 }
3618
3619 /* Initialize the table of registers to eliminate. */
3620 static void
3621 init_elim_table ()
3622 {
3623 struct elim_table *ep;
3624 #ifdef ELIMINABLE_REGS
3625 struct elim_table_1 *ep1;
3626 #endif
3627
3628 if (!reg_eliminate)
3629 {
3630 reg_eliminate = (struct elim_table *)
3631 xmalloc(sizeof(struct elim_table) * NUM_ELIMINABLE_REGS);
3632 bzero ((PTR) reg_eliminate,
3633 sizeof(struct elim_table) * NUM_ELIMINABLE_REGS);
3634 }
3635
3636 /* Does this function require a frame pointer? */
3637
3638 frame_pointer_needed = (! flag_omit_frame_pointer
3639 #ifdef EXIT_IGNORE_STACK
3640 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3641 and restore sp for alloca. So we can't eliminate
3642 the frame pointer in that case. At some point,
3643 we should improve this by emitting the
3644 sp-adjusting insns for this case. */
3645 || (current_function_calls_alloca
3646 && EXIT_IGNORE_STACK)
3647 #endif
3648 || FRAME_POINTER_REQUIRED);
3649
3650 num_eliminable = 0;
3651
3652 #ifdef ELIMINABLE_REGS
3653 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3654 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3655 {
3656 ep->from = ep1->from;
3657 ep->to = ep1->to;
3658 ep->can_eliminate = ep->can_eliminate_previous
3659 = (CAN_ELIMINATE (ep->from, ep->to)
3660 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3661 }
3662 #else
3663 reg_eliminate[0].from = reg_eliminate_1[0].from;
3664 reg_eliminate[0].to = reg_eliminate_1[0].to;
3665 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3666 = ! frame_pointer_needed;
3667 #endif
3668
3669 /* Count the number of eliminable registers and build the FROM and TO
3670 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3671 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3672 We depend on this. */
3673 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3674 {
3675 num_eliminable += ep->can_eliminate;
3676 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3677 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3678 }
3679 }
3680 \f
3681 /* Kick all pseudos out of hard register REGNO.
3682 If DUMPFILE is nonzero, log actions taken on that file.
3683
3684 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3685 because we found we can't eliminate some register. In the case, no pseudos
3686 are allowed to be in the register, even if they are only in a block that
3687 doesn't require spill registers, unlike the case when we are spilling this
3688 hard reg to produce another spill register.
3689
3690 Return nonzero if any pseudos needed to be kicked out. */
3691
3692 static void
3693 spill_hard_reg (regno, dumpfile, cant_eliminate)
3694 register int regno;
3695 FILE *dumpfile;
3696 int cant_eliminate;
3697 {
3698 register int i;
3699
3700 if (cant_eliminate)
3701 {
3702 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3703 regs_ever_live[regno] = 1;
3704 }
3705
3706 /* Spill every pseudo reg that was allocated to this reg
3707 or to something that overlaps this reg. */
3708
3709 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3710 if (reg_renumber[i] >= 0
3711 && reg_renumber[i] <= regno
3712 && (reg_renumber[i]
3713 + HARD_REGNO_NREGS (reg_renumber[i],
3714 PSEUDO_REGNO_MODE (i))
3715 > regno))
3716 SET_REGNO_REG_SET (spilled_pseudos, i);
3717 }
3718
3719 /* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3720 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
3721 static void
3722 ior_hard_reg_set (set1, set2)
3723 HARD_REG_SET *set1, *set2;
3724 {
3725 IOR_HARD_REG_SET (*set1, *set2);
3726 }
3727
3728 /* After find_reload_regs has been run for all insn that need reloads,
3729 and/or spill_hard_regs was called, this function is used to actually
3730 spill pseudo registers and try to reallocate them. It also sets up the
3731 spill_regs array for use by choose_reload_regs. */
3732
3733 static int
3734 finish_spills (global, dumpfile)
3735 int global;
3736 FILE *dumpfile;
3737 {
3738 struct insn_chain *chain;
3739 int something_changed = 0;
3740 int i;
3741
3742 /* Build the spill_regs array for the function. */
3743 /* If there are some registers still to eliminate and one of the spill regs
3744 wasn't ever used before, additional stack space may have to be
3745 allocated to store this register. Thus, we may have changed the offset
3746 between the stack and frame pointers, so mark that something has changed.
3747
3748 One might think that we need only set VAL to 1 if this is a call-used
3749 register. However, the set of registers that must be saved by the
3750 prologue is not identical to the call-used set. For example, the
3751 register used by the call insn for the return PC is a call-used register,
3752 but must be saved by the prologue. */
3753
3754 n_spills = 0;
3755 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3756 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3757 {
3758 spill_reg_order[i] = n_spills;
3759 spill_regs[n_spills++] = i;
3760 if (num_eliminable && ! regs_ever_live[i])
3761 something_changed = 1;
3762 regs_ever_live[i] = 1;
3763 }
3764 else
3765 spill_reg_order[i] = -1;
3766
3767 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3768 if (REGNO_REG_SET_P (spilled_pseudos, i))
3769 {
3770 /* Record the current hard register the pseudo is allocated to in
3771 pseudo_previous_regs so we avoid reallocating it to the same
3772 hard reg in a later pass. */
3773 if (reg_renumber[i] < 0)
3774 abort ();
3775 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3776 /* Mark it as no longer having a hard register home. */
3777 reg_renumber[i] = -1;
3778 /* We will need to scan everything again. */
3779 something_changed = 1;
3780 }
3781
3782 /* Retry global register allocation if possible. */
3783 if (global)
3784 {
3785 bzero ((char *) pseudo_forbidden_regs, max_regno * sizeof (HARD_REG_SET));
3786 /* For every insn that needs reloads, set the registers used as spill
3787 regs in pseudo_forbidden_regs for every pseudo live across the
3788 insn. */
3789 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3790 {
3791 EXECUTE_IF_SET_IN_REG_SET
3792 (chain->live_before, FIRST_PSEUDO_REGISTER, i,
3793 {
3794 ior_hard_reg_set (pseudo_forbidden_regs + i,
3795 &chain->used_spill_regs);
3796 });
3797 EXECUTE_IF_SET_IN_REG_SET
3798 (chain->live_after, FIRST_PSEUDO_REGISTER, i,
3799 {
3800 ior_hard_reg_set (pseudo_forbidden_regs + i,
3801 &chain->used_spill_regs);
3802 });
3803 }
3804
3805 /* Retry allocating the spilled pseudos. For each reg, merge the
3806 various reg sets that indicate which hard regs can't be used,
3807 and call retry_global_alloc.
3808 We change spill_pseudos here to only contain pseudos that did not
3809 get a new hard register. */
3810 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3811 if (reg_old_renumber[i] != reg_renumber[i])
3812 {
3813 HARD_REG_SET forbidden;
3814 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3815 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3816 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3817 retry_global_alloc (i, forbidden);
3818 if (reg_renumber[i] >= 0)
3819 CLEAR_REGNO_REG_SET (spilled_pseudos, i);
3820 }
3821 }
3822
3823 /* Fix up the register information in the insn chain.
3824 This involves deleting those of the spilled pseudos which did not get
3825 a new hard register home from the live_{before,after} sets. */
3826 for (chain = reload_insn_chain; chain; chain = chain->next)
3827 {
3828 HARD_REG_SET used_by_pseudos;
3829 HARD_REG_SET used_by_pseudos2;
3830
3831 AND_COMPL_REG_SET (chain->live_before, spilled_pseudos);
3832 AND_COMPL_REG_SET (chain->live_after, spilled_pseudos);
3833
3834 /* Mark any unallocated hard regs as available for spills. That
3835 makes inheritance work somewhat better. */
3836 if (chain->need_reload)
3837 {
3838 REG_SET_TO_HARD_REG_SET (used_by_pseudos, chain->live_before);
3839 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, chain->live_after);
3840 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3841
3842 /* Save the old value for the sanity test below. */
3843 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3844
3845 compute_use_by_pseudos (&used_by_pseudos, chain->live_before);
3846 compute_use_by_pseudos (&used_by_pseudos, chain->live_after);
3847 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3848 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3849
3850 /* Make sure we only enlarge the set. */
3851 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3852 abort ();
3853 ok:;
3854 }
3855 }
3856
3857 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3858 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3859 {
3860 int regno = reg_renumber[i];
3861 if (reg_old_renumber[i] == regno)
3862 continue;
3863
3864 alter_reg (i, reg_old_renumber[i]);
3865 reg_old_renumber[i] = regno;
3866 if (dumpfile)
3867 {
3868 if (regno == -1)
3869 fprintf (dumpfile, " Register %d now on stack.\n\n", i);
3870 else
3871 fprintf (dumpfile, " Register %d now in %d.\n\n",
3872 i, reg_renumber[i]);
3873 }
3874 }
3875
3876 return something_changed;
3877 }
3878 \f
3879 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3880 Also mark any hard registers used to store user variables as
3881 forbidden from being used for spill registers. */
3882
3883 static void
3884 scan_paradoxical_subregs (x)
3885 register rtx x;
3886 {
3887 register int i;
3888 register char *fmt;
3889 register enum rtx_code code = GET_CODE (x);
3890
3891 switch (code)
3892 {
3893 case REG:
3894 #if 0
3895 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3896 && REG_USERVAR_P (x))
3897 SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x));
3898 #endif
3899 return;
3900
3901 case CONST_INT:
3902 case CONST:
3903 case SYMBOL_REF:
3904 case LABEL_REF:
3905 case CONST_DOUBLE:
3906 case CC0:
3907 case PC:
3908 case USE:
3909 case CLOBBER:
3910 return;
3911
3912 case SUBREG:
3913 if (GET_CODE (SUBREG_REG (x)) == REG
3914 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3915 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3916 = GET_MODE_SIZE (GET_MODE (x));
3917 return;
3918
3919 default:
3920 break;
3921 }
3922
3923 fmt = GET_RTX_FORMAT (code);
3924 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3925 {
3926 if (fmt[i] == 'e')
3927 scan_paradoxical_subregs (XEXP (x, i));
3928 else if (fmt[i] == 'E')
3929 {
3930 register int j;
3931 for (j = XVECLEN (x, i) - 1; j >=0; j--)
3932 scan_paradoxical_subregs (XVECEXP (x, i, j));
3933 }
3934 }
3935 }
3936 \f
3937 static int
3938 hard_reg_use_compare (p1p, p2p)
3939 const GENERIC_PTR p1p;
3940 const GENERIC_PTR p2p;
3941 {
3942 struct hard_reg_n_uses *p1 = (struct hard_reg_n_uses *)p1p;
3943 struct hard_reg_n_uses *p2 = (struct hard_reg_n_uses *)p2p;
3944 int bad1 = TEST_HARD_REG_BIT (bad_spill_regs, p1->regno);
3945 int bad2 = TEST_HARD_REG_BIT (bad_spill_regs, p2->regno);
3946 if (bad1 && bad2)
3947 return p1->regno - p2->regno;
3948 if (bad1)
3949 return 1;
3950 if (bad2)
3951 return -1;
3952 if (p1->uses > p2->uses)
3953 return 1;
3954 if (p1->uses < p2->uses)
3955 return -1;
3956 /* If regs are equally good, sort by regno,
3957 so that the results of qsort leave nothing to chance. */
3958 return p1->regno - p2->regno;
3959 }
3960
3961 /* Used for communication between order_regs_for_reload and count_pseudo.
3962 Used to avoid counting one pseudo twice. */
3963 static regset pseudos_counted;
3964
3965 /* Update the costs in N_USES, considering that pseudo REG is live. */
3966 static void
3967 count_pseudo (n_uses, reg)
3968 struct hard_reg_n_uses *n_uses;
3969 int reg;
3970 {
3971 int r = reg_renumber[reg];
3972 int nregs;
3973
3974 if (REGNO_REG_SET_P (pseudos_counted, reg))
3975 return;
3976 SET_REGNO_REG_SET (pseudos_counted, reg);
3977
3978 if (r < 0)
3979 abort ();
3980
3981 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
3982 while (nregs-- > 0)
3983 n_uses[r++].uses += REG_N_REFS (reg);
3984 }
3985 /* Choose the order to consider regs for use as reload registers
3986 based on how much trouble would be caused by spilling one.
3987 Store them in order of decreasing preference in potential_reload_regs. */
3988
3989 static void
3990 order_regs_for_reload (chain)
3991 struct insn_chain *chain;
3992 {
3993 register int i;
3994 register int o = 0;
3995 struct hard_reg_n_uses hard_reg_n_uses[FIRST_PSEUDO_REGISTER];
3996
3997 pseudos_counted = ALLOCA_REG_SET ();
3998
3999 COPY_HARD_REG_SET (bad_spill_regs, bad_spill_regs_global);
4000
4001 /* Count number of uses of each hard reg by pseudo regs allocated to it
4002 and then order them by decreasing use. */
4003
4004 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4005 {
4006 int j;
4007
4008 hard_reg_n_uses[i].regno = i;
4009 hard_reg_n_uses[i].uses = 0;
4010
4011 /* Test the various reasons why we can't use a register for
4012 spilling in this insn. */
4013 if (fixed_regs[i]
4014 || REGNO_REG_SET_P (chain->live_before, i)
4015 || REGNO_REG_SET_P (chain->live_after, i))
4016 {
4017 SET_HARD_REG_BIT (bad_spill_regs, i);
4018 continue;
4019 }
4020
4021 /* Now find out which pseudos are allocated to it, and update
4022 hard_reg_n_uses. */
4023 CLEAR_REG_SET (pseudos_counted);
4024
4025 EXECUTE_IF_SET_IN_REG_SET
4026 (chain->live_before, FIRST_PSEUDO_REGISTER, j,
4027 {
4028 count_pseudo (hard_reg_n_uses, j);
4029 });
4030 EXECUTE_IF_SET_IN_REG_SET
4031 (chain->live_after, FIRST_PSEUDO_REGISTER, j,
4032 {
4033 count_pseudo (hard_reg_n_uses, j);
4034 });
4035 }
4036
4037 FREE_REG_SET (pseudos_counted);
4038
4039 /* Prefer registers not so far used, for use in temporary loading.
4040 Among them, if REG_ALLOC_ORDER is defined, use that order.
4041 Otherwise, prefer registers not preserved by calls. */
4042
4043 #ifdef REG_ALLOC_ORDER
4044 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4045 {
4046 int regno = reg_alloc_order[i];
4047
4048 if (hard_reg_n_uses[regno].uses == 0
4049 && ! TEST_HARD_REG_BIT (bad_spill_regs, regno))
4050 potential_reload_regs[o++] = regno;
4051 }
4052 #else
4053 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4054 {
4055 if (hard_reg_n_uses[i].uses == 0 && call_used_regs[i]
4056 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
4057 potential_reload_regs[o++] = i;
4058 }
4059 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4060 {
4061 if (hard_reg_n_uses[i].uses == 0 && ! call_used_regs[i]
4062 && ! TEST_HARD_REG_BIT (bad_spill_regs, i))
4063 potential_reload_regs[o++] = i;
4064 }
4065 #endif
4066
4067 qsort (hard_reg_n_uses, FIRST_PSEUDO_REGISTER,
4068 sizeof hard_reg_n_uses[0], hard_reg_use_compare);
4069
4070 /* Now add the regs that are already used,
4071 preferring those used less often. The fixed and otherwise forbidden
4072 registers will be at the end of this list. */
4073
4074 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4075 if (hard_reg_n_uses[i].uses != 0
4076 && ! TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
4077 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4078 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4079 if (TEST_HARD_REG_BIT (bad_spill_regs, hard_reg_n_uses[i].regno))
4080 potential_reload_regs[o++] = hard_reg_n_uses[i].regno;
4081 }
4082 \f
4083 /* Reload pseudo-registers into hard regs around each insn as needed.
4084 Additional register load insns are output before the insn that needs it
4085 and perhaps store insns after insns that modify the reloaded pseudo reg.
4086
4087 reg_last_reload_reg and reg_reloaded_contents keep track of
4088 which registers are already available in reload registers.
4089 We update these for the reloads that we perform,
4090 as the insns are scanned. */
4091
4092 static void
4093 reload_as_needed (live_known)
4094 int live_known;
4095 {
4096 struct insn_chain *chain;
4097 register int i;
4098 rtx x;
4099
4100 bzero ((char *) spill_reg_rtx, sizeof spill_reg_rtx);
4101 bzero ((char *) spill_reg_store, sizeof spill_reg_store);
4102 reg_last_reload_reg = (rtx *) alloca (max_regno * sizeof (rtx));
4103 bzero ((char *) reg_last_reload_reg, max_regno * sizeof (rtx));
4104 reg_has_output_reload = (char *) alloca (max_regno);
4105 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4106
4107 set_initial_elim_offsets ();
4108
4109 for (chain = reload_insn_chain; chain; chain = chain->next)
4110 {
4111 rtx prev;
4112 rtx insn = chain->insn;
4113 rtx old_next = NEXT_INSN (insn);
4114
4115 /* If we pass a label, copy the offsets from the label information
4116 into the current offsets of each elimination. */
4117 if (GET_CODE (insn) == CODE_LABEL)
4118 set_offsets_for_label (insn);
4119
4120 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4121 {
4122 rtx oldpat = PATTERN (insn);
4123
4124 /* If this is a USE and CLOBBER of a MEM, ensure that any
4125 references to eliminable registers have been removed. */
4126
4127 if ((GET_CODE (PATTERN (insn)) == USE
4128 || GET_CODE (PATTERN (insn)) == CLOBBER)
4129 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
4130 XEXP (XEXP (PATTERN (insn), 0), 0)
4131 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
4132 GET_MODE (XEXP (PATTERN (insn), 0)),
4133 NULL_RTX);
4134
4135 /* If we need to do register elimination processing, do so.
4136 This might delete the insn, in which case we are done. */
4137 if (num_eliminable && chain->need_elim)
4138 {
4139 eliminate_regs_in_insn (insn, 1);
4140 if (GET_CODE (insn) == NOTE)
4141 {
4142 update_eliminable_offsets ();
4143 continue;
4144 }
4145 }
4146
4147 /* If need_elim is nonzero but need_reload is zero, one might think
4148 that we could simply set n_reloads to 0. However, find_reloads
4149 could have done some manipulation of the insn (such as swapping
4150 commutative operands), and these manipulations are lost during
4151 the first pass for every insn that needs register elimination.
4152 So the actions of find_reloads must be redone here. */
4153
4154 if (! chain->need_elim && ! chain->need_reload
4155 && ! chain->need_operand_change)
4156 n_reloads = 0;
4157 /* First find the pseudo regs that must be reloaded for this insn.
4158 This info is returned in the tables reload_... (see reload.h).
4159 Also modify the body of INSN by substituting RELOAD
4160 rtx's for those pseudo regs. */
4161 else
4162 {
4163 bzero (reg_has_output_reload, max_regno);
4164 CLEAR_HARD_REG_SET (reg_is_output_reload);
4165
4166 find_reloads (insn, 1, spill_indirect_levels, live_known,
4167 spill_reg_order);
4168 }
4169
4170 if (num_eliminable && chain->need_elim)
4171 update_eliminable_offsets ();
4172
4173 if (n_reloads > 0)
4174 {
4175 rtx next = NEXT_INSN (insn);
4176 rtx p;
4177
4178 prev = PREV_INSN (insn);
4179
4180 /* Now compute which reload regs to reload them into. Perhaps
4181 reusing reload regs from previous insns, or else output
4182 load insns to reload them. Maybe output store insns too.
4183 Record the choices of reload reg in reload_reg_rtx. */
4184 choose_reload_regs (chain);
4185
4186 /* Merge any reloads that we didn't combine for fear of
4187 increasing the number of spill registers needed but now
4188 discover can be safely merged. */
4189 if (SMALL_REGISTER_CLASSES)
4190 merge_assigned_reloads (insn);
4191
4192 /* Generate the insns to reload operands into or out of
4193 their reload regs. */
4194 emit_reload_insns (chain);
4195
4196 /* Substitute the chosen reload regs from reload_reg_rtx
4197 into the insn's body (or perhaps into the bodies of other
4198 load and store insn that we just made for reloading
4199 and that we moved the structure into). */
4200 subst_reloads ();
4201
4202 /* If this was an ASM, make sure that all the reload insns
4203 we have generated are valid. If not, give an error
4204 and delete them. */
4205
4206 if (asm_noperands (PATTERN (insn)) >= 0)
4207 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
4208 if (p != insn && GET_RTX_CLASS (GET_CODE (p)) == 'i'
4209 && (recog_memoized (p) < 0
4210 || (extract_insn (p), ! constrain_operands (1))))
4211 {
4212 error_for_asm (insn,
4213 "`asm' operand requires impossible reload");
4214 PUT_CODE (p, NOTE);
4215 NOTE_SOURCE_FILE (p) = 0;
4216 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
4217 }
4218 }
4219 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4220 is no longer validly lying around to save a future reload.
4221 Note that this does not detect pseudos that were reloaded
4222 for this insn in order to be stored in
4223 (obeying register constraints). That is correct; such reload
4224 registers ARE still valid. */
4225 note_stores (oldpat, forget_old_reloads_1);
4226
4227 /* There may have been CLOBBER insns placed after INSN. So scan
4228 between INSN and NEXT and use them to forget old reloads. */
4229 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
4230 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
4231 note_stores (PATTERN (x), forget_old_reloads_1);
4232
4233 #ifdef AUTO_INC_DEC
4234 /* Likewise for regs altered by auto-increment in this insn.
4235 REG_INC notes have been changed by reloading:
4236 find_reloads_address_1 records substitutions for them,
4237 which have been performed by subst_reloads above. */
4238 for (i = n_reloads - 1; i >= 0; i--)
4239 {
4240 rtx in_reg = reload_in_reg[i];
4241 if (in_reg)
4242 {
4243 enum rtx_code code = GET_CODE (in_reg);
4244 /* PRE_INC / PRE_DEC will have the reload register ending up
4245 with the same value as the stack slot, but that doesn't
4246 hold true for POST_INC / POST_DEC. Either we have to
4247 convert the memory access to a true POST_INC / POST_DEC,
4248 or we can't use the reload register for inheritance. */
4249 if ((code == POST_INC || code == POST_DEC)
4250 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4251 REGNO (reload_reg_rtx[i]))
4252 /* Make sure it is the inc/dec pseudo, and not
4253 some other (e.g. output operand) pseudo. */
4254 && (reg_reloaded_contents[REGNO (reload_reg_rtx[i])]
4255 == REGNO (XEXP (in_reg, 0))))
4256
4257 {
4258 rtx reload_reg = reload_reg_rtx[i];
4259 enum machine_mode mode = GET_MODE (reload_reg);
4260 int n = 0;
4261 rtx p;
4262
4263 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
4264 {
4265 /* We really want to ignore REG_INC notes here, so
4266 use PATTERN (p) as argument to reg_set_p . */
4267 if (reg_set_p (reload_reg, PATTERN (p)))
4268 break;
4269 n = count_occurrences (PATTERN (p), reload_reg);
4270 if (! n)
4271 continue;
4272 if (n == 1)
4273 n = validate_replace_rtx (reload_reg,
4274 gen_rtx (code, mode,
4275 reload_reg), p);
4276 break;
4277 }
4278 if (n == 1)
4279 REG_NOTES (p) = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4280 REG_NOTES (p));
4281 else
4282 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX);
4283 }
4284 }
4285 }
4286 #if 0 /* ??? Is this code obsolete now? Need to check carefully. */
4287 /* Likewise for regs altered by auto-increment in this insn.
4288 But note that the reg-notes are not changed by reloading:
4289 they still contain the pseudo-regs, not the spill regs. */
4290 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4291 if (REG_NOTE_KIND (x) == REG_INC)
4292 {
4293 /* See if this pseudo reg was reloaded in this insn.
4294 If so, its last-reload info is still valid
4295 because it is based on this insn's reload. */
4296 for (i = 0; i < n_reloads; i++)
4297 if (reload_out[i] == XEXP (x, 0))
4298 break;
4299
4300 if (i == n_reloads)
4301 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX);
4302 }
4303 #endif
4304 #endif
4305 }
4306 /* A reload reg's contents are unknown after a label. */
4307 if (GET_CODE (insn) == CODE_LABEL)
4308 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4309
4310 /* Don't assume a reload reg is still good after a call insn
4311 if it is a call-used reg. */
4312 else if (GET_CODE (insn) == CALL_INSN)
4313 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
4314
4315 /* In case registers overlap, allow certain insns to invalidate
4316 particular hard registers. */
4317
4318 #ifdef INSN_CLOBBERS_REGNO_P
4319 for (i = 0 ; i < FIRST_PSEUDO_REGISTER; i++)
4320 if (TEST_HARD_REG_BIT (reg_reloaded_valid, i)
4321 && INSN_CLOBBERS_REGNO_P (insn, i))
4322 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i);
4323 #endif
4324
4325 #ifdef USE_C_ALLOCA
4326 alloca (0);
4327 #endif
4328 }
4329 }
4330
4331 /* Discard all record of any value reloaded from X,
4332 or reloaded in X from someplace else;
4333 unless X is an output reload reg of the current insn.
4334
4335 X may be a hard reg (the reload reg)
4336 or it may be a pseudo reg that was reloaded from. */
4337
4338 static void
4339 forget_old_reloads_1 (x, ignored)
4340 rtx x;
4341 rtx ignored ATTRIBUTE_UNUSED;
4342 {
4343 register int regno;
4344 int nr;
4345 int offset = 0;
4346
4347 /* note_stores does give us subregs of hard regs. */
4348 while (GET_CODE (x) == SUBREG)
4349 {
4350 offset += SUBREG_WORD (x);
4351 x = SUBREG_REG (x);
4352 }
4353
4354 if (GET_CODE (x) != REG)
4355 return;
4356
4357 regno = REGNO (x) + offset;
4358
4359 if (regno >= FIRST_PSEUDO_REGISTER)
4360 nr = 1;
4361 else
4362 {
4363 int i;
4364 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4365 /* Storing into a spilled-reg invalidates its contents.
4366 This can happen if a block-local pseudo is allocated to that reg
4367 and it wasn't spilled because this block's total need is 0.
4368 Then some insn might have an optional reload and use this reg. */
4369 for (i = 0; i < nr; i++)
4370 /* But don't do this if the reg actually serves as an output
4371 reload reg in the current instruction. */
4372 if (n_reloads == 0
4373 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4374 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4375 }
4376
4377 /* Since value of X has changed,
4378 forget any value previously copied from it. */
4379
4380 while (nr-- > 0)
4381 /* But don't forget a copy if this is the output reload
4382 that establishes the copy's validity. */
4383 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4384 reg_last_reload_reg[regno + nr] = 0;
4385 }
4386 \f
4387 /* For each reload, the mode of the reload register. */
4388 static enum machine_mode reload_mode[MAX_RELOADS];
4389
4390 /* For each reload, the largest number of registers it will require. */
4391 static int reload_nregs[MAX_RELOADS];
4392
4393 /* Comparison function for qsort to decide which of two reloads
4394 should be handled first. *P1 and *P2 are the reload numbers. */
4395
4396 static int
4397 reload_reg_class_lower (r1p, r2p)
4398 const GENERIC_PTR r1p;
4399 const GENERIC_PTR r2p;
4400 {
4401 register int r1 = *(short *)r1p, r2 = *(short *)r2p;
4402 register int t;
4403
4404 /* Consider required reloads before optional ones. */
4405 t = reload_optional[r1] - reload_optional[r2];
4406 if (t != 0)
4407 return t;
4408
4409 /* Count all solitary classes before non-solitary ones. */
4410 t = ((reg_class_size[(int) reload_reg_class[r2]] == 1)
4411 - (reg_class_size[(int) reload_reg_class[r1]] == 1));
4412 if (t != 0)
4413 return t;
4414
4415 /* Aside from solitaires, consider all multi-reg groups first. */
4416 t = reload_nregs[r2] - reload_nregs[r1];
4417 if (t != 0)
4418 return t;
4419
4420 /* Consider reloads in order of increasing reg-class number. */
4421 t = (int) reload_reg_class[r1] - (int) reload_reg_class[r2];
4422 if (t != 0)
4423 return t;
4424
4425 /* If reloads are equally urgent, sort by reload number,
4426 so that the results of qsort leave nothing to chance. */
4427 return r1 - r2;
4428 }
4429 \f
4430 /* The following HARD_REG_SETs indicate when each hard register is
4431 used for a reload of various parts of the current insn. */
4432
4433 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4434 static HARD_REG_SET reload_reg_used;
4435 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4436 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4437 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4438 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4439 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4440 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4441 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4442 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4443 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4444 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4445 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4446 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4447 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4448 static HARD_REG_SET reload_reg_used_in_op_addr;
4449 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4450 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4451 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4452 static HARD_REG_SET reload_reg_used_in_insn;
4453 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4454 static HARD_REG_SET reload_reg_used_in_other_addr;
4455
4456 /* If reg is in use as a reload reg for any sort of reload. */
4457 static HARD_REG_SET reload_reg_used_at_all;
4458
4459 /* If reg is use as an inherited reload. We just mark the first register
4460 in the group. */
4461 static HARD_REG_SET reload_reg_used_for_inherit;
4462
4463 /* Records which hard regs are allocated to a pseudo during any point of the
4464 current insn. */
4465 static HARD_REG_SET reg_used_by_pseudo;
4466
4467 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4468 TYPE. MODE is used to indicate how many consecutive regs are
4469 actually used. */
4470
4471 static void
4472 mark_reload_reg_in_use (regno, opnum, type, mode)
4473 int regno;
4474 int opnum;
4475 enum reload_type type;
4476 enum machine_mode mode;
4477 {
4478 int nregs = HARD_REGNO_NREGS (regno, mode);
4479 int i;
4480
4481 for (i = regno; i < nregs + regno; i++)
4482 {
4483 switch (type)
4484 {
4485 case RELOAD_OTHER:
4486 SET_HARD_REG_BIT (reload_reg_used, i);
4487 break;
4488
4489 case RELOAD_FOR_INPUT_ADDRESS:
4490 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4491 break;
4492
4493 case RELOAD_FOR_INPADDR_ADDRESS:
4494 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4495 break;
4496
4497 case RELOAD_FOR_OUTPUT_ADDRESS:
4498 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4499 break;
4500
4501 case RELOAD_FOR_OUTADDR_ADDRESS:
4502 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4503 break;
4504
4505 case RELOAD_FOR_OPERAND_ADDRESS:
4506 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4507 break;
4508
4509 case RELOAD_FOR_OPADDR_ADDR:
4510 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4511 break;
4512
4513 case RELOAD_FOR_OTHER_ADDRESS:
4514 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4515 break;
4516
4517 case RELOAD_FOR_INPUT:
4518 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4519 break;
4520
4521 case RELOAD_FOR_OUTPUT:
4522 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4523 break;
4524
4525 case RELOAD_FOR_INSN:
4526 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4527 break;
4528 }
4529
4530 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4531 }
4532 }
4533
4534 /* Similarly, but show REGNO is no longer in use for a reload. */
4535
4536 static void
4537 clear_reload_reg_in_use (regno, opnum, type, mode)
4538 int regno;
4539 int opnum;
4540 enum reload_type type;
4541 enum machine_mode mode;
4542 {
4543 int nregs = HARD_REGNO_NREGS (regno, mode);
4544 int start_regno, end_regno;
4545 int i;
4546 /* A complication is that for some reload types, inheritance might
4547 allow multiple reloads of the same types to share a reload register.
4548 We set check_opnum if we have to check only reloads with the same
4549 operand number, and check_any if we have to check all reloads. */
4550 int check_opnum = 0;
4551 int check_any = 0;
4552 HARD_REG_SET *used_in_set;
4553
4554 switch (type)
4555 {
4556 case RELOAD_OTHER:
4557 used_in_set = &reload_reg_used;
4558 break;
4559
4560 case RELOAD_FOR_INPUT_ADDRESS:
4561 used_in_set = &reload_reg_used_in_input_addr[opnum];
4562 break;
4563
4564 case RELOAD_FOR_INPADDR_ADDRESS:
4565 check_opnum = 1;
4566 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4567 break;
4568
4569 case RELOAD_FOR_OUTPUT_ADDRESS:
4570 used_in_set = &reload_reg_used_in_output_addr[opnum];
4571 break;
4572
4573 case RELOAD_FOR_OUTADDR_ADDRESS:
4574 check_opnum = 1;
4575 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4576 break;
4577
4578 case RELOAD_FOR_OPERAND_ADDRESS:
4579 used_in_set = &reload_reg_used_in_op_addr;
4580 break;
4581
4582 case RELOAD_FOR_OPADDR_ADDR:
4583 check_any = 1;
4584 used_in_set = &reload_reg_used_in_op_addr_reload;
4585 break;
4586
4587 case RELOAD_FOR_OTHER_ADDRESS:
4588 used_in_set = &reload_reg_used_in_other_addr;
4589 check_any = 1;
4590 break;
4591
4592 case RELOAD_FOR_INPUT:
4593 used_in_set = &reload_reg_used_in_input[opnum];
4594 break;
4595
4596 case RELOAD_FOR_OUTPUT:
4597 used_in_set = &reload_reg_used_in_output[opnum];
4598 break;
4599
4600 case RELOAD_FOR_INSN:
4601 used_in_set = &reload_reg_used_in_insn;
4602 break;
4603 default:
4604 abort ();
4605 }
4606 /* We resolve conflicts with remaining reloads of the same type by
4607 excluding the intervals of of reload registers by them from the
4608 interval of freed reload registers. Since we only keep track of
4609 one set of interval bounds, we might have to exclude somewhat
4610 more then what would be necessary if we used a HARD_REG_SET here.
4611 But this should only happen very infrequently, so there should
4612 be no reason to worry about it. */
4613
4614 start_regno = regno;
4615 end_regno = regno + nregs;
4616 if (check_opnum || check_any)
4617 {
4618 for (i = n_reloads - 1; i >= 0; i--)
4619 {
4620 if (reload_when_needed[i] == type
4621 && (check_any || reload_opnum[i] == opnum)
4622 && reload_reg_rtx[i])
4623 {
4624 int conflict_start = true_regnum (reload_reg_rtx[i]);
4625 int conflict_end
4626 = (conflict_start
4627 + HARD_REGNO_NREGS (conflict_start, reload_mode[i]));
4628
4629 /* If there is an overlap with the first to-be-freed register,
4630 adjust the interval start. */
4631 if (conflict_start <= start_regno && conflict_end > start_regno)
4632 start_regno = conflict_end;
4633 /* Otherwise, if there is a conflict with one of the other
4634 to-be-freed registers, adjust the interval end. */
4635 if (conflict_start > start_regno && conflict_start < end_regno)
4636 end_regno = conflict_start;
4637 }
4638 }
4639 }
4640 for (i = start_regno; i < end_regno; i++)
4641 CLEAR_HARD_REG_BIT (*used_in_set, i);
4642 }
4643
4644 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4645 specified by OPNUM and TYPE. */
4646
4647 static int
4648 reload_reg_free_p (regno, opnum, type)
4649 int regno;
4650 int opnum;
4651 enum reload_type type;
4652 {
4653 int i;
4654
4655 /* In use for a RELOAD_OTHER means it's not available for anything. */
4656 if (TEST_HARD_REG_BIT (reload_reg_used, regno))
4657 return 0;
4658
4659 switch (type)
4660 {
4661 case RELOAD_OTHER:
4662 /* In use for anything means we can't use it for RELOAD_OTHER. */
4663 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4664 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4665 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4666 return 0;
4667
4668 for (i = 0; i < reload_n_operands; i++)
4669 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4670 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4671 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4672 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4673 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4674 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4675 return 0;
4676
4677 return 1;
4678
4679 case RELOAD_FOR_INPUT:
4680 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4681 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4682 return 0;
4683
4684 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4685 return 0;
4686
4687 /* If it is used for some other input, can't use it. */
4688 for (i = 0; i < reload_n_operands; i++)
4689 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4690 return 0;
4691
4692 /* If it is used in a later operand's address, can't use it. */
4693 for (i = opnum + 1; i < reload_n_operands; i++)
4694 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4695 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4696 return 0;
4697
4698 return 1;
4699
4700 case RELOAD_FOR_INPUT_ADDRESS:
4701 /* Can't use a register if it is used for an input address for this
4702 operand or used as an input in an earlier one. */
4703 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4704 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4705 return 0;
4706
4707 for (i = 0; i < opnum; i++)
4708 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4709 return 0;
4710
4711 return 1;
4712
4713 case RELOAD_FOR_INPADDR_ADDRESS:
4714 /* Can't use a register if it is used for an input address
4715 for this operand or used as an input in an earlier
4716 one. */
4717 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4718 return 0;
4719
4720 for (i = 0; i < opnum; i++)
4721 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4722 return 0;
4723
4724 return 1;
4725
4726 case RELOAD_FOR_OUTPUT_ADDRESS:
4727 /* Can't use a register if it is used for an output address for this
4728 operand or used as an output in this or a later operand. */
4729 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4730 return 0;
4731
4732 for (i = opnum; i < reload_n_operands; i++)
4733 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4734 return 0;
4735
4736 return 1;
4737
4738 case RELOAD_FOR_OUTADDR_ADDRESS:
4739 /* Can't use a register if it is used for an output address
4740 for this operand or used as an output in this or a
4741 later operand. */
4742 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4743 return 0;
4744
4745 for (i = opnum; i < reload_n_operands; i++)
4746 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4747 return 0;
4748
4749 return 1;
4750
4751 case RELOAD_FOR_OPERAND_ADDRESS:
4752 for (i = 0; i < reload_n_operands; i++)
4753 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4754 return 0;
4755
4756 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4757 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4758
4759 case RELOAD_FOR_OPADDR_ADDR:
4760 for (i = 0; i < reload_n_operands; i++)
4761 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4762 return 0;
4763
4764 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4765
4766 case RELOAD_FOR_OUTPUT:
4767 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4768 outputs, or an operand address for this or an earlier output. */
4769 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4770 return 0;
4771
4772 for (i = 0; i < reload_n_operands; i++)
4773 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4774 return 0;
4775
4776 for (i = 0; i <= opnum; i++)
4777 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4778 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4779 return 0;
4780
4781 return 1;
4782
4783 case RELOAD_FOR_INSN:
4784 for (i = 0; i < reload_n_operands; i++)
4785 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4786 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4787 return 0;
4788
4789 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4790 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4791
4792 case RELOAD_FOR_OTHER_ADDRESS:
4793 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4794 }
4795 abort ();
4796 }
4797
4798 /* Return 1 if the value in reload reg REGNO, as used by a reload
4799 needed for the part of the insn specified by OPNUM and TYPE,
4800 is not in use for a reload in any prior part of the insn.
4801
4802 We can assume that the reload reg was already tested for availability
4803 at the time it is needed, and we should not check this again,
4804 in case the reg has already been marked in use.
4805
4806 However, if EQUIV is set, we are checking the availability of a register
4807 holding an equivalence to the value to be loaded into the reload register,
4808 not the availability of the reload register itself.
4809
4810 This is still less stringent than what reload_reg_free_p checks; for
4811 example, compare the checks for RELOAD_OTHER. */
4812
4813 static int
4814 reload_reg_free_before_p (regno, opnum, type, equiv)
4815 int regno;
4816 int opnum;
4817 enum reload_type type;
4818 int equiv;
4819 {
4820 int i;
4821
4822 /* The code to handle EQUIV below is wrong.
4823
4824 If we wnat to know if a value in a particular reload register is available
4825 at a particular point in time during reloading, we must check *all*
4826 prior reloads to see if they clobber the value.
4827
4828 Note this is significantly different from determining when a register is
4829 free for usage in a reload!
4830
4831 This change is temporary. It will go away. */
4832 if (equiv)
4833 return 0;
4834
4835 switch (type)
4836 {
4837 case RELOAD_FOR_OTHER_ADDRESS:
4838 /* These always come first. */
4839 if (equiv && TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno))
4840 return 0;
4841 return 1;
4842
4843 case RELOAD_OTHER:
4844 if (equiv && TEST_HARD_REG_BIT (reload_reg_used, regno))
4845 return 0;
4846 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4847
4848 /* If this use is for part of the insn,
4849 check the reg is not in use for any prior part. It is tempting
4850 to try to do this by falling through from objecs that occur
4851 later in the insn to ones that occur earlier, but that will not
4852 correctly take into account the fact that here we MUST ignore
4853 things that would prevent the register from being allocated in
4854 the first place, since we know that it was allocated. */
4855
4856 case RELOAD_FOR_OUTPUT_ADDRESS:
4857 if (equiv
4858 && TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4859 return 0;
4860 /* Earlier reloads include RELOAD_FOR_OUTADDR_ADDRESS reloads. */
4861 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4862 return 0;
4863 /* ... fall through ... */
4864 case RELOAD_FOR_OUTADDR_ADDRESS:
4865 if (equiv
4866 && (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno)
4867 || TEST_HARD_REG_BIT (reload_reg_used, regno)))
4868 return 0;
4869 /* Earlier reloads are for earlier outputs or their addresses,
4870 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4871 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4872 RELOAD_OTHER).. */
4873 for (i = 0; i < opnum; i++)
4874 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4875 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4876 return 0;
4877
4878 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4879 return 0;
4880
4881 for (i = 0; i < reload_n_operands; i++)
4882 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4883 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4884 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4885 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4886 return 0;
4887
4888 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4889 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4890 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4891 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4892
4893 case RELOAD_FOR_OUTPUT:
4894 case RELOAD_FOR_INSN:
4895 /* There is no reason to call this function for output reloads, thus
4896 anything we'd put here wouldn't be tested. So just abort. */
4897 abort ();
4898
4899 case RELOAD_FOR_OPERAND_ADDRESS:
4900 if (equiv && TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4901 return 0;
4902
4903 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4904 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4905 return 0;
4906
4907 /* ... fall through ... */
4908
4909 case RELOAD_FOR_OPADDR_ADDR:
4910 if (equiv)
4911 {
4912 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno)
4913 || TEST_HARD_REG_BIT (reload_reg_used, regno))
4914 return 0;
4915 for (i = 0; i < reload_n_operands; i++)
4916 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4917 return 0;
4918 }
4919 /* These can't conflict with inputs, or each other, so all we have to
4920 test is input addresses and the addresses of OTHER items. */
4921
4922 for (i = 0; i < reload_n_operands; i++)
4923 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4924 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4925 return 0;
4926
4927 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4928
4929 case RELOAD_FOR_INPUT:
4930 if (equiv && TEST_HARD_REG_BIT (reload_reg_used, regno))
4931 return 0;
4932
4933 /* The only things earlier are the address for this and
4934 earlier inputs, other inputs (which we know we don't conflict
4935 with), and addresses of RELOAD_OTHER objects.
4936 We can ignore the conflict with addresses of this operand, since
4937 when we inherit this operand, its address reloads are discarded. */
4938
4939 for (i = 0; i < opnum; i++)
4940 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4941 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4942 return 0;
4943
4944 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4945
4946 case RELOAD_FOR_INPUT_ADDRESS:
4947 /* Earlier reloads include RELOAD_FOR_INPADDR_ADDRESS reloads. */
4948 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4949 return 0;
4950 /* ... fall through ... */
4951 case RELOAD_FOR_INPADDR_ADDRESS:
4952 if (equiv && TEST_HARD_REG_BIT (reload_reg_used, regno))
4953 return 0;
4954
4955 /* Similarly, all we have to check is for use in earlier inputs'
4956 addresses. */
4957 for (i = 0; i < opnum; i++)
4958 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4959 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4960 return 0;
4961
4962 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4963 }
4964 abort ();
4965 }
4966
4967 /* Return 1 if the value in reload reg REGNO, as used by a reload
4968 needed for the part of the insn specified by OPNUM and TYPE,
4969 is still available in REGNO at the end of the insn.
4970
4971 We can assume that the reload reg was already tested for availability
4972 at the time it is needed, and we should not check this again,
4973 in case the reg has already been marked in use. */
4974
4975 static int
4976 reload_reg_reaches_end_p (regno, opnum, type)
4977 int regno;
4978 int opnum;
4979 enum reload_type type;
4980 {
4981 int i;
4982
4983 switch (type)
4984 {
4985 case RELOAD_OTHER:
4986 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4987 its value must reach the end. */
4988 return 1;
4989
4990 /* If this use is for part of the insn,
4991 its value reaches if no subsequent part uses the same register.
4992 Just like the above function, don't try to do this with lots
4993 of fallthroughs. */
4994
4995 case RELOAD_FOR_OTHER_ADDRESS:
4996 /* Here we check for everything else, since these don't conflict
4997 with anything else and everything comes later. */
4998
4999 for (i = 0; i < reload_n_operands; i++)
5000 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5001 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5002 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
5003 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5004 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5005 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5006 return 0;
5007
5008 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5009 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
5010 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
5011
5012 case RELOAD_FOR_INPUT_ADDRESS:
5013 case RELOAD_FOR_INPADDR_ADDRESS:
5014 /* Similar, except that we check only for this and subsequent inputs
5015 and the address of only subsequent inputs and we do not need
5016 to check for RELOAD_OTHER objects since they are known not to
5017 conflict. */
5018
5019 for (i = opnum; i < reload_n_operands; i++)
5020 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5021 return 0;
5022
5023 for (i = opnum + 1; i < reload_n_operands; i++)
5024 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5025 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
5026 return 0;
5027
5028 for (i = 0; i < reload_n_operands; i++)
5029 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5030 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5031 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5032 return 0;
5033
5034 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
5035 return 0;
5036
5037 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5038 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
5039
5040 case RELOAD_FOR_INPUT:
5041 /* Similar to input address, except we start at the next operand for
5042 both input and input address and we do not check for
5043 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
5044 would conflict. */
5045
5046 for (i = opnum + 1; i < reload_n_operands; i++)
5047 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
5048 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
5049 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
5050 return 0;
5051
5052 /* ... fall through ... */
5053
5054 case RELOAD_FOR_OPERAND_ADDRESS:
5055 /* Check outputs and their addresses. */
5056
5057 for (i = 0; i < reload_n_operands; i++)
5058 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5059 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5060 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5061 return 0;
5062
5063 return 1;
5064
5065 case RELOAD_FOR_OPADDR_ADDR:
5066 for (i = 0; i < reload_n_operands; i++)
5067 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5068 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
5069 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
5070 return 0;
5071
5072 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
5073 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno));
5074
5075 case RELOAD_FOR_INSN:
5076 /* These conflict with other outputs with RELOAD_OTHER. So
5077 we need only check for output addresses. */
5078
5079 opnum = -1;
5080
5081 /* ... fall through ... */
5082
5083 case RELOAD_FOR_OUTPUT:
5084 case RELOAD_FOR_OUTPUT_ADDRESS:
5085 case RELOAD_FOR_OUTADDR_ADDRESS:
5086 /* We already know these can't conflict with a later output. So the
5087 only thing to check are later output addresses. */
5088 for (i = opnum + 1; i < reload_n_operands; i++)
5089 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
5090 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
5091 return 0;
5092
5093 return 1;
5094 }
5095
5096 abort ();
5097 }
5098 \f
5099 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5100 Return 0 otherwise.
5101
5102 This function uses the same algorithm as reload_reg_free_p above. */
5103
5104 int
5105 reloads_conflict (r1, r2)
5106 int r1, r2;
5107 {
5108 enum reload_type r1_type = reload_when_needed[r1];
5109 enum reload_type r2_type = reload_when_needed[r2];
5110 int r1_opnum = reload_opnum[r1];
5111 int r2_opnum = reload_opnum[r2];
5112
5113 /* RELOAD_OTHER conflicts with everything. */
5114 if (r2_type == RELOAD_OTHER)
5115 return 1;
5116
5117 /* Otherwise, check conflicts differently for each type. */
5118
5119 switch (r1_type)
5120 {
5121 case RELOAD_FOR_INPUT:
5122 return (r2_type == RELOAD_FOR_INSN
5123 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
5124 || r2_type == RELOAD_FOR_OPADDR_ADDR
5125 || r2_type == RELOAD_FOR_INPUT
5126 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
5127 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
5128 && r2_opnum > r1_opnum));
5129
5130 case RELOAD_FOR_INPUT_ADDRESS:
5131 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
5132 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5133
5134 case RELOAD_FOR_INPADDR_ADDRESS:
5135 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
5136 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
5137
5138 case RELOAD_FOR_OUTPUT_ADDRESS:
5139 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
5140 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5141
5142 case RELOAD_FOR_OUTADDR_ADDRESS:
5143 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
5144 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
5145
5146 case RELOAD_FOR_OPERAND_ADDRESS:
5147 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
5148 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5149
5150 case RELOAD_FOR_OPADDR_ADDR:
5151 return (r2_type == RELOAD_FOR_INPUT
5152 || r2_type == RELOAD_FOR_OPADDR_ADDR);
5153
5154 case RELOAD_FOR_OUTPUT:
5155 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
5156 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
5157 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
5158 && r2_opnum >= r1_opnum));
5159
5160 case RELOAD_FOR_INSN:
5161 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
5162 || r2_type == RELOAD_FOR_INSN
5163 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
5164
5165 case RELOAD_FOR_OTHER_ADDRESS:
5166 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
5167
5168 case RELOAD_OTHER:
5169 return 1;
5170
5171 default:
5172 abort ();
5173 }
5174 }
5175 \f
5176 /* Vector of reload-numbers showing the order in which the reloads should
5177 be processed. */
5178 short reload_order[MAX_RELOADS];
5179
5180 /* Indexed by reload number, 1 if incoming value
5181 inherited from previous insns. */
5182 char reload_inherited[MAX_RELOADS];
5183
5184 /* For an inherited reload, this is the insn the reload was inherited from,
5185 if we know it. Otherwise, this is 0. */
5186 rtx reload_inheritance_insn[MAX_RELOADS];
5187
5188 /* If non-zero, this is a place to get the value of the reload,
5189 rather than using reload_in. */
5190 rtx reload_override_in[MAX_RELOADS];
5191
5192 /* For each reload, the hard register number of the register used,
5193 or -1 if we did not need a register for this reload. */
5194 int reload_spill_index[MAX_RELOADS];
5195
5196 /* Return 1 if the value in reload reg REGNO, as used by a reload
5197 needed for the part of the insn specified by OPNUM and TYPE,
5198 may be used to load VALUE into it.
5199
5200 Other read-only reloads with the same value do not conflict
5201 unless OUT is non-zero and these other reloads have to live while
5202 output reloads live.
5203
5204 RELOADNUM is the number of the reload we want to load this value for;
5205 a reload does not conflict with itself.
5206
5207 The caller has to make sure that there is no conflict with the return
5208 register. */
5209 static int
5210 reload_reg_free_for_value_p (regno, opnum, type, value, out, reloadnum)
5211 int regno;
5212 int opnum;
5213 enum reload_type type;
5214 rtx value, out;
5215 int reloadnum;
5216 {
5217 int time1;
5218 int i;
5219
5220 /* We use some pseudo 'time' value to check if the lifetimes of the
5221 new register use would overlap with the one of a previous reload
5222 that is not read-only or uses a different value.
5223 The 'time' used doesn't have to be linear in any shape or form, just
5224 monotonic.
5225 Some reload types use different 'buckets' for each operand.
5226 So there are MAX_RECOG_OPERANDS different time values for each
5227 such reload type.
5228 We compute TIME1 as the time when the register for the prospective
5229 new reload ceases to be live, and TIME2 for each existing
5230 reload as the time when that the reload register of that reload
5231 becomes live.
5232 Where there is little to be gained by exact lifetime calculations,
5233 we just make conservative assumptions, i.e. a longer lifetime;
5234 this is done in the 'default:' cases. */
5235 switch (type)
5236 {
5237 case RELOAD_FOR_OTHER_ADDRESS:
5238 time1 = 0;
5239 break;
5240 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5241 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5242 respectively, to the time values for these, we get distinct time
5243 values. To get distinct time values for each operand, we have to
5244 multiply opnum by at least three. We round that up to four because
5245 multiply by four is often cheaper. */
5246 case RELOAD_FOR_INPADDR_ADDRESS:
5247 time1 = opnum * 4 + 1;
5248 break;
5249 case RELOAD_FOR_INPUT_ADDRESS:
5250 time1 = opnum * 4 + 2;
5251 break;
5252 case RELOAD_FOR_OPADDR_ADDR:
5253 /* opnum * 4 + 3 < opnum * 4 + 4
5254 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5255 time1 = MAX_RECOG_OPERANDS * 4;
5256 break;
5257 case RELOAD_FOR_INPUT:
5258 /* All RELOAD_FOR_INPUT reloads remain live till just before the
5259 instruction is executed. */
5260 time1 = MAX_RECOG_OPERANDS * 4 + 1;
5261 break;
5262 case RELOAD_FOR_OPERAND_ADDRESS:
5263 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5264 is executed. */
5265 time1 = MAX_RECOG_OPERANDS * 4 + 2;
5266 break;
5267 case RELOAD_FOR_OUTPUT_ADDRESS:
5268 time1 = MAX_RECOG_OPERANDS * 4 + 3 + opnum;
5269 break;
5270 default:
5271 time1 = MAX_RECOG_OPERANDS * 5 + 3;
5272 }
5273
5274 for (i = 0; i < n_reloads; i++)
5275 {
5276 rtx reg = reload_reg_rtx[i];
5277 if (reg && GET_CODE (reg) == REG
5278 && ((unsigned) regno - true_regnum (reg)
5279 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
5280 && i != reloadnum)
5281 {
5282 if (out
5283 && reload_when_needed[i] != RELOAD_FOR_INPUT
5284 && reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
5285 && reload_when_needed[i] != RELOAD_FOR_INPADDR_ADDRESS)
5286 return 0;
5287 if (! reload_in[i] || ! rtx_equal_p (reload_in[i], value)
5288 || reload_out[i])
5289 {
5290 int time2;
5291 switch (reload_when_needed[i])
5292 {
5293 case RELOAD_FOR_OTHER_ADDRESS:
5294 time2 = 0;
5295 break;
5296 case RELOAD_FOR_INPADDR_ADDRESS:
5297 /* find_reloads makes sure that a
5298 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5299 by at most one - the first -
5300 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5301 address reload is inherited, the address address reload
5302 goes away, so we can ignore this conflict. */
5303 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1)
5304 continue;
5305 time2 = reload_opnum[i] * 4 + 1;
5306 break;
5307 case RELOAD_FOR_INPUT_ADDRESS:
5308 time2 = reload_opnum[i] * 4 + 2;
5309 break;
5310 case RELOAD_FOR_INPUT:
5311 time2 = reload_opnum[i] * 4 + 3;
5312 break;
5313 case RELOAD_FOR_OPADDR_ADDR:
5314 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1)
5315 continue;
5316 time2 = MAX_RECOG_OPERANDS * 4;
5317 break;
5318 case RELOAD_FOR_OPERAND_ADDRESS:
5319 time2 = MAX_RECOG_OPERANDS * 4 + 1;
5320 break;
5321 case RELOAD_FOR_OUTPUT:
5322 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5323 instruction is executed. */
5324 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5325 break;
5326 case RELOAD_FOR_OUTADDR_ADDRESS:
5327 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1)
5328 continue;
5329 /* fall through. */
5330 /* The first RELOAD_FOR_OUTPUT_ADDRESS reload conflicts with the
5331 RELOAD_FOR_OUTPUT reloads, so assign it the same time value. */
5332 case RELOAD_FOR_OUTPUT_ADDRESS:
5333 time2 = MAX_RECOG_OPERANDS * 4 + 3 + reload_opnum[i];
5334 break;
5335 case RELOAD_OTHER:
5336 if (! reload_in[i] || rtx_equal_p (reload_in[i], value))
5337 {
5338 time2 = MAX_RECOG_OPERANDS * 4 + 3;
5339 break;
5340 }
5341 default:
5342 time2 = 0;
5343 }
5344 if (time1 >= time2)
5345 return 0;
5346 }
5347 }
5348 }
5349 return 1;
5350 }
5351
5352 /* Find a spill register to use as a reload register for reload R.
5353 LAST_RELOAD is non-zero if this is the last reload for the insn being
5354 processed.
5355
5356 Set reload_reg_rtx[R] to the register allocated.
5357
5358 If NOERROR is nonzero, we return 1 if successful,
5359 or 0 if we couldn't find a spill reg and we didn't change anything. */
5360
5361 static int
5362 allocate_reload_reg (chain, r, last_reload, noerror)
5363 struct insn_chain *chain;
5364 int r;
5365 int last_reload;
5366 int noerror;
5367 {
5368 rtx insn = chain->insn;
5369 int i, pass, count, regno;
5370 rtx new;
5371
5372 /* If we put this reload ahead, thinking it is a group,
5373 then insist on finding a group. Otherwise we can grab a
5374 reg that some other reload needs.
5375 (That can happen when we have a 68000 DATA_OR_FP_REG
5376 which is a group of data regs or one fp reg.)
5377 We need not be so restrictive if there are no more reloads
5378 for this insn.
5379
5380 ??? Really it would be nicer to have smarter handling
5381 for that kind of reg class, where a problem like this is normal.
5382 Perhaps those classes should be avoided for reloading
5383 by use of more alternatives. */
5384
5385 int force_group = reload_nregs[r] > 1 && ! last_reload;
5386
5387 /* If we want a single register and haven't yet found one,
5388 take any reg in the right class and not in use.
5389 If we want a consecutive group, here is where we look for it.
5390
5391 We use two passes so we can first look for reload regs to
5392 reuse, which are already in use for other reloads in this insn,
5393 and only then use additional registers.
5394 I think that maximizing reuse is needed to make sure we don't
5395 run out of reload regs. Suppose we have three reloads, and
5396 reloads A and B can share regs. These need two regs.
5397 Suppose A and B are given different regs.
5398 That leaves none for C. */
5399 for (pass = 0; pass < 2; pass++)
5400 {
5401 /* I is the index in spill_regs.
5402 We advance it round-robin between insns to use all spill regs
5403 equally, so that inherited reloads have a chance
5404 of leapfrogging each other. Don't do this, however, when we have
5405 group needs and failure would be fatal; if we only have a relatively
5406 small number of spill registers, and more than one of them has
5407 group needs, then by starting in the middle, we may end up
5408 allocating the first one in such a way that we are not left with
5409 sufficient groups to handle the rest. */
5410
5411 if (noerror || ! force_group)
5412 i = last_spill_reg;
5413 else
5414 i = -1;
5415
5416 for (count = 0; count < n_spills; count++)
5417 {
5418 int class = (int) reload_reg_class[r];
5419 int regnum;
5420
5421 i++;
5422 if (i >= n_spills)
5423 i -= n_spills;
5424 regnum = spill_regs[i];
5425
5426 if ((reload_reg_free_p (regnum, reload_opnum[r],
5427 reload_when_needed[r])
5428 || (reload_in[r]
5429 /* We check reload_reg_used to make sure we
5430 don't clobber the return register. */
5431 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5432 && reload_reg_free_for_value_p (regnum,
5433 reload_opnum[r],
5434 reload_when_needed[r],
5435 reload_in[r],
5436 reload_out[r], r)))
5437 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5438 && HARD_REGNO_MODE_OK (regnum, reload_mode[r])
5439 /* Look first for regs to share, then for unshared. But
5440 don't share regs used for inherited reloads; they are
5441 the ones we want to preserve. */
5442 && (pass
5443 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5444 regnum)
5445 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5446 regnum))))
5447 {
5448 int nr = HARD_REGNO_NREGS (regnum, reload_mode[r]);
5449 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5450 (on 68000) got us two FP regs. If NR is 1,
5451 we would reject both of them. */
5452 if (force_group)
5453 nr = CLASS_MAX_NREGS (reload_reg_class[r], reload_mode[r]);
5454 /* If we need only one reg, we have already won. */
5455 if (nr == 1)
5456 {
5457 /* But reject a single reg if we demand a group. */
5458 if (force_group)
5459 continue;
5460 break;
5461 }
5462 /* Otherwise check that as many consecutive regs as we need
5463 are available here.
5464 Also, don't use for a group registers that are
5465 needed for nongroups. */
5466 if (! TEST_HARD_REG_BIT (chain->counted_for_nongroups, regnum))
5467 while (nr > 1)
5468 {
5469 regno = regnum + nr - 1;
5470 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5471 && spill_reg_order[regno] >= 0
5472 && reload_reg_free_p (regno, reload_opnum[r],
5473 reload_when_needed[r])
5474 && ! TEST_HARD_REG_BIT (chain->counted_for_nongroups,
5475 regno)))
5476 break;
5477 nr--;
5478 }
5479 if (nr == 1)
5480 break;
5481 }
5482 }
5483
5484 /* If we found something on pass 1, omit pass 2. */
5485 if (count < n_spills)
5486 break;
5487 }
5488
5489 /* We should have found a spill register by now. */
5490 if (count == n_spills)
5491 {
5492 if (noerror)
5493 return 0;
5494 goto failure;
5495 }
5496
5497 /* I is the index in SPILL_REG_RTX of the reload register we are to
5498 allocate. Get an rtx for it and find its register number. */
5499
5500 new = spill_reg_rtx[i];
5501
5502 if (new == 0 || GET_MODE (new) != reload_mode[r])
5503 spill_reg_rtx[i] = new
5504 = gen_rtx_REG (reload_mode[r], spill_regs[i]);
5505
5506 regno = true_regnum (new);
5507
5508 /* Detect when the reload reg can't hold the reload mode.
5509 This used to be one `if', but Sequent compiler can't handle that. */
5510 if (HARD_REGNO_MODE_OK (regno, reload_mode[r]))
5511 {
5512 enum machine_mode test_mode = VOIDmode;
5513 if (reload_in[r])
5514 test_mode = GET_MODE (reload_in[r]);
5515 /* If reload_in[r] has VOIDmode, it means we will load it
5516 in whatever mode the reload reg has: to wit, reload_mode[r].
5517 We have already tested that for validity. */
5518 /* Aside from that, we need to test that the expressions
5519 to reload from or into have modes which are valid for this
5520 reload register. Otherwise the reload insns would be invalid. */
5521 if (! (reload_in[r] != 0 && test_mode != VOIDmode
5522 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5523 if (! (reload_out[r] != 0
5524 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (reload_out[r]))))
5525 {
5526 /* The reg is OK. */
5527 last_spill_reg = i;
5528
5529 /* Mark as in use for this insn the reload regs we use
5530 for this. */
5531 mark_reload_reg_in_use (spill_regs[i], reload_opnum[r],
5532 reload_when_needed[r], reload_mode[r]);
5533
5534 reload_reg_rtx[r] = new;
5535 reload_spill_index[r] = spill_regs[i];
5536 return 1;
5537 }
5538 }
5539
5540 /* The reg is not OK. */
5541 if (noerror)
5542 return 0;
5543
5544 failure:
5545 if (asm_noperands (PATTERN (insn)) < 0)
5546 /* It's the compiler's fault. */
5547 fatal_insn ("Could not find a spill register", insn);
5548
5549 /* It's the user's fault; the operand's mode and constraint
5550 don't match. Disable this reload so we don't crash in final. */
5551 error_for_asm (insn,
5552 "`asm' operand constraint incompatible with operand size");
5553 reload_in[r] = 0;
5554 reload_out[r] = 0;
5555 reload_reg_rtx[r] = 0;
5556 reload_optional[r] = 1;
5557 reload_secondary_p[r] = 1;
5558
5559 return 1;
5560 }
5561 \f
5562 /* Assign hard reg targets for the pseudo-registers we must reload
5563 into hard regs for this insn.
5564 Also output the instructions to copy them in and out of the hard regs.
5565
5566 For machines with register classes, we are responsible for
5567 finding a reload reg in the proper class. */
5568
5569 static void
5570 choose_reload_regs (chain)
5571 struct insn_chain *chain;
5572 {
5573 rtx insn = chain->insn;
5574 register int i, j;
5575 int max_group_size = 1;
5576 enum reg_class group_class = NO_REGS;
5577 int inheritance;
5578 int pass;
5579
5580 rtx save_reload_reg_rtx[MAX_RELOADS];
5581 char save_reload_inherited[MAX_RELOADS];
5582 rtx save_reload_inheritance_insn[MAX_RELOADS];
5583 rtx save_reload_override_in[MAX_RELOADS];
5584 int save_reload_spill_index[MAX_RELOADS];
5585 HARD_REG_SET save_reload_reg_used;
5586 HARD_REG_SET save_reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
5587 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
5588 HARD_REG_SET save_reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
5589 HARD_REG_SET save_reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
5590 HARD_REG_SET save_reload_reg_used_in_input[MAX_RECOG_OPERANDS];
5591 HARD_REG_SET save_reload_reg_used_in_output[MAX_RECOG_OPERANDS];
5592 HARD_REG_SET save_reload_reg_used_in_op_addr;
5593 HARD_REG_SET save_reload_reg_used_in_op_addr_reload;
5594 HARD_REG_SET save_reload_reg_used_in_insn;
5595 HARD_REG_SET save_reload_reg_used_in_other_addr;
5596 HARD_REG_SET save_reload_reg_used_at_all;
5597
5598 bzero (reload_inherited, MAX_RELOADS);
5599 bzero ((char *) reload_inheritance_insn, MAX_RELOADS * sizeof (rtx));
5600 bzero ((char *) reload_override_in, MAX_RELOADS * sizeof (rtx));
5601
5602 CLEAR_HARD_REG_SET (reload_reg_used);
5603 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5604 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5605 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5606 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5607 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5608
5609 CLEAR_HARD_REG_SET (reg_used_by_pseudo);
5610 compute_use_by_pseudos (&reg_used_by_pseudo, chain->live_before);
5611 compute_use_by_pseudos (&reg_used_by_pseudo, chain->live_after);
5612
5613 for (i = 0; i < reload_n_operands; i++)
5614 {
5615 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5616 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5617 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5618 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5619 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5620 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5621 }
5622
5623 IOR_COMPL_HARD_REG_SET (reload_reg_used, chain->used_spill_regs);
5624
5625 #if 0 /* Not needed, now that we can always retry without inheritance. */
5626 /* See if we have more mandatory reloads than spill regs.
5627 If so, then we cannot risk optimizations that could prevent
5628 reloads from sharing one spill register.
5629
5630 Since we will try finding a better register than reload_reg_rtx
5631 unless it is equal to reload_in or reload_out, count such reloads. */
5632
5633 {
5634 int tem = 0;
5635 for (j = 0; j < n_reloads; j++)
5636 if (! reload_optional[j]
5637 && (reload_in[j] != 0 || reload_out[j] != 0 || reload_secondary_p[j])
5638 && (reload_reg_rtx[j] == 0
5639 || (! rtx_equal_p (reload_reg_rtx[j], reload_in[j])
5640 && ! rtx_equal_p (reload_reg_rtx[j], reload_out[j]))))
5641 tem++;
5642 if (tem > n_spills)
5643 must_reuse = 1;
5644 }
5645 #endif
5646
5647 /* In order to be certain of getting the registers we need,
5648 we must sort the reloads into order of increasing register class.
5649 Then our grabbing of reload registers will parallel the process
5650 that provided the reload registers.
5651
5652 Also note whether any of the reloads wants a consecutive group of regs.
5653 If so, record the maximum size of the group desired and what
5654 register class contains all the groups needed by this insn. */
5655
5656 for (j = 0; j < n_reloads; j++)
5657 {
5658 reload_order[j] = j;
5659 reload_spill_index[j] = -1;
5660
5661 reload_mode[j]
5662 = (reload_inmode[j] == VOIDmode
5663 || (GET_MODE_SIZE (reload_outmode[j])
5664 > GET_MODE_SIZE (reload_inmode[j])))
5665 ? reload_outmode[j] : reload_inmode[j];
5666
5667 reload_nregs[j] = CLASS_MAX_NREGS (reload_reg_class[j], reload_mode[j]);
5668
5669 if (reload_nregs[j] > 1)
5670 {
5671 max_group_size = MAX (reload_nregs[j], max_group_size);
5672 group_class = reg_class_superunion[(int)reload_reg_class[j]][(int)group_class];
5673 }
5674
5675 /* If we have already decided to use a certain register,
5676 don't use it in another way. */
5677 if (reload_reg_rtx[j])
5678 mark_reload_reg_in_use (REGNO (reload_reg_rtx[j]), reload_opnum[j],
5679 reload_when_needed[j], reload_mode[j]);
5680 }
5681
5682 if (n_reloads > 1)
5683 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5684
5685 bcopy ((char *) reload_reg_rtx, (char *) save_reload_reg_rtx,
5686 sizeof reload_reg_rtx);
5687 bcopy (reload_inherited, save_reload_inherited, sizeof reload_inherited);
5688 bcopy ((char *) reload_inheritance_insn,
5689 (char *) save_reload_inheritance_insn,
5690 sizeof reload_inheritance_insn);
5691 bcopy ((char *) reload_override_in, (char *) save_reload_override_in,
5692 sizeof reload_override_in);
5693 bcopy ((char *) reload_spill_index, (char *) save_reload_spill_index,
5694 sizeof reload_spill_index);
5695 COPY_HARD_REG_SET (save_reload_reg_used, reload_reg_used);
5696 COPY_HARD_REG_SET (save_reload_reg_used_at_all, reload_reg_used_at_all);
5697 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr,
5698 reload_reg_used_in_op_addr);
5699
5700 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload,
5701 reload_reg_used_in_op_addr_reload);
5702
5703 COPY_HARD_REG_SET (save_reload_reg_used_in_insn,
5704 reload_reg_used_in_insn);
5705 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr,
5706 reload_reg_used_in_other_addr);
5707
5708 for (i = 0; i < reload_n_operands; i++)
5709 {
5710 COPY_HARD_REG_SET (save_reload_reg_used_in_output[i],
5711 reload_reg_used_in_output[i]);
5712 COPY_HARD_REG_SET (save_reload_reg_used_in_input[i],
5713 reload_reg_used_in_input[i]);
5714 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr[i],
5715 reload_reg_used_in_input_addr[i]);
5716 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr[i],
5717 reload_reg_used_in_inpaddr_addr[i]);
5718 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr[i],
5719 reload_reg_used_in_output_addr[i]);
5720 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr[i],
5721 reload_reg_used_in_outaddr_addr[i]);
5722 }
5723
5724 /* If -O, try first with inheritance, then turning it off.
5725 If not -O, don't do inheritance.
5726 Using inheritance when not optimizing leads to paradoxes
5727 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5728 because one side of the comparison might be inherited. */
5729
5730 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5731 {
5732 /* Process the reloads in order of preference just found.
5733 Beyond this point, subregs can be found in reload_reg_rtx.
5734
5735 This used to look for an existing reloaded home for all
5736 of the reloads, and only then perform any new reloads.
5737 But that could lose if the reloads were done out of reg-class order
5738 because a later reload with a looser constraint might have an old
5739 home in a register needed by an earlier reload with a tighter constraint.
5740
5741 To solve this, we make two passes over the reloads, in the order
5742 described above. In the first pass we try to inherit a reload
5743 from a previous insn. If there is a later reload that needs a
5744 class that is a proper subset of the class being processed, we must
5745 also allocate a spill register during the first pass.
5746
5747 Then make a second pass over the reloads to allocate any reloads
5748 that haven't been given registers yet. */
5749
5750 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5751
5752 for (j = 0; j < n_reloads; j++)
5753 {
5754 register int r = reload_order[j];
5755
5756 /* Ignore reloads that got marked inoperative. */
5757 if (reload_out[r] == 0 && reload_in[r] == 0
5758 && ! reload_secondary_p[r])
5759 continue;
5760
5761 /* If find_reloads chose to use reload_in or reload_out as a reload
5762 register, we don't need to chose one. Otherwise, try even if it
5763 found one since we might save an insn if we find the value lying
5764 around.
5765 Try also when reload_in is a pseudo without a hard reg. */
5766 if (reload_in[r] != 0 && reload_reg_rtx[r] != 0
5767 && (rtx_equal_p (reload_in[r], reload_reg_rtx[r])
5768 || (rtx_equal_p (reload_out[r], reload_reg_rtx[r])
5769 && GET_CODE (reload_in[r]) != MEM
5770 && true_regnum (reload_in[r]) < FIRST_PSEUDO_REGISTER)))
5771 continue;
5772
5773 #if 0 /* No longer needed for correct operation.
5774 It might give better code, or might not; worth an experiment? */
5775 /* If this is an optional reload, we can't inherit from earlier insns
5776 until we are sure that any non-optional reloads have been allocated.
5777 The following code takes advantage of the fact that optional reloads
5778 are at the end of reload_order. */
5779 if (reload_optional[r] != 0)
5780 for (i = 0; i < j; i++)
5781 if ((reload_out[reload_order[i]] != 0
5782 || reload_in[reload_order[i]] != 0
5783 || reload_secondary_p[reload_order[i]])
5784 && ! reload_optional[reload_order[i]]
5785 && reload_reg_rtx[reload_order[i]] == 0)
5786 allocate_reload_reg (chain, reload_order[i], 0, inheritance);
5787 #endif
5788
5789 /* First see if this pseudo is already available as reloaded
5790 for a previous insn. We cannot try to inherit for reloads
5791 that are smaller than the maximum number of registers needed
5792 for groups unless the register we would allocate cannot be used
5793 for the groups.
5794
5795 We could check here to see if this is a secondary reload for
5796 an object that is already in a register of the desired class.
5797 This would avoid the need for the secondary reload register.
5798 But this is complex because we can't easily determine what
5799 objects might want to be loaded via this reload. So let a
5800 register be allocated here. In `emit_reload_insns' we suppress
5801 one of the loads in the case described above. */
5802
5803 if (inheritance)
5804 {
5805 int word = 0;
5806 register int regno = -1;
5807 enum machine_mode mode;
5808
5809 if (reload_in[r] == 0)
5810 ;
5811 else if (GET_CODE (reload_in[r]) == REG)
5812 {
5813 regno = REGNO (reload_in[r]);
5814 mode = GET_MODE (reload_in[r]);
5815 }
5816 else if (GET_CODE (reload_in_reg[r]) == REG)
5817 {
5818 regno = REGNO (reload_in_reg[r]);
5819 mode = GET_MODE (reload_in_reg[r]);
5820 }
5821 else if (GET_CODE (reload_in_reg[r]) == SUBREG
5822 && GET_CODE (SUBREG_REG (reload_in_reg[r])) == REG)
5823 {
5824 word = SUBREG_WORD (reload_in_reg[r]);
5825 regno = REGNO (SUBREG_REG (reload_in_reg[r]));
5826 if (regno < FIRST_PSEUDO_REGISTER)
5827 regno += word;
5828 mode = GET_MODE (reload_in_reg[r]);
5829 }
5830 #ifdef AUTO_INC_DEC
5831 else if ((GET_CODE (reload_in_reg[r]) == PRE_INC
5832 || GET_CODE (reload_in_reg[r]) == PRE_DEC
5833 || GET_CODE (reload_in_reg[r]) == POST_INC
5834 || GET_CODE (reload_in_reg[r]) == POST_DEC)
5835 && GET_CODE (XEXP (reload_in_reg[r], 0)) == REG)
5836 {
5837 regno = REGNO (XEXP (reload_in_reg[r], 0));
5838 mode = GET_MODE (XEXP (reload_in_reg[r], 0));
5839 reload_out[r] = reload_in[r];
5840 }
5841 #endif
5842 #if 0
5843 /* This won't work, since REGNO can be a pseudo reg number.
5844 Also, it takes much more hair to keep track of all the things
5845 that can invalidate an inherited reload of part of a pseudoreg. */
5846 else if (GET_CODE (reload_in[r]) == SUBREG
5847 && GET_CODE (SUBREG_REG (reload_in[r])) == REG)
5848 regno = REGNO (SUBREG_REG (reload_in[r])) + SUBREG_WORD (reload_in[r]);
5849 #endif
5850
5851 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5852 {
5853 enum reg_class class = reload_reg_class[r], last_class;
5854 rtx last_reg = reg_last_reload_reg[regno];
5855
5856 i = REGNO (last_reg) + word;
5857 last_class = REGNO_REG_CLASS (i);
5858 if ((GET_MODE_SIZE (GET_MODE (last_reg))
5859 >= GET_MODE_SIZE (mode) + word * UNITS_PER_WORD)
5860 && reg_reloaded_contents[i] == regno
5861 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5862 && HARD_REGNO_MODE_OK (i, reload_mode[r])
5863 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5864 /* Even if we can't use this register as a reload
5865 register, we might use it for reload_override_in,
5866 if copying it to the desired class is cheap
5867 enough. */
5868 || ((REGISTER_MOVE_COST (last_class, class)
5869 < MEMORY_MOVE_COST (mode, class, 1))
5870 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5871 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5872 last_reg)
5873 == NO_REGS)
5874 #endif
5875 #ifdef SECONDARY_MEMORY_NEEDED
5876 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5877 mode)
5878 #endif
5879 ))
5880
5881 && (reload_nregs[r] == max_group_size
5882 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5883 i))
5884 && ((reload_reg_free_p (i, reload_opnum[r],
5885 reload_when_needed[r])
5886 && reload_reg_free_before_p (i, reload_opnum[r],
5887 reload_when_needed[r],
5888 0))
5889 || reload_reg_free_for_value_p (i, reload_opnum[r],
5890 reload_when_needed[r],
5891 reload_in[r],
5892 reload_out[r], r)))
5893 {
5894 /* If a group is needed, verify that all the subsequent
5895 registers still have their values intact. */
5896 int nr
5897 = HARD_REGNO_NREGS (i, reload_mode[r]);
5898 int k;
5899
5900 for (k = 1; k < nr; k++)
5901 if (reg_reloaded_contents[i + k] != regno
5902 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5903 break;
5904
5905 if (k == nr)
5906 {
5907 int i1;
5908
5909 last_reg = (GET_MODE (last_reg) == mode
5910 ? last_reg : gen_rtx_REG (mode, i));
5911
5912 /* We found a register that contains the
5913 value we need. If this register is the
5914 same as an `earlyclobber' operand of the
5915 current insn, just mark it as a place to
5916 reload from since we can't use it as the
5917 reload register itself. */
5918
5919 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5920 if (reg_overlap_mentioned_for_reload_p
5921 (reg_last_reload_reg[regno],
5922 reload_earlyclobbers[i1]))
5923 break;
5924
5925 if (i1 != n_earlyclobbers
5926 /* Don't use it if we'd clobber a pseudo reg. */
5927 || (TEST_HARD_REG_BIT (reg_used_by_pseudo, i)
5928 && reload_out[r]
5929 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5930 /* Don't really use the inherited spill reg
5931 if we need it wider than we've got it. */
5932 || (GET_MODE_SIZE (reload_mode[r])
5933 > GET_MODE_SIZE (mode))
5934 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
5935 i)
5936
5937 /* If find_reloads chose reload_out as reload
5938 register, stay with it - that leaves the
5939 inherited register for subsequent reloads. */
5940 || (reload_out[r] && reload_reg_rtx[r]
5941 && rtx_equal_p (reload_out[r],
5942 reload_reg_rtx[r])))
5943 {
5944 reload_override_in[r] = last_reg;
5945 reload_inheritance_insn[r]
5946 = reg_reloaded_insn[i];
5947 }
5948 else
5949 {
5950 int k;
5951 /* We can use this as a reload reg. */
5952 /* Mark the register as in use for this part of
5953 the insn. */
5954 mark_reload_reg_in_use (i,
5955 reload_opnum[r],
5956 reload_when_needed[r],
5957 reload_mode[r]);
5958 reload_reg_rtx[r] = last_reg;
5959 reload_inherited[r] = 1;
5960 reload_inheritance_insn[r]
5961 = reg_reloaded_insn[i];
5962 reload_spill_index[r] = i;
5963 for (k = 0; k < nr; k++)
5964 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5965 i + k);
5966 }
5967 }
5968 }
5969 }
5970 }
5971
5972 /* Here's another way to see if the value is already lying around. */
5973 if (inheritance
5974 && reload_in[r] != 0
5975 && ! reload_inherited[r]
5976 && reload_out[r] == 0
5977 && (CONSTANT_P (reload_in[r])
5978 || GET_CODE (reload_in[r]) == PLUS
5979 || GET_CODE (reload_in[r]) == REG
5980 || GET_CODE (reload_in[r]) == MEM)
5981 && (reload_nregs[r] == max_group_size
5982 || ! reg_classes_intersect_p (reload_reg_class[r], group_class)))
5983 {
5984 register rtx equiv
5985 = find_equiv_reg (reload_in[r], insn, reload_reg_class[r],
5986 -1, NULL_PTR, 0, reload_mode[r]);
5987 int regno;
5988
5989 if (equiv != 0)
5990 {
5991 if (GET_CODE (equiv) == REG)
5992 regno = REGNO (equiv);
5993 else if (GET_CODE (equiv) == SUBREG)
5994 {
5995 /* This must be a SUBREG of a hard register.
5996 Make a new REG since this might be used in an
5997 address and not all machines support SUBREGs
5998 there. */
5999 regno = REGNO (SUBREG_REG (equiv)) + SUBREG_WORD (equiv);
6000 equiv = gen_rtx_REG (reload_mode[r], regno);
6001 }
6002 else
6003 abort ();
6004 }
6005
6006 /* If we found a spill reg, reject it unless it is free
6007 and of the desired class. */
6008 if (equiv != 0
6009 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
6010 && ! reload_reg_free_for_value_p (regno, reload_opnum[r],
6011 reload_when_needed[r],
6012 reload_in[r],
6013 reload_out[r], r))
6014 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) reload_reg_class[r]],
6015 regno)))
6016 equiv = 0;
6017
6018 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, reload_mode[r]))
6019 equiv = 0;
6020
6021 /* We found a register that contains the value we need.
6022 If this register is the same as an `earlyclobber' operand
6023 of the current insn, just mark it as a place to reload from
6024 since we can't use it as the reload register itself. */
6025
6026 if (equiv != 0)
6027 for (i = 0; i < n_earlyclobbers; i++)
6028 if (reg_overlap_mentioned_for_reload_p (equiv,
6029 reload_earlyclobbers[i]))
6030 {
6031 reload_override_in[r] = equiv;
6032 equiv = 0;
6033 break;
6034 }
6035
6036 /* If the equiv register we have found is explicitly clobbered
6037 in the current insn, it depends on the reload type if we
6038 can use it, use it for reload_override_in, or not at all.
6039 In particular, we then can't use EQUIV for a
6040 RELOAD_FOR_OUTPUT_ADDRESS reload. */
6041
6042 if (equiv != 0 && regno_clobbered_p (regno, insn))
6043 {
6044 switch (reload_when_needed[r])
6045 {
6046 case RELOAD_FOR_OTHER_ADDRESS:
6047 case RELOAD_FOR_INPADDR_ADDRESS:
6048 case RELOAD_FOR_INPUT_ADDRESS:
6049 case RELOAD_FOR_OPADDR_ADDR:
6050 break;
6051 case RELOAD_OTHER:
6052 case RELOAD_FOR_INPUT:
6053 case RELOAD_FOR_OPERAND_ADDRESS:
6054 reload_override_in[r] = equiv;
6055 /* Fall through. */
6056 default:
6057 equiv = 0;
6058 break;
6059 }
6060 }
6061
6062 /* If we found an equivalent reg, say no code need be generated
6063 to load it, and use it as our reload reg. */
6064 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
6065 {
6066 int nr = HARD_REGNO_NREGS (regno, reload_mode[r]);
6067 int k;
6068 reload_reg_rtx[r] = equiv;
6069 reload_inherited[r] = 1;
6070
6071 /* If reg_reloaded_valid is not set for this register,
6072 there might be a stale spill_reg_store lying around.
6073 We must clear it, since otherwise emit_reload_insns
6074 might delete the store. */
6075 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
6076 spill_reg_store[regno] = NULL_RTX;
6077 /* If any of the hard registers in EQUIV are spill
6078 registers, mark them as in use for this insn. */
6079 for (k = 0; k < nr; k++)
6080 {
6081 i = spill_reg_order[regno + k];
6082 if (i >= 0)
6083 {
6084 mark_reload_reg_in_use (regno, reload_opnum[r],
6085 reload_when_needed[r],
6086 reload_mode[r]);
6087 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
6088 regno + k);
6089 }
6090 }
6091 }
6092 }
6093
6094 /* If we found a register to use already, or if this is an optional
6095 reload, we are done. */
6096 if (reload_reg_rtx[r] != 0 || reload_optional[r] != 0)
6097 continue;
6098
6099 #if 0 /* No longer needed for correct operation. Might or might not
6100 give better code on the average. Want to experiment? */
6101
6102 /* See if there is a later reload that has a class different from our
6103 class that intersects our class or that requires less register
6104 than our reload. If so, we must allocate a register to this
6105 reload now, since that reload might inherit a previous reload
6106 and take the only available register in our class. Don't do this
6107 for optional reloads since they will force all previous reloads
6108 to be allocated. Also don't do this for reloads that have been
6109 turned off. */
6110
6111 for (i = j + 1; i < n_reloads; i++)
6112 {
6113 int s = reload_order[i];
6114
6115 if ((reload_in[s] == 0 && reload_out[s] == 0
6116 && ! reload_secondary_p[s])
6117 || reload_optional[s])
6118 continue;
6119
6120 if ((reload_reg_class[s] != reload_reg_class[r]
6121 && reg_classes_intersect_p (reload_reg_class[r],
6122 reload_reg_class[s]))
6123 || reload_nregs[s] < reload_nregs[r])
6124 break;
6125 }
6126
6127 if (i == n_reloads)
6128 continue;
6129
6130 allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance);
6131 #endif
6132 }
6133
6134 /* Now allocate reload registers for anything non-optional that
6135 didn't get one yet. */
6136 for (j = 0; j < n_reloads; j++)
6137 {
6138 register int r = reload_order[j];
6139
6140 /* Ignore reloads that got marked inoperative. */
6141 if (reload_out[r] == 0 && reload_in[r] == 0 && ! reload_secondary_p[r])
6142 continue;
6143
6144 /* Skip reloads that already have a register allocated or are
6145 optional. */
6146 if (reload_reg_rtx[r] != 0 || reload_optional[r])
6147 continue;
6148
6149 if (! allocate_reload_reg (chain, r, j == n_reloads - 1, inheritance))
6150 break;
6151 }
6152
6153 /* If that loop got all the way, we have won. */
6154 if (j == n_reloads)
6155 break;
6156
6157 /* Loop around and try without any inheritance. */
6158 /* First undo everything done by the failed attempt
6159 to allocate with inheritance. */
6160 bcopy ((char *) save_reload_reg_rtx, (char *) reload_reg_rtx,
6161 sizeof reload_reg_rtx);
6162 bcopy ((char *) save_reload_inherited, (char *) reload_inherited,
6163 sizeof reload_inherited);
6164 bcopy ((char *) save_reload_inheritance_insn,
6165 (char *) reload_inheritance_insn,
6166 sizeof reload_inheritance_insn);
6167 bcopy ((char *) save_reload_override_in, (char *) reload_override_in,
6168 sizeof reload_override_in);
6169 bcopy ((char *) save_reload_spill_index, (char *) reload_spill_index,
6170 sizeof reload_spill_index);
6171 COPY_HARD_REG_SET (reload_reg_used, save_reload_reg_used);
6172 COPY_HARD_REG_SET (reload_reg_used_at_all, save_reload_reg_used_at_all);
6173 COPY_HARD_REG_SET (reload_reg_used_in_op_addr,
6174 save_reload_reg_used_in_op_addr);
6175 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload,
6176 save_reload_reg_used_in_op_addr_reload);
6177 COPY_HARD_REG_SET (reload_reg_used_in_insn,
6178 save_reload_reg_used_in_insn);
6179 COPY_HARD_REG_SET (reload_reg_used_in_other_addr,
6180 save_reload_reg_used_in_other_addr);
6181
6182 for (i = 0; i < reload_n_operands; i++)
6183 {
6184 COPY_HARD_REG_SET (reload_reg_used_in_input[i],
6185 save_reload_reg_used_in_input[i]);
6186 COPY_HARD_REG_SET (reload_reg_used_in_output[i],
6187 save_reload_reg_used_in_output[i]);
6188 COPY_HARD_REG_SET (reload_reg_used_in_input_addr[i],
6189 save_reload_reg_used_in_input_addr[i]);
6190 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i],
6191 save_reload_reg_used_in_inpaddr_addr[i]);
6192 COPY_HARD_REG_SET (reload_reg_used_in_output_addr[i],
6193 save_reload_reg_used_in_output_addr[i]);
6194 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i],
6195 save_reload_reg_used_in_outaddr_addr[i]);
6196 }
6197 }
6198
6199 /* If we thought we could inherit a reload, because it seemed that
6200 nothing else wanted the same reload register earlier in the insn,
6201 verify that assumption, now that all reloads have been assigned.
6202 Likewise for reloads where reload_override_in has been set. */
6203
6204 /* If doing expensive optimizations, do one preliminary pass that doesn't
6205 cancel any inheritance, but removes reloads that have been needed only
6206 for reloads that we know can be inherited. */
6207 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
6208 {
6209 for (j = 0; j < n_reloads; j++)
6210 {
6211 register int r = reload_order[j];
6212 rtx check_reg;
6213 if (reload_inherited[r] && reload_reg_rtx[r])
6214 check_reg = reload_reg_rtx[r];
6215 else if (reload_override_in[r]
6216 && (GET_CODE (reload_override_in[r]) == REG
6217 || GET_CODE (reload_override_in[r]) == SUBREG))
6218 check_reg = reload_override_in[r];
6219 else
6220 continue;
6221 if (! (reload_reg_free_before_p (true_regnum (check_reg),
6222 reload_opnum[r], reload_when_needed[r],
6223 ! reload_inherited[r])
6224 || reload_reg_free_for_value_p (true_regnum (check_reg),
6225 reload_opnum[r],
6226 reload_when_needed[r],
6227 reload_in[r],
6228 reload_out[r], r)))
6229 {
6230 if (pass)
6231 continue;
6232 reload_inherited[r] = 0;
6233 reload_override_in[r] = 0;
6234 }
6235 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6236 reload_override_in, then we do not need its related
6237 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6238 likewise for other reload types.
6239 We handle this by removing a reload when its only replacement
6240 is mentioned in reload_in of the reload we are going to inherit.
6241 A special case are auto_inc expressions; even if the input is
6242 inherited, we still need the address for the output. We can
6243 recognize them because they have RELOAD_OUT set but not
6244 RELOAD_OUT_REG.
6245 If we suceeded removing some reload and we are doing a preliminary
6246 pass just to remove such reloads, make another pass, since the
6247 removal of one reload might allow us to inherit another one. */
6248 else if ((! reload_out[r] || reload_out_reg[r])
6249 && remove_address_replacements (reload_in[r]) && pass)
6250 pass = 2;
6251 }
6252 }
6253
6254 /* Now that reload_override_in is known valid,
6255 actually override reload_in. */
6256 for (j = 0; j < n_reloads; j++)
6257 if (reload_override_in[j])
6258 reload_in[j] = reload_override_in[j];
6259
6260 /* If this reload won't be done because it has been cancelled or is
6261 optional and not inherited, clear reload_reg_rtx so other
6262 routines (such as subst_reloads) don't get confused. */
6263 for (j = 0; j < n_reloads; j++)
6264 if (reload_reg_rtx[j] != 0
6265 && ((reload_optional[j] && ! reload_inherited[j])
6266 || (reload_in[j] == 0 && reload_out[j] == 0
6267 && ! reload_secondary_p[j])))
6268 {
6269 int regno = true_regnum (reload_reg_rtx[j]);
6270
6271 if (spill_reg_order[regno] >= 0)
6272 clear_reload_reg_in_use (regno, reload_opnum[j],
6273 reload_when_needed[j], reload_mode[j]);
6274 reload_reg_rtx[j] = 0;
6275 }
6276
6277 /* Record which pseudos and which spill regs have output reloads. */
6278 for (j = 0; j < n_reloads; j++)
6279 {
6280 register int r = reload_order[j];
6281
6282 i = reload_spill_index[r];
6283
6284 /* I is nonneg if this reload uses a register.
6285 If reload_reg_rtx[r] is 0, this is an optional reload
6286 that we opted to ignore. */
6287 if (reload_out_reg[r] != 0 && GET_CODE (reload_out_reg[r]) == REG
6288 && reload_reg_rtx[r] != 0)
6289 {
6290 register int nregno = REGNO (reload_out_reg[r]);
6291 int nr = 1;
6292
6293 if (nregno < FIRST_PSEUDO_REGISTER)
6294 nr = HARD_REGNO_NREGS (nregno, reload_mode[r]);
6295
6296 while (--nr >= 0)
6297 reg_has_output_reload[nregno + nr] = 1;
6298
6299 if (i >= 0)
6300 {
6301 nr = HARD_REGNO_NREGS (i, reload_mode[r]);
6302 while (--nr >= 0)
6303 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
6304 }
6305
6306 if (reload_when_needed[r] != RELOAD_OTHER
6307 && reload_when_needed[r] != RELOAD_FOR_OUTPUT
6308 && reload_when_needed[r] != RELOAD_FOR_INSN)
6309 abort ();
6310 }
6311 }
6312 }
6313
6314 /* Deallocate the reload register for reload R. This is called from
6315 remove_address_replacements. */
6316 void
6317 deallocate_reload_reg (r)
6318 int r;
6319 {
6320 int regno;
6321
6322 if (! reload_reg_rtx[r])
6323 return;
6324 regno = true_regnum (reload_reg_rtx[r]);
6325 reload_reg_rtx[r] = 0;
6326 if (spill_reg_order[regno] >= 0)
6327 clear_reload_reg_in_use (regno, reload_opnum[r], reload_when_needed[r],
6328 reload_mode[r]);
6329 reload_spill_index[r] = -1;
6330 }
6331 \f
6332 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
6333 reloads of the same item for fear that we might not have enough reload
6334 registers. However, normally they will get the same reload register
6335 and hence actually need not be loaded twice.
6336
6337 Here we check for the most common case of this phenomenon: when we have
6338 a number of reloads for the same object, each of which were allocated
6339 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6340 reload, and is not modified in the insn itself. If we find such,
6341 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6342 This will not increase the number of spill registers needed and will
6343 prevent redundant code. */
6344
6345 static void
6346 merge_assigned_reloads (insn)
6347 rtx insn;
6348 {
6349 int i, j;
6350
6351 /* Scan all the reloads looking for ones that only load values and
6352 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6353 assigned and not modified by INSN. */
6354
6355 for (i = 0; i < n_reloads; i++)
6356 {
6357 int conflicting_input = 0;
6358 int max_input_address_opnum = -1;
6359 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6360
6361 if (reload_in[i] == 0 || reload_when_needed[i] == RELOAD_OTHER
6362 || reload_out[i] != 0 || reload_reg_rtx[i] == 0
6363 || reg_set_p (reload_reg_rtx[i], insn))
6364 continue;
6365
6366 /* Look at all other reloads. Ensure that the only use of this
6367 reload_reg_rtx is in a reload that just loads the same value
6368 as we do. Note that any secondary reloads must be of the identical
6369 class since the values, modes, and result registers are the
6370 same, so we need not do anything with any secondary reloads. */
6371
6372 for (j = 0; j < n_reloads; j++)
6373 {
6374 if (i == j || reload_reg_rtx[j] == 0
6375 || ! reg_overlap_mentioned_p (reload_reg_rtx[j],
6376 reload_reg_rtx[i]))
6377 continue;
6378
6379 if (reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6380 && reload_opnum[j] > max_input_address_opnum)
6381 max_input_address_opnum = reload_opnum[j];
6382
6383 /* If the reload regs aren't exactly the same (e.g, different modes)
6384 or if the values are different, we can't merge this reload.
6385 But if it is an input reload, we might still merge
6386 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6387
6388 if (! rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6389 || reload_out[j] != 0 || reload_in[j] == 0
6390 || ! rtx_equal_p (reload_in[i], reload_in[j]))
6391 {
6392 if (reload_when_needed[j] != RELOAD_FOR_INPUT
6393 || ((reload_when_needed[i] != RELOAD_FOR_INPUT_ADDRESS
6394 || reload_opnum[i] > reload_opnum[j])
6395 && reload_when_needed[i] != RELOAD_FOR_OTHER_ADDRESS))
6396 break;
6397 conflicting_input = 1;
6398 if (min_conflicting_input_opnum > reload_opnum[j])
6399 min_conflicting_input_opnum = reload_opnum[j];
6400 }
6401 }
6402
6403 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6404 we, in fact, found any matching reloads. */
6405
6406 if (j == n_reloads
6407 && max_input_address_opnum <= min_conflicting_input_opnum)
6408 {
6409 for (j = 0; j < n_reloads; j++)
6410 if (i != j && reload_reg_rtx[j] != 0
6411 && rtx_equal_p (reload_reg_rtx[i], reload_reg_rtx[j])
6412 && (! conflicting_input
6413 || reload_when_needed[j] == RELOAD_FOR_INPUT_ADDRESS
6414 || reload_when_needed[j] == RELOAD_FOR_OTHER_ADDRESS))
6415 {
6416 reload_when_needed[i] = RELOAD_OTHER;
6417 reload_in[j] = 0;
6418 reload_spill_index[j] = -1;
6419 transfer_replacements (i, j);
6420 }
6421
6422 /* If this is now RELOAD_OTHER, look for any reloads that load
6423 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6424 if they were for inputs, RELOAD_OTHER for outputs. Note that
6425 this test is equivalent to looking for reloads for this operand
6426 number. */
6427
6428 if (reload_when_needed[i] == RELOAD_OTHER)
6429 for (j = 0; j < n_reloads; j++)
6430 if (reload_in[j] != 0
6431 && reload_when_needed[i] != RELOAD_OTHER
6432 && reg_overlap_mentioned_for_reload_p (reload_in[j],
6433 reload_in[i]))
6434 reload_when_needed[j]
6435 = ((reload_when_needed[i] == RELOAD_FOR_INPUT_ADDRESS
6436 || reload_when_needed[i] == RELOAD_FOR_INPADDR_ADDRESS)
6437 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6438 }
6439 }
6440 }
6441
6442 \f
6443 /* Output insns to reload values in and out of the chosen reload regs. */
6444
6445 static void
6446 emit_reload_insns (chain)
6447 struct insn_chain *chain;
6448 {
6449 rtx insn = chain->insn;
6450
6451 register int j;
6452 rtx input_reload_insns[MAX_RECOG_OPERANDS];
6453 rtx other_input_address_reload_insns = 0;
6454 rtx other_input_reload_insns = 0;
6455 rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6456 rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6457 rtx output_reload_insns[MAX_RECOG_OPERANDS];
6458 rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6459 rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6460 rtx operand_reload_insns = 0;
6461 rtx other_operand_reload_insns = 0;
6462 rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6463 rtx following_insn = NEXT_INSN (insn);
6464 rtx before_insn = PREV_INSN (insn);
6465 int special;
6466 /* Values to be put in spill_reg_store are put here first. */
6467 rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6468 HARD_REG_SET reg_reloaded_died;
6469
6470 CLEAR_HARD_REG_SET (reg_reloaded_died);
6471
6472 for (j = 0; j < reload_n_operands; j++)
6473 input_reload_insns[j] = input_address_reload_insns[j]
6474 = inpaddr_address_reload_insns[j]
6475 = output_reload_insns[j] = output_address_reload_insns[j]
6476 = outaddr_address_reload_insns[j]
6477 = other_output_reload_insns[j] = 0;
6478
6479 /* Now output the instructions to copy the data into and out of the
6480 reload registers. Do these in the order that the reloads were reported,
6481 since reloads of base and index registers precede reloads of operands
6482 and the operands may need the base and index registers reloaded. */
6483
6484 for (j = 0; j < n_reloads; j++)
6485 {
6486 register rtx old;
6487 rtx oldequiv_reg = 0;
6488 rtx this_reload_insn = 0;
6489 int expect_occurrences = 1;
6490
6491 if (reload_reg_rtx[j]
6492 && REGNO (reload_reg_rtx[j]) < FIRST_PSEUDO_REGISTER)
6493 new_spill_reg_store[REGNO (reload_reg_rtx[j])] = 0;
6494
6495 old = (reload_in[j] && GET_CODE (reload_in[j]) == MEM
6496 ? reload_in_reg[j] : reload_in[j]);
6497
6498 if (old != 0
6499 /* AUTO_INC reloads need to be handled even if inherited. We got an
6500 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6501 && (! reload_inherited[j] || (reload_out[j] && ! reload_out_reg[j]))
6502 && ! rtx_equal_p (reload_reg_rtx[j], old)
6503 && reload_reg_rtx[j] != 0)
6504 {
6505 register rtx reloadreg = reload_reg_rtx[j];
6506 rtx oldequiv = 0;
6507 enum machine_mode mode;
6508 rtx *where;
6509
6510 /* Determine the mode to reload in.
6511 This is very tricky because we have three to choose from.
6512 There is the mode the insn operand wants (reload_inmode[J]).
6513 There is the mode of the reload register RELOADREG.
6514 There is the intrinsic mode of the operand, which we could find
6515 by stripping some SUBREGs.
6516 It turns out that RELOADREG's mode is irrelevant:
6517 we can change that arbitrarily.
6518
6519 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6520 then the reload reg may not support QImode moves, so use SImode.
6521 If foo is in memory due to spilling a pseudo reg, this is safe,
6522 because the QImode value is in the least significant part of a
6523 slot big enough for a SImode. If foo is some other sort of
6524 memory reference, then it is impossible to reload this case,
6525 so previous passes had better make sure this never happens.
6526
6527 Then consider a one-word union which has SImode and one of its
6528 members is a float, being fetched as (SUBREG:SF union:SI).
6529 We must fetch that as SFmode because we could be loading into
6530 a float-only register. In this case OLD's mode is correct.
6531
6532 Consider an immediate integer: it has VOIDmode. Here we need
6533 to get a mode from something else.
6534
6535 In some cases, there is a fourth mode, the operand's
6536 containing mode. If the insn specifies a containing mode for
6537 this operand, it overrides all others.
6538
6539 I am not sure whether the algorithm here is always right,
6540 but it does the right things in those cases. */
6541
6542 mode = GET_MODE (old);
6543 if (mode == VOIDmode)
6544 mode = reload_inmode[j];
6545
6546 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6547 /* If we need a secondary register for this operation, see if
6548 the value is already in a register in that class. Don't
6549 do this if the secondary register will be used as a scratch
6550 register. */
6551
6552 if (reload_secondary_in_reload[j] >= 0
6553 && reload_secondary_in_icode[j] == CODE_FOR_nothing
6554 && optimize)
6555 oldequiv
6556 = find_equiv_reg (old, insn,
6557 reload_reg_class[reload_secondary_in_reload[j]],
6558 -1, NULL_PTR, 0, mode);
6559 #endif
6560
6561 /* If reloading from memory, see if there is a register
6562 that already holds the same value. If so, reload from there.
6563 We can pass 0 as the reload_reg_p argument because
6564 any other reload has either already been emitted,
6565 in which case find_equiv_reg will see the reload-insn,
6566 or has yet to be emitted, in which case it doesn't matter
6567 because we will use this equiv reg right away. */
6568
6569 if (oldequiv == 0 && optimize
6570 && (GET_CODE (old) == MEM
6571 || (GET_CODE (old) == REG
6572 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6573 && reg_renumber[REGNO (old)] < 0)))
6574 oldequiv = find_equiv_reg (old, insn, ALL_REGS,
6575 -1, NULL_PTR, 0, mode);
6576
6577 if (oldequiv)
6578 {
6579 int regno = true_regnum (oldequiv);
6580
6581 /* If OLDEQUIV is a spill register, don't use it for this
6582 if any other reload needs it at an earlier stage of this insn
6583 or at this stage. */
6584 if (spill_reg_order[regno] >= 0
6585 && (! reload_reg_free_p (regno, reload_opnum[j],
6586 reload_when_needed[j])
6587 || ! reload_reg_free_before_p (regno, reload_opnum[j],
6588 reload_when_needed[j], 1)))
6589 oldequiv = 0;
6590
6591 /* If OLDEQUIV is not a spill register,
6592 don't use it if any other reload wants it. */
6593 if (spill_reg_order[regno] < 0)
6594 {
6595 int k;
6596 for (k = 0; k < n_reloads; k++)
6597 if (reload_reg_rtx[k] != 0 && k != j
6598 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx[k],
6599 oldequiv))
6600 {
6601 oldequiv = 0;
6602 break;
6603 }
6604 }
6605
6606 /* If it is no cheaper to copy from OLDEQUIV into the
6607 reload register than it would be to move from memory,
6608 don't use it. Likewise, if we need a secondary register
6609 or memory. */
6610
6611 if (oldequiv != 0
6612 && ((REGNO_REG_CLASS (regno) != reload_reg_class[j]
6613 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno),
6614 reload_reg_class[j])
6615 >= MEMORY_MOVE_COST (mode, reload_reg_class[j], 1)))
6616 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6617 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6618 mode, oldequiv)
6619 != NO_REGS)
6620 #endif
6621 #ifdef SECONDARY_MEMORY_NEEDED
6622 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6623 reload_reg_class[j],
6624 mode)
6625 #endif
6626 ))
6627 oldequiv = 0;
6628 }
6629
6630 /* delete_output_reload is only invoked properly if old contains
6631 the original pseudo register. Since this is replaced with a
6632 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6633 find the pseudo in RELOAD_IN_REG. */
6634 if (oldequiv == 0
6635 && reload_override_in[j]
6636 && GET_CODE (reload_in_reg[j]) == REG)
6637 {
6638 oldequiv = old;
6639 old = reload_in_reg[j];
6640 }
6641 if (oldequiv == 0)
6642 oldequiv = old;
6643 else if (GET_CODE (oldequiv) == REG)
6644 oldequiv_reg = oldequiv;
6645 else if (GET_CODE (oldequiv) == SUBREG)
6646 oldequiv_reg = SUBREG_REG (oldequiv);
6647
6648 /* If we are reloading from a register that was recently stored in
6649 with an output-reload, see if we can prove there was
6650 actually no need to store the old value in it. */
6651
6652 if (optimize && GET_CODE (oldequiv) == REG
6653 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6654 && spill_reg_store[REGNO (oldequiv)]
6655 && GET_CODE (old) == REG
6656 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6657 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6658 reload_out_reg[j])))
6659 delete_output_reload (insn, j, REGNO (oldequiv));
6660
6661 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6662 then load RELOADREG from OLDEQUIV. Note that we cannot use
6663 gen_lowpart_common since it can do the wrong thing when
6664 RELOADREG has a multi-word mode. Note that RELOADREG
6665 must always be a REG here. */
6666
6667 if (GET_MODE (reloadreg) != mode)
6668 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6669 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6670 oldequiv = SUBREG_REG (oldequiv);
6671 if (GET_MODE (oldequiv) != VOIDmode
6672 && mode != GET_MODE (oldequiv))
6673 oldequiv = gen_rtx_SUBREG (mode, oldequiv, 0);
6674
6675 /* Switch to the right place to emit the reload insns. */
6676 switch (reload_when_needed[j])
6677 {
6678 case RELOAD_OTHER:
6679 where = &other_input_reload_insns;
6680 break;
6681 case RELOAD_FOR_INPUT:
6682 where = &input_reload_insns[reload_opnum[j]];
6683 break;
6684 case RELOAD_FOR_INPUT_ADDRESS:
6685 where = &input_address_reload_insns[reload_opnum[j]];
6686 break;
6687 case RELOAD_FOR_INPADDR_ADDRESS:
6688 where = &inpaddr_address_reload_insns[reload_opnum[j]];
6689 break;
6690 case RELOAD_FOR_OUTPUT_ADDRESS:
6691 where = &output_address_reload_insns[reload_opnum[j]];
6692 break;
6693 case RELOAD_FOR_OUTADDR_ADDRESS:
6694 where = &outaddr_address_reload_insns[reload_opnum[j]];
6695 break;
6696 case RELOAD_FOR_OPERAND_ADDRESS:
6697 where = &operand_reload_insns;
6698 break;
6699 case RELOAD_FOR_OPADDR_ADDR:
6700 where = &other_operand_reload_insns;
6701 break;
6702 case RELOAD_FOR_OTHER_ADDRESS:
6703 where = &other_input_address_reload_insns;
6704 break;
6705 default:
6706 abort ();
6707 }
6708
6709 push_to_sequence (*where);
6710 special = 0;
6711
6712 /* Auto-increment addresses must be reloaded in a special way. */
6713 if (reload_out[j] && ! reload_out_reg[j])
6714 {
6715 /* We are not going to bother supporting the case where a
6716 incremented register can't be copied directly from
6717 OLDEQUIV since this seems highly unlikely. */
6718 if (reload_secondary_in_reload[j] >= 0)
6719 abort ();
6720
6721 if (reload_inherited[j])
6722 oldequiv = reloadreg;
6723
6724 old = XEXP (reload_in_reg[j], 0);
6725
6726 if (optimize && GET_CODE (oldequiv) == REG
6727 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6728 && spill_reg_store[REGNO (oldequiv)]
6729 && GET_CODE (old) == REG
6730 && (dead_or_set_p (insn,
6731 spill_reg_stored_to[REGNO (oldequiv)])
6732 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6733 old)))
6734 delete_output_reload (insn, j, REGNO (oldequiv));
6735
6736 /* Prevent normal processing of this reload. */
6737 special = 1;
6738 /* Output a special code sequence for this case. */
6739 new_spill_reg_store[REGNO (reloadreg)]
6740 = inc_for_reload (reloadreg, oldequiv, reload_out[j],
6741 reload_inc[j]);
6742 }
6743
6744 /* If we are reloading a pseudo-register that was set by the previous
6745 insn, see if we can get rid of that pseudo-register entirely
6746 by redirecting the previous insn into our reload register. */
6747
6748 else if (optimize && GET_CODE (old) == REG
6749 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6750 && dead_or_set_p (insn, old)
6751 /* This is unsafe if some other reload
6752 uses the same reg first. */
6753 && reload_reg_free_before_p (REGNO (reloadreg),
6754 reload_opnum[j],
6755 reload_when_needed[j], 0))
6756 {
6757 rtx temp = PREV_INSN (insn);
6758 while (temp && GET_CODE (temp) == NOTE)
6759 temp = PREV_INSN (temp);
6760 if (temp
6761 && GET_CODE (temp) == INSN
6762 && GET_CODE (PATTERN (temp)) == SET
6763 && SET_DEST (PATTERN (temp)) == old
6764 /* Make sure we can access insn_operand_constraint. */
6765 && asm_noperands (PATTERN (temp)) < 0
6766 /* This is unsafe if prev insn rejects our reload reg. */
6767 && constraint_accepts_reg_p (insn_operand_constraint[recog_memoized (temp)][0],
6768 reloadreg)
6769 /* This is unsafe if operand occurs more than once in current
6770 insn. Perhaps some occurrences aren't reloaded. */
6771 && count_occurrences (PATTERN (insn), old) == 1
6772 /* Don't risk splitting a matching pair of operands. */
6773 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6774 {
6775 /* Store into the reload register instead of the pseudo. */
6776 SET_DEST (PATTERN (temp)) = reloadreg;
6777 /* If these are the only uses of the pseudo reg,
6778 pretend for GDB it lives in the reload reg we used. */
6779 if (REG_N_DEATHS (REGNO (old)) == 1
6780 && REG_N_SETS (REGNO (old)) == 1)
6781 {
6782 reg_renumber[REGNO (old)] = REGNO (reload_reg_rtx[j]);
6783 alter_reg (REGNO (old), -1);
6784 }
6785 special = 1;
6786 }
6787 }
6788
6789 /* We can't do that, so output an insn to load RELOADREG. */
6790
6791 if (! special)
6792 {
6793 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6794 rtx second_reload_reg = 0;
6795 enum insn_code icode;
6796
6797 /* If we have a secondary reload, pick up the secondary register
6798 and icode, if any. If OLDEQUIV and OLD are different or
6799 if this is an in-out reload, recompute whether or not we
6800 still need a secondary register and what the icode should
6801 be. If we still need a secondary register and the class or
6802 icode is different, go back to reloading from OLD if using
6803 OLDEQUIV means that we got the wrong type of register. We
6804 cannot have different class or icode due to an in-out reload
6805 because we don't make such reloads when both the input and
6806 output need secondary reload registers. */
6807
6808 if (reload_secondary_in_reload[j] >= 0)
6809 {
6810 int secondary_reload = reload_secondary_in_reload[j];
6811 rtx real_oldequiv = oldequiv;
6812 rtx real_old = old;
6813
6814 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6815 and similarly for OLD.
6816 See comments in get_secondary_reload in reload.c. */
6817 /* If it is a pseudo that cannot be replaced with its
6818 equivalent MEM, we must fall back to reload_in, which
6819 will have all the necessary substitutions registered. */
6820
6821 if (GET_CODE (oldequiv) == REG
6822 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6823 && reg_equiv_memory_loc[REGNO (oldequiv)] != 0)
6824 {
6825 if (reg_equiv_address[REGNO (oldequiv)]
6826 || num_not_at_initial_offset)
6827 real_oldequiv = reload_in[j];
6828 else
6829 real_oldequiv = reg_equiv_mem[REGNO (oldequiv)];
6830 }
6831
6832 if (GET_CODE (old) == REG
6833 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6834 && reg_equiv_memory_loc[REGNO (old)] != 0)
6835 {
6836 if (reg_equiv_address[REGNO (old)]
6837 || num_not_at_initial_offset)
6838 real_old = reload_in[j];
6839 else
6840 real_old = reg_equiv_mem[REGNO (old)];
6841 }
6842
6843 second_reload_reg = reload_reg_rtx[secondary_reload];
6844 icode = reload_secondary_in_icode[j];
6845
6846 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6847 || (reload_in[j] != 0 && reload_out[j] != 0))
6848 {
6849 enum reg_class new_class
6850 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class[j],
6851 mode, real_oldequiv);
6852
6853 if (new_class == NO_REGS)
6854 second_reload_reg = 0;
6855 else
6856 {
6857 enum insn_code new_icode;
6858 enum machine_mode new_mode;
6859
6860 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6861 REGNO (second_reload_reg)))
6862 oldequiv = old, real_oldequiv = real_old;
6863 else
6864 {
6865 new_icode = reload_in_optab[(int) mode];
6866 if (new_icode != CODE_FOR_nothing
6867 && ((insn_operand_predicate[(int) new_icode][0]
6868 && ! ((*insn_operand_predicate[(int) new_icode][0])
6869 (reloadreg, mode)))
6870 || (insn_operand_predicate[(int) new_icode][1]
6871 && ! ((*insn_operand_predicate[(int) new_icode][1])
6872 (real_oldequiv, mode)))))
6873 new_icode = CODE_FOR_nothing;
6874
6875 if (new_icode == CODE_FOR_nothing)
6876 new_mode = mode;
6877 else
6878 new_mode = insn_operand_mode[(int) new_icode][2];
6879
6880 if (GET_MODE (second_reload_reg) != new_mode)
6881 {
6882 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6883 new_mode))
6884 oldequiv = old, real_oldequiv = real_old;
6885 else
6886 second_reload_reg
6887 = gen_rtx_REG (new_mode,
6888 REGNO (second_reload_reg));
6889 }
6890 }
6891 }
6892 }
6893
6894 /* If we still need a secondary reload register, check
6895 to see if it is being used as a scratch or intermediate
6896 register and generate code appropriately. If we need
6897 a scratch register, use REAL_OLDEQUIV since the form of
6898 the insn may depend on the actual address if it is
6899 a MEM. */
6900
6901 if (second_reload_reg)
6902 {
6903 if (icode != CODE_FOR_nothing)
6904 {
6905 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6906 second_reload_reg));
6907 special = 1;
6908 }
6909 else
6910 {
6911 /* See if we need a scratch register to load the
6912 intermediate register (a tertiary reload). */
6913 enum insn_code tertiary_icode
6914 = reload_secondary_in_icode[secondary_reload];
6915
6916 if (tertiary_icode != CODE_FOR_nothing)
6917 {
6918 rtx third_reload_reg
6919 = reload_reg_rtx[reload_secondary_in_reload[secondary_reload]];
6920
6921 emit_insn ((GEN_FCN (tertiary_icode)
6922 (second_reload_reg, real_oldequiv,
6923 third_reload_reg)));
6924 }
6925 else
6926 gen_reload (second_reload_reg, real_oldequiv,
6927 reload_opnum[j],
6928 reload_when_needed[j]);
6929
6930 oldequiv = second_reload_reg;
6931 }
6932 }
6933 }
6934 #endif
6935
6936 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6937 {
6938 rtx real_oldequiv = oldequiv;
6939
6940 if ((GET_CODE (oldequiv) == REG
6941 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6942 && reg_equiv_memory_loc[REGNO (oldequiv)] != 0)
6943 || (GET_CODE (oldequiv) == SUBREG
6944 && GET_CODE (SUBREG_REG (oldequiv)) == REG
6945 && (REGNO (SUBREG_REG (oldequiv))
6946 >= FIRST_PSEUDO_REGISTER)
6947 && (reg_equiv_memory_loc
6948 [REGNO (SUBREG_REG (oldequiv))] != 0)))
6949 real_oldequiv = reload_in[j];
6950 gen_reload (reloadreg, real_oldequiv, reload_opnum[j],
6951 reload_when_needed[j]);
6952 }
6953
6954 }
6955
6956 this_reload_insn = get_last_insn ();
6957 /* End this sequence. */
6958 *where = get_insns ();
6959 end_sequence ();
6960
6961 /* Update reload_override_in so that delete_address_reloads_1
6962 can see the actual register usage. */
6963 if (oldequiv_reg)
6964 reload_override_in[j] = oldequiv;
6965 }
6966
6967 /* When inheriting a wider reload, we have a MEM in reload_in[j],
6968 e.g. inheriting a SImode output reload for
6969 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6970 if (optimize && reload_inherited[j] && reload_in[j]
6971 && GET_CODE (reload_in[j]) == MEM
6972 && GET_CODE (reload_in_reg[j]) == MEM
6973 && reload_spill_index[j] >= 0
6974 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6975 {
6976 expect_occurrences
6977 = count_occurrences (PATTERN (insn), reload_in[j]) == 1 ? 0 : -1;
6978 reload_in[j]
6979 = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6980 }
6981
6982 /* If we are reloading a register that was recently stored in with an
6983 output-reload, see if we can prove there was
6984 actually no need to store the old value in it. */
6985
6986 if (optimize
6987 && (reload_inherited[j] || reload_override_in[j])
6988 && reload_reg_rtx[j]
6989 && GET_CODE (reload_reg_rtx[j]) == REG
6990 && spill_reg_store[REGNO (reload_reg_rtx[j])] != 0
6991 #if 0
6992 /* There doesn't seem to be any reason to restrict this to pseudos
6993 and doing so loses in the case where we are copying from a
6994 register of the wrong class. */
6995 && REGNO (spill_reg_stored_to[REGNO (reload_reg_rtx[j])])
6996 >= FIRST_PSEUDO_REGISTER
6997 #endif
6998 /* The insn might have already some references to stackslots
6999 replaced by MEMs, while reload_out_reg still names the
7000 original pseudo. */
7001 && (dead_or_set_p (insn,
7002 spill_reg_stored_to[REGNO (reload_reg_rtx[j])])
7003 || rtx_equal_p (spill_reg_stored_to[REGNO (reload_reg_rtx[j])],
7004 reload_out_reg[j])))
7005 delete_output_reload (insn, j, REGNO (reload_reg_rtx[j]));
7006
7007 /* Input-reloading is done. Now do output-reloading,
7008 storing the value from the reload-register after the main insn
7009 if reload_out[j] is nonzero.
7010
7011 ??? At some point we need to support handling output reloads of
7012 JUMP_INSNs or insns that set cc0. */
7013
7014 /* If this is an output reload that stores something that is
7015 not loaded in this same reload, see if we can eliminate a previous
7016 store. */
7017 {
7018 rtx pseudo = reload_out_reg[j];
7019
7020 if (pseudo
7021 && GET_CODE (pseudo) == REG
7022 && ! rtx_equal_p (reload_in_reg[j], pseudo)
7023 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
7024 && reg_last_reload_reg[REGNO (pseudo)])
7025 {
7026 int pseudo_no = REGNO (pseudo);
7027 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
7028
7029 /* We don't need to test full validity of last_regno for
7030 inherit here; we only want to know if the store actually
7031 matches the pseudo. */
7032 if (reg_reloaded_contents[last_regno] == pseudo_no
7033 && spill_reg_store[last_regno]
7034 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
7035 delete_output_reload (insn, j, last_regno);
7036 }
7037 }
7038
7039 old = reload_out_reg[j];
7040 if (old != 0
7041 && reload_reg_rtx[j] != old
7042 && reload_reg_rtx[j] != 0)
7043 {
7044 register rtx reloadreg = reload_reg_rtx[j];
7045 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7046 register rtx second_reloadreg = 0;
7047 #endif
7048 rtx note, p;
7049 enum machine_mode mode;
7050 int special = 0;
7051
7052 /* An output operand that dies right away does need a reload,
7053 but need not be copied from it. Show the new location in the
7054 REG_UNUSED note. */
7055 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
7056 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
7057 {
7058 XEXP (note, 0) = reload_reg_rtx[j];
7059 continue;
7060 }
7061 /* Likewise for a SUBREG of an operand that dies. */
7062 else if (GET_CODE (old) == SUBREG
7063 && GET_CODE (SUBREG_REG (old)) == REG
7064 && 0 != (note = find_reg_note (insn, REG_UNUSED,
7065 SUBREG_REG (old))))
7066 {
7067 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
7068 reload_reg_rtx[j]);
7069 continue;
7070 }
7071 else if (GET_CODE (old) == SCRATCH)
7072 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7073 but we don't want to make an output reload. */
7074 continue;
7075
7076 #if 0
7077 /* Strip off of OLD any size-increasing SUBREGs such as
7078 (SUBREG:SI foo:QI 0). */
7079
7080 while (GET_CODE (old) == SUBREG && SUBREG_WORD (old) == 0
7081 && (GET_MODE_SIZE (GET_MODE (old))
7082 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old)))))
7083 old = SUBREG_REG (old);
7084 #endif
7085
7086 /* If is a JUMP_INSN, we can't support output reloads yet. */
7087 if (GET_CODE (insn) == JUMP_INSN)
7088 abort ();
7089
7090 if (reload_when_needed[j] == RELOAD_OTHER)
7091 start_sequence ();
7092 else
7093 push_to_sequence (output_reload_insns[reload_opnum[j]]);
7094
7095 old = reload_out[j];
7096
7097 /* Determine the mode to reload in.
7098 See comments above (for input reloading). */
7099
7100 mode = GET_MODE (old);
7101 if (mode == VOIDmode)
7102 {
7103 /* VOIDmode should never happen for an output. */
7104 if (asm_noperands (PATTERN (insn)) < 0)
7105 /* It's the compiler's fault. */
7106 fatal_insn ("VOIDmode on an output", insn);
7107 error_for_asm (insn, "output operand is constant in `asm'");
7108 /* Prevent crash--use something we know is valid. */
7109 mode = word_mode;
7110 old = gen_rtx_REG (mode, REGNO (reloadreg));
7111 }
7112
7113 if (GET_MODE (reloadreg) != mode)
7114 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
7115
7116 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7117
7118 /* If we need two reload regs, set RELOADREG to the intermediate
7119 one, since it will be stored into OLD. We might need a secondary
7120 register only for an input reload, so check again here. */
7121
7122 if (reload_secondary_out_reload[j] >= 0)
7123 {
7124 rtx real_old = old;
7125
7126 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
7127 && reg_equiv_mem[REGNO (old)] != 0)
7128 real_old = reg_equiv_mem[REGNO (old)];
7129
7130 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class[j],
7131 mode, real_old)
7132 != NO_REGS))
7133 {
7134 second_reloadreg = reloadreg;
7135 reloadreg = reload_reg_rtx[reload_secondary_out_reload[j]];
7136
7137 /* See if RELOADREG is to be used as a scratch register
7138 or as an intermediate register. */
7139 if (reload_secondary_out_icode[j] != CODE_FOR_nothing)
7140 {
7141 emit_insn ((GEN_FCN (reload_secondary_out_icode[j])
7142 (real_old, second_reloadreg, reloadreg)));
7143 special = 1;
7144 }
7145 else
7146 {
7147 /* See if we need both a scratch and intermediate reload
7148 register. */
7149
7150 int secondary_reload = reload_secondary_out_reload[j];
7151 enum insn_code tertiary_icode
7152 = reload_secondary_out_icode[secondary_reload];
7153
7154 if (GET_MODE (reloadreg) != mode)
7155 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
7156
7157 if (tertiary_icode != CODE_FOR_nothing)
7158 {
7159 rtx third_reloadreg
7160 = reload_reg_rtx[reload_secondary_out_reload[secondary_reload]];
7161 rtx tem;
7162
7163 /* Copy primary reload reg to secondary reload reg.
7164 (Note that these have been swapped above, then
7165 secondary reload reg to OLD using our insn. */
7166
7167 /* If REAL_OLD is a paradoxical SUBREG, remove it
7168 and try to put the opposite SUBREG on
7169 RELOADREG. */
7170 if (GET_CODE (real_old) == SUBREG
7171 && (GET_MODE_SIZE (GET_MODE (real_old))
7172 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
7173 && 0 != (tem = gen_lowpart_common
7174 (GET_MODE (SUBREG_REG (real_old)),
7175 reloadreg)))
7176 real_old = SUBREG_REG (real_old), reloadreg = tem;
7177
7178 gen_reload (reloadreg, second_reloadreg,
7179 reload_opnum[j], reload_when_needed[j]);
7180 emit_insn ((GEN_FCN (tertiary_icode)
7181 (real_old, reloadreg, third_reloadreg)));
7182 special = 1;
7183 }
7184
7185 else
7186 /* Copy between the reload regs here and then to
7187 OUT later. */
7188
7189 gen_reload (reloadreg, second_reloadreg,
7190 reload_opnum[j], reload_when_needed[j]);
7191 }
7192 }
7193 }
7194 #endif
7195
7196 /* Output the last reload insn. */
7197 if (! special)
7198 {
7199 rtx set;
7200
7201 /* Don't output the last reload if OLD is not the dest of
7202 INSN and is in the src and is clobbered by INSN. */
7203 if (! flag_expensive_optimizations
7204 || GET_CODE (old) != REG
7205 || !(set = single_set (insn))
7206 || rtx_equal_p (old, SET_DEST (set))
7207 || !reg_mentioned_p (old, SET_SRC (set))
7208 || !regno_clobbered_p (REGNO (old), insn))
7209 gen_reload (old, reloadreg, reload_opnum[j],
7210 reload_when_needed[j]);
7211 }
7212
7213 /* Look at all insns we emitted, just to be safe. */
7214 for (p = get_insns (); p; p = NEXT_INSN (p))
7215 if (GET_RTX_CLASS (GET_CODE (p)) == 'i')
7216 {
7217 rtx pat = PATTERN (p);
7218
7219 /* If this output reload doesn't come from a spill reg,
7220 clear any memory of reloaded copies of the pseudo reg.
7221 If this output reload comes from a spill reg,
7222 reg_has_output_reload will make this do nothing. */
7223 note_stores (pat, forget_old_reloads_1);
7224
7225 if (reg_mentioned_p (reload_reg_rtx[j], pat))
7226 {
7227 rtx set = single_set (insn);
7228 if (reload_spill_index[j] < 0
7229 && set
7230 && SET_SRC (set) == reload_reg_rtx[j])
7231 {
7232 int src = REGNO (SET_SRC (set));
7233
7234 reload_spill_index[j] = src;
7235 SET_HARD_REG_BIT (reg_is_output_reload, src);
7236 if (find_regno_note (insn, REG_DEAD, src))
7237 SET_HARD_REG_BIT (reg_reloaded_died, src);
7238 }
7239 if (REGNO (reload_reg_rtx[j]) < FIRST_PSEUDO_REGISTER)
7240 {
7241 int s = reload_secondary_out_reload[j];
7242 set = single_set (p);
7243 /* If this reload copies only to the secondary reload
7244 register, the secondary reload does the actual
7245 store. */
7246 if (s >= 0 && set == NULL_RTX)
7247 ; /* We can't tell what function the secondary reload
7248 has and where the actual store to the pseudo is
7249 made; leave new_spill_reg_store alone. */
7250 else if (s >= 0
7251 && SET_SRC (set) == reload_reg_rtx[j]
7252 && SET_DEST (set) == reload_reg_rtx[s])
7253 {
7254 /* Usually the next instruction will be the
7255 secondary reload insn; if we can confirm
7256 that it is, setting new_spill_reg_store to
7257 that insn will allow an extra optimization. */
7258 rtx s_reg = reload_reg_rtx[s];
7259 rtx next = NEXT_INSN (p);
7260 reload_out[s] = reload_out[j];
7261 reload_out_reg[s] = reload_out_reg[j];
7262 set = single_set (next);
7263 if (set && SET_SRC (set) == s_reg
7264 && ! new_spill_reg_store[REGNO (s_reg)])
7265 {
7266 SET_HARD_REG_BIT (reg_is_output_reload,
7267 REGNO (s_reg));
7268 new_spill_reg_store[REGNO (s_reg)] = next;
7269 }
7270 }
7271 else
7272 new_spill_reg_store[REGNO (reload_reg_rtx[j])] = p;
7273 }
7274 }
7275 }
7276
7277 if (reload_when_needed[j] == RELOAD_OTHER)
7278 {
7279 emit_insns (other_output_reload_insns[reload_opnum[j]]);
7280 other_output_reload_insns[reload_opnum[j]] = get_insns ();
7281 }
7282 else
7283 output_reload_insns[reload_opnum[j]] = get_insns ();
7284
7285 end_sequence ();
7286 }
7287 }
7288
7289 /* Now write all the insns we made for reloads in the order expected by
7290 the allocation functions. Prior to the insn being reloaded, we write
7291 the following reloads:
7292
7293 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7294
7295 RELOAD_OTHER reloads.
7296
7297 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7298 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7299 RELOAD_FOR_INPUT reload for the operand.
7300
7301 RELOAD_FOR_OPADDR_ADDRS reloads.
7302
7303 RELOAD_FOR_OPERAND_ADDRESS reloads.
7304
7305 After the insn being reloaded, we write the following:
7306
7307 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7308 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7309 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7310 reloads for the operand. The RELOAD_OTHER output reloads are
7311 output in descending order by reload number. */
7312
7313 emit_insns_before (other_input_address_reload_insns, insn);
7314 emit_insns_before (other_input_reload_insns, insn);
7315
7316 for (j = 0; j < reload_n_operands; j++)
7317 {
7318 emit_insns_before (inpaddr_address_reload_insns[j], insn);
7319 emit_insns_before (input_address_reload_insns[j], insn);
7320 emit_insns_before (input_reload_insns[j], insn);
7321 }
7322
7323 emit_insns_before (other_operand_reload_insns, insn);
7324 emit_insns_before (operand_reload_insns, insn);
7325
7326 for (j = 0; j < reload_n_operands; j++)
7327 {
7328 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
7329 emit_insns_before (output_address_reload_insns[j], following_insn);
7330 emit_insns_before (output_reload_insns[j], following_insn);
7331 emit_insns_before (other_output_reload_insns[j], following_insn);
7332 }
7333
7334 /* Keep basic block info up to date. */
7335 if (n_basic_blocks)
7336 {
7337 if (basic_block_head[chain->block] == insn)
7338 basic_block_head[chain->block] = NEXT_INSN (before_insn);
7339 if (basic_block_end[chain->block] == insn)
7340 basic_block_end[chain->block] = PREV_INSN (following_insn);
7341 }
7342
7343 /* For all the spill regs newly reloaded in this instruction,
7344 record what they were reloaded from, so subsequent instructions
7345 can inherit the reloads.
7346
7347 Update spill_reg_store for the reloads of this insn.
7348 Copy the elements that were updated in the loop above. */
7349
7350 for (j = 0; j < n_reloads; j++)
7351 {
7352 register int r = reload_order[j];
7353 register int i = reload_spill_index[r];
7354
7355 /* I is nonneg if this reload used a register.
7356 If reload_reg_rtx[r] is 0, this is an optional reload
7357 that we opted to ignore. */
7358
7359 if (i >= 0 && reload_reg_rtx[r] != 0)
7360 {
7361 int nr
7362 = HARD_REGNO_NREGS (i, GET_MODE (reload_reg_rtx[r]));
7363 int k;
7364 int part_reaches_end = 0;
7365 int all_reaches_end = 1;
7366
7367 /* For a multi register reload, we need to check if all or part
7368 of the value lives to the end. */
7369 for (k = 0; k < nr; k++)
7370 {
7371 if (reload_reg_reaches_end_p (i + k, reload_opnum[r],
7372 reload_when_needed[r]))
7373 part_reaches_end = 1;
7374 else
7375 all_reaches_end = 0;
7376 }
7377
7378 /* Ignore reloads that don't reach the end of the insn in
7379 entirety. */
7380 if (all_reaches_end)
7381 {
7382 /* First, clear out memory of what used to be in this spill reg.
7383 If consecutive registers are used, clear them all. */
7384
7385 for (k = 0; k < nr; k++)
7386 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7387
7388 /* Maybe the spill reg contains a copy of reload_out. */
7389 if (reload_out[r] != 0
7390 && (GET_CODE (reload_out[r]) == REG
7391 #ifdef AUTO_INC_DEC
7392 || ! reload_out_reg[r]
7393 #endif
7394 || GET_CODE (reload_out_reg[r]) == REG))
7395 {
7396 rtx out = (GET_CODE (reload_out[r]) == REG
7397 ? reload_out[r]
7398 : reload_out_reg[r]
7399 ? reload_out_reg[r]
7400 /* AUTO_INC */ : XEXP (reload_in_reg[r], 0));
7401 register int nregno = REGNO (out);
7402 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7403 : HARD_REGNO_NREGS (nregno,
7404 GET_MODE (reload_reg_rtx[r])));
7405
7406 spill_reg_store[i] = new_spill_reg_store[i];
7407 spill_reg_stored_to[i] = out;
7408 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7409
7410 /* If NREGNO is a hard register, it may occupy more than
7411 one register. If it does, say what is in the
7412 rest of the registers assuming that both registers
7413 agree on how many words the object takes. If not,
7414 invalidate the subsequent registers. */
7415
7416 if (nregno < FIRST_PSEUDO_REGISTER)
7417 for (k = 1; k < nnr; k++)
7418 reg_last_reload_reg[nregno + k]
7419 = (nr == nnr
7420 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7421 REGNO (reload_reg_rtx[r]) + k)
7422 : 0);
7423
7424 /* Now do the inverse operation. */
7425 for (k = 0; k < nr; k++)
7426 {
7427 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7428 reg_reloaded_contents[i + k]
7429 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7430 ? nregno
7431 : nregno + k);
7432 reg_reloaded_insn[i + k] = insn;
7433 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7434 }
7435 }
7436
7437 /* Maybe the spill reg contains a copy of reload_in. Only do
7438 something if there will not be an output reload for
7439 the register being reloaded. */
7440 else if (reload_out_reg[r] == 0
7441 && reload_in[r] != 0
7442 && ((GET_CODE (reload_in[r]) == REG
7443 && REGNO (reload_in[r]) >= FIRST_PSEUDO_REGISTER
7444 && ! reg_has_output_reload[REGNO (reload_in[r])])
7445 || (GET_CODE (reload_in_reg[r]) == REG
7446 && ! reg_has_output_reload[REGNO (reload_in_reg[r])]))
7447 && ! reg_set_p (reload_reg_rtx[r], PATTERN (insn)))
7448 {
7449 register int nregno;
7450 int nnr;
7451
7452 if (GET_CODE (reload_in[r]) == REG
7453 && REGNO (reload_in[r]) >= FIRST_PSEUDO_REGISTER)
7454 nregno = REGNO (reload_in[r]);
7455 else if (GET_CODE (reload_in_reg[r]) == REG)
7456 nregno = REGNO (reload_in_reg[r]);
7457 else
7458 nregno = REGNO (XEXP (reload_in_reg[r], 0));
7459
7460 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7461 : HARD_REGNO_NREGS (nregno,
7462 GET_MODE (reload_reg_rtx[r])));
7463
7464 reg_last_reload_reg[nregno] = reload_reg_rtx[r];
7465
7466 if (nregno < FIRST_PSEUDO_REGISTER)
7467 for (k = 1; k < nnr; k++)
7468 reg_last_reload_reg[nregno + k]
7469 = (nr == nnr
7470 ? gen_rtx_REG (reg_raw_mode[REGNO (reload_reg_rtx[r]) + k],
7471 REGNO (reload_reg_rtx[r]) + k)
7472 : 0);
7473
7474 /* Unless we inherited this reload, show we haven't
7475 recently done a store.
7476 Previous stores of inherited auto_inc expressions
7477 also have to be discarded. */
7478 if (! reload_inherited[r]
7479 || (reload_out[r] && ! reload_out_reg[r]))
7480 spill_reg_store[i] = 0;
7481
7482 for (k = 0; k < nr; k++)
7483 {
7484 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7485 reg_reloaded_contents[i + k]
7486 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7487 ? nregno
7488 : nregno + k);
7489 reg_reloaded_insn[i + k] = insn;
7490 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7491 }
7492 }
7493 }
7494
7495 /* However, if part of the reload reaches the end, then we must
7496 invalidate the old info for the part that survives to the end. */
7497 else if (part_reaches_end)
7498 {
7499 for (k = 0; k < nr; k++)
7500 if (reload_reg_reaches_end_p (i + k,
7501 reload_opnum[r],
7502 reload_when_needed[r]))
7503 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7504 }
7505 }
7506
7507 /* The following if-statement was #if 0'd in 1.34 (or before...).
7508 It's reenabled in 1.35 because supposedly nothing else
7509 deals with this problem. */
7510
7511 /* If a register gets output-reloaded from a non-spill register,
7512 that invalidates any previous reloaded copy of it.
7513 But forget_old_reloads_1 won't get to see it, because
7514 it thinks only about the original insn. So invalidate it here. */
7515 if (i < 0 && reload_out[r] != 0
7516 && (GET_CODE (reload_out[r]) == REG
7517 || (GET_CODE (reload_out[r]) == MEM
7518 && GET_CODE (reload_out_reg[r]) == REG)))
7519 {
7520 rtx out = (GET_CODE (reload_out[r]) == REG
7521 ? reload_out[r] : reload_out_reg[r]);
7522 register int nregno = REGNO (out);
7523 if (nregno >= FIRST_PSEUDO_REGISTER)
7524 {
7525 rtx src_reg, store_insn;
7526
7527 reg_last_reload_reg[nregno] = 0;
7528
7529 /* If we can find a hard register that is stored, record
7530 the storing insn so that we may delete this insn with
7531 delete_output_reload. */
7532 src_reg = reload_reg_rtx[r];
7533
7534 /* If this is an optional reload, try to find the source reg
7535 from an input reload. */
7536 if (! src_reg)
7537 {
7538 rtx set = single_set (insn);
7539 if (SET_DEST (set) == reload_out[r])
7540 {
7541 int k;
7542
7543 src_reg = SET_SRC (set);
7544 store_insn = insn;
7545 for (k = 0; k < n_reloads; k++)
7546 {
7547 if (reload_in[k] == src_reg)
7548 {
7549 src_reg = reload_reg_rtx[k];
7550 break;
7551 }
7552 }
7553 }
7554 }
7555 else
7556 store_insn = new_spill_reg_store[REGNO (src_reg)];
7557 if (src_reg && GET_CODE (src_reg) == REG
7558 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7559 {
7560 int src_regno = REGNO (src_reg);
7561 int nr = HARD_REGNO_NREGS (src_regno, reload_mode[r]);
7562 /* The place where to find a death note varies with
7563 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7564 necessarily checked exactly in the code that moves
7565 notes, so just check both locations. */
7566 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7567 if (! note)
7568 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7569 while (nr-- > 0)
7570 {
7571 spill_reg_store[src_regno + nr] = store_insn;
7572 spill_reg_stored_to[src_regno + nr] = out;
7573 reg_reloaded_contents[src_regno + nr] = nregno;
7574 reg_reloaded_insn[src_regno + nr] = store_insn;
7575 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7576 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7577 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7578 if (note)
7579 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7580 else
7581 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7582 }
7583 reg_last_reload_reg[nregno] = src_reg;
7584 }
7585 }
7586 else
7587 {
7588 int num_regs = HARD_REGNO_NREGS (nregno,GET_MODE (reload_out[r]));
7589
7590 while (num_regs-- > 0)
7591 reg_last_reload_reg[nregno + num_regs] = 0;
7592 }
7593 }
7594 }
7595 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7596 }
7597 \f
7598 /* Emit code to perform a reload from IN (which may be a reload register) to
7599 OUT (which may also be a reload register). IN or OUT is from operand
7600 OPNUM with reload type TYPE.
7601
7602 Returns first insn emitted. */
7603
7604 rtx
7605 gen_reload (out, in, opnum, type)
7606 rtx out;
7607 rtx in;
7608 int opnum;
7609 enum reload_type type;
7610 {
7611 rtx last = get_last_insn ();
7612 rtx tem;
7613
7614 /* If IN is a paradoxical SUBREG, remove it and try to put the
7615 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7616 if (GET_CODE (in) == SUBREG
7617 && (GET_MODE_SIZE (GET_MODE (in))
7618 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7619 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7620 in = SUBREG_REG (in), out = tem;
7621 else if (GET_CODE (out) == SUBREG
7622 && (GET_MODE_SIZE (GET_MODE (out))
7623 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7624 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7625 out = SUBREG_REG (out), in = tem;
7626
7627 /* How to do this reload can get quite tricky. Normally, we are being
7628 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7629 register that didn't get a hard register. In that case we can just
7630 call emit_move_insn.
7631
7632 We can also be asked to reload a PLUS that adds a register or a MEM to
7633 another register, constant or MEM. This can occur during frame pointer
7634 elimination and while reloading addresses. This case is handled by
7635 trying to emit a single insn to perform the add. If it is not valid,
7636 we use a two insn sequence.
7637
7638 Finally, we could be called to handle an 'o' constraint by putting
7639 an address into a register. In that case, we first try to do this
7640 with a named pattern of "reload_load_address". If no such pattern
7641 exists, we just emit a SET insn and hope for the best (it will normally
7642 be valid on machines that use 'o').
7643
7644 This entire process is made complex because reload will never
7645 process the insns we generate here and so we must ensure that
7646 they will fit their constraints and also by the fact that parts of
7647 IN might be being reloaded separately and replaced with spill registers.
7648 Because of this, we are, in some sense, just guessing the right approach
7649 here. The one listed above seems to work.
7650
7651 ??? At some point, this whole thing needs to be rethought. */
7652
7653 if (GET_CODE (in) == PLUS
7654 && (GET_CODE (XEXP (in, 0)) == REG
7655 || GET_CODE (XEXP (in, 0)) == SUBREG
7656 || GET_CODE (XEXP (in, 0)) == MEM)
7657 && (GET_CODE (XEXP (in, 1)) == REG
7658 || GET_CODE (XEXP (in, 1)) == SUBREG
7659 || CONSTANT_P (XEXP (in, 1))
7660 || GET_CODE (XEXP (in, 1)) == MEM))
7661 {
7662 /* We need to compute the sum of a register or a MEM and another
7663 register, constant, or MEM, and put it into the reload
7664 register. The best possible way of doing this is if the machine
7665 has a three-operand ADD insn that accepts the required operands.
7666
7667 The simplest approach is to try to generate such an insn and see if it
7668 is recognized and matches its constraints. If so, it can be used.
7669
7670 It might be better not to actually emit the insn unless it is valid,
7671 but we need to pass the insn as an operand to `recog' and
7672 `extract_insn' and it is simpler to emit and then delete the insn if
7673 not valid than to dummy things up. */
7674
7675 rtx op0, op1, tem, insn;
7676 int code;
7677
7678 op0 = find_replacement (&XEXP (in, 0));
7679 op1 = find_replacement (&XEXP (in, 1));
7680
7681 /* Since constraint checking is strict, commutativity won't be
7682 checked, so we need to do that here to avoid spurious failure
7683 if the add instruction is two-address and the second operand
7684 of the add is the same as the reload reg, which is frequently
7685 the case. If the insn would be A = B + A, rearrange it so
7686 it will be A = A + B as constrain_operands expects. */
7687
7688 if (GET_CODE (XEXP (in, 1)) == REG
7689 && REGNO (out) == REGNO (XEXP (in, 1)))
7690 tem = op0, op0 = op1, op1 = tem;
7691
7692 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7693 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7694
7695 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7696 code = recog_memoized (insn);
7697
7698 if (code >= 0)
7699 {
7700 extract_insn (insn);
7701 /* We want constrain operands to treat this insn strictly in
7702 its validity determination, i.e., the way it would after reload
7703 has completed. */
7704 if (constrain_operands (1))
7705 return insn;
7706 }
7707
7708 delete_insns_since (last);
7709
7710 /* If that failed, we must use a conservative two-insn sequence.
7711 use move to copy constant, MEM, or pseudo register to the reload
7712 register since "move" will be able to handle an arbitrary operand,
7713 unlike add which can't, in general. Then add the registers.
7714
7715 If there is another way to do this for a specific machine, a
7716 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7717 we emit below. */
7718
7719 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7720 || (GET_CODE (op1) == REG
7721 && REGNO (op1) >= FIRST_PSEUDO_REGISTER))
7722 tem = op0, op0 = op1, op1 = tem;
7723
7724 gen_reload (out, op0, opnum, type);
7725
7726 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7727 This fixes a problem on the 32K where the stack pointer cannot
7728 be used as an operand of an add insn. */
7729
7730 if (rtx_equal_p (op0, op1))
7731 op1 = out;
7732
7733 insn = emit_insn (gen_add2_insn (out, op1));
7734
7735 /* If that failed, copy the address register to the reload register.
7736 Then add the constant to the reload register. */
7737
7738 code = recog_memoized (insn);
7739
7740 if (code >= 0)
7741 {
7742 extract_insn (insn);
7743 /* We want constrain operands to treat this insn strictly in
7744 its validity determination, i.e., the way it would after reload
7745 has completed. */
7746 if (constrain_operands (1))
7747 {
7748 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7749 REG_NOTES (insn)
7750 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7751 return insn;
7752 }
7753 }
7754
7755 delete_insns_since (last);
7756
7757 gen_reload (out, op1, opnum, type);
7758 insn = emit_insn (gen_add2_insn (out, op0));
7759 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7760 }
7761
7762 #ifdef SECONDARY_MEMORY_NEEDED
7763 /* If we need a memory location to do the move, do it that way. */
7764 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7765 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7766 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7767 REGNO_REG_CLASS (REGNO (out)),
7768 GET_MODE (out)))
7769 {
7770 /* Get the memory to use and rewrite both registers to its mode. */
7771 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7772
7773 if (GET_MODE (loc) != GET_MODE (out))
7774 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7775
7776 if (GET_MODE (loc) != GET_MODE (in))
7777 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7778
7779 gen_reload (loc, in, opnum, type);
7780 gen_reload (out, loc, opnum, type);
7781 }
7782 #endif
7783
7784 /* If IN is a simple operand, use gen_move_insn. */
7785 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7786 emit_insn (gen_move_insn (out, in));
7787
7788 #ifdef HAVE_reload_load_address
7789 else if (HAVE_reload_load_address)
7790 emit_insn (gen_reload_load_address (out, in));
7791 #endif
7792
7793 /* Otherwise, just write (set OUT IN) and hope for the best. */
7794 else
7795 emit_insn (gen_rtx_SET (VOIDmode, out, in));
7796
7797 /* Return the first insn emitted.
7798 We can not just return get_last_insn, because there may have
7799 been multiple instructions emitted. Also note that gen_move_insn may
7800 emit more than one insn itself, so we can not assume that there is one
7801 insn emitted per emit_insn_before call. */
7802
7803 return last ? NEXT_INSN (last) : get_insns ();
7804 }
7805 \f
7806 /* Delete a previously made output-reload
7807 whose result we now believe is not needed.
7808 First we double-check.
7809
7810 INSN is the insn now being processed.
7811 LAST_RELOAD_REG is the hard register number for which we want to delete
7812 the last output reload.
7813 J is the reload-number that originally used REG. The caller has made
7814 certain that reload J doesn't use REG any longer for input. */
7815
7816 static void
7817 delete_output_reload (insn, j, last_reload_reg)
7818 rtx insn;
7819 int j;
7820 int last_reload_reg;
7821 {
7822 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7823 rtx reg = spill_reg_stored_to[last_reload_reg];
7824 int k;
7825 int n_occurrences;
7826 int n_inherited = 0;
7827 register rtx i1;
7828 rtx substed;
7829
7830 /* Get the raw pseudo-register referred to. */
7831
7832 while (GET_CODE (reg) == SUBREG)
7833 reg = SUBREG_REG (reg);
7834 substed = reg_equiv_memory_loc[REGNO (reg)];
7835
7836 /* This is unsafe if the operand occurs more often in the current
7837 insn than it is inherited. */
7838 for (k = n_reloads - 1; k >= 0; k--)
7839 {
7840 rtx reg2 = reload_in[k];
7841 if (! reg2)
7842 continue;
7843 if (GET_CODE (reg2) == MEM || reload_override_in[k])
7844 reg2 = reload_in_reg[k];
7845 #ifdef AUTO_INC_DEC
7846 if (reload_out[k] && ! reload_out_reg[k])
7847 reg2 = XEXP (reload_in_reg[k], 0);
7848 #endif
7849 while (GET_CODE (reg2) == SUBREG)
7850 reg2 = SUBREG_REG (reg2);
7851 if (rtx_equal_p (reg2, reg))
7852 {
7853 if (reload_inherited[k] || reload_override_in[k] || k == j)
7854 {
7855 n_inherited++;
7856 reg2 = reload_out_reg[k];
7857 if (! reg2)
7858 continue;
7859 while (GET_CODE (reg2) == SUBREG)
7860 reg2 = XEXP (reg2, 0);
7861 if (rtx_equal_p (reg2, reg))
7862 n_inherited++;
7863 }
7864 else
7865 return;
7866 }
7867 }
7868 n_occurrences = count_occurrences (PATTERN (insn), reg);
7869 if (substed)
7870 n_occurrences += count_occurrences (PATTERN (insn), substed);
7871 if (n_occurrences > n_inherited)
7872 return;
7873
7874 /* If the pseudo-reg we are reloading is no longer referenced
7875 anywhere between the store into it and here,
7876 and no jumps or labels intervene, then the value can get
7877 here through the reload reg alone.
7878 Otherwise, give up--return. */
7879 for (i1 = NEXT_INSN (output_reload_insn);
7880 i1 != insn; i1 = NEXT_INSN (i1))
7881 {
7882 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7883 return;
7884 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7885 && reg_mentioned_p (reg, PATTERN (i1)))
7886 {
7887 /* If this is USE in front of INSN, we only have to check that
7888 there are no more references than accounted for by inheritance. */
7889 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
7890 {
7891 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7892 i1 = NEXT_INSN (i1);
7893 }
7894 if (n_occurrences <= n_inherited && i1 == insn)
7895 break;
7896 return;
7897 }
7898 }
7899
7900 /* The caller has already checked that REG dies or is set in INSN.
7901 It has also checked that we are optimizing, and thus some inaccurancies
7902 in the debugging information are acceptable.
7903 So we could just delete output_reload_insn.
7904 But in some cases we can improve the debugging information without
7905 sacrificing optimization - maybe even improving the code:
7906 See if the pseudo reg has been completely replaced
7907 with reload regs. If so, delete the store insn
7908 and forget we had a stack slot for the pseudo. */
7909 if (reload_out[j] != reload_in[j]
7910 && REG_N_DEATHS (REGNO (reg)) == 1
7911 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7912 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7913 {
7914 rtx i2;
7915
7916 /* We know that it was used only between here
7917 and the beginning of the current basic block.
7918 (We also know that the last use before INSN was
7919 the output reload we are thinking of deleting, but never mind that.)
7920 Search that range; see if any ref remains. */
7921 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7922 {
7923 rtx set = single_set (i2);
7924
7925 /* Uses which just store in the pseudo don't count,
7926 since if they are the only uses, they are dead. */
7927 if (set != 0 && SET_DEST (set) == reg)
7928 continue;
7929 if (GET_CODE (i2) == CODE_LABEL
7930 || GET_CODE (i2) == JUMP_INSN)
7931 break;
7932 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7933 && reg_mentioned_p (reg, PATTERN (i2)))
7934 {
7935 /* Some other ref remains; just delete the output reload we
7936 know to be dead. */
7937 delete_address_reloads (output_reload_insn, insn);
7938 PUT_CODE (output_reload_insn, NOTE);
7939 NOTE_SOURCE_FILE (output_reload_insn) = 0;
7940 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
7941 return;
7942 }
7943 }
7944
7945 /* Delete the now-dead stores into this pseudo. */
7946 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7947 {
7948 rtx set = single_set (i2);
7949
7950 if (set != 0 && SET_DEST (set) == reg)
7951 {
7952 delete_address_reloads (i2, insn);
7953 /* This might be a basic block head,
7954 thus don't use delete_insn. */
7955 PUT_CODE (i2, NOTE);
7956 NOTE_SOURCE_FILE (i2) = 0;
7957 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7958 }
7959 if (GET_CODE (i2) == CODE_LABEL
7960 || GET_CODE (i2) == JUMP_INSN)
7961 break;
7962 }
7963
7964 /* For the debugging info,
7965 say the pseudo lives in this reload reg. */
7966 reg_renumber[REGNO (reg)] = REGNO (reload_reg_rtx[j]);
7967 alter_reg (REGNO (reg), -1);
7968 }
7969 delete_address_reloads (output_reload_insn, insn);
7970 PUT_CODE (output_reload_insn, NOTE);
7971 NOTE_SOURCE_FILE (output_reload_insn) = 0;
7972 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
7973
7974 }
7975
7976 /* We are going to delete DEAD_INSN. Recursively delete loads of
7977 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7978 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
7979 static void
7980 delete_address_reloads (dead_insn, current_insn)
7981 rtx dead_insn, current_insn;
7982 {
7983 rtx set = single_set (dead_insn);
7984 rtx set2, dst, prev, next;
7985 if (set)
7986 {
7987 rtx dst = SET_DEST (set);
7988 if (GET_CODE (dst) == MEM)
7989 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
7990 }
7991 /* If we deleted the store from a reloaded post_{in,de}c expression,
7992 we can delete the matching adds. */
7993 prev = PREV_INSN (dead_insn);
7994 next = NEXT_INSN (dead_insn);
7995 if (! prev || ! next)
7996 return;
7997 set = single_set (next);
7998 set2 = single_set (prev);
7999 if (! set || ! set2
8000 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
8001 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
8002 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
8003 return;
8004 dst = SET_DEST (set);
8005 if (! rtx_equal_p (dst, SET_DEST (set2))
8006 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
8007 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
8008 || (INTVAL (XEXP (SET_SRC (set), 1))
8009 != - INTVAL (XEXP (SET_SRC (set2), 1))))
8010 return;
8011 delete_insn (prev);
8012 delete_insn (next);
8013 }
8014
8015 /* Subfunction of delete_address_reloads: process registers found in X. */
8016 static void
8017 delete_address_reloads_1 (dead_insn, x, current_insn)
8018 rtx dead_insn, x, current_insn;
8019 {
8020 rtx prev, set, dst, i2;
8021 int i, j;
8022 enum rtx_code code = GET_CODE (x);
8023
8024 if (code != REG)
8025 {
8026 char *fmt= GET_RTX_FORMAT (code);
8027 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8028 {
8029 if (fmt[i] == 'e')
8030 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
8031 else if (fmt[i] == 'E')
8032 {
8033 for (j = XVECLEN (x, i) - 1; j >=0; j--)
8034 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
8035 current_insn);
8036 }
8037 }
8038 return;
8039 }
8040
8041 if (spill_reg_order[REGNO (x)] < 0)
8042 return;
8043
8044 /* Scan backwards for the insn that sets x. This might be a way back due
8045 to inheritance. */
8046 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
8047 {
8048 code = GET_CODE (prev);
8049 if (code == CODE_LABEL || code == JUMP_INSN)
8050 return;
8051 if (GET_RTX_CLASS (code) != 'i')
8052 continue;
8053 if (reg_set_p (x, PATTERN (prev)))
8054 break;
8055 if (reg_referenced_p (x, PATTERN (prev)))
8056 return;
8057 }
8058 if (! prev || INSN_UID (prev) < reload_first_uid)
8059 return;
8060 /* Check that PREV only sets the reload register. */
8061 set = single_set (prev);
8062 if (! set)
8063 return;
8064 dst = SET_DEST (set);
8065 if (GET_CODE (dst) != REG
8066 || ! rtx_equal_p (dst, x))
8067 return;
8068 if (! reg_set_p (dst, PATTERN (dead_insn)))
8069 {
8070 /* Check if DST was used in a later insn -
8071 it might have been inherited. */
8072 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
8073 {
8074 if (GET_CODE (i2) == CODE_LABEL)
8075 break;
8076 if (GET_RTX_CLASS (GET_CODE (i2)) != 'i')
8077 continue;
8078 if (reg_referenced_p (dst, PATTERN (i2)))
8079 {
8080 /* If there is a reference to the register in the current insn,
8081 it might be loaded in a non-inherited reload. If no other
8082 reload uses it, that means the register is set before
8083 referenced. */
8084 if (i2 == current_insn)
8085 {
8086 for (j = n_reloads - 1; j >= 0; j--)
8087 if ((reload_reg_rtx[j] == dst && reload_inherited[j])
8088 || reload_override_in[j] == dst)
8089 return;
8090 for (j = n_reloads - 1; j >= 0; j--)
8091 if (reload_in[j] && reload_reg_rtx[j] == dst)
8092 break;
8093 if (j >= 0)
8094 break;
8095 }
8096 return;
8097 }
8098 if (GET_CODE (i2) == JUMP_INSN)
8099 break;
8100 if (reg_set_p (dst, PATTERN (i2)))
8101 break;
8102 /* If DST is still live at CURRENT_INSN, check if it is used for
8103 any reload. */
8104 if (i2 == current_insn)
8105 {
8106 for (j = n_reloads - 1; j >= 0; j--)
8107 if ((reload_reg_rtx[j] == dst && reload_inherited[j])
8108 || reload_override_in[j] == dst)
8109 return;
8110 /* ??? We can't finish the loop here, because dst might be
8111 allocated to a pseudo in this block if no reload in this
8112 block needs any of the clsses containing DST - see
8113 spill_hard_reg. There is no easy way to tell this, so we
8114 have to scan till the end of the basic block. */
8115 }
8116 }
8117 }
8118 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
8119 reg_reloaded_contents[REGNO (dst)] = -1;
8120 /* Can't use delete_insn here because PREV might be a basic block head. */
8121 PUT_CODE (prev, NOTE);
8122 NOTE_LINE_NUMBER (prev) = NOTE_INSN_DELETED;
8123 NOTE_SOURCE_FILE (prev) = 0;
8124 }
8125 \f
8126 /* Output reload-insns to reload VALUE into RELOADREG.
8127 VALUE is an autoincrement or autodecrement RTX whose operand
8128 is a register or memory location;
8129 so reloading involves incrementing that location.
8130 IN is either identical to VALUE, or some cheaper place to reload from.
8131
8132 INC_AMOUNT is the number to increment or decrement by (always positive).
8133 This cannot be deduced from VALUE.
8134
8135 Return the instruction that stores into RELOADREG. */
8136
8137 static rtx
8138 inc_for_reload (reloadreg, in, value, inc_amount)
8139 rtx reloadreg;
8140 rtx in, value;
8141 int inc_amount;
8142 {
8143 /* REG or MEM to be copied and incremented. */
8144 rtx incloc = XEXP (value, 0);
8145 /* Nonzero if increment after copying. */
8146 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
8147 rtx last;
8148 rtx inc;
8149 rtx add_insn;
8150 int code;
8151 rtx store;
8152 rtx real_in = in == value ? XEXP (in, 0) : in;
8153
8154 /* No hard register is equivalent to this register after
8155 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
8156 we could inc/dec that register as well (maybe even using it for
8157 the source), but I'm not sure it's worth worrying about. */
8158 if (GET_CODE (incloc) == REG)
8159 reg_last_reload_reg[REGNO (incloc)] = 0;
8160
8161 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
8162 inc_amount = - inc_amount;
8163
8164 inc = GEN_INT (inc_amount);
8165
8166 /* If this is post-increment, first copy the location to the reload reg. */
8167 if (post && real_in != reloadreg)
8168 emit_insn (gen_move_insn (reloadreg, real_in));
8169
8170 if (in == value)
8171 {
8172 /* See if we can directly increment INCLOC. Use a method similar to
8173 that in gen_reload. */
8174
8175 last = get_last_insn ();
8176 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
8177 gen_rtx_PLUS (GET_MODE (incloc),
8178 incloc, inc)));
8179
8180 code = recog_memoized (add_insn);
8181 if (code >= 0)
8182 {
8183 extract_insn (add_insn);
8184 if (constrain_operands (1))
8185 {
8186 /* If this is a pre-increment and we have incremented the value
8187 where it lives, copy the incremented value to RELOADREG to
8188 be used as an address. */
8189
8190 if (! post)
8191 emit_insn (gen_move_insn (reloadreg, incloc));
8192
8193 return add_insn;
8194 }
8195 }
8196 delete_insns_since (last);
8197 }
8198
8199 /* If couldn't do the increment directly, must increment in RELOADREG.
8200 The way we do this depends on whether this is pre- or post-increment.
8201 For pre-increment, copy INCLOC to the reload register, increment it
8202 there, then save back. */
8203
8204 if (! post)
8205 {
8206 if (in != reloadreg)
8207 emit_insn (gen_move_insn (reloadreg, real_in));
8208 emit_insn (gen_add2_insn (reloadreg, inc));
8209 store = emit_insn (gen_move_insn (incloc, reloadreg));
8210 }
8211 else
8212 {
8213 /* Postincrement.
8214 Because this might be a jump insn or a compare, and because RELOADREG
8215 may not be available after the insn in an input reload, we must do
8216 the incrementation before the insn being reloaded for.
8217
8218 We have already copied IN to RELOADREG. Increment the copy in
8219 RELOADREG, save that back, then decrement RELOADREG so it has
8220 the original value. */
8221
8222 emit_insn (gen_add2_insn (reloadreg, inc));
8223 store = emit_insn (gen_move_insn (incloc, reloadreg));
8224 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
8225 }
8226
8227 return store;
8228 }
8229 \f
8230 /* Return 1 if we are certain that the constraint-string STRING allows
8231 the hard register REG. Return 0 if we can't be sure of this. */
8232
8233 static int
8234 constraint_accepts_reg_p (string, reg)
8235 char *string;
8236 rtx reg;
8237 {
8238 int value = 0;
8239 int regno = true_regnum (reg);
8240 int c;
8241
8242 /* Initialize for first alternative. */
8243 value = 0;
8244 /* Check that each alternative contains `g' or `r'. */
8245 while (1)
8246 switch (c = *string++)
8247 {
8248 case 0:
8249 /* If an alternative lacks `g' or `r', we lose. */
8250 return value;
8251 case ',':
8252 /* If an alternative lacks `g' or `r', we lose. */
8253 if (value == 0)
8254 return 0;
8255 /* Initialize for next alternative. */
8256 value = 0;
8257 break;
8258 case 'g':
8259 case 'r':
8260 /* Any general reg wins for this alternative. */
8261 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
8262 value = 1;
8263 break;
8264 default:
8265 /* Any reg in specified class wins for this alternative. */
8266 {
8267 enum reg_class class = REG_CLASS_FROM_LETTER (c);
8268
8269 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
8270 value = 1;
8271 }
8272 }
8273 }
8274 \f
8275 /* Return the number of places FIND appears within X, but don't count
8276 an occurrence if some SET_DEST is FIND. */
8277
8278 int
8279 count_occurrences (x, find)
8280 register rtx x, find;
8281 {
8282 register int i, j;
8283 register enum rtx_code code;
8284 register char *format_ptr;
8285 int count;
8286
8287 if (x == find)
8288 return 1;
8289 if (x == 0)
8290 return 0;
8291
8292 code = GET_CODE (x);
8293
8294 switch (code)
8295 {
8296 case REG:
8297 case QUEUED:
8298 case CONST_INT:
8299 case CONST_DOUBLE:
8300 case SYMBOL_REF:
8301 case CODE_LABEL:
8302 case PC:
8303 case CC0:
8304 return 0;
8305
8306 case MEM:
8307 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
8308 return 1;
8309 break;
8310 case SET:
8311 if (SET_DEST (x) == find)
8312 return count_occurrences (SET_SRC (x), find);
8313 break;
8314
8315 default:
8316 break;
8317 }
8318
8319 format_ptr = GET_RTX_FORMAT (code);
8320 count = 0;
8321
8322 for (i = 0; i < GET_RTX_LENGTH (code); i++)
8323 {
8324 switch (*format_ptr++)
8325 {
8326 case 'e':
8327 count += count_occurrences (XEXP (x, i), find);
8328 break;
8329
8330 case 'E':
8331 if (XVEC (x, i) != NULL)
8332 {
8333 for (j = 0; j < XVECLEN (x, i); j++)
8334 count += count_occurrences (XVECEXP (x, i, j), find);
8335 }
8336 break;
8337 }
8338 }
8339 return count;
8340 }
8341 \f
8342 /* This array holds values which are equivalent to a hard register
8343 during reload_cse_regs. Each array element is an EXPR_LIST of
8344 values. Each time a hard register is set, we set the corresponding
8345 array element to the value. Each time a hard register is copied
8346 into memory, we add the memory location to the corresponding array
8347 element. We don't store values or memory addresses with side
8348 effects in this array.
8349
8350 If the value is a CONST_INT, then the mode of the containing
8351 EXPR_LIST is the mode in which that CONST_INT was referenced.
8352
8353 We sometimes clobber a specific entry in a list. In that case, we
8354 just set XEXP (list-entry, 0) to 0. */
8355
8356 static rtx *reg_values;
8357
8358 /* This is a preallocated REG rtx which we use as a temporary in
8359 reload_cse_invalidate_regno, so that we don't need to allocate a
8360 new one each time through a loop in that function. */
8361
8362 static rtx invalidate_regno_rtx;
8363
8364 /* Invalidate any entries in reg_values which depend on REGNO,
8365 including those for REGNO itself. This is called if REGNO is
8366 changing. If CLOBBER is true, then always forget anything we
8367 currently know about REGNO. MODE is the mode of the assignment to
8368 REGNO, which is used to determine how many hard registers are being
8369 changed. If MODE is VOIDmode, then only REGNO is being changed;
8370 this is used when invalidating call clobbered registers across a
8371 call. */
8372
8373 static void
8374 reload_cse_invalidate_regno (regno, mode, clobber)
8375 int regno;
8376 enum machine_mode mode;
8377 int clobber;
8378 {
8379 int endregno;
8380 register int i;
8381
8382 /* Our callers don't always go through true_regnum; we may see a
8383 pseudo-register here from a CLOBBER or the like. We probably
8384 won't ever see a pseudo-register that has a real register number,
8385 for we check anyhow for safety. */
8386 if (regno >= FIRST_PSEUDO_REGISTER)
8387 regno = reg_renumber[regno];
8388 if (regno < 0)
8389 return;
8390
8391 if (mode == VOIDmode)
8392 endregno = regno + 1;
8393 else
8394 endregno = regno + HARD_REGNO_NREGS (regno, mode);
8395
8396 if (clobber)
8397 for (i = regno; i < endregno; i++)
8398 reg_values[i] = 0;
8399
8400 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8401 {
8402 rtx x;
8403
8404 for (x = reg_values[i]; x; x = XEXP (x, 1))
8405 {
8406 if (XEXP (x, 0) != 0
8407 && refers_to_regno_p (regno, endregno, XEXP (x, 0), NULL_PTR))
8408 {
8409 /* If this is the only entry on the list, clear
8410 reg_values[i]. Otherwise, just clear this entry on
8411 the list. */
8412 if (XEXP (x, 1) == 0 && x == reg_values[i])
8413 {
8414 reg_values[i] = 0;
8415 break;
8416 }
8417 XEXP (x, 0) = 0;
8418 }
8419 }
8420 }
8421
8422 /* We must look at earlier registers, in case REGNO is part of a
8423 multi word value but is not the first register. If an earlier
8424 register has a value in a mode which overlaps REGNO, then we must
8425 invalidate that earlier register. Note that we do not need to
8426 check REGNO or later registers (we must not check REGNO itself,
8427 because we would incorrectly conclude that there was a conflict). */
8428
8429 for (i = 0; i < regno; i++)
8430 {
8431 rtx x;
8432
8433 for (x = reg_values[i]; x; x = XEXP (x, 1))
8434 {
8435 if (XEXP (x, 0) != 0)
8436 {
8437 PUT_MODE (invalidate_regno_rtx, GET_MODE (x));
8438 REGNO (invalidate_regno_rtx) = i;
8439 if (refers_to_regno_p (regno, endregno, invalidate_regno_rtx,
8440 NULL_PTR))
8441 {
8442 reload_cse_invalidate_regno (i, VOIDmode, 1);
8443 break;
8444 }
8445 }
8446 }
8447 }
8448 }
8449
8450 /* The memory at address MEM_BASE is being changed.
8451 Return whether this change will invalidate VAL. */
8452
8453 static int
8454 reload_cse_mem_conflict_p (mem_base, val)
8455 rtx mem_base;
8456 rtx val;
8457 {
8458 enum rtx_code code;
8459 char *fmt;
8460 int i;
8461
8462 code = GET_CODE (val);
8463 switch (code)
8464 {
8465 /* Get rid of a few simple cases quickly. */
8466 case REG:
8467 case PC:
8468 case CC0:
8469 case SCRATCH:
8470 case CONST:
8471 case CONST_INT:
8472 case CONST_DOUBLE:
8473 case SYMBOL_REF:
8474 case LABEL_REF:
8475 return 0;
8476
8477 case MEM:
8478 if (GET_MODE (mem_base) == BLKmode
8479 || GET_MODE (val) == BLKmode)
8480 return 1;
8481 if (anti_dependence (val, mem_base))
8482 return 1;
8483 /* The address may contain nested MEMs. */
8484 break;
8485
8486 default:
8487 break;
8488 }
8489
8490 fmt = GET_RTX_FORMAT (code);
8491
8492 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8493 {
8494 if (fmt[i] == 'e')
8495 {
8496 if (reload_cse_mem_conflict_p (mem_base, XEXP (val, i)))
8497 return 1;
8498 }
8499 else if (fmt[i] == 'E')
8500 {
8501 int j;
8502
8503 for (j = 0; j < XVECLEN (val, i); j++)
8504 if (reload_cse_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
8505 return 1;
8506 }
8507 }
8508
8509 return 0;
8510 }
8511
8512 /* Invalidate any entries in reg_values which are changed because of a
8513 store to MEM_RTX. If this is called because of a non-const call
8514 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8515
8516 static void
8517 reload_cse_invalidate_mem (mem_rtx)
8518 rtx mem_rtx;
8519 {
8520 register int i;
8521
8522 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8523 {
8524 rtx x;
8525
8526 for (x = reg_values[i]; x; x = XEXP (x, 1))
8527 {
8528 if (XEXP (x, 0) != 0
8529 && reload_cse_mem_conflict_p (mem_rtx, XEXP (x, 0)))
8530 {
8531 /* If this is the only entry on the list, clear
8532 reg_values[i]. Otherwise, just clear this entry on
8533 the list. */
8534 if (XEXP (x, 1) == 0 && x == reg_values[i])
8535 {
8536 reg_values[i] = 0;
8537 break;
8538 }
8539 XEXP (x, 0) = 0;
8540 }
8541 }
8542 }
8543 }
8544
8545 /* Invalidate DEST, which is being assigned to or clobbered. The
8546 second parameter exists so that this function can be passed to
8547 note_stores; it is ignored. */
8548
8549 static void
8550 reload_cse_invalidate_rtx (dest, ignore)
8551 rtx dest;
8552 rtx ignore ATTRIBUTE_UNUSED;
8553 {
8554 while (GET_CODE (dest) == STRICT_LOW_PART
8555 || GET_CODE (dest) == SIGN_EXTRACT
8556 || GET_CODE (dest) == ZERO_EXTRACT
8557 || GET_CODE (dest) == SUBREG)
8558 dest = XEXP (dest, 0);
8559
8560 if (GET_CODE (dest) == REG)
8561 reload_cse_invalidate_regno (REGNO (dest), GET_MODE (dest), 1);
8562 else if (GET_CODE (dest) == MEM)
8563 reload_cse_invalidate_mem (dest);
8564 }
8565
8566 /* Do a very simple CSE pass over the hard registers.
8567
8568 This function detects no-op moves where we happened to assign two
8569 different pseudo-registers to the same hard register, and then
8570 copied one to the other. Reload will generate a useless
8571 instruction copying a register to itself.
8572
8573 This function also detects cases where we load a value from memory
8574 into two different registers, and (if memory is more expensive than
8575 registers) changes it to simply copy the first register into the
8576 second register.
8577
8578 Another optimization is performed that scans the operands of each
8579 instruction to see whether the value is already available in a
8580 hard register. It then replaces the operand with the hard register
8581 if possible, much like an optional reload would. */
8582
8583 static void
8584 reload_cse_regs_1 (first)
8585 rtx first;
8586 {
8587 char *firstobj;
8588 rtx callmem;
8589 register int i;
8590 rtx insn;
8591
8592 init_alias_analysis ();
8593
8594 reg_values = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8595 bzero ((char *)reg_values, FIRST_PSEUDO_REGISTER * sizeof (rtx));
8596
8597 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8598 free them when we are done. */
8599 push_obstacks (&reload_obstack, &reload_obstack);
8600 firstobj = (char *) obstack_alloc (&reload_obstack, 0);
8601
8602 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8603 memory for a non-const call instruction. */
8604 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
8605
8606 /* This is used in reload_cse_invalidate_regno to avoid consing a
8607 new REG in a loop in that function. */
8608 invalidate_regno_rtx = gen_rtx_REG (VOIDmode, 0);
8609
8610 for (insn = first; insn; insn = NEXT_INSN (insn))
8611 {
8612 rtx body;
8613
8614 if (GET_CODE (insn) == CODE_LABEL)
8615 {
8616 /* Forget all the register values at a code label. We don't
8617 try to do anything clever around jumps. */
8618 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8619 reg_values[i] = 0;
8620
8621 continue;
8622 }
8623
8624 #ifdef NON_SAVING_SETJMP
8625 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
8626 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
8627 {
8628 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8629 reg_values[i] = 0;
8630
8631 continue;
8632 }
8633 #endif
8634
8635 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
8636 continue;
8637
8638 /* If this is a call instruction, forget anything stored in a
8639 call clobbered register, or, if this is not a const call, in
8640 memory. */
8641 if (GET_CODE (insn) == CALL_INSN)
8642 {
8643 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8644 if (call_used_regs[i])
8645 reload_cse_invalidate_regno (i, VOIDmode, 1);
8646
8647 if (! CONST_CALL_P (insn))
8648 reload_cse_invalidate_mem (callmem);
8649 }
8650
8651 body = PATTERN (insn);
8652 if (GET_CODE (body) == SET)
8653 {
8654 int count = 0;
8655 if (reload_cse_noop_set_p (body, insn))
8656 {
8657 /* If this sets the return value of the function, we must keep
8658 a USE around, in case this is in a different basic block
8659 than the final USE. Otherwise, we could loose important
8660 register lifeness information on SMALL_REGISTER_CLASSES
8661 machines, where return registers might be used as spills:
8662 subsequent passes assume that spill registers are dead at
8663 the end of a basic block. */
8664 if (REG_FUNCTION_VALUE_P (SET_DEST (body)))
8665 {
8666 pop_obstacks ();
8667 PATTERN (insn) = gen_rtx_USE (VOIDmode, SET_DEST (body));
8668 INSN_CODE (insn) = -1;
8669 REG_NOTES (insn) = NULL_RTX;
8670 push_obstacks (&reload_obstack, &reload_obstack);
8671 }
8672 else
8673 {
8674 PUT_CODE (insn, NOTE);
8675 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8676 NOTE_SOURCE_FILE (insn) = 0;
8677 }
8678
8679 /* We're done with this insn. */
8680 continue;
8681 }
8682
8683 /* It's not a no-op, but we can try to simplify it. */
8684 count += reload_cse_simplify_set (body, insn);
8685
8686 if (count > 0)
8687 apply_change_group ();
8688 else
8689 reload_cse_simplify_operands (insn);
8690
8691 reload_cse_record_set (body, body);
8692 }
8693 else if (GET_CODE (body) == PARALLEL)
8694 {
8695 int count = 0;
8696 rtx value = NULL_RTX;
8697
8698 /* If every action in a PARALLEL is a noop, we can delete
8699 the entire PARALLEL. */
8700 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8701 {
8702 rtx part = XVECEXP (body, 0, i);
8703 if (GET_CODE (part) == SET)
8704 {
8705 if (! reload_cse_noop_set_p (part, insn))
8706 break;
8707 if (REG_FUNCTION_VALUE_P (SET_DEST (part)))
8708 {
8709 if (value)
8710 break;
8711 value = SET_DEST (part);
8712 }
8713 }
8714 else if (GET_CODE (part) != CLOBBER)
8715 break;
8716 }
8717 if (i < 0)
8718 {
8719 if (value)
8720 {
8721 pop_obstacks ();
8722 PATTERN (insn) = gen_rtx_USE (VOIDmode, value);
8723 INSN_CODE (insn) = -1;
8724 REG_NOTES (insn) = NULL_RTX;
8725 push_obstacks (&reload_obstack, &reload_obstack);
8726 }
8727 else
8728 {
8729 PUT_CODE (insn, NOTE);
8730 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8731 NOTE_SOURCE_FILE (insn) = 0;
8732 }
8733
8734 /* We're done with this insn. */
8735 continue;
8736 }
8737
8738 /* It's not a no-op, but we can try to simplify it. */
8739 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8740 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8741 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8742
8743 if (count > 0)
8744 apply_change_group ();
8745 else
8746 reload_cse_simplify_operands (insn);
8747
8748 /* Look through the PARALLEL and record the values being
8749 set, if possible. Also handle any CLOBBERs. */
8750 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8751 {
8752 rtx x = XVECEXP (body, 0, i);
8753
8754 if (GET_CODE (x) == SET)
8755 reload_cse_record_set (x, body);
8756 else
8757 note_stores (x, reload_cse_invalidate_rtx);
8758 }
8759 }
8760 else
8761 note_stores (body, reload_cse_invalidate_rtx);
8762
8763 #ifdef AUTO_INC_DEC
8764 /* Clobber any registers which appear in REG_INC notes. We
8765 could keep track of the changes to their values, but it is
8766 unlikely to help. */
8767 {
8768 rtx x;
8769
8770 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
8771 if (REG_NOTE_KIND (x) == REG_INC)
8772 reload_cse_invalidate_rtx (XEXP (x, 0), NULL_RTX);
8773 }
8774 #endif
8775
8776 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8777 after we have processed the insn. */
8778 if (GET_CODE (insn) == CALL_INSN)
8779 {
8780 rtx x;
8781
8782 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
8783 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
8784 reload_cse_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX);
8785 }
8786 }
8787
8788 /* Free all the temporary structures we created, and go back to the
8789 regular obstacks. */
8790 obstack_free (&reload_obstack, firstobj);
8791 pop_obstacks ();
8792 }
8793
8794 /* Call cse / combine like post-reload optimization phases.
8795 FIRST is the first instruction. */
8796 void
8797 reload_cse_regs (first)
8798 rtx first;
8799 {
8800 reload_cse_regs_1 (first);
8801 reload_combine ();
8802 reload_cse_move2add (first);
8803 if (flag_expensive_optimizations)
8804 reload_cse_regs_1 (first);
8805 }
8806
8807 /* Return whether the values known for REGNO are equal to VAL. MODE
8808 is the mode of the object that VAL is being copied to; this matters
8809 if VAL is a CONST_INT. */
8810
8811 static int
8812 reload_cse_regno_equal_p (regno, val, mode)
8813 int regno;
8814 rtx val;
8815 enum machine_mode mode;
8816 {
8817 rtx x;
8818
8819 if (val == 0)
8820 return 0;
8821
8822 for (x = reg_values[regno]; x; x = XEXP (x, 1))
8823 if (XEXP (x, 0) != 0
8824 && rtx_equal_p (XEXP (x, 0), val)
8825 && (! flag_float_store || GET_CODE (XEXP (x, 0)) != MEM
8826 || GET_MODE_CLASS (GET_MODE (x)) != MODE_FLOAT)
8827 && (GET_CODE (val) != CONST_INT
8828 || mode == GET_MODE (x)
8829 || (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x))
8830 /* On a big endian machine if the value spans more than
8831 one register then this register holds the high part of
8832 it and we can't use it.
8833
8834 ??? We should also compare with the high part of the
8835 value. */
8836 && !(WORDS_BIG_ENDIAN
8837 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
8838 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
8839 GET_MODE_BITSIZE (GET_MODE (x))))))
8840 return 1;
8841
8842 return 0;
8843 }
8844
8845 /* See whether a single set is a noop. SET is the set instruction we
8846 are should check, and INSN is the instruction from which it came. */
8847
8848 static int
8849 reload_cse_noop_set_p (set, insn)
8850 rtx set;
8851 rtx insn;
8852 {
8853 rtx src, dest;
8854 enum machine_mode dest_mode;
8855 int dreg, sreg;
8856 int ret;
8857
8858 src = SET_SRC (set);
8859 dest = SET_DEST (set);
8860 dest_mode = GET_MODE (dest);
8861
8862 if (side_effects_p (src))
8863 return 0;
8864
8865 dreg = true_regnum (dest);
8866 sreg = true_regnum (src);
8867
8868 /* Check for setting a register to itself. In this case, we don't
8869 have to worry about REG_DEAD notes. */
8870 if (dreg >= 0 && dreg == sreg)
8871 return 1;
8872
8873 ret = 0;
8874 if (dreg >= 0)
8875 {
8876 /* Check for setting a register to itself. */
8877 if (dreg == sreg)
8878 ret = 1;
8879
8880 /* Check for setting a register to a value which we already know
8881 is in the register. */
8882 else if (reload_cse_regno_equal_p (dreg, src, dest_mode))
8883 ret = 1;
8884
8885 /* Check for setting a register DREG to another register SREG
8886 where SREG is equal to a value which is already in DREG. */
8887 else if (sreg >= 0)
8888 {
8889 rtx x;
8890
8891 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
8892 {
8893 rtx tmp;
8894
8895 if (XEXP (x, 0) == 0)
8896 continue;
8897
8898 if (dest_mode == GET_MODE (x))
8899 tmp = XEXP (x, 0);
8900 else if (GET_MODE_BITSIZE (dest_mode)
8901 < GET_MODE_BITSIZE (GET_MODE (x)))
8902 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
8903 else
8904 continue;
8905
8906 if (tmp
8907 && reload_cse_regno_equal_p (dreg, tmp, dest_mode))
8908 {
8909 ret = 1;
8910 break;
8911 }
8912 }
8913 }
8914 }
8915 else if (GET_CODE (dest) == MEM)
8916 {
8917 /* Check for storing a register to memory when we know that the
8918 register is equivalent to the memory location. */
8919 if (sreg >= 0
8920 && reload_cse_regno_equal_p (sreg, dest, dest_mode)
8921 && ! side_effects_p (dest))
8922 ret = 1;
8923 }
8924
8925 return ret;
8926 }
8927
8928 /* Try to simplify a single SET instruction. SET is the set pattern.
8929 INSN is the instruction it came from.
8930 This function only handles one case: if we set a register to a value
8931 which is not a register, we try to find that value in some other register
8932 and change the set into a register copy. */
8933
8934 static int
8935 reload_cse_simplify_set (set, insn)
8936 rtx set;
8937 rtx insn;
8938 {
8939 int dreg;
8940 rtx src;
8941 enum machine_mode dest_mode;
8942 enum reg_class dclass;
8943 register int i;
8944
8945 dreg = true_regnum (SET_DEST (set));
8946 if (dreg < 0)
8947 return 0;
8948
8949 src = SET_SRC (set);
8950 if (side_effects_p (src) || true_regnum (src) >= 0)
8951 return 0;
8952
8953 dclass = REGNO_REG_CLASS (dreg);
8954
8955 /* If memory loads are cheaper than register copies, don't change them. */
8956 if (GET_CODE (src) == MEM
8957 && MEMORY_MOVE_COST (GET_MODE (src), dclass, 1) < 2)
8958 return 0;
8959
8960 /* If the constant is cheaper than a register, don't change it. */
8961 if (CONSTANT_P (src)
8962 && rtx_cost (src, SET) < 2)
8963 return 0;
8964
8965 dest_mode = GET_MODE (SET_DEST (set));
8966 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
8967 {
8968 if (i != dreg
8969 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i), dclass) == 2
8970 && reload_cse_regno_equal_p (i, src, dest_mode))
8971 {
8972 int validated;
8973
8974 /* Pop back to the real obstacks while changing the insn. */
8975 pop_obstacks ();
8976
8977 validated = validate_change (insn, &SET_SRC (set),
8978 gen_rtx_REG (dest_mode, i), 1);
8979
8980 /* Go back to the obstack we are using for temporary
8981 storage. */
8982 push_obstacks (&reload_obstack, &reload_obstack);
8983
8984 if (validated)
8985 return 1;
8986 }
8987 }
8988 return 0;
8989 }
8990
8991 /* Try to replace operands in INSN with equivalent values that are already
8992 in registers. This can be viewed as optional reloading.
8993
8994 For each non-register operand in the insn, see if any hard regs are
8995 known to be equivalent to that operand. Record the alternatives which
8996 can accept these hard registers. Among all alternatives, select the
8997 ones which are better or equal to the one currently matching, where
8998 "better" is in terms of '?' and '!' constraints. Among the remaining
8999 alternatives, select the one which replaces most operands with
9000 hard registers. */
9001
9002 static int
9003 reload_cse_simplify_operands (insn)
9004 rtx insn;
9005 {
9006 #ifdef REGISTER_CONSTRAINTS
9007 int i,j;
9008
9009 char *constraints[MAX_RECOG_OPERANDS];
9010
9011 /* Vector recording how bad an alternative is. */
9012 int *alternative_reject;
9013 /* Vector recording how many registers can be introduced by choosing
9014 this alternative. */
9015 int *alternative_nregs;
9016 /* Array of vectors recording, for each operand and each alternative,
9017 which hard register to substitute, or -1 if the operand should be
9018 left as it is. */
9019 int *op_alt_regno[MAX_RECOG_OPERANDS];
9020 /* Array of alternatives, sorted in order of decreasing desirability. */
9021 int *alternative_order;
9022 rtx reg = gen_rtx_REG (VOIDmode, -1);
9023
9024 extract_insn (insn);
9025
9026 if (recog_n_alternatives == 0 || recog_n_operands == 0)
9027 return 0;
9028
9029 /* Figure out which alternative currently matches. */
9030 if (! constrain_operands (1))
9031 fatal_insn_not_found (insn);
9032
9033 alternative_reject = (int *) alloca (recog_n_alternatives * sizeof (int));
9034 alternative_nregs = (int *) alloca (recog_n_alternatives * sizeof (int));
9035 alternative_order = (int *) alloca (recog_n_alternatives * sizeof (int));
9036 bzero ((char *)alternative_reject, recog_n_alternatives * sizeof (int));
9037 bzero ((char *)alternative_nregs, recog_n_alternatives * sizeof (int));
9038
9039 for (i = 0; i < recog_n_operands; i++)
9040 {
9041 enum machine_mode mode;
9042 int regno;
9043 char *p;
9044
9045 op_alt_regno[i] = (int *) alloca (recog_n_alternatives * sizeof (int));
9046 for (j = 0; j < recog_n_alternatives; j++)
9047 op_alt_regno[i][j] = -1;
9048
9049 p = constraints[i] = recog_constraints[i];
9050 mode = recog_operand_mode[i];
9051
9052 /* Add the reject values for each alternative given by the constraints
9053 for this operand. */
9054 j = 0;
9055 while (*p != '\0')
9056 {
9057 char c = *p++;
9058 if (c == ',')
9059 j++;
9060 else if (c == '?')
9061 alternative_reject[j] += 3;
9062 else if (c == '!')
9063 alternative_reject[j] += 300;
9064 }
9065
9066 /* We won't change operands which are already registers. We
9067 also don't want to modify output operands. */
9068 regno = true_regnum (recog_operand[i]);
9069 if (regno >= 0
9070 || constraints[i][0] == '='
9071 || constraints[i][0] == '+')
9072 continue;
9073
9074 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9075 {
9076 int class = (int) NO_REGS;
9077
9078 if (! reload_cse_regno_equal_p (regno, recog_operand[i], mode))
9079 continue;
9080
9081 REGNO (reg) = regno;
9082 PUT_MODE (reg, mode);
9083
9084 /* We found a register equal to this operand. Now look for all
9085 alternatives that can accept this register and have not been
9086 assigned a register they can use yet. */
9087 j = 0;
9088 p = constraints[i];
9089 for (;;)
9090 {
9091 char c = *p++;
9092
9093 switch (c)
9094 {
9095 case '=': case '+': case '?':
9096 case '#': case '&': case '!':
9097 case '*': case '%':
9098 case '0': case '1': case '2': case '3': case '4':
9099 case 'm': case '<': case '>': case 'V': case 'o':
9100 case 'E': case 'F': case 'G': case 'H':
9101 case 's': case 'i': case 'n':
9102 case 'I': case 'J': case 'K': case 'L':
9103 case 'M': case 'N': case 'O': case 'P':
9104 #ifdef EXTRA_CONSTRAINT
9105 case 'Q': case 'R': case 'S': case 'T': case 'U':
9106 #endif
9107 case 'p': case 'X':
9108 /* These don't say anything we care about. */
9109 break;
9110
9111 case 'g': case 'r':
9112 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
9113 break;
9114
9115 default:
9116 class
9117 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
9118 break;
9119
9120 case ',': case '\0':
9121 /* See if REGNO fits this alternative, and set it up as the
9122 replacement register if we don't have one for this
9123 alternative yet and the operand being replaced is not
9124 a cheap CONST_INT. */
9125 if (op_alt_regno[i][j] == -1
9126 && reg_fits_class_p (reg, class, 0, mode)
9127 && (GET_CODE (recog_operand[i]) != CONST_INT
9128 || rtx_cost (recog_operand[i], SET) > rtx_cost (reg, SET)))
9129 {
9130 alternative_nregs[j]++;
9131 op_alt_regno[i][j] = regno;
9132 }
9133 j++;
9134 break;
9135 }
9136
9137 if (c == '\0')
9138 break;
9139 }
9140 }
9141 }
9142
9143 /* Record all alternatives which are better or equal to the currently
9144 matching one in the alternative_order array. */
9145 for (i = j = 0; i < recog_n_alternatives; i++)
9146 if (alternative_reject[i] <= alternative_reject[which_alternative])
9147 alternative_order[j++] = i;
9148 recog_n_alternatives = j;
9149
9150 /* Sort it. Given a small number of alternatives, a dumb algorithm
9151 won't hurt too much. */
9152 for (i = 0; i < recog_n_alternatives - 1; i++)
9153 {
9154 int best = i;
9155 int best_reject = alternative_reject[alternative_order[i]];
9156 int best_nregs = alternative_nregs[alternative_order[i]];
9157 int tmp;
9158
9159 for (j = i + 1; j < recog_n_alternatives; j++)
9160 {
9161 int this_reject = alternative_reject[alternative_order[j]];
9162 int this_nregs = alternative_nregs[alternative_order[j]];
9163
9164 if (this_reject < best_reject
9165 || (this_reject == best_reject && this_nregs < best_nregs))
9166 {
9167 best = j;
9168 best_reject = this_reject;
9169 best_nregs = this_nregs;
9170 }
9171 }
9172
9173 tmp = alternative_order[best];
9174 alternative_order[best] = alternative_order[i];
9175 alternative_order[i] = tmp;
9176 }
9177
9178 /* Substitute the operands as determined by op_alt_regno for the best
9179 alternative. */
9180 j = alternative_order[0];
9181
9182 /* Pop back to the real obstacks while changing the insn. */
9183 pop_obstacks ();
9184
9185 for (i = 0; i < recog_n_operands; i++)
9186 {
9187 enum machine_mode mode = recog_operand_mode[i];
9188 if (op_alt_regno[i][j] == -1)
9189 continue;
9190
9191 validate_change (insn, recog_operand_loc[i],
9192 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
9193 }
9194
9195 for (i = recog_n_dups - 1; i >= 0; i--)
9196 {
9197 int op = recog_dup_num[i];
9198 enum machine_mode mode = recog_operand_mode[op];
9199
9200 if (op_alt_regno[op][j] == -1)
9201 continue;
9202
9203 validate_change (insn, recog_dup_loc[i],
9204 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
9205 }
9206
9207 /* Go back to the obstack we are using for temporary
9208 storage. */
9209 push_obstacks (&reload_obstack, &reload_obstack);
9210
9211 return apply_change_group ();
9212 #else
9213 return 0;
9214 #endif
9215 }
9216
9217 /* These two variables are used to pass information from
9218 reload_cse_record_set to reload_cse_check_clobber. */
9219
9220 static int reload_cse_check_clobbered;
9221 static rtx reload_cse_check_src;
9222
9223 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
9224 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
9225 second argument, which is passed by note_stores, is ignored. */
9226
9227 static void
9228 reload_cse_check_clobber (dest, ignore)
9229 rtx dest;
9230 rtx ignore ATTRIBUTE_UNUSED;
9231 {
9232 if (reg_overlap_mentioned_p (dest, reload_cse_check_src))
9233 reload_cse_check_clobbered = 1;
9234 }
9235
9236 /* Record the result of a SET instruction. SET is the set pattern.
9237 BODY is the pattern of the insn that it came from. */
9238
9239 static void
9240 reload_cse_record_set (set, body)
9241 rtx set;
9242 rtx body;
9243 {
9244 rtx dest, src, x;
9245 int dreg, sreg;
9246 enum machine_mode dest_mode;
9247
9248 dest = SET_DEST (set);
9249 src = SET_SRC (set);
9250 dreg = true_regnum (dest);
9251 sreg = true_regnum (src);
9252 dest_mode = GET_MODE (dest);
9253
9254 /* Some machines don't define AUTO_INC_DEC, but they still use push
9255 instructions. We need to catch that case here in order to
9256 invalidate the stack pointer correctly. Note that invalidating
9257 the stack pointer is different from invalidating DEST. */
9258 x = dest;
9259 while (GET_CODE (x) == SUBREG
9260 || GET_CODE (x) == ZERO_EXTRACT
9261 || GET_CODE (x) == SIGN_EXTRACT
9262 || GET_CODE (x) == STRICT_LOW_PART)
9263 x = XEXP (x, 0);
9264 if (push_operand (x, GET_MODE (x)))
9265 {
9266 reload_cse_invalidate_rtx (stack_pointer_rtx, NULL_RTX);
9267 reload_cse_invalidate_rtx (dest, NULL_RTX);
9268 return;
9269 }
9270
9271 /* We can only handle an assignment to a register, or a store of a
9272 register to a memory location. For other cases, we just clobber
9273 the destination. We also have to just clobber if there are side
9274 effects in SRC or DEST. */
9275 if ((dreg < 0 && GET_CODE (dest) != MEM)
9276 || side_effects_p (src)
9277 || side_effects_p (dest))
9278 {
9279 reload_cse_invalidate_rtx (dest, NULL_RTX);
9280 return;
9281 }
9282
9283 #ifdef HAVE_cc0
9284 /* We don't try to handle values involving CC, because it's a pain
9285 to keep track of when they have to be invalidated. */
9286 if (reg_mentioned_p (cc0_rtx, src)
9287 || reg_mentioned_p (cc0_rtx, dest))
9288 {
9289 reload_cse_invalidate_rtx (dest, NULL_RTX);
9290 return;
9291 }
9292 #endif
9293
9294 /* If BODY is a PARALLEL, then we need to see whether the source of
9295 SET is clobbered by some other instruction in the PARALLEL. */
9296 if (GET_CODE (body) == PARALLEL)
9297 {
9298 int i;
9299
9300 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
9301 {
9302 rtx x;
9303
9304 x = XVECEXP (body, 0, i);
9305 if (x == set)
9306 continue;
9307
9308 reload_cse_check_clobbered = 0;
9309 reload_cse_check_src = src;
9310 note_stores (x, reload_cse_check_clobber);
9311 if (reload_cse_check_clobbered)
9312 {
9313 reload_cse_invalidate_rtx (dest, NULL_RTX);
9314 return;
9315 }
9316 }
9317 }
9318
9319 if (dreg >= 0)
9320 {
9321 int i;
9322
9323 /* This is an assignment to a register. Update the value we
9324 have stored for the register. */
9325 if (sreg >= 0)
9326 {
9327 rtx x;
9328
9329 /* This is a copy from one register to another. Any values
9330 which were valid for SREG are now valid for DREG. If the
9331 mode changes, we use gen_lowpart_common to extract only
9332 the part of the value that is copied. */
9333 reg_values[dreg] = 0;
9334 for (x = reg_values[sreg]; x; x = XEXP (x, 1))
9335 {
9336 rtx tmp;
9337
9338 if (XEXP (x, 0) == 0)
9339 continue;
9340 if (dest_mode == GET_MODE (XEXP (x, 0)))
9341 tmp = XEXP (x, 0);
9342 else if (GET_MODE_BITSIZE (dest_mode)
9343 > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
9344 continue;
9345 else
9346 tmp = gen_lowpart_common (dest_mode, XEXP (x, 0));
9347 if (tmp)
9348 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, tmp,
9349 reg_values[dreg]);
9350 }
9351 }
9352 else
9353 reg_values[dreg] = gen_rtx_EXPR_LIST (dest_mode, src, NULL_RTX);
9354
9355 /* We've changed DREG, so invalidate any values held by other
9356 registers that depend upon it. */
9357 reload_cse_invalidate_regno (dreg, dest_mode, 0);
9358
9359 /* If this assignment changes more than one hard register,
9360 forget anything we know about the others. */
9361 for (i = 1; i < HARD_REGNO_NREGS (dreg, dest_mode); i++)
9362 reg_values[dreg + i] = 0;
9363 }
9364 else if (GET_CODE (dest) == MEM)
9365 {
9366 /* Invalidate conflicting memory locations. */
9367 reload_cse_invalidate_mem (dest);
9368
9369 /* If we're storing a register to memory, add DEST to the list
9370 in REG_VALUES. */
9371 if (sreg >= 0 && ! side_effects_p (dest))
9372 reg_values[sreg] = gen_rtx_EXPR_LIST (dest_mode, dest,
9373 reg_values[sreg]);
9374 }
9375 else
9376 {
9377 /* We should have bailed out earlier. */
9378 abort ();
9379 }
9380 }
9381 \f
9382 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
9383 addressing now.
9384 This code might also be useful when reload gave up on reg+reg addresssing
9385 because of clashes between the return register and INDEX_REG_CLASS. */
9386
9387 /* The maximum number of uses of a register we can keep track of to
9388 replace them with reg+reg addressing. */
9389 #define RELOAD_COMBINE_MAX_USES 6
9390
9391 /* INSN is the insn where a register has ben used, and USEP points to the
9392 location of the register within the rtl. */
9393 struct reg_use { rtx insn, *usep; };
9394
9395 /* If the register is used in some unknown fashion, USE_INDEX is negative.
9396 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
9397 indicates where it becomes live again.
9398 Otherwise, USE_INDEX is the index of the last encountered use of the
9399 register (which is first among these we have seen since we scan backwards),
9400 OFFSET contains the constant offset that is added to the register in
9401 all encountered uses, and USE_RUID indicates the first encountered, i.e.
9402 last, of these uses. */
9403 static struct
9404 {
9405 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
9406 int use_index;
9407 rtx offset;
9408 int store_ruid;
9409 int use_ruid;
9410 } reg_state[FIRST_PSEUDO_REGISTER];
9411
9412 /* Reverse linear uid. This is increased in reload_combine while scanning
9413 the instructions from last to first. It is used to set last_label_ruid
9414 and the store_ruid / use_ruid fields in reg_state. */
9415 static int reload_combine_ruid;
9416
9417 static void
9418 reload_combine ()
9419 {
9420 rtx insn, set;
9421 int first_index_reg = 1, last_index_reg = 0;
9422 int i;
9423 int last_label_ruid;
9424
9425 /* If reg+reg can be used in offsetable memory adresses, the main chunk of
9426 reload has already used it where appropriate, so there is no use in
9427 trying to generate it now. */
9428 if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
9429 return;
9430
9431 /* To avoid wasting too much time later searching for an index register,
9432 determine the minimum and maximum index register numbers. */
9433 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9434 {
9435 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i))
9436 {
9437 if (! last_index_reg)
9438 last_index_reg = i;
9439 first_index_reg = i;
9440 }
9441 }
9442 /* If no index register is available, we can quit now. */
9443 if (first_index_reg > last_index_reg)
9444 return;
9445
9446 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
9447 last_label_ruid = reload_combine_ruid = 0;
9448 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9449 {
9450 if (fixed_regs[i])
9451 reg_state[i].use_index = -1;
9452 else
9453 {
9454 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9455 reg_state[i].store_ruid = reload_combine_ruid;
9456 }
9457 }
9458
9459 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
9460 {
9461 rtx note;
9462
9463 /* We cannot do our optimization across labels. Invalidating all the use
9464 information we have would be costly, so we just note where the label
9465 is and then later disable any optimization that would cross it. */
9466 if (GET_CODE (insn) == CODE_LABEL)
9467 last_label_ruid = reload_combine_ruid;
9468 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9469 continue;
9470 reload_combine_ruid++;
9471
9472 /* Look for (set (REGX) (CONST_INT))
9473 (set (REGX) (PLUS (REGX) (REGY)))
9474 ...
9475 ... (MEM (REGX)) ...
9476 and convert it to
9477 (set (REGZ) (CONST_INT))
9478 ...
9479 ... (MEM (PLUS (REGZ) (REGY)))... .
9480
9481 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
9482 and that we know all uses of REGX before it dies. */
9483 set = single_set (insn);
9484 if (set != NULL_RTX
9485 && GET_CODE (SET_DEST (set)) == REG
9486 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
9487 GET_MODE (SET_DEST (set)))
9488 == 1)
9489 && GET_CODE (SET_SRC (set)) == PLUS
9490 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
9491 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
9492 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
9493 {
9494 rtx reg = SET_DEST (set);
9495 rtx plus = SET_SRC (set);
9496 rtx base = XEXP (plus, 1);
9497 rtx prev = prev_nonnote_insn (insn);
9498 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
9499 int regno = REGNO (reg);
9500 rtx const_reg;
9501 rtx reg_sum = NULL_RTX;
9502
9503 /* Now, we need an index register.
9504 We'll set index_reg to this index register, const_reg to the
9505 register that is to be loaded with the constant
9506 (denoted as REGZ in the substitution illustration above),
9507 and reg_sum to the register-register that we want to use to
9508 substitute uses of REG (typically in MEMs) with.
9509 First check REG and BASE for being index registers;
9510 we can use them even if they are not dead. */
9511 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
9512 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
9513 REGNO (base)))
9514 {
9515 const_reg = reg;
9516 reg_sum = plus;
9517 }
9518 else
9519 {
9520 /* Otherwise, look for a free index register. Since we have
9521 checked above that neiter REG nor BASE are index registers,
9522 if we find anything at all, it will be different from these
9523 two registers. */
9524 for (i = first_index_reg; i <= last_index_reg; i++)
9525 {
9526 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], i)
9527 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
9528 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
9529 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
9530 {
9531 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
9532 const_reg = index_reg;
9533 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
9534 break;
9535 }
9536 }
9537 }
9538 if (prev_set
9539 && GET_CODE (SET_SRC (prev_set)) == CONST_INT
9540 && rtx_equal_p (SET_DEST (prev_set), reg)
9541 && reg_state[regno].use_index >= 0
9542 && reg_sum)
9543 {
9544 int i;
9545
9546 /* Change destination register and - if necessary - the
9547 constant value in PREV, the constant loading instruction. */
9548 validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
9549 if (reg_state[regno].offset != const0_rtx)
9550 validate_change (prev,
9551 &SET_SRC (prev_set),
9552 GEN_INT (INTVAL (SET_SRC (prev_set))
9553 + INTVAL (reg_state[regno].offset)),
9554 1);
9555 /* Now for every use of REG that we have recorded, replace REG
9556 with REG_SUM. */
9557 for (i = reg_state[regno].use_index;
9558 i < RELOAD_COMBINE_MAX_USES; i++)
9559 validate_change (reg_state[regno].reg_use[i].insn,
9560 reg_state[regno].reg_use[i].usep,
9561 reg_sum, 1);
9562
9563 if (apply_change_group ())
9564 {
9565 rtx *np;
9566
9567 /* Delete the reg-reg addition. */
9568 PUT_CODE (insn, NOTE);
9569 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9570 NOTE_SOURCE_FILE (insn) = 0;
9571
9572 if (reg_state[regno].offset != const0_rtx)
9573 {
9574 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
9575 are now invalid. */
9576 for (np = &REG_NOTES (prev); *np; )
9577 {
9578 if (REG_NOTE_KIND (*np) == REG_EQUAL
9579 || REG_NOTE_KIND (*np) == REG_EQUIV)
9580 *np = XEXP (*np, 1);
9581 else
9582 np = &XEXP (*np, 1);
9583 }
9584 }
9585 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9586 reg_state[REGNO (const_reg)].store_ruid = reload_combine_ruid;
9587 continue;
9588 }
9589 }
9590 }
9591 note_stores (PATTERN (insn), reload_combine_note_store);
9592 if (GET_CODE (insn) == CALL_INSN)
9593 {
9594 rtx link;
9595
9596 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9597 {
9598 if (call_used_regs[i])
9599 {
9600 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9601 reg_state[i].store_ruid = reload_combine_ruid;
9602 }
9603 }
9604 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
9605 link = XEXP (link, 1))
9606 {
9607 rtx use = XEXP (link, 0);
9608 int regno = REGNO (XEXP (use, 0));
9609 if (GET_CODE (use) == CLOBBER)
9610 {
9611 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
9612 reg_state[regno].store_ruid = reload_combine_ruid;
9613 }
9614 else
9615 reg_state[regno].use_index = -1;
9616 }
9617 }
9618 if (GET_CODE (insn) == JUMP_INSN)
9619 {
9620 /* Non-spill registers might be used at the call destination in
9621 some unknown fashion, so we have to mark the unknown use. */
9622 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
9623 {
9624 if (1)
9625 reg_state[i].use_index = -1;
9626 }
9627 }
9628 reload_combine_note_use (&PATTERN (insn), insn);
9629 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9630 {
9631 if (REG_NOTE_KIND (note) == REG_INC
9632 && GET_CODE (XEXP (note, 0)) == REG)
9633 reg_state[REGNO (XEXP (note, 0))].use_index = -1;
9634 }
9635 }
9636 }
9637
9638 /* Check if DST is a register or a subreg of a register; if it is,
9639 update reg_state[regno].store_ruid and reg_state[regno].use_index
9640 accordingly. Called via note_stores from reload_combine.
9641 The second argument, SET, is ignored. */
9642 static void
9643 reload_combine_note_store (dst, set)
9644 rtx dst, set ATTRIBUTE_UNUSED;
9645 {
9646 int regno = 0;
9647 int i;
9648 unsigned size = GET_MODE_SIZE (GET_MODE (dst));
9649
9650 if (GET_CODE (dst) == SUBREG)
9651 {
9652 regno = SUBREG_WORD (dst);
9653 dst = SUBREG_REG (dst);
9654 }
9655 if (GET_CODE (dst) != REG)
9656 return;
9657 regno += REGNO (dst);
9658 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
9659 careful with registers / register parts that are not full words. */
9660 if (size < (unsigned) UNITS_PER_WORD)
9661 reg_state[regno].use_index = -1;
9662 else
9663 {
9664 for (i = size / UNITS_PER_WORD - 1 + regno; i >= regno; i--)
9665 {
9666 reg_state[i].store_ruid = reload_combine_ruid;
9667 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
9668 }
9669 }
9670 }
9671
9672 /* XP points to a piece of rtl that has to be checked for any uses of
9673 registers.
9674 *XP is the pattern of INSN, or a part of it.
9675 Called from reload_combine, and recursively by itself. */
9676 static void
9677 reload_combine_note_use (xp, insn)
9678 rtx *xp, insn;
9679 {
9680 rtx x = *xp;
9681 enum rtx_code code = x->code;
9682 char *fmt;
9683 int i, j;
9684 rtx offset = const0_rtx; /* For the REG case below. */
9685
9686 switch (code)
9687 {
9688 case SET:
9689 if (GET_CODE (SET_DEST (x)) == REG)
9690 {
9691 reload_combine_note_use (&SET_SRC (x), insn);
9692 return;
9693 }
9694 break;
9695
9696 case CLOBBER:
9697 if (GET_CODE (SET_DEST (x)) == REG)
9698 return;
9699 break;
9700
9701 case PLUS:
9702 /* We are interested in (plus (reg) (const_int)) . */
9703 if (GET_CODE (XEXP (x, 0)) != REG || GET_CODE (XEXP (x, 1)) != CONST_INT)
9704 break;
9705 offset = XEXP (x, 1);
9706 x = XEXP (x, 0);
9707 /* Fall through. */
9708 case REG:
9709 {
9710 int regno = REGNO (x);
9711 int use_index;
9712
9713 /* Some spurious USEs of pseudo registers might remain.
9714 Just ignore them. */
9715 if (regno >= FIRST_PSEUDO_REGISTER)
9716 return;
9717
9718 /* If this register is already used in some unknown fashion, we
9719 can't do anything.
9720 If we decrement the index from zero to -1, we can't store more
9721 uses, so this register becomes used in an unknown fashion. */
9722 use_index = --reg_state[regno].use_index;
9723 if (use_index < 0)
9724 return;
9725
9726 if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9727 {
9728 /* We have found another use for a register that is already
9729 used later. Check if the offsets match; if not, mark the
9730 register as used in an unknown fashion. */
9731 if (! rtx_equal_p (offset, reg_state[regno].offset))
9732 {
9733 reg_state[regno].use_index = -1;
9734 return;
9735 }
9736 }
9737 else
9738 {
9739 /* This is the first use of this register we have seen since we
9740 marked it as dead. */
9741 reg_state[regno].offset = offset;
9742 reg_state[regno].use_ruid = reload_combine_ruid;
9743 }
9744 reg_state[regno].reg_use[use_index].insn = insn;
9745 reg_state[regno].reg_use[use_index].usep = xp;
9746 return;
9747 }
9748
9749 default:
9750 break;
9751 }
9752
9753 /* Recursively process the components of X. */
9754 fmt = GET_RTX_FORMAT (code);
9755 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9756 {
9757 if (fmt[i] == 'e')
9758 reload_combine_note_use (&XEXP (x, i), insn);
9759 else if (fmt[i] == 'E')
9760 {
9761 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9762 reload_combine_note_use (&XVECEXP (x, i, j), insn);
9763 }
9764 }
9765 }
9766 \f
9767 /* See if we can reduce the cost of a constant by replacing a move with
9768 an add. */
9769 /* We cannot do our optimization across labels. Invalidating all the
9770 information about register contents we have would be costly, so we
9771 use last_label_luid (local variable of reload_cse_move2add) to note
9772 where the label is and then later disable any optimization that would
9773 cross it.
9774 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9775 reg_set_luid[n] is larger than last_label_luid[n] . */
9776 static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9777 /* reg_offset[n] has to be CONST_INT for it and reg_base_reg[n] /
9778 reg_mode[n] to be valid.
9779 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is negative, register n
9780 has been set to reg_offset[n] in mode reg_mode[n] .
9781 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is non-negative,
9782 register n has been set to the sum of reg_offset[n] and register
9783 reg_base_reg[n], calculated in mode reg_mode[n] . */
9784 static rtx reg_offset[FIRST_PSEUDO_REGISTER];
9785 static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9786 static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9787 /* move2add_luid is linearily increased while scanning the instructions
9788 from first to last. It is used to set reg_set_luid in
9789 reload_cse_move2add and move2add_note_store. */
9790 static int move2add_luid;
9791
9792 static void
9793 reload_cse_move2add (first)
9794 rtx first;
9795 {
9796 int i;
9797 rtx insn;
9798 int last_label_luid;
9799
9800 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
9801 reg_set_luid[i] = 0;
9802
9803 last_label_luid = 0;
9804 move2add_luid = 1;
9805 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
9806 {
9807 rtx pat, note;
9808
9809 if (GET_CODE (insn) == CODE_LABEL)
9810 last_label_luid = move2add_luid;
9811 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
9812 continue;
9813 pat = PATTERN (insn);
9814 /* For simplicity, we only perform this optimization on
9815 straightforward SETs. */
9816 if (GET_CODE (pat) == SET
9817 && GET_CODE (SET_DEST (pat)) == REG)
9818 {
9819 rtx reg = SET_DEST (pat);
9820 int regno = REGNO (reg);
9821 rtx src = SET_SRC (pat);
9822
9823 /* Check if we have valid information on the contents of this
9824 register in the mode of REG. */
9825 /* ??? We don't know how zero / sign extension is handled, hence
9826 we can't go from a narrower to a wider mode. */
9827 if (reg_set_luid[regno] > last_label_luid
9828 && (GET_MODE_SIZE (GET_MODE (reg))
9829 <= GET_MODE_SIZE (reg_mode[regno]))
9830 && GET_CODE (reg_offset[regno]) == CONST_INT)
9831 {
9832 /* Try to transform (set (REGX) (CONST_INT A))
9833 ...
9834 (set (REGX) (CONST_INT B))
9835 to
9836 (set (REGX) (CONST_INT A))
9837 ...
9838 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9839
9840 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
9841 {
9842 int success = 0;
9843 rtx new_src = GEN_INT (INTVAL (src)
9844 - INTVAL (reg_offset[regno]));
9845 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
9846 use (set (reg) (reg)) instead.
9847 We don't delete this insn, nor do we convert it into a
9848 note, to avoid losing register notes or the return
9849 value flag. jump2 already knowns how to get rid of
9850 no-op moves. */
9851 if (new_src == const0_rtx)
9852 success = validate_change (insn, &SET_SRC (pat), reg, 0);
9853 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
9854 && have_add2_insn (GET_MODE (reg)))
9855 success = validate_change (insn, &PATTERN (insn),
9856 gen_add2_insn (reg, new_src), 0);
9857 reg_set_luid[regno] = move2add_luid;
9858 reg_mode[regno] = GET_MODE (reg);
9859 reg_offset[regno] = src;
9860 continue;
9861 }
9862
9863 /* Try to transform (set (REGX) (REGY))
9864 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9865 ...
9866 (set (REGX) (REGY))
9867 (set (REGX) (PLUS (REGX) (CONST_INT B)))
9868 to
9869 (REGX) (REGY))
9870 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9871 ...
9872 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9873 else if (GET_CODE (src) == REG
9874 && reg_base_reg[regno] == REGNO (src)
9875 && reg_set_luid[regno] > reg_set_luid[REGNO (src)])
9876 {
9877 rtx next = next_nonnote_insn (insn);
9878 rtx set;
9879 if (next)
9880 set = single_set (next);
9881 if (next
9882 && set
9883 && SET_DEST (set) == reg
9884 && GET_CODE (SET_SRC (set)) == PLUS
9885 && XEXP (SET_SRC (set), 0) == reg
9886 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
9887 {
9888 rtx src3 = XEXP (SET_SRC (set), 1);
9889 rtx new_src = GEN_INT (INTVAL (src3)
9890 - INTVAL (reg_offset[regno]));
9891 int success = 0;
9892
9893 if (new_src == const0_rtx)
9894 /* See above why we create (set (reg) (reg)) here. */
9895 success
9896 = validate_change (next, &SET_SRC (set), reg, 0);
9897 else if ((rtx_cost (new_src, PLUS)
9898 < 2 + rtx_cost (src3, SET))
9899 && have_add2_insn (GET_MODE (reg)))
9900 success
9901 = validate_change (next, &PATTERN (next),
9902 gen_add2_insn (reg, new_src), 0);
9903 if (success)
9904 {
9905 /* INSN might be the first insn in a basic block
9906 if the preceding insn is a conditional jump
9907 or a possible-throwing call. */
9908 PUT_CODE (insn, NOTE);
9909 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9910 NOTE_SOURCE_FILE (insn) = 0;
9911 }
9912 insn = next;
9913 reg_set_luid[regno] = move2add_luid;
9914 reg_mode[regno] = GET_MODE (reg);
9915 reg_offset[regno] = src3;
9916 continue;
9917 }
9918 }
9919 }
9920 }
9921
9922 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9923 {
9924 if (REG_NOTE_KIND (note) == REG_INC
9925 && GET_CODE (XEXP (note, 0)) == REG)
9926 {
9927 /* Indicate that this register has been recently written to,
9928 but the exact contents are not available. */
9929 int regno = REGNO (XEXP (note, 0));
9930 if (regno < FIRST_PSEUDO_REGISTER)
9931 {
9932 reg_set_luid[regno] = move2add_luid;
9933 reg_offset[regno] = note;
9934 }
9935 }
9936 }
9937 note_stores (PATTERN (insn), move2add_note_store);
9938 /* If this is a CALL_INSN, all call used registers are stored with
9939 unknown values. */
9940 if (GET_CODE (insn) == CALL_INSN)
9941 {
9942 for (i = FIRST_PSEUDO_REGISTER-1; i >= 0; i--)
9943 {
9944 if (call_used_regs[i])
9945 {
9946 reg_set_luid[i] = move2add_luid;
9947 reg_offset[i] = insn; /* Invalidate contents. */
9948 }
9949 }
9950 }
9951 }
9952 }
9953
9954 /* SET is a SET or CLOBBER that sets DST.
9955 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
9956 Called from reload_cse_move2add via note_stores. */
9957 static void
9958 move2add_note_store (dst, set)
9959 rtx dst, set;
9960 {
9961 int regno = 0;
9962 int i;
9963
9964 enum machine_mode mode = GET_MODE (dst);
9965 if (GET_CODE (dst) == SUBREG)
9966 {
9967 regno = SUBREG_WORD (dst);
9968 dst = SUBREG_REG (dst);
9969 }
9970 if (GET_CODE (dst) != REG)
9971 return;
9972
9973 regno += REGNO (dst);
9974
9975 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET)
9976 {
9977 rtx src = SET_SRC (set);
9978
9979 reg_mode[regno] = mode;
9980 switch (GET_CODE (src))
9981 {
9982 case PLUS:
9983 {
9984 rtx src0 = XEXP (src, 0);
9985 if (GET_CODE (src0) == REG)
9986 {
9987 if (REGNO (src0) != regno
9988 || reg_offset[regno] != const0_rtx)
9989 {
9990 reg_base_reg[regno] = REGNO (src0);
9991 reg_set_luid[regno] = move2add_luid;
9992 }
9993 reg_offset[regno] = XEXP (src, 1);
9994 break;
9995 }
9996 reg_set_luid[regno] = move2add_luid;
9997 reg_offset[regno] = set; /* Invalidate contents. */
9998 break;
9999 }
10000
10001 case REG:
10002 reg_base_reg[regno] = REGNO (SET_SRC (set));
10003 reg_offset[regno] = const0_rtx;
10004 reg_set_luid[regno] = move2add_luid;
10005 break;
10006
10007 default:
10008 reg_base_reg[regno] = -1;
10009 reg_offset[regno] = SET_SRC (set);
10010 reg_set_luid[regno] = move2add_luid;
10011 break;
10012 }
10013 }
10014 else
10015 {
10016 for (i = regno + HARD_REGNO_NREGS (regno, mode) - 1; i >= regno; i--)
10017 {
10018 /* Indicate that this register has been recently written to,
10019 but the exact contents are not available. */
10020 reg_set_luid[i] = move2add_luid;
10021 reg_offset[i] = dst;
10022 }
10023 }
10024 }
This page took 0.501444 seconds and 6 git commands to generate.