1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 88, 89, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
26 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "insn-flags.h"
31 #include "insn-codes.h"
35 #include "basic-block.h"
42 /* This file contains the reload pass of the compiler, which is
43 run after register allocation has been done. It checks that
44 each insn is valid (operands required to be in registers really
45 are in registers of the proper class) and fixes up invalid ones
46 by copying values temporarily into registers for the insns
49 The results of register allocation are described by the vector
50 reg_renumber; the insns still contain pseudo regs, but reg_renumber
51 can be used to find which hard reg, if any, a pseudo reg is in.
53 The technique we always use is to free up a few hard regs that are
54 called ``reload regs'', and for each place where a pseudo reg
55 must be in a hard reg, copy it temporarily into one of the reload regs.
57 Reload regs are allocated locally for every instruction that needs
58 reloads. When there are pseudos which are allocated to a register that
59 has been chosen as a reload reg, such pseudos must be ``spilled''.
60 This means that they go to other hard regs, or to stack slots if no other
61 available hard regs can be found. Spilling can invalidate more
62 insns, requiring additional need for reloads, so we must keep checking
63 until the process stabilizes.
65 For machines with different classes of registers, we must keep track
66 of the register class needed for each reload, and make sure that
67 we allocate enough reload registers of each class.
69 The file reload.c contains the code that checks one insn for
70 validity and reports the reloads that it needs. This file
71 is in charge of scanning the entire rtl code, accumulating the
72 reload needs, spilling, assigning reload registers to use for
73 fixing up each insn, and generating the new insns to copy values
74 into the reload registers. */
77 #ifndef REGISTER_MOVE_COST
78 #define REGISTER_MOVE_COST(x, y) 2
81 /* During reload_as_needed, element N contains a REG rtx for the hard reg
82 into which reg N has been reloaded (perhaps for a previous insn). */
83 static rtx
*reg_last_reload_reg
;
85 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
86 for an output reload that stores into reg N. */
87 static char *reg_has_output_reload
;
89 /* Indicates which hard regs are reload-registers for an output reload
90 in the current insn. */
91 static HARD_REG_SET reg_is_output_reload
;
93 /* Element N is the constant value to which pseudo reg N is equivalent,
94 or zero if pseudo reg N is not equivalent to a constant.
95 find_reloads looks at this in order to replace pseudo reg N
96 with the constant it stands for. */
97 rtx
*reg_equiv_constant
;
99 /* Element N is a memory location to which pseudo reg N is equivalent,
100 prior to any register elimination (such as frame pointer to stack
101 pointer). Depending on whether or not it is a valid address, this value
102 is transferred to either reg_equiv_address or reg_equiv_mem. */
103 rtx
*reg_equiv_memory_loc
;
105 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
106 This is used when the address is not valid as a memory address
107 (because its displacement is too big for the machine.) */
108 rtx
*reg_equiv_address
;
110 /* Element N is the memory slot to which pseudo reg N is equivalent,
111 or zero if pseudo reg N is not equivalent to a memory slot. */
114 /* Widest width in which each pseudo reg is referred to (via subreg). */
115 static int *reg_max_ref_width
;
117 /* Element N is the insn that initialized reg N from its equivalent
118 constant or memory slot. */
119 static rtx
*reg_equiv_init
;
121 /* Vector to remember old contents of reg_renumber before spilling. */
122 static short *reg_old_renumber
;
124 /* During reload_as_needed, element N contains the last pseudo regno reloaded
125 into hard register N. If that pseudo reg occupied more than one register,
126 reg_reloaded_contents points to that pseudo for each spill register in
127 use; all of these must remain set for an inheritance to occur. */
128 static int reg_reloaded_contents
[FIRST_PSEUDO_REGISTER
];
130 /* During reload_as_needed, element N contains the insn for which
131 hard register N was last used. Its contents are significant only
132 when reg_reloaded_valid is set for this register. */
133 static rtx reg_reloaded_insn
[FIRST_PSEUDO_REGISTER
];
135 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
136 static HARD_REG_SET reg_reloaded_valid
;
137 /* Indicate if the register was dead at the end of the reload.
138 This is only valid if reg_reloaded_contents is set and valid. */
139 static HARD_REG_SET reg_reloaded_dead
;
141 /* Number of spill-regs so far; number of valid elements of spill_regs. */
144 /* In parallel with spill_regs, contains REG rtx's for those regs.
145 Holds the last rtx used for any given reg, or 0 if it has never
146 been used for spilling yet. This rtx is reused, provided it has
148 static rtx spill_reg_rtx
[FIRST_PSEUDO_REGISTER
];
150 /* In parallel with spill_regs, contains nonzero for a spill reg
151 that was stored after the last time it was used.
152 The precise value is the insn generated to do the store. */
153 static rtx spill_reg_store
[FIRST_PSEUDO_REGISTER
];
155 /* This is the register that was stored with spill_reg_store. This is a
156 copy of reload_out / reload_out_reg when the value was stored; if
157 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
158 static rtx spill_reg_stored_to
[FIRST_PSEUDO_REGISTER
];
160 /* This table is the inverse mapping of spill_regs:
161 indexed by hard reg number,
162 it contains the position of that reg in spill_regs,
163 or -1 for something that is not in spill_regs.
165 ?!? This is no longer accurate. */
166 static short spill_reg_order
[FIRST_PSEUDO_REGISTER
];
168 /* This reg set indicates registers that can't be used as spill registers for
169 the currently processed insn. These are the hard registers which are live
170 during the insn, but not allocated to pseudos, as well as fixed
172 static HARD_REG_SET bad_spill_regs
;
174 /* These are the hard registers that can't be used as spill register for any
175 insn. This includes registers used for user variables and registers that
176 we can't eliminate. A register that appears in this set also can't be used
177 to retry register allocation. */
178 static HARD_REG_SET bad_spill_regs_global
;
180 /* Describes order of use of registers for reloading
181 of spilled pseudo-registers. `n_spills' is the number of
182 elements that are actually valid; new ones are added at the end.
184 Both spill_regs and spill_reg_order are used on two occasions:
185 once during find_reload_regs, where they keep track of the spill registers
186 for a single insn, but also during reload_as_needed where they show all
187 the registers ever used by reload. For the latter case, the information
188 is calculated during finish_spills. */
189 static short spill_regs
[FIRST_PSEUDO_REGISTER
];
191 /* This vector of reg sets indicates, for each pseudo, which hard registers
192 may not be used for retrying global allocation because the register was
193 formerly spilled from one of them. If we allowed reallocating a pseudo to
194 a register that it was already allocated to, reload might not
196 static HARD_REG_SET
*pseudo_previous_regs
;
198 /* This vector of reg sets indicates, for each pseudo, which hard
199 registers may not be used for retrying global allocation because they
200 are used as spill registers during one of the insns in which the
202 static HARD_REG_SET
*pseudo_forbidden_regs
;
204 /* All hard regs that have been used as spill registers for any insn are
205 marked in this set. */
206 static HARD_REG_SET used_spill_regs
;
208 /* Index of last register assigned as a spill register. We allocate in
209 a round-robin fashion. */
210 static int last_spill_reg
;
212 /* Describes order of preference for putting regs into spill_regs.
213 Contains the numbers of all the hard regs, in order most preferred first.
214 This order is different for each function.
215 It is set up by order_regs_for_reload.
216 Empty elements at the end contain -1. */
217 static short potential_reload_regs
[FIRST_PSEUDO_REGISTER
];
219 /* Nonzero if indirect addressing is supported on the machine; this means
220 that spilling (REG n) does not require reloading it into a register in
221 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
222 value indicates the level of indirect addressing supported, e.g., two
223 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
225 static char spill_indirect_levels
;
227 /* Nonzero if indirect addressing is supported when the innermost MEM is
228 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
229 which these are valid is the same as spill_indirect_levels, above. */
230 char indirect_symref_ok
;
232 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
233 char double_reg_address_ok
;
235 /* Record the stack slot for each spilled hard register. */
236 static rtx spill_stack_slot
[FIRST_PSEUDO_REGISTER
];
238 /* Width allocated so far for that stack slot. */
239 static int spill_stack_slot_width
[FIRST_PSEUDO_REGISTER
];
241 /* Record which pseudos needed to be spilled. */
242 static regset spilled_pseudos
;
244 /* First uid used by insns created by reload in this function.
245 Used in find_equiv_reg. */
246 int reload_first_uid
;
248 /* Flag set by local-alloc or global-alloc if anything is live in
249 a call-clobbered reg across calls. */
250 int caller_save_needed
;
252 /* Set to 1 while reload_as_needed is operating.
253 Required by some machines to handle any generated moves differently. */
254 int reload_in_progress
= 0;
256 /* These arrays record the insn_code of insns that may be needed to
257 perform input and output reloads of special objects. They provide a
258 place to pass a scratch register. */
259 enum insn_code reload_in_optab
[NUM_MACHINE_MODES
];
260 enum insn_code reload_out_optab
[NUM_MACHINE_MODES
];
262 /* This obstack is used for allocation of rtl during register elimination.
263 The allocated storage can be freed once find_reloads has processed the
265 struct obstack reload_obstack
;
267 /* Points to the beginning of the reload_obstack. All insn_chain structures
268 are allocated first. */
269 char *reload_startobj
;
271 /* The point after all insn_chain structures. Used to quickly deallocate
272 memory used while processing one insn. */
273 char *reload_firstobj
;
275 #define obstack_chunk_alloc xmalloc
276 #define obstack_chunk_free free
278 /* List of labels that must never be deleted. */
279 extern rtx forced_labels
;
281 /* List of insn_chain instructions, one for every insn that reload needs to
283 struct insn_chain
*reload_insn_chain
;
285 /* List of all insns needing reloads. */
286 static struct insn_chain
*insns_need_reload
;
288 /* This structure is used to record information about register eliminations.
289 Each array entry describes one possible way of eliminating a register
290 in favor of another. If there is more than one way of eliminating a
291 particular register, the most preferred should be specified first. */
293 static struct elim_table
295 int from
; /* Register number to be eliminated. */
296 int to
; /* Register number used as replacement. */
297 int initial_offset
; /* Initial difference between values. */
298 int can_eliminate
; /* Non-zero if this elimination can be done. */
299 int can_eliminate_previous
; /* Value of CAN_ELIMINATE in previous scan over
300 insns made by reload. */
301 int offset
; /* Current offset between the two regs. */
302 int previous_offset
; /* Offset at end of previous insn. */
303 int ref_outside_mem
; /* "to" has been referenced outside a MEM. */
304 rtx from_rtx
; /* REG rtx for the register to be eliminated.
305 We cannot simply compare the number since
306 we might then spuriously replace a hard
307 register corresponding to a pseudo
308 assigned to the reg to be eliminated. */
309 rtx to_rtx
; /* REG rtx for the replacement. */
312 /* If a set of eliminable registers was specified, define the table from it.
313 Otherwise, default to the normal case of the frame pointer being
314 replaced by the stack pointer. */
316 #ifdef ELIMINABLE_REGS
319 {{ FRAME_POINTER_REGNUM
, STACK_POINTER_REGNUM
}};
322 #define NUM_ELIMINABLE_REGS (sizeof reg_eliminate / sizeof reg_eliminate[0])
324 /* Record the number of pending eliminations that have an offset not equal
325 to their initial offset. If non-zero, we use a new copy of each
326 replacement result in any insns encountered. */
327 int num_not_at_initial_offset
;
329 /* Count the number of registers that we may be able to eliminate. */
330 static int num_eliminable
;
332 /* For each label, we record the offset of each elimination. If we reach
333 a label by more than one path and an offset differs, we cannot do the
334 elimination. This information is indexed by the number of the label.
335 The first table is an array of flags that records whether we have yet
336 encountered a label and the second table is an array of arrays, one
337 entry in the latter array for each elimination. */
339 static char *offsets_known_at
;
340 static int (*offsets_at
)[NUM_ELIMINABLE_REGS
];
342 /* Number of labels in the current function. */
344 static int num_labels
;
346 struct hard_reg_n_uses
352 static void maybe_fix_stack_asms
PROTO((void));
353 static void calculate_needs_all_insns
PROTO((int));
354 static void calculate_needs
PROTO((struct insn_chain
*));
355 static void find_reload_regs
PROTO((struct insn_chain
*chain
,
357 static void find_tworeg_group
PROTO((struct insn_chain
*, int,
359 static void find_group
PROTO((struct insn_chain
*, int,
361 static int possible_group_p
PROTO((struct insn_chain
*, int));
362 static void count_possible_groups
PROTO((struct insn_chain
*, int));
363 static int modes_equiv_for_class_p
PROTO((enum machine_mode
,
366 static void delete_caller_save_insns
PROTO((void));
368 static void spill_failure
PROTO((rtx
));
369 static void new_spill_reg
PROTO((struct insn_chain
*, int, int,
371 static void maybe_mark_pseudo_spilled
PROTO((int));
372 static void delete_dead_insn
PROTO((rtx
));
373 static void alter_reg
PROTO((int, int));
374 static void set_label_offsets
PROTO((rtx
, rtx
, int));
375 static int eliminate_regs_in_insn
PROTO((rtx
, int));
376 static void update_eliminable_offsets
PROTO((void));
377 static void mark_not_eliminable
PROTO((rtx
, rtx
));
378 static void set_initial_elim_offsets
PROTO((void));
379 static void verify_initial_elim_offsets
PROTO((void));
380 static void set_initial_label_offsets
PROTO((void));
381 static void set_offsets_for_label
PROTO((rtx
));
382 static void init_elim_table
PROTO((void));
383 static void update_eliminables
PROTO((HARD_REG_SET
*));
384 static void spill_hard_reg
PROTO((int, FILE *, int));
385 static int finish_spills
PROTO((int, FILE *));
386 static void ior_hard_reg_set
PROTO((HARD_REG_SET
*, HARD_REG_SET
*));
387 static void scan_paradoxical_subregs
PROTO((rtx
));
388 static int hard_reg_use_compare
PROTO((const GENERIC_PTR
, const GENERIC_PTR
));
389 static void count_pseudo
PROTO((struct hard_reg_n_uses
*, int));
390 static void order_regs_for_reload
PROTO((struct insn_chain
*));
391 static void reload_as_needed
PROTO((int));
392 static void forget_old_reloads_1
PROTO((rtx
, rtx
));
393 static int reload_reg_class_lower
PROTO((const GENERIC_PTR
, const GENERIC_PTR
));
394 static void mark_reload_reg_in_use
PROTO((int, int, enum reload_type
,
396 static void clear_reload_reg_in_use
PROTO((int, int, enum reload_type
,
398 static int reload_reg_free_p
PROTO((int, int, enum reload_type
));
399 static int reload_reg_free_before_p
PROTO((int, int, enum reload_type
, int));
400 static int reload_reg_free_for_value_p
PROTO((int, int, enum reload_type
, rtx
, rtx
, int));
401 static int reload_reg_reaches_end_p
PROTO((int, int, enum reload_type
));
402 static int allocate_reload_reg
PROTO((struct insn_chain
*, int, int,
404 static void choose_reload_regs
PROTO((struct insn_chain
*));
405 static void merge_assigned_reloads
PROTO((rtx
));
406 static void emit_reload_insns
PROTO((struct insn_chain
*));
407 static void delete_output_reload
PROTO((rtx
, int, int));
408 static void delete_address_reloads
PROTO((rtx
, rtx
));
409 static void delete_address_reloads_1
PROTO((rtx
, rtx
, rtx
));
410 static rtx inc_for_reload
PROTO((rtx
, rtx
, rtx
, int));
411 static int constraint_accepts_reg_p
PROTO((char *, rtx
));
412 static void reload_cse_regs_1
PROTO((rtx
));
413 static void reload_cse_invalidate_regno
PROTO((int, enum machine_mode
, int));
414 static int reload_cse_mem_conflict_p
PROTO((rtx
, rtx
));
415 static void reload_cse_invalidate_mem
PROTO((rtx
));
416 static void reload_cse_invalidate_rtx
PROTO((rtx
, rtx
));
417 static int reload_cse_regno_equal_p
PROTO((int, rtx
, enum machine_mode
));
418 static int reload_cse_noop_set_p
PROTO((rtx
, rtx
));
419 static int reload_cse_simplify_set
PROTO((rtx
, rtx
));
420 static int reload_cse_simplify_operands
PROTO((rtx
));
421 static void reload_cse_check_clobber
PROTO((rtx
, rtx
));
422 static void reload_cse_record_set
PROTO((rtx
, rtx
));
423 static void reload_combine
PROTO((void));
424 static void reload_combine_note_use
PROTO((rtx
*, rtx
));
425 static void reload_combine_note_store
PROTO((rtx
, rtx
));
426 static void reload_cse_move2add
PROTO((rtx
));
427 static void move2add_note_store
PROTO((rtx
, rtx
));
429 /* Initialize the reload pass once per compilation. */
436 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
437 Set spill_indirect_levels to the number of levels such addressing is
438 permitted, zero if it is not permitted at all. */
441 = gen_rtx_MEM (Pmode
,
443 gen_rtx_REG (Pmode
, LAST_VIRTUAL_REGISTER
+ 1),
445 spill_indirect_levels
= 0;
447 while (memory_address_p (QImode
, tem
))
449 spill_indirect_levels
++;
450 tem
= gen_rtx_MEM (Pmode
, tem
);
453 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
455 tem
= gen_rtx_MEM (Pmode
, gen_rtx_SYMBOL_REF (Pmode
, "foo"));
456 indirect_symref_ok
= memory_address_p (QImode
, tem
);
458 /* See if reg+reg is a valid (and offsettable) address. */
460 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
462 tem
= gen_rtx_PLUS (Pmode
,
463 gen_rtx_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
),
464 gen_rtx_REG (Pmode
, i
));
465 /* This way, we make sure that reg+reg is an offsettable address. */
466 tem
= plus_constant (tem
, 4);
468 if (memory_address_p (QImode
, tem
))
470 double_reg_address_ok
= 1;
475 /* Initialize obstack for our rtl allocation. */
476 gcc_obstack_init (&reload_obstack
);
477 reload_startobj
= (char *) obstack_alloc (&reload_obstack
, 0);
480 /* List of insn chains that are currently unused. */
481 static struct insn_chain
*unused_insn_chains
= 0;
483 /* Allocate an empty insn_chain structure. */
487 struct insn_chain
*c
;
489 if (unused_insn_chains
== 0)
491 c
= obstack_alloc (&reload_obstack
, sizeof (struct insn_chain
));
492 c
->live_before
= OBSTACK_ALLOC_REG_SET (&reload_obstack
);
493 c
->live_after
= OBSTACK_ALLOC_REG_SET (&reload_obstack
);
497 c
= unused_insn_chains
;
498 unused_insn_chains
= c
->next
;
500 c
->is_caller_save_insn
= 0;
501 c
->need_operand_change
= 0;
507 /* Small utility function to set all regs in hard reg set TO which are
508 allocated to pseudos in regset FROM. */
510 compute_use_by_pseudos (to
, from
)
515 EXECUTE_IF_SET_IN_REG_SET
516 (from
, FIRST_PSEUDO_REGISTER
, regno
,
518 int r
= reg_renumber
[regno
];
522 nregs
= HARD_REGNO_NREGS (r
, PSEUDO_REGNO_MODE (regno
));
524 SET_HARD_REG_BIT (*to
, r
+ nregs
);
528 /* Global variables used by reload and its subroutines. */
530 /* Set during calculate_needs if an insn needs register elimination. */
531 static int something_needs_elimination
;
532 /* Set during calculate_needs if an insn needs an operand changed. */
533 int something_needs_operands_changed
;
535 /* Nonzero means we couldn't get enough spill regs. */
538 /* Main entry point for the reload pass.
540 FIRST is the first insn of the function being compiled.
542 GLOBAL nonzero means we were called from global_alloc
543 and should attempt to reallocate any pseudoregs that we
544 displace from hard regs we will use for reloads.
545 If GLOBAL is zero, we do not have enough information to do that,
546 so any pseudo reg that is spilled must go to the stack.
548 DUMPFILE is the global-reg debugging dump file stream, or 0.
549 If it is nonzero, messages are written to it to describe
550 which registers are seized as reload regs, which pseudo regs
551 are spilled from them, and where the pseudo regs are reallocated to.
553 Return value is nonzero if reload failed
554 and we must not do any more for this function. */
557 reload (first
, global
, dumpfile
)
564 register struct elim_table
*ep
;
566 /* The two pointers used to track the true location of the memory used
567 for label offsets. */
568 char *real_known_ptr
= NULL_PTR
;
569 int (*real_at_ptr
)[NUM_ELIMINABLE_REGS
];
571 /* Make sure even insns with volatile mem refs are recognizable. */
576 reload_firstobj
= (char *) obstack_alloc (&reload_obstack
, 0);
578 /* Make sure that the last insn in the chain
579 is not something that needs reloading. */
580 emit_note (NULL_PTR
, NOTE_INSN_DELETED
);
582 /* Enable find_equiv_reg to distinguish insns made by reload. */
583 reload_first_uid
= get_max_uid ();
585 #ifdef SECONDARY_MEMORY_NEEDED
586 /* Initialize the secondary memory table. */
587 clear_secondary_mem ();
590 /* We don't have a stack slot for any spill reg yet. */
591 bzero ((char *) spill_stack_slot
, sizeof spill_stack_slot
);
592 bzero ((char *) spill_stack_slot_width
, sizeof spill_stack_slot_width
);
594 /* Initialize the save area information for caller-save, in case some
598 /* Compute which hard registers are now in use
599 as homes for pseudo registers.
600 This is done here rather than (eg) in global_alloc
601 because this point is reached even if not optimizing. */
602 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
605 /* A function that receives a nonlocal goto must save all call-saved
607 if (current_function_has_nonlocal_label
)
608 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
610 if (! call_used_regs
[i
] && ! fixed_regs
[i
])
611 regs_ever_live
[i
] = 1;
614 /* Find all the pseudo registers that didn't get hard regs
615 but do have known equivalent constants or memory slots.
616 These include parameters (known equivalent to parameter slots)
617 and cse'd or loop-moved constant memory addresses.
619 Record constant equivalents in reg_equiv_constant
620 so they will be substituted by find_reloads.
621 Record memory equivalents in reg_mem_equiv so they can
622 be substituted eventually by altering the REG-rtx's. */
624 reg_equiv_constant
= (rtx
*) xmalloc (max_regno
* sizeof (rtx
));
625 bzero ((char *) reg_equiv_constant
, max_regno
* sizeof (rtx
));
626 reg_equiv_memory_loc
= (rtx
*) xmalloc (max_regno
* sizeof (rtx
));
627 bzero ((char *) reg_equiv_memory_loc
, max_regno
* sizeof (rtx
));
628 reg_equiv_mem
= (rtx
*) xmalloc (max_regno
* sizeof (rtx
));
629 bzero ((char *) reg_equiv_mem
, max_regno
* sizeof (rtx
));
630 reg_equiv_init
= (rtx
*) xmalloc (max_regno
* sizeof (rtx
));
631 bzero ((char *) reg_equiv_init
, max_regno
* sizeof (rtx
));
632 reg_equiv_address
= (rtx
*) xmalloc (max_regno
* sizeof (rtx
));
633 bzero ((char *) reg_equiv_address
, max_regno
* sizeof (rtx
));
634 reg_max_ref_width
= (int *) xmalloc (max_regno
* sizeof (int));
635 bzero ((char *) reg_max_ref_width
, max_regno
* sizeof (int));
636 reg_old_renumber
= (short *) xmalloc (max_regno
* sizeof (short));
637 bcopy (reg_renumber
, reg_old_renumber
, max_regno
* sizeof (short));
638 pseudo_forbidden_regs
639 = (HARD_REG_SET
*) xmalloc (max_regno
* sizeof (HARD_REG_SET
));
641 = (HARD_REG_SET
*) xmalloc (max_regno
* sizeof (HARD_REG_SET
));
643 CLEAR_HARD_REG_SET (bad_spill_regs_global
);
644 bzero ((char *) pseudo_previous_regs
, max_regno
* sizeof (HARD_REG_SET
));
646 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
647 Also find all paradoxical subregs and find largest such for each pseudo.
648 On machines with small register classes, record hard registers that
649 are used for user variables. These can never be used for spills.
650 Also look for a "constant" NOTE_INSN_SETJMP. This means that all
651 caller-saved registers must be marked live. */
653 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
655 rtx set
= single_set (insn
);
657 if (GET_CODE (insn
) == NOTE
&& CONST_CALL_P (insn
)
658 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
659 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
660 if (! call_used_regs
[i
])
661 regs_ever_live
[i
] = 1;
663 if (set
!= 0 && GET_CODE (SET_DEST (set
)) == REG
)
665 rtx note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
667 #ifdef LEGITIMATE_PIC_OPERAND_P
668 && (! CONSTANT_P (XEXP (note
, 0)) || ! flag_pic
669 || LEGITIMATE_PIC_OPERAND_P (XEXP (note
, 0)))
673 rtx x
= XEXP (note
, 0);
674 i
= REGNO (SET_DEST (set
));
675 if (i
> LAST_VIRTUAL_REGISTER
)
677 if (GET_CODE (x
) == MEM
)
679 /* If the operand is a PLUS, the MEM may be shared,
680 so make sure we have an unshared copy here. */
681 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
684 reg_equiv_memory_loc
[i
] = x
;
686 else if (CONSTANT_P (x
))
688 if (LEGITIMATE_CONSTANT_P (x
))
689 reg_equiv_constant
[i
] = x
;
691 reg_equiv_memory_loc
[i
]
692 = force_const_mem (GET_MODE (SET_DEST (set
)), x
);
697 /* If this register is being made equivalent to a MEM
698 and the MEM is not SET_SRC, the equivalencing insn
699 is one with the MEM as a SET_DEST and it occurs later.
700 So don't mark this insn now. */
701 if (GET_CODE (x
) != MEM
702 || rtx_equal_p (SET_SRC (set
), x
))
703 reg_equiv_init
[i
] = insn
;
708 /* If this insn is setting a MEM from a register equivalent to it,
709 this is the equivalencing insn. */
710 else if (set
&& GET_CODE (SET_DEST (set
)) == MEM
711 && GET_CODE (SET_SRC (set
)) == REG
712 && reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]
713 && rtx_equal_p (SET_DEST (set
),
714 reg_equiv_memory_loc
[REGNO (SET_SRC (set
))]))
715 reg_equiv_init
[REGNO (SET_SRC (set
))] = insn
;
717 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
718 scan_paradoxical_subregs (PATTERN (insn
));
723 num_labels
= max_label_num () - get_first_label_num ();
725 /* Allocate the tables used to store offset information at labels. */
726 /* We used to use alloca here, but the size of what it would try to
727 allocate would occasionally cause it to exceed the stack limit and
728 cause a core dump. */
729 real_known_ptr
= xmalloc (num_labels
);
731 = (int (*)[NUM_ELIMINABLE_REGS
])
732 xmalloc (num_labels
* NUM_ELIMINABLE_REGS
* sizeof (int));
734 offsets_known_at
= real_known_ptr
- get_first_label_num ();
736 = (int (*)[NUM_ELIMINABLE_REGS
]) (real_at_ptr
- get_first_label_num ());
738 /* Alter each pseudo-reg rtx to contain its hard reg number.
739 Assign stack slots to the pseudos that lack hard regs or equivalents.
740 Do not touch virtual registers. */
742 for (i
= LAST_VIRTUAL_REGISTER
+ 1; i
< max_regno
; i
++)
745 /* If we have some registers we think can be eliminated, scan all insns to
746 see if there is an insn that sets one of these registers to something
747 other than itself plus a constant. If so, the register cannot be
748 eliminated. Doing this scan here eliminates an extra pass through the
749 main reload loop in the most common case where register elimination
751 for (insn
= first
; insn
&& num_eliminable
; insn
= NEXT_INSN (insn
))
752 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == JUMP_INSN
753 || GET_CODE (insn
) == CALL_INSN
)
754 note_stores (PATTERN (insn
), mark_not_eliminable
);
756 #ifndef REGISTER_CONSTRAINTS
757 /* If all the pseudo regs have hard regs,
758 except for those that are never referenced,
759 we know that no reloads are needed. */
760 /* But that is not true if there are register constraints, since
761 in that case some pseudos might be in the wrong kind of hard reg. */
763 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
764 if (reg_renumber
[i
] == -1 && REG_N_REFS (i
) != 0)
767 if (i
== max_regno
&& num_eliminable
== 0 && ! caller_save_needed
)
769 free (real_known_ptr
);
771 free (reg_equiv_constant
);
772 free (reg_equiv_memory_loc
);
773 free (reg_equiv_mem
);
774 free (reg_equiv_init
);
775 free (reg_equiv_address
);
776 free (reg_max_ref_width
);
777 free (reg_old_renumber
);
778 free (pseudo_previous_regs
);
779 free (pseudo_forbidden_regs
);
784 maybe_fix_stack_asms ();
786 insns_need_reload
= 0;
787 something_needs_elimination
= 0;
789 /* Initialize to -1, which means take the first spill register. */
792 spilled_pseudos
= ALLOCA_REG_SET ();
794 /* Spill any hard regs that we know we can't eliminate. */
795 CLEAR_HARD_REG_SET (used_spill_regs
);
796 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
797 if (! ep
->can_eliminate
)
798 spill_hard_reg (ep
->from
, dumpfile
, 1);
800 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
801 if (frame_pointer_needed
)
802 spill_hard_reg (HARD_FRAME_POINTER_REGNUM
, dumpfile
, 1);
804 finish_spills (global
, dumpfile
);
806 /* From now on, we need to emit any moves without making new pseudos. */
807 reload_in_progress
= 1;
809 /* This loop scans the entire function each go-round
810 and repeats until one repetition spills no additional hard regs. */
813 int something_changed
;
815 struct insn_chain
*chain
;
817 HOST_WIDE_INT starting_frame_size
;
819 /* Round size of stack frame to BIGGEST_ALIGNMENT. This must be done
820 here because the stack size may be a part of the offset computation
821 for register elimination, and there might have been new stack slots
822 created in the last iteration of this loop. */
823 assign_stack_local (BLKmode
, 0, 0);
825 starting_frame_size
= get_frame_size ();
827 set_initial_elim_offsets ();
828 set_initial_label_offsets ();
830 /* For each pseudo register that has an equivalent location defined,
831 try to eliminate any eliminable registers (such as the frame pointer)
832 assuming initial offsets for the replacement register, which
835 If the resulting location is directly addressable, substitute
836 the MEM we just got directly for the old REG.
838 If it is not addressable but is a constant or the sum of a hard reg
839 and constant, it is probably not addressable because the constant is
840 out of range, in that case record the address; we will generate
841 hairy code to compute the address in a register each time it is
842 needed. Similarly if it is a hard register, but one that is not
843 valid as an address register.
845 If the location is not addressable, but does not have one of the
846 above forms, assign a stack slot. We have to do this to avoid the
847 potential of producing lots of reloads if, e.g., a location involves
848 a pseudo that didn't get a hard register and has an equivalent memory
849 location that also involves a pseudo that didn't get a hard register.
851 Perhaps at some point we will improve reload_when_needed handling
852 so this problem goes away. But that's very hairy. */
854 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
855 if (reg_renumber
[i
] < 0 && reg_equiv_memory_loc
[i
])
857 rtx x
= eliminate_regs (reg_equiv_memory_loc
[i
], 0, NULL_RTX
);
859 if (strict_memory_address_p (GET_MODE (regno_reg_rtx
[i
]),
861 reg_equiv_mem
[i
] = x
, reg_equiv_address
[i
] = 0;
862 else if (CONSTANT_P (XEXP (x
, 0))
863 || (GET_CODE (XEXP (x
, 0)) == REG
864 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
865 || (GET_CODE (XEXP (x
, 0)) == PLUS
866 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
867 && (REGNO (XEXP (XEXP (x
, 0), 0))
868 < FIRST_PSEUDO_REGISTER
)
869 && CONSTANT_P (XEXP (XEXP (x
, 0), 1))))
870 reg_equiv_address
[i
] = XEXP (x
, 0), reg_equiv_mem
[i
] = 0;
873 /* Make a new stack slot. Then indicate that something
874 changed so we go back and recompute offsets for
875 eliminable registers because the allocation of memory
876 below might change some offset. reg_equiv_{mem,address}
877 will be set up for this pseudo on the next pass around
879 reg_equiv_memory_loc
[i
] = 0;
880 reg_equiv_init
[i
] = 0;
885 if (caller_save_needed
)
888 /* If we allocated another stack slot, redo elimination bookkeeping. */
889 if (starting_frame_size
!= get_frame_size ())
892 if (caller_save_needed
)
894 save_call_clobbered_regs ();
895 /* That might have allocated new insn_chain structures. */
896 reload_firstobj
= (char *) obstack_alloc (&reload_obstack
, 0);
899 calculate_needs_all_insns (global
);
901 CLEAR_REG_SET (spilled_pseudos
);
904 something_changed
= 0;
906 /* If we allocated any new memory locations, make another pass
907 since it might have changed elimination offsets. */
908 if (starting_frame_size
!= get_frame_size ())
909 something_changed
= 1;
912 HARD_REG_SET to_spill
;
913 CLEAR_HARD_REG_SET (to_spill
);
914 update_eliminables (&to_spill
);
915 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
916 if (TEST_HARD_REG_BIT (to_spill
, i
))
918 spill_hard_reg (i
, dumpfile
, 1);
923 CLEAR_HARD_REG_SET (used_spill_regs
);
924 /* Try to satisfy the needs for each insn. */
925 for (chain
= insns_need_reload
; chain
!= 0;
926 chain
= chain
->next_need_reload
)
927 find_reload_regs (chain
, dumpfile
);
932 if (insns_need_reload
!= 0 || did_spill
)
933 something_changed
|= finish_spills (global
, dumpfile
);
935 if (! something_changed
)
938 if (caller_save_needed
)
939 delete_caller_save_insns ();
942 /* If global-alloc was run, notify it of any register eliminations we have
945 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
946 if (ep
->can_eliminate
)
947 mark_elimination (ep
->from
, ep
->to
);
949 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
950 If that insn didn't set the register (i.e., it copied the register to
951 memory), just delete that insn instead of the equivalencing insn plus
952 anything now dead. If we call delete_dead_insn on that insn, we may
953 delete the insn that actually sets the register if the register die
954 there and that is incorrect. */
956 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
957 if (reg_renumber
[i
] < 0 && reg_equiv_init
[i
] != 0
958 && GET_CODE (reg_equiv_init
[i
]) != NOTE
)
960 if (reg_set_p (regno_reg_rtx
[i
], PATTERN (reg_equiv_init
[i
])))
961 delete_dead_insn (reg_equiv_init
[i
]);
964 PUT_CODE (reg_equiv_init
[i
], NOTE
);
965 NOTE_SOURCE_FILE (reg_equiv_init
[i
]) = 0;
966 NOTE_LINE_NUMBER (reg_equiv_init
[i
]) = NOTE_INSN_DELETED
;
970 /* Use the reload registers where necessary
971 by generating move instructions to move the must-be-register
972 values into or out of the reload registers. */
974 if (insns_need_reload
!= 0 || something_needs_elimination
975 || something_needs_operands_changed
)
977 int old_frame_size
= get_frame_size ();
979 reload_as_needed (global
);
981 if (old_frame_size
!= get_frame_size ())
985 verify_initial_elim_offsets ();
988 /* If we were able to eliminate the frame pointer, show that it is no
989 longer live at the start of any basic block. If it ls live by
990 virtue of being in a pseudo, that pseudo will be marked live
991 and hence the frame pointer will be known to be live via that
994 if (! frame_pointer_needed
)
995 for (i
= 0; i
< n_basic_blocks
; i
++)
996 CLEAR_REGNO_REG_SET (basic_block_live_at_start
[i
],
997 HARD_FRAME_POINTER_REGNUM
);
999 /* Come here (with failure set nonzero) if we can't get enough spill regs
1000 and we decide not to abort about it. */
1003 reload_in_progress
= 0;
1005 /* Now eliminate all pseudo regs by modifying them into
1006 their equivalent memory references.
1007 The REG-rtx's for the pseudos are modified in place,
1008 so all insns that used to refer to them now refer to memory.
1010 For a reg that has a reg_equiv_address, all those insns
1011 were changed by reloading so that no insns refer to it any longer;
1012 but the DECL_RTL of a variable decl may refer to it,
1013 and if so this causes the debugging info to mention the variable. */
1015 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
1019 int is_readonly
= 0;
1021 if (reg_equiv_memory_loc
[i
])
1023 in_struct
= MEM_IN_STRUCT_P (reg_equiv_memory_loc
[i
]);
1024 is_readonly
= RTX_UNCHANGING_P (reg_equiv_memory_loc
[i
]);
1027 if (reg_equiv_mem
[i
])
1028 addr
= XEXP (reg_equiv_mem
[i
], 0);
1030 if (reg_equiv_address
[i
])
1031 addr
= reg_equiv_address
[i
];
1035 if (reg_renumber
[i
] < 0)
1037 rtx reg
= regno_reg_rtx
[i
];
1038 XEXP (reg
, 0) = addr
;
1039 REG_USERVAR_P (reg
) = 0;
1040 RTX_UNCHANGING_P (reg
) = is_readonly
;
1041 MEM_IN_STRUCT_P (reg
) = in_struct
;
1042 /* We have no alias information about this newly created
1044 MEM_ALIAS_SET (reg
) = 0;
1045 PUT_CODE (reg
, MEM
);
1047 else if (reg_equiv_mem
[i
])
1048 XEXP (reg_equiv_mem
[i
], 0) = addr
;
1052 /* Make a pass over all the insns and delete all USEs which we inserted
1053 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1054 notes. Delete all CLOBBER insns and simplify (subreg (reg)) operands. */
1056 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
1057 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1061 if ((GET_CODE (PATTERN (insn
)) == USE
1062 && find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1063 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
1065 PUT_CODE (insn
, NOTE
);
1066 NOTE_SOURCE_FILE (insn
) = 0;
1067 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1071 pnote
= ®_NOTES (insn
);
1074 if (REG_NOTE_KIND (*pnote
) == REG_DEAD
1075 || REG_NOTE_KIND (*pnote
) == REG_UNUSED
)
1076 *pnote
= XEXP (*pnote
, 1);
1078 pnote
= &XEXP (*pnote
, 1);
1081 /* And simplify (subreg (reg)) if it appears as an operand. */
1082 cleanup_subreg_operands (insn
);
1085 /* If we are doing stack checking, give a warning if this function's
1086 frame size is larger than we expect. */
1087 if (flag_stack_check
&& ! STACK_CHECK_BUILTIN
)
1089 HOST_WIDE_INT size
= get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE
;
1091 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1092 if (regs_ever_live
[i
] && ! fixed_regs
[i
] && call_used_regs
[i
])
1093 size
+= UNITS_PER_WORD
;
1095 if (size
> STACK_CHECK_MAX_FRAME_SIZE
)
1096 warning ("frame size too large for reliable stack checking");
1099 /* Indicate that we no longer have known memory locations or constants. */
1100 if (reg_equiv_constant
)
1101 free (reg_equiv_constant
);
1102 reg_equiv_constant
= 0;
1103 if (reg_equiv_memory_loc
)
1104 free (reg_equiv_memory_loc
);
1105 reg_equiv_memory_loc
= 0;
1108 free (real_known_ptr
);
1112 free (reg_equiv_mem
);
1113 free (reg_equiv_init
);
1114 free (reg_equiv_address
);
1115 free (reg_max_ref_width
);
1116 free (reg_old_renumber
);
1117 free (pseudo_previous_regs
);
1118 free (pseudo_forbidden_regs
);
1120 FREE_REG_SET (spilled_pseudos
);
1122 CLEAR_HARD_REG_SET (used_spill_regs
);
1123 for (i
= 0; i
< n_spills
; i
++)
1124 SET_HARD_REG_BIT (used_spill_regs
, spill_regs
[i
]);
1126 /* Free all the insn_chain structures at once. */
1127 obstack_free (&reload_obstack
, reload_startobj
);
1128 unused_insn_chains
= 0;
1133 /* Yet another special case. Unfortunately, reg-stack forces people to
1134 write incorrect clobbers in asm statements. These clobbers must not
1135 cause the register to appear in bad_spill_regs, otherwise we'll call
1136 fatal_insn later. We clear the corresponding regnos in the live
1137 register sets to avoid this.
1138 The whole thing is rather sick, I'm afraid. */
1140 maybe_fix_stack_asms ()
1143 char *constraints
[MAX_RECOG_OPERANDS
];
1144 enum machine_mode operand_mode
[MAX_RECOG_OPERANDS
];
1145 struct insn_chain
*chain
;
1147 for (chain
= reload_insn_chain
; chain
!= 0; chain
= chain
->next
)
1150 HARD_REG_SET clobbered
, allowed
;
1153 if (GET_RTX_CLASS (GET_CODE (chain
->insn
)) != 'i'
1154 || (noperands
= asm_noperands (PATTERN (chain
->insn
))) < 0)
1156 pat
= PATTERN (chain
->insn
);
1157 if (GET_CODE (pat
) != PARALLEL
)
1160 CLEAR_HARD_REG_SET (clobbered
);
1161 CLEAR_HARD_REG_SET (allowed
);
1163 /* First, make a mask of all stack regs that are clobbered. */
1164 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1166 rtx t
= XVECEXP (pat
, 0, i
);
1167 if (GET_CODE (t
) == CLOBBER
&& STACK_REG_P (XEXP (t
, 0)))
1168 SET_HARD_REG_BIT (clobbered
, REGNO (XEXP (t
, 0)));
1171 /* Get the operand values and constraints out of the insn. */
1172 decode_asm_operands (pat
, recog_operand
, recog_operand_loc
,
1173 constraints
, operand_mode
);
1175 /* For every operand, see what registers are allowed. */
1176 for (i
= 0; i
< noperands
; i
++)
1178 char *p
= constraints
[i
];
1179 /* For every alternative, we compute the class of registers allowed
1180 for reloading in CLS, and merge its contents into the reg set
1182 int cls
= (int) NO_REGS
;
1188 if (c
== '\0' || c
== ',' || c
== '#')
1190 /* End of one alternative - mark the regs in the current
1191 class, and reset the class. */
1192 IOR_HARD_REG_SET (allowed
, reg_class_contents
[cls
]);
1197 } while (c
!= '\0' && c
!= ',');
1205 case '=': case '+': case '*': case '%': case '?': case '!':
1206 case '0': case '1': case '2': case '3': case '4': case 'm':
1207 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1208 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1209 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1211 #ifdef EXTRA_CONSTRAINT
1212 case 'Q': case 'R': case 'S': case 'T': case 'U':
1217 cls
= (int) reg_class_subunion
[cls
][(int) BASE_REG_CLASS
];
1222 cls
= (int) reg_class_subunion
[cls
][(int) GENERAL_REGS
];
1226 cls
= (int) reg_class_subunion
[cls
][(int) REG_CLASS_FROM_LETTER (c
)];
1231 /* Those of the registers which are clobbered, but allowed by the
1232 constraints, must be usable as reload registers. So clear them
1233 out of the life information. */
1234 AND_HARD_REG_SET (allowed
, clobbered
);
1235 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1236 if (TEST_HARD_REG_BIT (allowed
, i
))
1238 CLEAR_REGNO_REG_SET (chain
->live_before
, i
);
1239 CLEAR_REGNO_REG_SET (chain
->live_after
, i
);
1247 /* Walk the chain of insns, and determine for each whether it needs reloads
1248 and/or eliminations. Build the corresponding insns_need_reload list, and
1249 set something_needs_elimination as appropriate. */
1251 calculate_needs_all_insns (global
)
1254 struct insn_chain
**pprev_reload
= &insns_need_reload
;
1255 struct insn_chain
**pchain
;
1257 something_needs_elimination
= 0;
1259 for (pchain
= &reload_insn_chain
; *pchain
!= 0; pchain
= &(*pchain
)->next
)
1262 struct insn_chain
*chain
;
1267 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1268 include REG_LABEL), we need to see what effects this has on the
1269 known offsets at labels. */
1271 if (GET_CODE (insn
) == CODE_LABEL
|| GET_CODE (insn
) == JUMP_INSN
1272 || (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1273 && REG_NOTES (insn
) != 0))
1274 set_label_offsets (insn
, insn
, 0);
1276 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1278 rtx old_body
= PATTERN (insn
);
1279 int old_code
= INSN_CODE (insn
);
1280 rtx old_notes
= REG_NOTES (insn
);
1281 int did_elimination
= 0;
1282 int operands_changed
= 0;
1284 /* If needed, eliminate any eliminable registers. */
1286 did_elimination
= eliminate_regs_in_insn (insn
, 0);
1288 /* Analyze the instruction. */
1289 operands_changed
= find_reloads (insn
, 0, spill_indirect_levels
,
1290 global
, spill_reg_order
);
1292 /* If a no-op set needs more than one reload, this is likely
1293 to be something that needs input address reloads. We
1294 can't get rid of this cleanly later, and it is of no use
1295 anyway, so discard it now.
1296 We only do this when expensive_optimizations is enabled,
1297 since this complements reload inheritance / output
1298 reload deletion, and it can make debugging harder. */
1299 if (flag_expensive_optimizations
&& n_reloads
> 1)
1301 rtx set
= single_set (insn
);
1303 && SET_SRC (set
) == SET_DEST (set
)
1304 && GET_CODE (SET_SRC (set
)) == REG
1305 && REGNO (SET_SRC (set
)) >= FIRST_PSEUDO_REGISTER
)
1307 PUT_CODE (insn
, NOTE
);
1308 NOTE_SOURCE_FILE (insn
) = 0;
1309 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
1314 update_eliminable_offsets ();
1316 /* Remember for later shortcuts which insns had any reloads or
1317 register eliminations. */
1318 chain
->need_elim
= did_elimination
;
1319 chain
->need_reload
= n_reloads
> 0;
1320 chain
->need_operand_change
= operands_changed
;
1322 /* Discard any register replacements done. */
1323 if (did_elimination
)
1325 obstack_free (&reload_obstack
, reload_firstobj
);
1326 PATTERN (insn
) = old_body
;
1327 INSN_CODE (insn
) = old_code
;
1328 REG_NOTES (insn
) = old_notes
;
1329 something_needs_elimination
= 1;
1332 something_needs_operands_changed
|= operands_changed
;
1336 *pprev_reload
= chain
;
1337 pprev_reload
= &chain
->next_need_reload
;
1339 calculate_needs (chain
);
1346 /* Compute the most additional registers needed by one instruction,
1347 given by CHAIN. Collect information separately for each class of regs.
1349 To compute the number of reload registers of each class needed for an
1350 insn, we must simulate what choose_reload_regs can do. We do this by
1351 splitting an insn into an "input" and an "output" part. RELOAD_OTHER
1352 reloads are used in both. The input part uses those reloads,
1353 RELOAD_FOR_INPUT reloads, which must be live over the entire input section
1354 of reloads, and the maximum of all the RELOAD_FOR_INPUT_ADDRESS and
1355 RELOAD_FOR_OPERAND_ADDRESS reloads, which conflict with the inputs.
1357 The registers needed for output are RELOAD_OTHER and RELOAD_FOR_OUTPUT,
1358 which are live for the entire output portion, and the maximum of all the
1359 RELOAD_FOR_OUTPUT_ADDRESS reloads for each operand.
1361 The total number of registers needed is the maximum of the
1362 inputs and outputs. */
1365 calculate_needs (chain
)
1366 struct insn_chain
*chain
;
1370 /* Each `struct needs' corresponds to one RELOAD_... type. */
1374 struct needs output
;
1376 struct needs other_addr
;
1377 struct needs op_addr
;
1378 struct needs op_addr_reload
;
1379 struct needs in_addr
[MAX_RECOG_OPERANDS
];
1380 struct needs in_addr_addr
[MAX_RECOG_OPERANDS
];
1381 struct needs out_addr
[MAX_RECOG_OPERANDS
];
1382 struct needs out_addr_addr
[MAX_RECOG_OPERANDS
];
1385 bzero ((char *) chain
->group_size
, sizeof chain
->group_size
);
1386 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1387 chain
->group_mode
[i
] = VOIDmode
;
1388 bzero ((char *) &insn_needs
, sizeof insn_needs
);
1390 /* Count each reload once in every class
1391 containing the reload's own class. */
1393 for (i
= 0; i
< n_reloads
; i
++)
1395 register enum reg_class
*p
;
1396 enum reg_class
class = reload_reg_class
[i
];
1398 enum machine_mode mode
;
1399 struct needs
*this_needs
;
1401 /* Don't count the dummy reloads, for which one of the
1402 regs mentioned in the insn can be used for reloading.
1403 Don't count optional reloads.
1404 Don't count reloads that got combined with others. */
1405 if (reload_reg_rtx
[i
] != 0
1406 || reload_optional
[i
] != 0
1407 || (reload_out
[i
] == 0 && reload_in
[i
] == 0
1408 && ! reload_secondary_p
[i
]))
1411 mode
= reload_inmode
[i
];
1412 if (GET_MODE_SIZE (reload_outmode
[i
]) > GET_MODE_SIZE (mode
))
1413 mode
= reload_outmode
[i
];
1414 size
= CLASS_MAX_NREGS (class, mode
);
1416 /* Decide which time-of-use to count this reload for. */
1417 switch (reload_when_needed
[i
])
1420 this_needs
= &insn_needs
.other
;
1422 case RELOAD_FOR_INPUT
:
1423 this_needs
= &insn_needs
.input
;
1425 case RELOAD_FOR_OUTPUT
:
1426 this_needs
= &insn_needs
.output
;
1428 case RELOAD_FOR_INSN
:
1429 this_needs
= &insn_needs
.insn
;
1431 case RELOAD_FOR_OTHER_ADDRESS
:
1432 this_needs
= &insn_needs
.other_addr
;
1434 case RELOAD_FOR_INPUT_ADDRESS
:
1435 this_needs
= &insn_needs
.in_addr
[reload_opnum
[i
]];
1437 case RELOAD_FOR_INPADDR_ADDRESS
:
1438 this_needs
= &insn_needs
.in_addr_addr
[reload_opnum
[i
]];
1440 case RELOAD_FOR_OUTPUT_ADDRESS
:
1441 this_needs
= &insn_needs
.out_addr
[reload_opnum
[i
]];
1443 case RELOAD_FOR_OUTADDR_ADDRESS
:
1444 this_needs
= &insn_needs
.out_addr_addr
[reload_opnum
[i
]];
1446 case RELOAD_FOR_OPERAND_ADDRESS
:
1447 this_needs
= &insn_needs
.op_addr
;
1449 case RELOAD_FOR_OPADDR_ADDR
:
1450 this_needs
= &insn_needs
.op_addr_reload
;
1456 enum machine_mode other_mode
, allocate_mode
;
1458 /* Count number of groups needed separately from
1459 number of individual regs needed. */
1460 this_needs
->groups
[(int) class]++;
1461 p
= reg_class_superclasses
[(int) class];
1462 while (*p
!= LIM_REG_CLASSES
)
1463 this_needs
->groups
[(int) *p
++]++;
1465 /* Record size and mode of a group of this class. */
1466 /* If more than one size group is needed,
1467 make all groups the largest needed size. */
1468 if (chain
->group_size
[(int) class] < size
)
1470 other_mode
= chain
->group_mode
[(int) class];
1471 allocate_mode
= mode
;
1473 chain
->group_size
[(int) class] = size
;
1474 chain
->group_mode
[(int) class] = mode
;
1479 allocate_mode
= chain
->group_mode
[(int) class];
1482 /* Crash if two dissimilar machine modes both need
1483 groups of consecutive regs of the same class. */
1485 if (other_mode
!= VOIDmode
&& other_mode
!= allocate_mode
1486 && ! modes_equiv_for_class_p (allocate_mode
,
1488 fatal_insn ("Two dissimilar machine modes both need groups of consecutive regs of the same class",
1493 this_needs
->regs
[(unsigned char)reload_nongroup
[i
]][(int) class] += 1;
1494 p
= reg_class_superclasses
[(int) class];
1495 while (*p
!= LIM_REG_CLASSES
)
1496 this_needs
->regs
[(unsigned char)reload_nongroup
[i
]][(int) *p
++] += 1;
1502 /* All reloads have been counted for this insn;
1503 now merge the various times of use.
1504 This sets insn_needs, etc., to the maximum total number
1505 of registers needed at any point in this insn. */
1507 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1509 int j
, in_max
, out_max
;
1511 /* Compute normal and nongroup needs. */
1512 for (j
= 0; j
<= 1; j
++)
1515 for (in_max
= 0, out_max
= 0, k
= 0; k
< reload_n_operands
; k
++)
1517 in_max
= MAX (in_max
,
1518 (insn_needs
.in_addr
[k
].regs
[j
][i
]
1519 + insn_needs
.in_addr_addr
[k
].regs
[j
][i
]));
1520 out_max
= MAX (out_max
, insn_needs
.out_addr
[k
].regs
[j
][i
]);
1521 out_max
= MAX (out_max
,
1522 insn_needs
.out_addr_addr
[k
].regs
[j
][i
]);
1525 /* RELOAD_FOR_INSN reloads conflict with inputs, outputs,
1526 and operand addresses but not things used to reload
1527 them. Similarly, RELOAD_FOR_OPERAND_ADDRESS reloads
1528 don't conflict with things needed to reload inputs or
1531 in_max
= MAX (MAX (insn_needs
.op_addr
.regs
[j
][i
],
1532 insn_needs
.op_addr_reload
.regs
[j
][i
]),
1535 out_max
= MAX (out_max
, insn_needs
.insn
.regs
[j
][i
]);
1537 insn_needs
.input
.regs
[j
][i
]
1538 = MAX (insn_needs
.input
.regs
[j
][i
]
1539 + insn_needs
.op_addr
.regs
[j
][i
]
1540 + insn_needs
.insn
.regs
[j
][i
],
1541 in_max
+ insn_needs
.input
.regs
[j
][i
]);
1543 insn_needs
.output
.regs
[j
][i
] += out_max
;
1544 insn_needs
.other
.regs
[j
][i
]
1545 += MAX (MAX (insn_needs
.input
.regs
[j
][i
],
1546 insn_needs
.output
.regs
[j
][i
]),
1547 insn_needs
.other_addr
.regs
[j
][i
]);
1551 /* Now compute group needs. */
1552 for (in_max
= 0, out_max
= 0, j
= 0; j
< reload_n_operands
; j
++)
1554 in_max
= MAX (in_max
, insn_needs
.in_addr
[j
].groups
[i
]);
1555 in_max
= MAX (in_max
, insn_needs
.in_addr_addr
[j
].groups
[i
]);
1556 out_max
= MAX (out_max
, insn_needs
.out_addr
[j
].groups
[i
]);
1557 out_max
= MAX (out_max
, insn_needs
.out_addr_addr
[j
].groups
[i
]);
1560 in_max
= MAX (MAX (insn_needs
.op_addr
.groups
[i
],
1561 insn_needs
.op_addr_reload
.groups
[i
]),
1563 out_max
= MAX (out_max
, insn_needs
.insn
.groups
[i
]);
1565 insn_needs
.input
.groups
[i
]
1566 = MAX (insn_needs
.input
.groups
[i
]
1567 + insn_needs
.op_addr
.groups
[i
]
1568 + insn_needs
.insn
.groups
[i
],
1569 in_max
+ insn_needs
.input
.groups
[i
]);
1571 insn_needs
.output
.groups
[i
] += out_max
;
1572 insn_needs
.other
.groups
[i
]
1573 += MAX (MAX (insn_needs
.input
.groups
[i
],
1574 insn_needs
.output
.groups
[i
]),
1575 insn_needs
.other_addr
.groups
[i
]);
1578 /* Record the needs for later. */
1579 chain
->need
= insn_needs
.other
;
1582 /* Find a group of exactly 2 registers.
1584 First try to fill out the group by spilling a single register which
1585 would allow completion of the group.
1587 Then try to create a new group from a pair of registers, neither of
1588 which are explicitly used.
1590 Then try to create a group from any pair of registers. */
1593 find_tworeg_group (chain
, class, dumpfile
)
1594 struct insn_chain
*chain
;
1599 /* First, look for a register that will complete a group. */
1600 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1604 j
= potential_reload_regs
[i
];
1605 if (j
>= 0 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
)
1606 && ((j
> 0 && (other
= j
- 1, spill_reg_order
[other
] >= 0)
1607 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1608 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1609 && HARD_REGNO_MODE_OK (other
, chain
->group_mode
[class])
1610 && ! TEST_HARD_REG_BIT (chain
->counted_for_nongroups
, other
)
1611 /* We don't want one part of another group.
1612 We could get "two groups" that overlap! */
1613 && ! TEST_HARD_REG_BIT (chain
->counted_for_groups
, other
))
1614 || (j
< FIRST_PSEUDO_REGISTER
- 1
1615 && (other
= j
+ 1, spill_reg_order
[other
] >= 0)
1616 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1617 && TEST_HARD_REG_BIT (reg_class_contents
[class], other
)
1618 && HARD_REGNO_MODE_OK (j
, chain
->group_mode
[class])
1619 && ! TEST_HARD_REG_BIT (chain
->counted_for_nongroups
, other
)
1620 && ! TEST_HARD_REG_BIT (chain
->counted_for_groups
, other
))))
1622 register enum reg_class
*p
;
1624 /* We have found one that will complete a group,
1625 so count off one group as provided. */
1626 chain
->need
.groups
[class]--;
1627 p
= reg_class_superclasses
[class];
1628 while (*p
!= LIM_REG_CLASSES
)
1630 if (chain
->group_size
[(int) *p
] <= chain
->group_size
[class])
1631 chain
->need
.groups
[(int) *p
]--;
1635 /* Indicate both these regs are part of a group. */
1636 SET_HARD_REG_BIT (chain
->counted_for_groups
, j
);
1637 SET_HARD_REG_BIT (chain
->counted_for_groups
, other
);
1641 /* We can't complete a group, so start one. */
1642 if (i
== FIRST_PSEUDO_REGISTER
)
1643 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1646 j
= potential_reload_regs
[i
];
1647 /* Verify that J+1 is a potential reload reg. */
1648 for (k
= 0; k
< FIRST_PSEUDO_REGISTER
; k
++)
1649 if (potential_reload_regs
[k
] == j
+ 1)
1651 if (j
>= 0 && j
+ 1 < FIRST_PSEUDO_REGISTER
1652 && k
< FIRST_PSEUDO_REGISTER
1653 && spill_reg_order
[j
] < 0 && spill_reg_order
[j
+ 1] < 0
1654 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
)
1655 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ 1)
1656 && HARD_REGNO_MODE_OK (j
, chain
->group_mode
[class])
1657 && ! TEST_HARD_REG_BIT (chain
->counted_for_nongroups
, j
+ 1)
1658 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ 1))
1662 /* I should be the index in potential_reload_regs
1663 of the new reload reg we have found. */
1665 new_spill_reg (chain
, i
, class, 0, dumpfile
);
1668 /* Find a group of more than 2 registers.
1669 Look for a sufficient sequence of unspilled registers, and spill them all
1673 find_group (chain
, class, dumpfile
)
1674 struct insn_chain
*chain
;
1680 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1682 int j
= potential_reload_regs
[i
];
1685 && j
+ chain
->group_size
[class] <= FIRST_PSEUDO_REGISTER
1686 && HARD_REGNO_MODE_OK (j
, chain
->group_mode
[class]))
1689 /* Check each reg in the sequence. */
1690 for (k
= 0; k
< chain
->group_size
[class]; k
++)
1691 if (! (spill_reg_order
[j
+ k
] < 0
1692 && ! TEST_HARD_REG_BIT (bad_spill_regs
, j
+ k
)
1693 && TEST_HARD_REG_BIT (reg_class_contents
[class], j
+ k
)))
1695 /* We got a full sequence, so spill them all. */
1696 if (k
== chain
->group_size
[class])
1698 register enum reg_class
*p
;
1699 for (k
= 0; k
< chain
->group_size
[class]; k
++)
1702 SET_HARD_REG_BIT (chain
->counted_for_groups
, j
+ k
);
1703 for (idx
= 0; idx
< FIRST_PSEUDO_REGISTER
; idx
++)
1704 if (potential_reload_regs
[idx
] == j
+ k
)
1706 new_spill_reg (chain
, idx
, class, 0, dumpfile
);
1709 /* We have found one that will complete a group,
1710 so count off one group as provided. */
1711 chain
->need
.groups
[class]--;
1712 p
= reg_class_superclasses
[class];
1713 while (*p
!= LIM_REG_CLASSES
)
1715 if (chain
->group_size
[(int) *p
]
1716 <= chain
->group_size
[class])
1717 chain
->need
.groups
[(int) *p
]--;
1724 /* There are no groups left. */
1725 spill_failure (chain
->insn
);
1729 /* If pseudo REG conflicts with one of our reload registers, mark it as
1732 maybe_mark_pseudo_spilled (reg
)
1736 int r
= reg_renumber
[reg
];
1741 nregs
= HARD_REGNO_NREGS (r
, PSEUDO_REGNO_MODE (reg
));
1742 for (i
= 0; i
< n_spills
; i
++)
1743 if (r
<= spill_regs
[i
] && r
+ nregs
> spill_regs
[i
])
1745 SET_REGNO_REG_SET (spilled_pseudos
, reg
);
1750 /* Find more reload regs to satisfy the remaining need of an insn, which
1752 Do it by ascending class number, since otherwise a reg
1753 might be spilled for a big class and might fail to count
1754 for a smaller class even though it belongs to that class.
1756 Count spilled regs in `spills', and add entries to
1757 `spill_regs' and `spill_reg_order'.
1759 ??? Note there is a problem here.
1760 When there is a need for a group in a high-numbered class,
1761 and also need for non-group regs that come from a lower class,
1762 the non-group regs are chosen first. If there aren't many regs,
1763 they might leave no room for a group.
1765 This was happening on the 386. To fix it, we added the code
1766 that calls possible_group_p, so that the lower class won't
1767 break up the last possible group.
1769 Really fixing the problem would require changes above
1770 in counting the regs already spilled, and in choose_reload_regs.
1771 It might be hard to avoid introducing bugs there. */
1774 find_reload_regs (chain
, dumpfile
)
1775 struct insn_chain
*chain
;
1779 short *group_needs
= chain
->need
.groups
;
1780 short *simple_needs
= chain
->need
.regs
[0];
1781 short *nongroup_needs
= chain
->need
.regs
[1];
1784 fprintf (dumpfile
, "Spilling for insn %d.\n", INSN_UID (chain
->insn
));
1786 /* Compute the order of preference for hard registers to spill.
1787 Store them by decreasing preference in potential_reload_regs. */
1789 order_regs_for_reload (chain
);
1791 /* So far, no hard regs have been spilled. */
1793 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1794 spill_reg_order
[i
] = -1;
1796 CLEAR_HARD_REG_SET (chain
->used_spill_regs
);
1797 CLEAR_HARD_REG_SET (chain
->counted_for_groups
);
1798 CLEAR_HARD_REG_SET (chain
->counted_for_nongroups
);
1800 for (class = 0; class < N_REG_CLASSES
; class++)
1802 /* First get the groups of registers.
1803 If we got single registers first, we might fragment
1805 while (group_needs
[class] > 0)
1807 /* If any single spilled regs happen to form groups,
1808 count them now. Maybe we don't really need
1809 to spill another group. */
1810 count_possible_groups (chain
, class);
1812 if (group_needs
[class] <= 0)
1815 /* Groups of size 2, the only groups used on most machines,
1816 are treated specially. */
1817 if (chain
->group_size
[class] == 2)
1818 find_tworeg_group (chain
, class, dumpfile
);
1820 find_group (chain
, class, dumpfile
);
1825 /* Now similarly satisfy all need for single registers. */
1827 while (simple_needs
[class] > 0 || nongroup_needs
[class] > 0)
1829 /* If we spilled enough regs, but they weren't counted
1830 against the non-group need, see if we can count them now.
1831 If so, we can avoid some actual spilling. */
1832 if (simple_needs
[class] <= 0 && nongroup_needs
[class] > 0)
1833 for (i
= 0; i
< n_spills
; i
++)
1835 int regno
= spill_regs
[i
];
1836 if (TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
1837 && !TEST_HARD_REG_BIT (chain
->counted_for_groups
, regno
)
1838 && !TEST_HARD_REG_BIT (chain
->counted_for_nongroups
, regno
)
1839 && nongroup_needs
[class] > 0)
1841 register enum reg_class
*p
;
1843 SET_HARD_REG_BIT (chain
->counted_for_nongroups
, regno
);
1844 nongroup_needs
[class]--;
1845 p
= reg_class_superclasses
[class];
1846 while (*p
!= LIM_REG_CLASSES
)
1847 nongroup_needs
[(int) *p
++]--;
1851 if (simple_needs
[class] <= 0 && nongroup_needs
[class] <= 0)
1854 /* Consider the potential reload regs that aren't
1855 yet in use as reload regs, in order of preference.
1856 Find the most preferred one that's in this class. */
1858 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1860 int regno
= potential_reload_regs
[i
];
1862 && TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
1863 /* If this reg will not be available for groups,
1864 pick one that does not foreclose possible groups.
1865 This is a kludge, and not very general,
1866 but it should be sufficient to make the 386 work,
1867 and the problem should not occur on machines with
1869 && (nongroup_needs
[class] == 0
1870 || possible_group_p (chain
, regno
)))
1874 /* If we couldn't get a register, try to get one even if we
1875 might foreclose possible groups. This may cause problems
1876 later, but that's better than aborting now, since it is
1877 possible that we will, in fact, be able to form the needed
1878 group even with this allocation. */
1880 if (i
>= FIRST_PSEUDO_REGISTER
1881 && asm_noperands (chain
->insn
) < 0)
1882 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1883 if (potential_reload_regs
[i
] >= 0
1884 && TEST_HARD_REG_BIT (reg_class_contents
[class],
1885 potential_reload_regs
[i
]))
1888 /* I should be the index in potential_reload_regs
1889 of the new reload reg we have found. */
1891 new_spill_reg (chain
, i
, class, 1, dumpfile
);
1897 /* We know which hard regs to use, now mark the pseudos that live in them
1898 as needing to be kicked out. */
1899 EXECUTE_IF_SET_IN_REG_SET
1900 (chain
->live_before
, FIRST_PSEUDO_REGISTER
, i
,
1902 maybe_mark_pseudo_spilled (i
);
1904 EXECUTE_IF_SET_IN_REG_SET
1905 (chain
->live_after
, FIRST_PSEUDO_REGISTER
, i
,
1907 maybe_mark_pseudo_spilled (i
);
1910 IOR_HARD_REG_SET (used_spill_regs
, chain
->used_spill_regs
);
1914 dump_needs (chain
, dumpfile
)
1915 struct insn_chain
*chain
;
1918 static char *reg_class_names
[] = REG_CLASS_NAMES
;
1920 struct needs
*n
= &chain
->need
;
1922 for (i
= 0; i
< N_REG_CLASSES
; i
++)
1924 if (n
->regs
[i
][0] > 0)
1926 ";; Need %d reg%s of class %s.\n",
1927 n
->regs
[i
][0], n
->regs
[i
][0] == 1 ? "" : "s",
1928 reg_class_names
[i
]);
1929 if (n
->regs
[i
][1] > 0)
1931 ";; Need %d nongroup reg%s of class %s.\n",
1932 n
->regs
[i
][1], n
->regs
[i
][1] == 1 ? "" : "s",
1933 reg_class_names
[i
]);
1934 if (n
->groups
[i
] > 0)
1936 ";; Need %d group%s (%smode) of class %s.\n",
1937 n
->groups
[i
], n
->groups
[i
] == 1 ? "" : "s",
1938 mode_name
[(int) chain
->group_mode
[i
]],
1939 reg_class_names
[i
]);
1943 /* Delete all insns that were inserted by emit_caller_save_insns during
1946 delete_caller_save_insns ()
1948 struct insn_chain
*c
= reload_insn_chain
;
1952 while (c
!= 0 && c
->is_caller_save_insn
)
1954 struct insn_chain
*next
= c
->next
;
1957 if (insn
== basic_block_head
[c
->block
])
1958 basic_block_head
[c
->block
] = NEXT_INSN (insn
);
1959 if (insn
== basic_block_end
[c
->block
])
1960 basic_block_end
[c
->block
] = PREV_INSN (insn
);
1961 if (c
== reload_insn_chain
)
1962 reload_insn_chain
= next
;
1964 if (NEXT_INSN (insn
) != 0)
1965 PREV_INSN (NEXT_INSN (insn
)) = PREV_INSN (insn
);
1966 if (PREV_INSN (insn
) != 0)
1967 NEXT_INSN (PREV_INSN (insn
)) = NEXT_INSN (insn
);
1970 next
->prev
= c
->prev
;
1972 c
->prev
->next
= next
;
1973 c
->next
= unused_insn_chains
;
1974 unused_insn_chains
= c
;
1982 /* Nonzero if, after spilling reg REGNO for non-groups,
1983 it will still be possible to find a group if we still need one. */
1986 possible_group_p (chain
, regno
)
1987 struct insn_chain
*chain
;
1991 int class = (int) NO_REGS
;
1993 for (i
= 0; i
< (int) N_REG_CLASSES
; i
++)
1994 if (chain
->need
.groups
[i
] > 0)
2000 if (class == (int) NO_REGS
)
2003 /* Consider each pair of consecutive registers. */
2004 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
- 1; i
++)
2006 /* Ignore pairs that include reg REGNO. */
2007 if (i
== regno
|| i
+ 1 == regno
)
2010 /* Ignore pairs that are outside the class that needs the group.
2011 ??? Here we fail to handle the case where two different classes
2012 independently need groups. But this never happens with our
2013 current machine descriptions. */
2014 if (! (TEST_HARD_REG_BIT (reg_class_contents
[class], i
)
2015 && TEST_HARD_REG_BIT (reg_class_contents
[class], i
+ 1)))
2018 /* A pair of consecutive regs we can still spill does the trick. */
2019 if (spill_reg_order
[i
] < 0 && spill_reg_order
[i
+ 1] < 0
2020 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2021 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1))
2024 /* A pair of one already spilled and one we can spill does it
2025 provided the one already spilled is not otherwise reserved. */
2026 if (spill_reg_order
[i
] < 0
2027 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
)
2028 && spill_reg_order
[i
+ 1] >= 0
2029 && ! TEST_HARD_REG_BIT (chain
->counted_for_groups
, i
+ 1)
2030 && ! TEST_HARD_REG_BIT (chain
->counted_for_nongroups
, i
+ 1))
2032 if (spill_reg_order
[i
+ 1] < 0
2033 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
+ 1)
2034 && spill_reg_order
[i
] >= 0
2035 && ! TEST_HARD_REG_BIT (chain
->counted_for_groups
, i
)
2036 && ! TEST_HARD_REG_BIT (chain
->counted_for_nongroups
, i
))
2043 /* Count any groups of CLASS that can be formed from the registers recently
2047 count_possible_groups (chain
, class)
2048 struct insn_chain
*chain
;
2054 /* Now find all consecutive groups of spilled registers
2055 and mark each group off against the need for such groups.
2056 But don't count them against ordinary need, yet. */
2058 if (chain
->group_size
[class] == 0)
2061 CLEAR_HARD_REG_SET (new);
2063 /* Make a mask of all the regs that are spill regs in class I. */
2064 for (i
= 0; i
< n_spills
; i
++)
2066 int regno
= spill_regs
[i
];
2068 if (TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
2069 && ! TEST_HARD_REG_BIT (chain
->counted_for_groups
, regno
)
2070 && ! TEST_HARD_REG_BIT (chain
->counted_for_nongroups
, regno
))
2071 SET_HARD_REG_BIT (new, regno
);
2074 /* Find each consecutive group of them. */
2075 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
&& chain
->need
.groups
[class] > 0; i
++)
2076 if (TEST_HARD_REG_BIT (new, i
)
2077 && i
+ chain
->group_size
[class] <= FIRST_PSEUDO_REGISTER
2078 && HARD_REGNO_MODE_OK (i
, chain
->group_mode
[class]))
2080 for (j
= 1; j
< chain
->group_size
[class]; j
++)
2081 if (! TEST_HARD_REG_BIT (new, i
+ j
))
2084 if (j
== chain
->group_size
[class])
2086 /* We found a group. Mark it off against this class's need for
2087 groups, and against each superclass too. */
2088 register enum reg_class
*p
;
2090 chain
->need
.groups
[class]--;
2091 p
= reg_class_superclasses
[class];
2092 while (*p
!= LIM_REG_CLASSES
)
2094 if (chain
->group_size
[(int) *p
] <= chain
->group_size
[class])
2095 chain
->need
.groups
[(int) *p
]--;
2099 /* Don't count these registers again. */
2100 for (j
= 0; j
< chain
->group_size
[class]; j
++)
2101 SET_HARD_REG_BIT (chain
->counted_for_groups
, i
+ j
);
2104 /* Skip to the last reg in this group. When i is incremented above,
2105 it will then point to the first reg of the next possible group. */
2110 /* ALLOCATE_MODE is a register mode that needs to be reloaded. OTHER_MODE is
2111 another mode that needs to be reloaded for the same register class CLASS.
2112 If any reg in CLASS allows ALLOCATE_MODE but not OTHER_MODE, fail.
2113 ALLOCATE_MODE will never be smaller than OTHER_MODE.
2115 This code used to also fail if any reg in CLASS allows OTHER_MODE but not
2116 ALLOCATE_MODE. This test is unnecessary, because we will never try to put
2117 something of mode ALLOCATE_MODE into an OTHER_MODE register. Testing this
2118 causes unnecessary failures on machines requiring alignment of register
2119 groups when the two modes are different sizes, because the larger mode has
2120 more strict alignment rules than the smaller mode. */
2123 modes_equiv_for_class_p (allocate_mode
, other_mode
, class)
2124 enum machine_mode allocate_mode
, other_mode
;
2125 enum reg_class
class;
2128 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2130 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
)
2131 && HARD_REGNO_MODE_OK (regno
, allocate_mode
)
2132 && ! HARD_REGNO_MODE_OK (regno
, other_mode
))
2138 /* Handle the failure to find a register to spill.
2139 INSN should be one of the insns which needed this particular spill reg. */
2142 spill_failure (insn
)
2145 if (asm_noperands (PATTERN (insn
)) >= 0)
2146 error_for_asm (insn
, "`asm' needs too many reloads");
2148 fatal_insn ("Unable to find a register to spill.", insn
);
2151 /* Add a new register to the tables of available spill-registers.
2152 CHAIN is the insn for which the register will be used; we decrease the
2154 I is the index of this register in potential_reload_regs.
2155 CLASS is the regclass whose need is being satisfied.
2156 NONGROUP is 0 if this register is part of a group.
2157 DUMPFILE is the same as the one that `reload' got. */
2160 new_spill_reg (chain
, i
, class, nongroup
, dumpfile
)
2161 struct insn_chain
*chain
;
2167 register enum reg_class
*p
;
2168 int regno
= potential_reload_regs
[i
];
2170 if (i
>= FIRST_PSEUDO_REGISTER
)
2172 spill_failure (chain
->insn
);
2177 if (TEST_HARD_REG_BIT (bad_spill_regs
, regno
))
2179 static char *reg_class_names
[] = REG_CLASS_NAMES
;
2181 if (asm_noperands (PATTERN (chain
->insn
)) < 0)
2183 /* The error message is still correct - we know only that it wasn't
2184 an asm statement that caused the problem, but one of the global
2185 registers declared by the users might have screwed us. */
2186 error ("fixed or forbidden register %d (%s) was spilled for class %s.",
2187 regno
, reg_names
[regno
], reg_class_names
[class]);
2188 error ("This may be due to a compiler bug or to impossible asm");
2189 error ("statements or clauses.");
2190 fatal_insn ("This is the instruction:", chain
->insn
);
2192 error_for_asm (chain
->insn
, "Invalid `asm' statement:");
2193 error_for_asm (chain
->insn
,
2194 "fixed or forbidden register %d (%s) was spilled for class %s.",
2195 regno
, reg_names
[regno
], reg_class_names
[class]);
2200 /* Make reg REGNO an additional reload reg. */
2202 potential_reload_regs
[i
] = -1;
2203 spill_regs
[n_spills
] = regno
;
2204 spill_reg_order
[regno
] = n_spills
;
2206 fprintf (dumpfile
, "Spilling reg %d.\n", regno
);
2207 SET_HARD_REG_BIT (chain
->used_spill_regs
, regno
);
2209 /* Clear off the needs we just satisfied. */
2211 chain
->need
.regs
[0][class]--;
2212 p
= reg_class_superclasses
[class];
2213 while (*p
!= LIM_REG_CLASSES
)
2214 chain
->need
.regs
[0][(int) *p
++]--;
2216 if (nongroup
&& chain
->need
.regs
[1][class] > 0)
2218 SET_HARD_REG_BIT (chain
->counted_for_nongroups
, regno
);
2219 chain
->need
.regs
[1][class]--;
2220 p
= reg_class_superclasses
[class];
2221 while (*p
!= LIM_REG_CLASSES
)
2222 chain
->need
.regs
[1][(int) *p
++]--;
2228 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
2229 data that is dead in INSN. */
2232 delete_dead_insn (insn
)
2235 rtx prev
= prev_real_insn (insn
);
2238 /* If the previous insn sets a register that dies in our insn, delete it
2240 if (prev
&& GET_CODE (PATTERN (prev
)) == SET
2241 && (prev_dest
= SET_DEST (PATTERN (prev
)), GET_CODE (prev_dest
) == REG
)
2242 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
2243 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
))
2244 && ! side_effects_p (SET_SRC (PATTERN (prev
))))
2245 delete_dead_insn (prev
);
2247 PUT_CODE (insn
, NOTE
);
2248 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
2249 NOTE_SOURCE_FILE (insn
) = 0;
2252 /* Modify the home of pseudo-reg I.
2253 The new home is present in reg_renumber[I].
2255 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
2256 or it may be -1, meaning there is none or it is not relevant.
2257 This is used so that all pseudos spilled from a given hard reg
2258 can share one stack slot. */
2261 alter_reg (i
, from_reg
)
2265 /* When outputting an inline function, this can happen
2266 for a reg that isn't actually used. */
2267 if (regno_reg_rtx
[i
] == 0)
2270 /* If the reg got changed to a MEM at rtl-generation time,
2272 if (GET_CODE (regno_reg_rtx
[i
]) != REG
)
2275 /* Modify the reg-rtx to contain the new hard reg
2276 number or else to contain its pseudo reg number. */
2277 REGNO (regno_reg_rtx
[i
])
2278 = reg_renumber
[i
] >= 0 ? reg_renumber
[i
] : i
;
2280 /* If we have a pseudo that is needed but has no hard reg or equivalent,
2281 allocate a stack slot for it. */
2283 if (reg_renumber
[i
] < 0
2284 && REG_N_REFS (i
) > 0
2285 && reg_equiv_constant
[i
] == 0
2286 && reg_equiv_memory_loc
[i
] == 0)
2289 int inherent_size
= PSEUDO_REGNO_BYTES (i
);
2290 int total_size
= MAX (inherent_size
, reg_max_ref_width
[i
]);
2293 /* Each pseudo reg has an inherent size which comes from its own mode,
2294 and a total size which provides room for paradoxical subregs
2295 which refer to the pseudo reg in wider modes.
2297 We can use a slot already allocated if it provides both
2298 enough inherent space and enough total space.
2299 Otherwise, we allocate a new slot, making sure that it has no less
2300 inherent space, and no less total space, then the previous slot. */
2303 /* No known place to spill from => no slot to reuse. */
2304 x
= assign_stack_local (GET_MODE (regno_reg_rtx
[i
]), total_size
,
2305 inherent_size
== total_size
? 0 : -1);
2306 if (BYTES_BIG_ENDIAN
)
2307 /* Cancel the big-endian correction done in assign_stack_local.
2308 Get the address of the beginning of the slot.
2309 This is so we can do a big-endian correction unconditionally
2311 adjust
= inherent_size
- total_size
;
2313 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2315 /* Reuse a stack slot if possible. */
2316 else if (spill_stack_slot
[from_reg
] != 0
2317 && spill_stack_slot_width
[from_reg
] >= total_size
2318 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2320 x
= spill_stack_slot
[from_reg
];
2321 /* Allocate a bigger slot. */
2324 /* Compute maximum size needed, both for inherent size
2325 and for total size. */
2326 enum machine_mode mode
= GET_MODE (regno_reg_rtx
[i
]);
2328 if (spill_stack_slot
[from_reg
])
2330 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot
[from_reg
]))
2332 mode
= GET_MODE (spill_stack_slot
[from_reg
]);
2333 if (spill_stack_slot_width
[from_reg
] > total_size
)
2334 total_size
= spill_stack_slot_width
[from_reg
];
2336 /* Make a slot with that size. */
2337 x
= assign_stack_local (mode
, total_size
,
2338 inherent_size
== total_size
? 0 : -1);
2340 if (BYTES_BIG_ENDIAN
)
2342 /* Cancel the big-endian correction done in assign_stack_local.
2343 Get the address of the beginning of the slot.
2344 This is so we can do a big-endian correction unconditionally
2346 adjust
= GET_MODE_SIZE (mode
) - total_size
;
2348 stack_slot
= gen_rtx_MEM (mode_for_size (total_size
2351 plus_constant (XEXP (x
, 0), adjust
));
2353 spill_stack_slot
[from_reg
] = stack_slot
;
2354 spill_stack_slot_width
[from_reg
] = total_size
;
2357 /* On a big endian machine, the "address" of the slot
2358 is the address of the low part that fits its inherent mode. */
2359 if (BYTES_BIG_ENDIAN
&& inherent_size
< total_size
)
2360 adjust
+= (total_size
- inherent_size
);
2362 /* If we have any adjustment to make, or if the stack slot is the
2363 wrong mode, make a new stack slot. */
2364 if (adjust
!= 0 || GET_MODE (x
) != GET_MODE (regno_reg_rtx
[i
]))
2366 x
= gen_rtx_MEM (GET_MODE (regno_reg_rtx
[i
]),
2367 plus_constant (XEXP (x
, 0), adjust
));
2369 /* If this was shared among registers, must ensure we never
2370 set it readonly since that can cause scheduling
2371 problems. Note we would only have in this adjustment
2372 case in any event, since the code above doesn't set it. */
2375 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (regno_reg_rtx
[i
]);
2378 /* Save the stack slot for later. */
2379 reg_equiv_memory_loc
[i
] = x
;
2383 /* Mark the slots in regs_ever_live for the hard regs
2384 used by pseudo-reg number REGNO. */
2387 mark_home_live (regno
)
2390 register int i
, lim
;
2391 i
= reg_renumber
[regno
];
2394 lim
= i
+ HARD_REGNO_NREGS (i
, PSEUDO_REGNO_MODE (regno
));
2396 regs_ever_live
[i
++] = 1;
2399 /* This function handles the tracking of elimination offsets around branches.
2401 X is a piece of RTL being scanned.
2403 INSN is the insn that it came from, if any.
2405 INITIAL_P is non-zero if we are to set the offset to be the initial
2406 offset and zero if we are setting the offset of the label to be the
2410 set_label_offsets (x
, insn
, initial_p
)
2415 enum rtx_code code
= GET_CODE (x
);
2418 struct elim_table
*p
;
2423 if (LABEL_REF_NONLOCAL_P (x
))
2428 /* ... fall through ... */
2431 /* If we know nothing about this label, set the desired offsets. Note
2432 that this sets the offset at a label to be the offset before a label
2433 if we don't know anything about the label. This is not correct for
2434 the label after a BARRIER, but is the best guess we can make. If
2435 we guessed wrong, we will suppress an elimination that might have
2436 been possible had we been able to guess correctly. */
2438 if (! offsets_known_at
[CODE_LABEL_NUMBER (x
)])
2440 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2441 offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2442 = (initial_p
? reg_eliminate
[i
].initial_offset
2443 : reg_eliminate
[i
].offset
);
2444 offsets_known_at
[CODE_LABEL_NUMBER (x
)] = 1;
2447 /* Otherwise, if this is the definition of a label and it is
2448 preceded by a BARRIER, set our offsets to the known offset of
2452 && (tem
= prev_nonnote_insn (insn
)) != 0
2453 && GET_CODE (tem
) == BARRIER
)
2454 set_offsets_for_label (insn
);
2456 /* If neither of the above cases is true, compare each offset
2457 with those previously recorded and suppress any eliminations
2458 where the offsets disagree. */
2460 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
2461 if (offsets_at
[CODE_LABEL_NUMBER (x
)][i
]
2462 != (initial_p
? reg_eliminate
[i
].initial_offset
2463 : reg_eliminate
[i
].offset
))
2464 reg_eliminate
[i
].can_eliminate
= 0;
2469 set_label_offsets (PATTERN (insn
), insn
, initial_p
);
2471 /* ... fall through ... */
2475 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2476 and hence must have all eliminations at their initial offsets. */
2477 for (tem
= REG_NOTES (x
); tem
; tem
= XEXP (tem
, 1))
2478 if (REG_NOTE_KIND (tem
) == REG_LABEL
)
2479 set_label_offsets (XEXP (tem
, 0), insn
, 1);
2484 /* Each of the labels in the address vector must be at their initial
2485 offsets. We want the first field for ADDR_VEC and the second
2486 field for ADDR_DIFF_VEC. */
2488 for (i
= 0; i
< (unsigned) XVECLEN (x
, code
== ADDR_DIFF_VEC
); i
++)
2489 set_label_offsets (XVECEXP (x
, code
== ADDR_DIFF_VEC
, i
),
2494 /* We only care about setting PC. If the source is not RETURN,
2495 IF_THEN_ELSE, or a label, disable any eliminations not at
2496 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2497 isn't one of those possibilities. For branches to a label,
2498 call ourselves recursively.
2500 Note that this can disable elimination unnecessarily when we have
2501 a non-local goto since it will look like a non-constant jump to
2502 someplace in the current function. This isn't a significant
2503 problem since such jumps will normally be when all elimination
2504 pairs are back to their initial offsets. */
2506 if (SET_DEST (x
) != pc_rtx
)
2509 switch (GET_CODE (SET_SRC (x
)))
2516 set_label_offsets (XEXP (SET_SRC (x
), 0), insn
, initial_p
);
2520 tem
= XEXP (SET_SRC (x
), 1);
2521 if (GET_CODE (tem
) == LABEL_REF
)
2522 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2523 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2526 tem
= XEXP (SET_SRC (x
), 2);
2527 if (GET_CODE (tem
) == LABEL_REF
)
2528 set_label_offsets (XEXP (tem
, 0), insn
, initial_p
);
2529 else if (GET_CODE (tem
) != PC
&& GET_CODE (tem
) != RETURN
)
2537 /* If we reach here, all eliminations must be at their initial
2538 offset because we are doing a jump to a variable address. */
2539 for (p
= reg_eliminate
; p
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; p
++)
2540 if (p
->offset
!= p
->initial_offset
)
2541 p
->can_eliminate
= 0;
2549 /* Used for communication between the next two function to properly share
2550 the vector for an ASM_OPERANDS. */
2552 static struct rtvec_def
*old_asm_operands_vec
, *new_asm_operands_vec
;
2554 /* Scan X and replace any eliminable registers (such as fp) with a
2555 replacement (such as sp), plus an offset.
2557 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2558 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2559 MEM, we are allowed to replace a sum of a register and the constant zero
2560 with the register, which we cannot do outside a MEM. In addition, we need
2561 to record the fact that a register is referenced outside a MEM.
2563 If INSN is an insn, it is the insn containing X. If we replace a REG
2564 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2565 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2566 the REG is being modified.
2568 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2569 That's used when we eliminate in expressions stored in notes.
2570 This means, do not set ref_outside_mem even if the reference
2573 If we see a modification to a register we know about, take the
2574 appropriate action (see case SET, below).
2576 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2577 replacements done assuming all offsets are at their initial values. If
2578 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2579 encounter, return the actual location so that find_reloads will do
2580 the proper thing. */
2583 eliminate_regs (x
, mem_mode
, insn
)
2585 enum machine_mode mem_mode
;
2588 enum rtx_code code
= GET_CODE (x
);
2589 struct elim_table
*ep
;
2612 /* This is only for the benefit of the debugging backends, which call
2613 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2614 removed after CSE. */
2615 new = eliminate_regs (XEXP (x
, 0), 0, insn
);
2616 if (GET_CODE (new) == MEM
)
2617 return XEXP (new, 0);
2623 /* First handle the case where we encounter a bare register that
2624 is eliminable. Replace it with a PLUS. */
2625 if (regno
< FIRST_PSEUDO_REGISTER
)
2627 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2629 if (ep
->from_rtx
== x
&& ep
->can_eliminate
)
2632 /* Refs inside notes don't count for this purpose. */
2633 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2634 || GET_CODE (insn
) == INSN_LIST
)))
2635 ep
->ref_outside_mem
= 1;
2636 return plus_constant (ep
->to_rtx
, ep
->previous_offset
);
2643 /* If this is the sum of an eliminable register and a constant, rework
2645 if (GET_CODE (XEXP (x
, 0)) == REG
2646 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2647 && CONSTANT_P (XEXP (x
, 1)))
2649 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2651 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2654 /* Refs inside notes don't count for this purpose. */
2655 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2656 || GET_CODE (insn
) == INSN_LIST
)))
2657 ep
->ref_outside_mem
= 1;
2659 /* The only time we want to replace a PLUS with a REG (this
2660 occurs when the constant operand of the PLUS is the negative
2661 of the offset) is when we are inside a MEM. We won't want
2662 to do so at other times because that would change the
2663 structure of the insn in a way that reload can't handle.
2664 We special-case the commonest situation in
2665 eliminate_regs_in_insn, so just replace a PLUS with a
2666 PLUS here, unless inside a MEM. */
2667 if (mem_mode
!= 0 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2668 && INTVAL (XEXP (x
, 1)) == - ep
->previous_offset
)
2671 return gen_rtx_PLUS (Pmode
, ep
->to_rtx
,
2672 plus_constant (XEXP (x
, 1),
2673 ep
->previous_offset
));
2676 /* If the register is not eliminable, we are done since the other
2677 operand is a constant. */
2681 /* If this is part of an address, we want to bring any constant to the
2682 outermost PLUS. We will do this by doing register replacement in
2683 our operands and seeing if a constant shows up in one of them.
2685 We assume here this is part of an address (or a "load address" insn)
2686 since an eliminable register is not likely to appear in any other
2689 If we have (plus (eliminable) (reg)), we want to produce
2690 (plus (plus (replacement) (reg) (const))). If this was part of a
2691 normal add insn, (plus (replacement) (reg)) will be pushed as a
2692 reload. This is the desired action. */
2695 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2696 rtx new1
= eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2698 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2700 /* If one side is a PLUS and the other side is a pseudo that
2701 didn't get a hard register but has a reg_equiv_constant,
2702 we must replace the constant here since it may no longer
2703 be in the position of any operand. */
2704 if (GET_CODE (new0
) == PLUS
&& GET_CODE (new1
) == REG
2705 && REGNO (new1
) >= FIRST_PSEUDO_REGISTER
2706 && reg_renumber
[REGNO (new1
)] < 0
2707 && reg_equiv_constant
!= 0
2708 && reg_equiv_constant
[REGNO (new1
)] != 0)
2709 new1
= reg_equiv_constant
[REGNO (new1
)];
2710 else if (GET_CODE (new1
) == PLUS
&& GET_CODE (new0
) == REG
2711 && REGNO (new0
) >= FIRST_PSEUDO_REGISTER
2712 && reg_renumber
[REGNO (new0
)] < 0
2713 && reg_equiv_constant
[REGNO (new0
)] != 0)
2714 new0
= reg_equiv_constant
[REGNO (new0
)];
2716 new = form_sum (new0
, new1
);
2718 /* As above, if we are not inside a MEM we do not want to
2719 turn a PLUS into something else. We might try to do so here
2720 for an addition of 0 if we aren't optimizing. */
2721 if (! mem_mode
&& GET_CODE (new) != PLUS
)
2722 return gen_rtx_PLUS (GET_MODE (x
), new, const0_rtx
);
2730 /* If this is the product of an eliminable register and a
2731 constant, apply the distribute law and move the constant out
2732 so that we have (plus (mult ..) ..). This is needed in order
2733 to keep load-address insns valid. This case is pathological.
2734 We ignore the possibility of overflow here. */
2735 if (GET_CODE (XEXP (x
, 0)) == REG
2736 && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2737 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2738 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2740 if (ep
->from_rtx
== XEXP (x
, 0) && ep
->can_eliminate
)
2743 /* Refs inside notes don't count for this purpose. */
2744 && ! (insn
!= 0 && (GET_CODE (insn
) == EXPR_LIST
2745 || GET_CODE (insn
) == INSN_LIST
)))
2746 ep
->ref_outside_mem
= 1;
2749 plus_constant (gen_rtx_MULT (Pmode
, ep
->to_rtx
, XEXP (x
, 1)),
2750 ep
->previous_offset
* INTVAL (XEXP (x
, 1)));
2753 /* ... fall through ... */
2758 case DIV
: case UDIV
:
2759 case MOD
: case UMOD
:
2760 case AND
: case IOR
: case XOR
:
2761 case ROTATERT
: case ROTATE
:
2762 case ASHIFTRT
: case LSHIFTRT
: case ASHIFT
:
2764 case GE
: case GT
: case GEU
: case GTU
:
2765 case LE
: case LT
: case LEU
: case LTU
:
2767 rtx new0
= eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2769 = XEXP (x
, 1) ? eliminate_regs (XEXP (x
, 1), mem_mode
, insn
) : 0;
2771 if (new0
!= XEXP (x
, 0) || new1
!= XEXP (x
, 1))
2772 return gen_rtx_fmt_ee (code
, GET_MODE (x
), new0
, new1
);
2777 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2780 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2781 if (new != XEXP (x
, 0))
2782 x
= gen_rtx_EXPR_LIST (REG_NOTE_KIND (x
), new, XEXP (x
, 1));
2785 /* ... fall through ... */
2788 /* Now do eliminations in the rest of the chain. If this was
2789 an EXPR_LIST, this might result in allocating more memory than is
2790 strictly needed, but it simplifies the code. */
2793 new = eliminate_regs (XEXP (x
, 1), mem_mode
, insn
);
2794 if (new != XEXP (x
, 1))
2795 return gen_rtx_fmt_ee (GET_CODE (x
), GET_MODE (x
), XEXP (x
, 0), new);
2803 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2804 if (ep
->to_rtx
== XEXP (x
, 0))
2806 int size
= GET_MODE_SIZE (mem_mode
);
2808 /* If more bytes than MEM_MODE are pushed, account for them. */
2809 #ifdef PUSH_ROUNDING
2810 if (ep
->to_rtx
== stack_pointer_rtx
)
2811 size
= PUSH_ROUNDING (size
);
2813 if (code
== PRE_DEC
|| code
== POST_DEC
)
2819 /* Fall through to generic unary operation case. */
2820 case STRICT_LOW_PART
:
2822 case SIGN_EXTEND
: case ZERO_EXTEND
:
2823 case TRUNCATE
: case FLOAT_EXTEND
: case FLOAT_TRUNCATE
:
2824 case FLOAT
: case FIX
:
2825 case UNSIGNED_FIX
: case UNSIGNED_FLOAT
:
2829 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2830 if (new != XEXP (x
, 0))
2831 return gen_rtx_fmt_e (code
, GET_MODE (x
), new);
2835 /* Similar to above processing, but preserve SUBREG_WORD.
2836 Convert (subreg (mem)) to (mem) if not paradoxical.
2837 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2838 pseudo didn't get a hard reg, we must replace this with the
2839 eliminated version of the memory location because push_reloads
2840 may do the replacement in certain circumstances. */
2841 if (GET_CODE (SUBREG_REG (x
)) == REG
2842 && (GET_MODE_SIZE (GET_MODE (x
))
2843 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
2844 && reg_equiv_memory_loc
!= 0
2845 && reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))] != 0)
2848 new = eliminate_regs (reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))],
2851 /* If we didn't change anything, we must retain the pseudo. */
2852 if (new == reg_equiv_memory_loc
[REGNO (SUBREG_REG (x
))])
2853 new = SUBREG_REG (x
);
2856 /* In this case, we must show that the pseudo is used in this
2857 insn so that delete_output_reload will do the right thing. */
2858 if (insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
2859 && GET_CODE (insn
) != INSN_LIST
)
2860 REG_NOTES (emit_insn_before (gen_rtx_USE (VOIDmode
,
2863 = gen_rtx_EXPR_LIST (REG_EQUAL
, new, NULL_RTX
);
2865 /* Ensure NEW isn't shared in case we have to reload it. */
2866 new = copy_rtx (new);
2869 new = SUBREG_REG (x
);
2873 new = eliminate_regs (SUBREG_REG (x
), mem_mode
, insn
);
2875 if (new != XEXP (x
, 0))
2877 int x_size
= GET_MODE_SIZE (GET_MODE (x
));
2878 int new_size
= GET_MODE_SIZE (GET_MODE (new));
2880 if (GET_CODE (new) == MEM
2881 && ((x_size
< new_size
2882 #ifdef WORD_REGISTER_OPERATIONS
2883 /* On these machines, combine can create rtl of the form
2884 (set (subreg:m1 (reg:m2 R) 0) ...)
2885 where m1 < m2, and expects something interesting to
2886 happen to the entire word. Moreover, it will use the
2887 (reg:m2 R) later, expecting all bits to be preserved.
2888 So if the number of words is the same, preserve the
2889 subreg so that push_reloads can see it. */
2890 && ! ((x_size
-1)/UNITS_PER_WORD
== (new_size
-1)/UNITS_PER_WORD
)
2893 || (x_size
== new_size
))
2896 int offset
= SUBREG_WORD (x
) * UNITS_PER_WORD
;
2897 enum machine_mode mode
= GET_MODE (x
);
2899 if (BYTES_BIG_ENDIAN
)
2900 offset
+= (MIN (UNITS_PER_WORD
,
2901 GET_MODE_SIZE (GET_MODE (new)))
2902 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
)));
2904 PUT_MODE (new, mode
);
2905 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset
);
2909 return gen_rtx_SUBREG (GET_MODE (x
), new, SUBREG_WORD (x
));
2915 /* If using a register that is the source of an eliminate we still
2916 think can be performed, note it cannot be performed since we don't
2917 know how this register is used. */
2918 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2919 if (ep
->from_rtx
== XEXP (x
, 0))
2920 ep
->can_eliminate
= 0;
2922 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2923 if (new != XEXP (x
, 0))
2924 return gen_rtx_fmt_e (code
, GET_MODE (x
), new);
2928 /* If clobbering a register that is the replacement register for an
2929 elimination we still think can be performed, note that it cannot
2930 be performed. Otherwise, we need not be concerned about it. */
2931 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
2932 if (ep
->to_rtx
== XEXP (x
, 0))
2933 ep
->can_eliminate
= 0;
2935 new = eliminate_regs (XEXP (x
, 0), mem_mode
, insn
);
2936 if (new != XEXP (x
, 0))
2937 return gen_rtx_fmt_e (code
, GET_MODE (x
), new);
2943 /* Properly handle sharing input and constraint vectors. */
2944 if (ASM_OPERANDS_INPUT_VEC (x
) != old_asm_operands_vec
)
2946 /* When we come to a new vector not seen before,
2947 scan all its elements; keep the old vector if none
2948 of them changes; otherwise, make a copy. */
2949 old_asm_operands_vec
= ASM_OPERANDS_INPUT_VEC (x
);
2950 temp_vec
= (rtx
*) alloca (XVECLEN (x
, 3) * sizeof (rtx
));
2951 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2952 temp_vec
[i
] = eliminate_regs (ASM_OPERANDS_INPUT (x
, i
),
2955 for (i
= 0; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
2956 if (temp_vec
[i
] != ASM_OPERANDS_INPUT (x
, i
))
2959 if (i
== ASM_OPERANDS_INPUT_LENGTH (x
))
2960 new_asm_operands_vec
= old_asm_operands_vec
;
2962 new_asm_operands_vec
2963 = gen_rtvec_v (ASM_OPERANDS_INPUT_LENGTH (x
), temp_vec
);
2966 /* If we had to copy the vector, copy the entire ASM_OPERANDS. */
2967 if (new_asm_operands_vec
== old_asm_operands_vec
)
2970 new = gen_rtx_ASM_OPERANDS (VOIDmode
, ASM_OPERANDS_TEMPLATE (x
),
2971 ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
2972 ASM_OPERANDS_OUTPUT_IDX (x
),
2973 new_asm_operands_vec
,
2974 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (x
),
2975 ASM_OPERANDS_SOURCE_FILE (x
),
2976 ASM_OPERANDS_SOURCE_LINE (x
));
2977 new->volatil
= x
->volatil
;
2982 /* Check for setting a register that we know about. */
2983 if (GET_CODE (SET_DEST (x
)) == REG
)
2985 /* See if this is setting the replacement register for an
2988 If DEST is the hard frame pointer, we do nothing because we
2989 assume that all assignments to the frame pointer are for
2990 non-local gotos and are being done at a time when they are valid
2991 and do not disturb anything else. Some machines want to
2992 eliminate a fake argument pointer (or even a fake frame pointer)
2993 with either the real frame or the stack pointer. Assignments to
2994 the hard frame pointer must not prevent this elimination. */
2996 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
2998 if (ep
->to_rtx
== SET_DEST (x
)
2999 && SET_DEST (x
) != hard_frame_pointer_rtx
)
3001 /* If it is being incremented, adjust the offset. Otherwise,
3002 this elimination can't be done. */
3003 rtx src
= SET_SRC (x
);
3005 if (GET_CODE (src
) == PLUS
3006 && XEXP (src
, 0) == SET_DEST (x
)
3007 && GET_CODE (XEXP (src
, 1)) == CONST_INT
)
3008 ep
->offset
-= INTVAL (XEXP (src
, 1));
3010 ep
->can_eliminate
= 0;
3013 /* Now check to see we are assigning to a register that can be
3014 eliminated. If so, it must be as part of a PARALLEL, since we
3015 will not have been called if this is a single SET. So indicate
3016 that we can no longer eliminate this reg. */
3017 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3019 if (ep
->from_rtx
== SET_DEST (x
) && ep
->can_eliminate
)
3020 ep
->can_eliminate
= 0;
3023 /* Now avoid the loop below in this common case. */
3025 rtx new0
= eliminate_regs (SET_DEST (x
), 0, insn
);
3026 rtx new1
= eliminate_regs (SET_SRC (x
), 0, insn
);
3028 /* If SET_DEST changed from a REG to a MEM and INSN is an insn,
3029 write a CLOBBER insn. */
3030 if (GET_CODE (SET_DEST (x
)) == REG
&& GET_CODE (new0
) == MEM
3031 && insn
!= 0 && GET_CODE (insn
) != EXPR_LIST
3032 && GET_CODE (insn
) != INSN_LIST
)
3033 emit_insn_after (gen_rtx_CLOBBER (VOIDmode
, SET_DEST (x
)), insn
);
3035 if (new0
!= SET_DEST (x
) || new1
!= SET_SRC (x
))
3036 return gen_rtx_SET (VOIDmode
, new0
, new1
);
3042 /* This is only for the benefit of the debugging backends, which call
3043 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
3044 removed after CSE. */
3045 if (GET_CODE (XEXP (x
, 0)) == ADDRESSOF
)
3046 return eliminate_regs (XEXP (XEXP (x
, 0), 0), 0, insn
);
3048 /* Our only special processing is to pass the mode of the MEM to our
3049 recursive call and copy the flags. While we are here, handle this
3050 case more efficiently. */
3051 new = eliminate_regs (XEXP (x
, 0), GET_MODE (x
), insn
);
3052 if (new != XEXP (x
, 0))
3054 new = gen_rtx_MEM (GET_MODE (x
), new);
3055 new->volatil
= x
->volatil
;
3056 new->unchanging
= x
->unchanging
;
3057 new->in_struct
= x
->in_struct
;
3067 /* Process each of our operands recursively. If any have changed, make a
3069 fmt
= GET_RTX_FORMAT (code
);
3070 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++, fmt
++)
3074 new = eliminate_regs (XEXP (x
, i
), mem_mode
, insn
);
3075 if (new != XEXP (x
, i
) && ! copied
)
3077 rtx new_x
= rtx_alloc (code
);
3078 bcopy ((char *) x
, (char *) new_x
,
3079 (sizeof (*new_x
) - sizeof (new_x
->fld
)
3080 + sizeof (new_x
->fld
[0]) * GET_RTX_LENGTH (code
)));
3086 else if (*fmt
== 'E')
3089 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3091 new = eliminate_regs (XVECEXP (x
, i
, j
), mem_mode
, insn
);
3092 if (new != XVECEXP (x
, i
, j
) && ! copied_vec
)
3094 rtvec new_v
= gen_rtvec_vv (XVECLEN (x
, i
),
3098 rtx new_x
= rtx_alloc (code
);
3099 bcopy ((char *) x
, (char *) new_x
,
3100 (sizeof (*new_x
) - sizeof (new_x
->fld
)
3101 + (sizeof (new_x
->fld
[0])
3102 * GET_RTX_LENGTH (code
))));
3106 XVEC (x
, i
) = new_v
;
3109 XVECEXP (x
, i
, j
) = new;
3117 /* Scan INSN and eliminate all eliminable registers in it.
3119 If REPLACE is nonzero, do the replacement destructively. Also
3120 delete the insn as dead it if it is setting an eliminable register.
3122 If REPLACE is zero, do all our allocations in reload_obstack.
3124 If no eliminations were done and this insn doesn't require any elimination
3125 processing (these are not identical conditions: it might be updating sp,
3126 but not referencing fp; this needs to be seen during reload_as_needed so
3127 that the offset between fp and sp can be taken into consideration), zero
3128 is returned. Otherwise, 1 is returned. */
3131 eliminate_regs_in_insn (insn
, replace
)
3135 rtx old_body
= PATTERN (insn
);
3136 rtx old_set
= single_set (insn
);
3139 struct elim_table
*ep
;
3142 push_obstacks (&reload_obstack
, &reload_obstack
);
3144 if (old_set
!= 0 && GET_CODE (SET_DEST (old_set
)) == REG
3145 && REGNO (SET_DEST (old_set
)) < FIRST_PSEUDO_REGISTER
)
3147 /* Check for setting an eliminable register. */
3148 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3149 if (ep
->from_rtx
== SET_DEST (old_set
) && ep
->can_eliminate
)
3151 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3152 /* If this is setting the frame pointer register to the
3153 hardware frame pointer register and this is an elimination
3154 that will be done (tested above), this insn is really
3155 adjusting the frame pointer downward to compensate for
3156 the adjustment done before a nonlocal goto. */
3157 if (ep
->from
== FRAME_POINTER_REGNUM
3158 && ep
->to
== HARD_FRAME_POINTER_REGNUM
)
3160 rtx src
= SET_SRC (old_set
);
3162 rtx prev_insn
, prev_set
;
3164 if (src
== ep
->to_rtx
)
3166 else if (GET_CODE (src
) == PLUS
3167 && GET_CODE (XEXP (src
, 0)) == CONST_INT
3168 && XEXP (src
, 1) == ep
->to_rtx
)
3169 offset
= INTVAL (XEXP (src
, 0)), ok
= 1;
3170 else if (GET_CODE (src
) == PLUS
3171 && GET_CODE (XEXP (src
, 1)) == CONST_INT
3172 && XEXP (src
, 0) == ep
->to_rtx
)
3173 offset
= INTVAL (XEXP (src
, 1)), ok
= 1;
3174 else if ((prev_insn
= prev_nonnote_insn (insn
)) != 0
3175 && (prev_set
= single_set (prev_insn
)) != 0
3176 && rtx_equal_p (SET_DEST (prev_set
), src
))
3178 src
= SET_SRC (prev_set
);
3179 if (src
== ep
->to_rtx
)
3181 else if (GET_CODE (src
) == PLUS
3182 && GET_CODE (XEXP (src
, 0)) == CONST_INT
3183 && XEXP (src
, 1) == ep
->to_rtx
)
3184 offset
= INTVAL (XEXP (src
, 0)), ok
= 1;
3185 else if (GET_CODE (src
) == PLUS
3186 && GET_CODE (XEXP (src
, 1)) == CONST_INT
3187 && XEXP (src
, 0) == ep
->to_rtx
)
3188 offset
= INTVAL (XEXP (src
, 1)), ok
= 1;
3196 = plus_constant (ep
->to_rtx
, offset
- ep
->offset
);
3198 /* First see if this insn remains valid when we
3199 make the change. If not, keep the INSN_CODE
3200 the same and let reload fit it up. */
3201 validate_change (insn
, &SET_SRC (old_set
), src
, 1);
3202 validate_change (insn
, &SET_DEST (old_set
),
3204 if (! apply_change_group ())
3206 SET_SRC (old_set
) = src
;
3207 SET_DEST (old_set
) = ep
->to_rtx
;
3217 /* In this case this insn isn't serving a useful purpose. We
3218 will delete it in reload_as_needed once we know that this
3219 elimination is, in fact, being done.
3221 If REPLACE isn't set, we can't delete this insn, but needn't
3222 process it since it won't be used unless something changes. */
3224 delete_dead_insn (insn
);
3229 /* Check for (set (reg) (plus (reg from) (offset))) where the offset
3230 in the insn is the negative of the offset in FROM. Substitute
3231 (set (reg) (reg to)) for the insn and change its code.
3233 We have to do this here, rather than in eliminate_regs, so that we can
3234 change the insn code. */
3236 if (GET_CODE (SET_SRC (old_set
)) == PLUS
3237 && GET_CODE (XEXP (SET_SRC (old_set
), 0)) == REG
3238 && GET_CODE (XEXP (SET_SRC (old_set
), 1)) == CONST_INT
)
3239 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
];
3241 if (ep
->from_rtx
== XEXP (SET_SRC (old_set
), 0)
3242 && ep
->can_eliminate
)
3244 /* We must stop at the first elimination that will be used.
3245 If this one would replace the PLUS with a REG, do it
3246 now. Otherwise, quit the loop and let eliminate_regs
3247 do its normal replacement. */
3248 if (ep
->offset
== - INTVAL (XEXP (SET_SRC (old_set
), 1)))
3250 /* We assume here that we don't need a PARALLEL of
3251 any CLOBBERs for this assignment. There's not
3252 much we can do if we do need it. */
3253 PATTERN (insn
) = gen_rtx_SET (VOIDmode
,
3256 INSN_CODE (insn
) = -1;
3265 old_asm_operands_vec
= 0;
3267 /* Replace the body of this insn with a substituted form. If we changed
3268 something, return non-zero.
3270 If we are replacing a body that was a (set X (plus Y Z)), try to
3271 re-recognize the insn. We do this in case we had a simple addition
3272 but now can do this as a load-address. This saves an insn in this
3275 new_body
= eliminate_regs (old_body
, 0, replace
? insn
: NULL_RTX
);
3276 if (new_body
!= old_body
)
3278 /* If we aren't replacing things permanently and we changed something,
3279 make another copy to ensure that all the RTL is new. Otherwise
3280 things can go wrong if find_reload swaps commutative operands
3281 and one is inside RTL that has been copied while the other is not. */
3283 /* Don't copy an asm_operands because (1) there's no need and (2)
3284 copy_rtx can't do it properly when there are multiple outputs. */
3285 if (! replace
&& asm_noperands (old_body
) < 0)
3286 new_body
= copy_rtx (new_body
);
3288 /* If we had a move insn but now we don't, rerecognize it. This will
3289 cause spurious re-recognition if the old move had a PARALLEL since
3290 the new one still will, but we can't call single_set without
3291 having put NEW_BODY into the insn and the re-recognition won't
3292 hurt in this rare case. */
3294 && ((GET_CODE (SET_SRC (old_set
)) == REG
3295 && (GET_CODE (new_body
) != SET
3296 || GET_CODE (SET_SRC (new_body
)) != REG
))
3297 /* If this was a load from or store to memory, compare
3298 the MEM in recog_operand to the one in the insn. If they
3299 are not equal, then rerecognize the insn. */
3301 && ((GET_CODE (SET_SRC (old_set
)) == MEM
3302 && SET_SRC (old_set
) != recog_operand
[1])
3303 || (GET_CODE (SET_DEST (old_set
)) == MEM
3304 && SET_DEST (old_set
) != recog_operand
[0])))
3305 /* If this was an add insn before, rerecognize. */
3306 || GET_CODE (SET_SRC (old_set
)) == PLUS
))
3308 if (! validate_change (insn
, &PATTERN (insn
), new_body
, 0))
3309 /* If recognition fails, store the new body anyway.
3310 It's normal to have recognition failures here
3311 due to bizarre memory addresses; reloading will fix them. */
3312 PATTERN (insn
) = new_body
;
3315 PATTERN (insn
) = new_body
;
3320 /* Loop through all elimination pairs. See if any have changed.
3322 We also detect a cases where register elimination cannot be done,
3323 namely, if a register would be both changed and referenced outside a MEM
3324 in the resulting insn since such an insn is often undefined and, even if
3325 not, we cannot know what meaning will be given to it. Note that it is
3326 valid to have a register used in an address in an insn that changes it
3327 (presumably with a pre- or post-increment or decrement).
3329 If anything changes, return nonzero. */
3331 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3333 if (ep
->previous_offset
!= ep
->offset
&& ep
->ref_outside_mem
)
3334 ep
->can_eliminate
= 0;
3336 ep
->ref_outside_mem
= 0;
3338 if (ep
->previous_offset
!= ep
->offset
)
3343 /* If we changed something, perform elimination in REG_NOTES. This is
3344 needed even when REPLACE is zero because a REG_DEAD note might refer
3345 to a register that we eliminate and could cause a different number
3346 of spill registers to be needed in the final reload pass than in
3348 if (val
&& REG_NOTES (insn
) != 0)
3349 REG_NOTES (insn
) = eliminate_regs (REG_NOTES (insn
), 0, REG_NOTES (insn
));
3357 /* Loop through all elimination pairs.
3358 Recalculate the number not at initial offset.
3360 Compute the maximum offset (minimum offset if the stack does not
3361 grow downward) for each elimination pair. */
3364 update_eliminable_offsets ()
3366 struct elim_table
*ep
;
3368 num_not_at_initial_offset
= 0;
3369 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3371 ep
->previous_offset
= ep
->offset
;
3372 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3373 num_not_at_initial_offset
++;
3377 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3378 replacement we currently believe is valid, mark it as not eliminable if X
3379 modifies DEST in any way other than by adding a constant integer to it.
3381 If DEST is the frame pointer, we do nothing because we assume that
3382 all assignments to the hard frame pointer are nonlocal gotos and are being
3383 done at a time when they are valid and do not disturb anything else.
3384 Some machines want to eliminate a fake argument pointer with either the
3385 frame or stack pointer. Assignments to the hard frame pointer must not
3386 prevent this elimination.
3388 Called via note_stores from reload before starting its passes to scan
3389 the insns of the function. */
3392 mark_not_eliminable (dest
, x
)
3396 register unsigned int i
;
3398 /* A SUBREG of a hard register here is just changing its mode. We should
3399 not see a SUBREG of an eliminable hard register, but check just in
3401 if (GET_CODE (dest
) == SUBREG
)
3402 dest
= SUBREG_REG (dest
);
3404 if (dest
== hard_frame_pointer_rtx
)
3407 for (i
= 0; i
< NUM_ELIMINABLE_REGS
; i
++)
3408 if (reg_eliminate
[i
].can_eliminate
&& dest
== reg_eliminate
[i
].to_rtx
3409 && (GET_CODE (x
) != SET
3410 || GET_CODE (SET_SRC (x
)) != PLUS
3411 || XEXP (SET_SRC (x
), 0) != dest
3412 || GET_CODE (XEXP (SET_SRC (x
), 1)) != CONST_INT
))
3414 reg_eliminate
[i
].can_eliminate_previous
3415 = reg_eliminate
[i
].can_eliminate
= 0;
3420 /* Verify that the initial elimination offsets did not change since the
3421 last call to set_initial_elim_offsets. This is used to catch cases
3422 where something illegal happened during reload_as_needed that could
3423 cause incorrect code to be generated if we did not check for it. */
3425 verify_initial_elim_offsets ()
3429 #ifdef ELIMINABLE_REGS
3430 struct elim_table
*ep
;
3432 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3434 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, t
);
3435 if (t
!= ep
->initial_offset
)
3439 INITIAL_FRAME_POINTER_OFFSET (t
);
3440 if (t
!= reg_eliminate
[0].initial_offset
)
3445 /* Reset all offsets on eliminable registers to their initial values. */
3447 set_initial_elim_offsets ()
3449 struct elim_table
*ep
= reg_eliminate
;
3451 #ifdef ELIMINABLE_REGS
3452 for (; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3454 INITIAL_ELIMINATION_OFFSET (ep
->from
, ep
->to
, ep
->initial_offset
);
3455 ep
->previous_offset
= ep
->offset
= ep
->initial_offset
;
3458 INITIAL_FRAME_POINTER_OFFSET (ep
->initial_offset
);
3459 ep
->previous_offset
= ep
->offset
= ep
->initial_offset
;
3462 num_not_at_initial_offset
= 0;
3465 /* Initialize the known label offsets.
3466 Set a known offset for each forced label to be at the initial offset
3467 of each elimination. We do this because we assume that all
3468 computed jumps occur from a location where each elimination is
3469 at its initial offset.
3470 For all other labels, show that we don't know the offsets. */
3473 set_initial_label_offsets ()
3476 bzero ((char *) &offsets_known_at
[get_first_label_num ()], num_labels
);
3478 for (x
= forced_labels
; x
; x
= XEXP (x
, 1))
3480 set_label_offsets (XEXP (x
, 0), NULL_RTX
, 1);
3483 /* Set all elimination offsets to the known values for the code label given
3486 set_offsets_for_label (insn
)
3490 int label_nr
= CODE_LABEL_NUMBER (insn
);
3491 struct elim_table
*ep
;
3493 num_not_at_initial_offset
= 0;
3494 for (i
= 0, ep
= reg_eliminate
; i
< NUM_ELIMINABLE_REGS
; ep
++, i
++)
3496 ep
->offset
= ep
->previous_offset
= offsets_at
[label_nr
][i
];
3497 if (ep
->can_eliminate
&& ep
->offset
!= ep
->initial_offset
)
3498 num_not_at_initial_offset
++;
3502 /* See if anything that happened changes which eliminations are valid.
3503 For example, on the Sparc, whether or not the frame pointer can
3504 be eliminated can depend on what registers have been used. We need
3505 not check some conditions again (such as flag_omit_frame_pointer)
3506 since they can't have changed. */
3509 update_eliminables (pset
)
3512 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3513 int previous_frame_pointer_needed
= frame_pointer_needed
;
3515 struct elim_table
*ep
;
3517 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3518 if ((ep
->from
== HARD_FRAME_POINTER_REGNUM
&& FRAME_POINTER_REQUIRED
)
3519 #ifdef ELIMINABLE_REGS
3520 || ! CAN_ELIMINATE (ep
->from
, ep
->to
)
3523 ep
->can_eliminate
= 0;
3525 /* Look for the case where we have discovered that we can't replace
3526 register A with register B and that means that we will now be
3527 trying to replace register A with register C. This means we can
3528 no longer replace register C with register B and we need to disable
3529 such an elimination, if it exists. This occurs often with A == ap,
3530 B == sp, and C == fp. */
3532 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3534 struct elim_table
*op
;
3535 register int new_to
= -1;
3537 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
3539 /* Find the current elimination for ep->from, if there is a
3541 for (op
= reg_eliminate
;
3542 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
3543 if (op
->from
== ep
->from
&& op
->can_eliminate
)
3549 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3551 for (op
= reg_eliminate
;
3552 op
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; op
++)
3553 if (op
->from
== new_to
&& op
->to
== ep
->to
)
3554 op
->can_eliminate
= 0;
3558 /* See if any registers that we thought we could eliminate the previous
3559 time are no longer eliminable. If so, something has changed and we
3560 must spill the register. Also, recompute the number of eliminable
3561 registers and see if the frame pointer is needed; it is if there is
3562 no elimination of the frame pointer that we can perform. */
3564 frame_pointer_needed
= 1;
3565 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3567 if (ep
->can_eliminate
&& ep
->from
== FRAME_POINTER_REGNUM
3568 && ep
->to
!= HARD_FRAME_POINTER_REGNUM
)
3569 frame_pointer_needed
= 0;
3571 if (! ep
->can_eliminate
&& ep
->can_eliminate_previous
)
3573 ep
->can_eliminate_previous
= 0;
3574 SET_HARD_REG_BIT (*pset
, ep
->from
);
3579 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3580 /* If we didn't need a frame pointer last time, but we do now, spill
3581 the hard frame pointer. */
3582 if (frame_pointer_needed
&& ! previous_frame_pointer_needed
)
3583 SET_HARD_REG_BIT (*pset
, HARD_FRAME_POINTER_REGNUM
);
3587 /* Initialize the table of registers to eliminate. */
3591 struct elim_table
*ep
;
3593 /* Does this function require a frame pointer? */
3595 frame_pointer_needed
= (! flag_omit_frame_pointer
3596 #ifdef EXIT_IGNORE_STACK
3597 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3598 and restore sp for alloca. So we can't eliminate
3599 the frame pointer in that case. At some point,
3600 we should improve this by emitting the
3601 sp-adjusting insns for this case. */
3602 || (current_function_calls_alloca
3603 && EXIT_IGNORE_STACK
)
3605 || FRAME_POINTER_REQUIRED
);
3609 #ifdef ELIMINABLE_REGS
3610 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3612 ep
->can_eliminate
= ep
->can_eliminate_previous
3613 = (CAN_ELIMINATE (ep
->from
, ep
->to
)
3614 && ! (ep
->to
== STACK_POINTER_REGNUM
&& frame_pointer_needed
));
3617 reg_eliminate
[0].can_eliminate
= reg_eliminate
[0].can_eliminate_previous
3618 = ! frame_pointer_needed
;
3621 /* Count the number of eliminable registers and build the FROM and TO
3622 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3623 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3624 We depend on this. */
3625 for (ep
= reg_eliminate
; ep
< ®_eliminate
[NUM_ELIMINABLE_REGS
]; ep
++)
3627 num_eliminable
+= ep
->can_eliminate
;
3628 ep
->from_rtx
= gen_rtx_REG (Pmode
, ep
->from
);
3629 ep
->to_rtx
= gen_rtx_REG (Pmode
, ep
->to
);
3633 /* Kick all pseudos out of hard register REGNO.
3634 If DUMPFILE is nonzero, log actions taken on that file.
3636 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3637 because we found we can't eliminate some register. In the case, no pseudos
3638 are allowed to be in the register, even if they are only in a block that
3639 doesn't require spill registers, unlike the case when we are spilling this
3640 hard reg to produce another spill register.
3642 Return nonzero if any pseudos needed to be kicked out. */
3645 spill_hard_reg (regno
, dumpfile
, cant_eliminate
)
3654 SET_HARD_REG_BIT (bad_spill_regs_global
, regno
);
3655 regs_ever_live
[regno
] = 1;
3658 /* Spill every pseudo reg that was allocated to this reg
3659 or to something that overlaps this reg. */
3661 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3662 if (reg_renumber
[i
] >= 0
3663 && reg_renumber
[i
] <= regno
3665 + HARD_REGNO_NREGS (reg_renumber
[i
],
3666 PSEUDO_REGNO_MODE (i
))
3668 SET_REGNO_REG_SET (spilled_pseudos
, i
);
3671 /* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3672 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
3674 ior_hard_reg_set (set1
, set2
)
3675 HARD_REG_SET
*set1
, *set2
;
3677 IOR_HARD_REG_SET (*set1
, *set2
);
3680 /* After find_reload_regs has been run for all insn that need reloads,
3681 and/or spill_hard_regs was called, this function is used to actually
3682 spill pseudo registers and try to reallocate them. It also sets up the
3683 spill_regs array for use by choose_reload_regs. */
3686 finish_spills (global
, dumpfile
)
3690 struct insn_chain
*chain
;
3691 int something_changed
= 0;
3694 /* Build the spill_regs array for the function. */
3695 /* If there are some registers still to eliminate and one of the spill regs
3696 wasn't ever used before, additional stack space may have to be
3697 allocated to store this register. Thus, we may have changed the offset
3698 between the stack and frame pointers, so mark that something has changed.
3700 One might think that we need only set VAL to 1 if this is a call-used
3701 register. However, the set of registers that must be saved by the
3702 prologue is not identical to the call-used set. For example, the
3703 register used by the call insn for the return PC is a call-used register,
3704 but must be saved by the prologue. */
3707 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3708 if (TEST_HARD_REG_BIT (used_spill_regs
, i
))
3710 spill_reg_order
[i
] = n_spills
;
3711 spill_regs
[n_spills
++] = i
;
3712 if (num_eliminable
&& ! regs_ever_live
[i
])
3713 something_changed
= 1;
3714 regs_ever_live
[i
] = 1;
3717 spill_reg_order
[i
] = -1;
3719 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3720 if (REGNO_REG_SET_P (spilled_pseudos
, i
))
3722 /* Record the current hard register the pseudo is allocated to in
3723 pseudo_previous_regs so we avoid reallocating it to the same
3724 hard reg in a later pass. */
3725 if (reg_renumber
[i
] < 0)
3727 SET_HARD_REG_BIT (pseudo_previous_regs
[i
], reg_renumber
[i
]);
3728 /* Mark it as no longer having a hard register home. */
3729 reg_renumber
[i
] = -1;
3730 /* We will need to scan everything again. */
3731 something_changed
= 1;
3734 /* Retry global register allocation if possible. */
3737 bzero ((char *) pseudo_forbidden_regs
, max_regno
* sizeof (HARD_REG_SET
));
3738 /* For every insn that needs reloads, set the registers used as spill
3739 regs in pseudo_forbidden_regs for every pseudo live across the
3741 for (chain
= insns_need_reload
; chain
; chain
= chain
->next_need_reload
)
3743 EXECUTE_IF_SET_IN_REG_SET
3744 (chain
->live_before
, FIRST_PSEUDO_REGISTER
, i
,
3746 ior_hard_reg_set (pseudo_forbidden_regs
+ i
,
3747 &chain
->used_spill_regs
);
3749 EXECUTE_IF_SET_IN_REG_SET
3750 (chain
->live_after
, FIRST_PSEUDO_REGISTER
, i
,
3752 ior_hard_reg_set (pseudo_forbidden_regs
+ i
,
3753 &chain
->used_spill_regs
);
3757 /* Retry allocating the spilled pseudos. For each reg, merge the
3758 various reg sets that indicate which hard regs can't be used,
3759 and call retry_global_alloc.
3760 We change spill_pseudos here to only contain pseudos that did not
3761 get a new hard register. */
3762 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3763 if (reg_old_renumber
[i
] != reg_renumber
[i
])
3765 HARD_REG_SET forbidden
;
3766 COPY_HARD_REG_SET (forbidden
, bad_spill_regs_global
);
3767 IOR_HARD_REG_SET (forbidden
, pseudo_forbidden_regs
[i
]);
3768 IOR_HARD_REG_SET (forbidden
, pseudo_previous_regs
[i
]);
3769 retry_global_alloc (i
, forbidden
);
3770 if (reg_renumber
[i
] >= 0)
3771 CLEAR_REGNO_REG_SET (spilled_pseudos
, i
);
3775 /* Fix up the register information in the insn chain.
3776 This involves deleting those of the spilled pseudos which did not get
3777 a new hard register home from the live_{before,after} sets. */
3778 for (chain
= reload_insn_chain
; chain
; chain
= chain
->next
)
3780 HARD_REG_SET used_by_pseudos
;
3781 HARD_REG_SET used_by_pseudos2
;
3783 AND_COMPL_REG_SET (chain
->live_before
, spilled_pseudos
);
3784 AND_COMPL_REG_SET (chain
->live_after
, spilled_pseudos
);
3786 /* Mark any unallocated hard regs as available for spills. That
3787 makes inheritance work somewhat better. */
3788 if (chain
->need_reload
)
3790 REG_SET_TO_HARD_REG_SET (used_by_pseudos
, chain
->live_before
);
3791 REG_SET_TO_HARD_REG_SET (used_by_pseudos2
, chain
->live_after
);
3792 IOR_HARD_REG_SET (used_by_pseudos
, used_by_pseudos2
);
3794 /* Save the old value for the sanity test below. */
3795 COPY_HARD_REG_SET (used_by_pseudos2
, chain
->used_spill_regs
);
3797 compute_use_by_pseudos (&used_by_pseudos
, chain
->live_before
);
3798 compute_use_by_pseudos (&used_by_pseudos
, chain
->live_after
);
3799 COMPL_HARD_REG_SET (chain
->used_spill_regs
, used_by_pseudos
);
3800 AND_HARD_REG_SET (chain
->used_spill_regs
, used_spill_regs
);
3802 /* Make sure we only enlarge the set. */
3803 GO_IF_HARD_REG_SUBSET (used_by_pseudos2
, chain
->used_spill_regs
, ok
);
3809 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3810 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
3812 int regno
= reg_renumber
[i
];
3813 if (reg_old_renumber
[i
] == regno
)
3816 alter_reg (i
, reg_old_renumber
[i
]);
3817 reg_old_renumber
[i
] = regno
;
3821 fprintf (dumpfile
, " Register %d now on stack.\n\n", i
);
3823 fprintf (dumpfile
, " Register %d now in %d.\n\n",
3824 i
, reg_renumber
[i
]);
3828 return something_changed
;
3831 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3832 Also mark any hard registers used to store user variables as
3833 forbidden from being used for spill registers. */
3836 scan_paradoxical_subregs (x
)
3841 register enum rtx_code code
= GET_CODE (x
);
3847 if (SMALL_REGISTER_CLASSES
&& REGNO (x
) < FIRST_PSEUDO_REGISTER
3848 && REG_USERVAR_P (x
))
3849 SET_HARD_REG_BIT (bad_spill_regs_global
, REGNO (x
));
3865 if (GET_CODE (SUBREG_REG (x
)) == REG
3866 && GET_MODE_SIZE (GET_MODE (x
)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3867 reg_max_ref_width
[REGNO (SUBREG_REG (x
))]
3868 = GET_MODE_SIZE (GET_MODE (x
));
3875 fmt
= GET_RTX_FORMAT (code
);
3876 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3879 scan_paradoxical_subregs (XEXP (x
, i
));
3880 else if (fmt
[i
] == 'E')
3883 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
3884 scan_paradoxical_subregs (XVECEXP (x
, i
, j
));
3890 hard_reg_use_compare (p1p
, p2p
)
3891 const GENERIC_PTR p1p
;
3892 const GENERIC_PTR p2p
;
3894 struct hard_reg_n_uses
*p1
= (struct hard_reg_n_uses
*)p1p
;
3895 struct hard_reg_n_uses
*p2
= (struct hard_reg_n_uses
*)p2p
;
3896 int bad1
= TEST_HARD_REG_BIT (bad_spill_regs
, p1
->regno
);
3897 int bad2
= TEST_HARD_REG_BIT (bad_spill_regs
, p2
->regno
);
3899 return p1
->regno
- p2
->regno
;
3904 if (p1
->uses
> p2
->uses
)
3906 if (p1
->uses
< p2
->uses
)
3908 /* If regs are equally good, sort by regno,
3909 so that the results of qsort leave nothing to chance. */
3910 return p1
->regno
- p2
->regno
;
3913 /* Used for communication between order_regs_for_reload and count_pseudo.
3914 Used to avoid counting one pseudo twice. */
3915 static regset pseudos_counted
;
3917 /* Update the costs in N_USES, considering that pseudo REG is live. */
3919 count_pseudo (n_uses
, reg
)
3920 struct hard_reg_n_uses
*n_uses
;
3923 int r
= reg_renumber
[reg
];
3926 if (REGNO_REG_SET_P (pseudos_counted
, reg
))
3928 SET_REGNO_REG_SET (pseudos_counted
, reg
);
3933 nregs
= HARD_REGNO_NREGS (r
, PSEUDO_REGNO_MODE (reg
));
3935 n_uses
[r
++].uses
+= REG_N_REFS (reg
);
3937 /* Choose the order to consider regs for use as reload registers
3938 based on how much trouble would be caused by spilling one.
3939 Store them in order of decreasing preference in potential_reload_regs. */
3942 order_regs_for_reload (chain
)
3943 struct insn_chain
*chain
;
3947 struct hard_reg_n_uses hard_reg_n_uses
[FIRST_PSEUDO_REGISTER
];
3949 pseudos_counted
= ALLOCA_REG_SET ();
3951 COPY_HARD_REG_SET (bad_spill_regs
, bad_spill_regs_global
);
3953 /* Count number of uses of each hard reg by pseudo regs allocated to it
3954 and then order them by decreasing use. */
3956 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3960 hard_reg_n_uses
[i
].regno
= i
;
3961 hard_reg_n_uses
[i
].uses
= 0;
3963 /* Test the various reasons why we can't use a register for
3964 spilling in this insn. */
3966 || REGNO_REG_SET_P (chain
->live_before
, i
)
3967 || REGNO_REG_SET_P (chain
->live_after
, i
))
3969 SET_HARD_REG_BIT (bad_spill_regs
, i
);
3973 /* Now find out which pseudos are allocated to it, and update
3975 CLEAR_REG_SET (pseudos_counted
);
3977 EXECUTE_IF_SET_IN_REG_SET
3978 (chain
->live_before
, FIRST_PSEUDO_REGISTER
, j
,
3980 count_pseudo (hard_reg_n_uses
, j
);
3982 EXECUTE_IF_SET_IN_REG_SET
3983 (chain
->live_after
, FIRST_PSEUDO_REGISTER
, j
,
3985 count_pseudo (hard_reg_n_uses
, j
);
3989 FREE_REG_SET (pseudos_counted
);
3991 /* Prefer registers not so far used, for use in temporary loading.
3992 Among them, if REG_ALLOC_ORDER is defined, use that order.
3993 Otherwise, prefer registers not preserved by calls. */
3995 #ifdef REG_ALLOC_ORDER
3996 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3998 int regno
= reg_alloc_order
[i
];
4000 if (hard_reg_n_uses
[regno
].uses
== 0
4001 && ! TEST_HARD_REG_BIT (bad_spill_regs
, regno
))
4002 potential_reload_regs
[o
++] = regno
;
4005 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4007 if (hard_reg_n_uses
[i
].uses
== 0 && call_used_regs
[i
]
4008 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
))
4009 potential_reload_regs
[o
++] = i
;
4011 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4013 if (hard_reg_n_uses
[i
].uses
== 0 && ! call_used_regs
[i
]
4014 && ! TEST_HARD_REG_BIT (bad_spill_regs
, i
))
4015 potential_reload_regs
[o
++] = i
;
4019 qsort (hard_reg_n_uses
, FIRST_PSEUDO_REGISTER
,
4020 sizeof hard_reg_n_uses
[0], hard_reg_use_compare
);
4022 /* Now add the regs that are already used,
4023 preferring those used less often. The fixed and otherwise forbidden
4024 registers will be at the end of this list. */
4026 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4027 if (hard_reg_n_uses
[i
].uses
!= 0
4028 && ! TEST_HARD_REG_BIT (bad_spill_regs
, hard_reg_n_uses
[i
].regno
))
4029 potential_reload_regs
[o
++] = hard_reg_n_uses
[i
].regno
;
4030 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
4031 if (TEST_HARD_REG_BIT (bad_spill_regs
, hard_reg_n_uses
[i
].regno
))
4032 potential_reload_regs
[o
++] = hard_reg_n_uses
[i
].regno
;
4035 /* Reload pseudo-registers into hard regs around each insn as needed.
4036 Additional register load insns are output before the insn that needs it
4037 and perhaps store insns after insns that modify the reloaded pseudo reg.
4039 reg_last_reload_reg and reg_reloaded_contents keep track of
4040 which registers are already available in reload registers.
4041 We update these for the reloads that we perform,
4042 as the insns are scanned. */
4045 reload_as_needed (live_known
)
4048 struct insn_chain
*chain
;
4052 bzero ((char *) spill_reg_rtx
, sizeof spill_reg_rtx
);
4053 bzero ((char *) spill_reg_store
, sizeof spill_reg_store
);
4054 reg_last_reload_reg
= (rtx
*) alloca (max_regno
* sizeof (rtx
));
4055 bzero ((char *) reg_last_reload_reg
, max_regno
* sizeof (rtx
));
4056 reg_has_output_reload
= (char *) alloca (max_regno
);
4057 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4059 set_initial_elim_offsets ();
4061 for (chain
= reload_insn_chain
; chain
; chain
= chain
->next
)
4064 rtx insn
= chain
->insn
;
4065 rtx old_next
= NEXT_INSN (insn
);
4067 /* If we pass a label, copy the offsets from the label information
4068 into the current offsets of each elimination. */
4069 if (GET_CODE (insn
) == CODE_LABEL
)
4070 set_offsets_for_label (insn
);
4072 else if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
4074 rtx oldpat
= PATTERN (insn
);
4076 /* If this is a USE and CLOBBER of a MEM, ensure that any
4077 references to eliminable registers have been removed. */
4079 if ((GET_CODE (PATTERN (insn
)) == USE
4080 || GET_CODE (PATTERN (insn
)) == CLOBBER
)
4081 && GET_CODE (XEXP (PATTERN (insn
), 0)) == MEM
)
4082 XEXP (XEXP (PATTERN (insn
), 0), 0)
4083 = eliminate_regs (XEXP (XEXP (PATTERN (insn
), 0), 0),
4084 GET_MODE (XEXP (PATTERN (insn
), 0)),
4087 /* If we need to do register elimination processing, do so.
4088 This might delete the insn, in which case we are done. */
4089 if (num_eliminable
&& chain
->need_elim
)
4091 eliminate_regs_in_insn (insn
, 1);
4092 if (GET_CODE (insn
) == NOTE
)
4094 update_eliminable_offsets ();
4099 /* If need_elim is nonzero but need_reload is zero, one might think
4100 that we could simply set n_reloads to 0. However, find_reloads
4101 could have done some manipulation of the insn (such as swapping
4102 commutative operands), and these manipulations are lost during
4103 the first pass for every insn that needs register elimination.
4104 So the actions of find_reloads must be redone here. */
4106 if (! chain
->need_elim
&& ! chain
->need_reload
4107 && ! chain
->need_operand_change
)
4109 /* First find the pseudo regs that must be reloaded for this insn.
4110 This info is returned in the tables reload_... (see reload.h).
4111 Also modify the body of INSN by substituting RELOAD
4112 rtx's for those pseudo regs. */
4115 bzero (reg_has_output_reload
, max_regno
);
4116 CLEAR_HARD_REG_SET (reg_is_output_reload
);
4118 find_reloads (insn
, 1, spill_indirect_levels
, live_known
,
4122 if (num_eliminable
&& chain
->need_elim
)
4123 update_eliminable_offsets ();
4127 rtx next
= NEXT_INSN (insn
);
4130 prev
= PREV_INSN (insn
);
4132 /* Now compute which reload regs to reload them into. Perhaps
4133 reusing reload regs from previous insns, or else output
4134 load insns to reload them. Maybe output store insns too.
4135 Record the choices of reload reg in reload_reg_rtx. */
4136 choose_reload_regs (chain
);
4138 /* Merge any reloads that we didn't combine for fear of
4139 increasing the number of spill registers needed but now
4140 discover can be safely merged. */
4141 if (SMALL_REGISTER_CLASSES
)
4142 merge_assigned_reloads (insn
);
4144 /* Generate the insns to reload operands into or out of
4145 their reload regs. */
4146 emit_reload_insns (chain
);
4148 /* Substitute the chosen reload regs from reload_reg_rtx
4149 into the insn's body (or perhaps into the bodies of other
4150 load and store insn that we just made for reloading
4151 and that we moved the structure into). */
4154 /* If this was an ASM, make sure that all the reload insns
4155 we have generated are valid. If not, give an error
4158 if (asm_noperands (PATTERN (insn
)) >= 0)
4159 for (p
= NEXT_INSN (prev
); p
!= next
; p
= NEXT_INSN (p
))
4160 if (p
!= insn
&& GET_RTX_CLASS (GET_CODE (p
)) == 'i'
4161 && (recog_memoized (p
) < 0
4162 || (insn_extract (p
),
4163 ! constrain_operands (INSN_CODE (p
), 1))))
4165 error_for_asm (insn
,
4166 "`asm' operand requires impossible reload");
4168 NOTE_SOURCE_FILE (p
) = 0;
4169 NOTE_LINE_NUMBER (p
) = NOTE_INSN_DELETED
;
4172 /* Any previously reloaded spilled pseudo reg, stored in this insn,
4173 is no longer validly lying around to save a future reload.
4174 Note that this does not detect pseudos that were reloaded
4175 for this insn in order to be stored in
4176 (obeying register constraints). That is correct; such reload
4177 registers ARE still valid. */
4178 note_stores (oldpat
, forget_old_reloads_1
);
4180 /* There may have been CLOBBER insns placed after INSN. So scan
4181 between INSN and NEXT and use them to forget old reloads. */
4182 for (x
= NEXT_INSN (insn
); x
!= old_next
; x
= NEXT_INSN (x
))
4183 if (GET_CODE (x
) == INSN
&& GET_CODE (PATTERN (x
)) == CLOBBER
)
4184 note_stores (PATTERN (x
), forget_old_reloads_1
);
4187 /* Likewise for regs altered by auto-increment in this insn.
4188 REG_INC notes have been changed by reloading:
4189 find_reloads_address_1 records substitutions for them,
4190 which have been performed by subst_reloads above. */
4191 for (i
= n_reloads
- 1; i
>= 0; i
--)
4193 rtx in_reg
= reload_in_reg
[i
];
4196 enum rtx_code code
= GET_CODE (in_reg
);
4197 /* PRE_INC / PRE_DEC will have the reload register ending up
4198 with the same value as the stack slot, but that doesn't
4199 hold true for POST_INC / POST_DEC. Either we have to
4200 convert the memory access to a true POST_INC / POST_DEC,
4201 or we can't use the reload register for inheritance. */
4202 if ((code
== POST_INC
|| code
== POST_DEC
)
4203 && TEST_HARD_REG_BIT (reg_reloaded_valid
,
4204 REGNO (reload_reg_rtx
[i
]))
4205 /* Make sure it is the inc/dec pseudo, and not
4206 some other (e.g. output operand) pseudo. */
4207 && (reg_reloaded_contents
[REGNO (reload_reg_rtx
[i
])]
4208 == REGNO (XEXP (in_reg
, 0))))
4211 rtx reload_reg
= reload_reg_rtx
[i
];
4212 enum machine_mode mode
= GET_MODE (reload_reg
);
4216 for (p
= PREV_INSN (old_next
); p
!= prev
; p
= PREV_INSN (p
))
4218 /* We really want to ignore REG_INC notes here, so
4219 use PATTERN (p) as argument to reg_set_p . */
4220 if (reg_set_p (reload_reg
, PATTERN (p
)))
4222 n
= count_occurrences (PATTERN (p
), reload_reg
);
4226 n
= validate_replace_rtx (reload_reg
,
4227 gen_rtx (code
, mode
,
4232 REG_NOTES (p
) = gen_rtx_EXPR_LIST (REG_INC
, reload_reg
,
4235 forget_old_reloads_1 (XEXP (in_reg
, 0), NULL_RTX
);
4239 #if 0 /* ??? Is this code obsolete now? Need to check carefully. */
4240 /* Likewise for regs altered by auto-increment in this insn.
4241 But note that the reg-notes are not changed by reloading:
4242 they still contain the pseudo-regs, not the spill regs. */
4243 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
4244 if (REG_NOTE_KIND (x
) == REG_INC
)
4246 /* See if this pseudo reg was reloaded in this insn.
4247 If so, its last-reload info is still valid
4248 because it is based on this insn's reload. */
4249 for (i
= 0; i
< n_reloads
; i
++)
4250 if (reload_out
[i
] == XEXP (x
, 0))
4254 forget_old_reloads_1 (XEXP (x
, 0), NULL_RTX
);
4259 /* A reload reg's contents are unknown after a label. */
4260 if (GET_CODE (insn
) == CODE_LABEL
)
4261 CLEAR_HARD_REG_SET (reg_reloaded_valid
);
4263 /* Don't assume a reload reg is still good after a call insn
4264 if it is a call-used reg. */
4265 else if (GET_CODE (insn
) == CALL_INSN
)
4266 AND_COMPL_HARD_REG_SET(reg_reloaded_valid
, call_used_reg_set
);
4268 /* In case registers overlap, allow certain insns to invalidate
4269 particular hard registers. */
4271 #ifdef INSN_CLOBBERS_REGNO_P
4272 for (i
= 0 ; i
< FIRST_PSEUDO_REGISTER
; i
++)
4273 if (TEST_HARD_REG_BIT (reg_reloaded_valid
, i
)
4274 && INSN_CLOBBERS_REGNO_P (insn
, i
))
4275 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
);
4284 /* Discard all record of any value reloaded from X,
4285 or reloaded in X from someplace else;
4286 unless X is an output reload reg of the current insn.
4288 X may be a hard reg (the reload reg)
4289 or it may be a pseudo reg that was reloaded from. */
4292 forget_old_reloads_1 (x
, ignored
)
4294 rtx ignored ATTRIBUTE_UNUSED
;
4300 /* note_stores does give us subregs of hard regs. */
4301 while (GET_CODE (x
) == SUBREG
)
4303 offset
+= SUBREG_WORD (x
);
4307 if (GET_CODE (x
) != REG
)
4310 regno
= REGNO (x
) + offset
;
4312 if (regno
>= FIRST_PSEUDO_REGISTER
)
4317 nr
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
4318 /* Storing into a spilled-reg invalidates its contents.
4319 This can happen if a block-local pseudo is allocated to that reg
4320 and it wasn't spilled because this block's total need is 0.
4321 Then some insn might have an optional reload and use this reg. */
4322 for (i
= 0; i
< nr
; i
++)
4323 /* But don't do this if the reg actually serves as an output
4324 reload reg in the current instruction. */
4326 || ! TEST_HARD_REG_BIT (reg_is_output_reload
, regno
+ i
))
4327 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, regno
+ i
);
4330 /* Since value of X has changed,
4331 forget any value previously copied from it. */
4334 /* But don't forget a copy if this is the output reload
4335 that establishes the copy's validity. */
4336 if (n_reloads
== 0 || reg_has_output_reload
[regno
+ nr
] == 0)
4337 reg_last_reload_reg
[regno
+ nr
] = 0;
4340 /* For each reload, the mode of the reload register. */
4341 static enum machine_mode reload_mode
[MAX_RELOADS
];
4343 /* For each reload, the largest number of registers it will require. */
4344 static int reload_nregs
[MAX_RELOADS
];
4346 /* Comparison function for qsort to decide which of two reloads
4347 should be handled first. *P1 and *P2 are the reload numbers. */
4350 reload_reg_class_lower (r1p
, r2p
)
4351 const GENERIC_PTR r1p
;
4352 const GENERIC_PTR r2p
;
4354 register int r1
= *(short *)r1p
, r2
= *(short *)r2p
;
4357 /* Consider required reloads before optional ones. */
4358 t
= reload_optional
[r1
] - reload_optional
[r2
];
4362 /* Count all solitary classes before non-solitary ones. */
4363 t
= ((reg_class_size
[(int) reload_reg_class
[r2
]] == 1)
4364 - (reg_class_size
[(int) reload_reg_class
[r1
]] == 1));
4368 /* Aside from solitaires, consider all multi-reg groups first. */
4369 t
= reload_nregs
[r2
] - reload_nregs
[r1
];
4373 /* Consider reloads in order of increasing reg-class number. */
4374 t
= (int) reload_reg_class
[r1
] - (int) reload_reg_class
[r2
];
4378 /* If reloads are equally urgent, sort by reload number,
4379 so that the results of qsort leave nothing to chance. */
4383 /* The following HARD_REG_SETs indicate when each hard register is
4384 used for a reload of various parts of the current insn. */
4386 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4387 static HARD_REG_SET reload_reg_used
;
4388 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4389 static HARD_REG_SET reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
4390 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4391 static HARD_REG_SET reload_reg_used_in_inpaddr_addr
[MAX_RECOG_OPERANDS
];
4392 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4393 static HARD_REG_SET reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
4394 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4395 static HARD_REG_SET reload_reg_used_in_outaddr_addr
[MAX_RECOG_OPERANDS
];
4396 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4397 static HARD_REG_SET reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
4398 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4399 static HARD_REG_SET reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
4400 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4401 static HARD_REG_SET reload_reg_used_in_op_addr
;
4402 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4403 static HARD_REG_SET reload_reg_used_in_op_addr_reload
;
4404 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4405 static HARD_REG_SET reload_reg_used_in_insn
;
4406 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4407 static HARD_REG_SET reload_reg_used_in_other_addr
;
4409 /* If reg is in use as a reload reg for any sort of reload. */
4410 static HARD_REG_SET reload_reg_used_at_all
;
4412 /* If reg is use as an inherited reload. We just mark the first register
4414 static HARD_REG_SET reload_reg_used_for_inherit
;
4416 /* Records which hard regs are allocated to a pseudo during any point of the
4418 static HARD_REG_SET reg_used_by_pseudo
;
4420 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4421 TYPE. MODE is used to indicate how many consecutive regs are
4425 mark_reload_reg_in_use (regno
, opnum
, type
, mode
)
4428 enum reload_type type
;
4429 enum machine_mode mode
;
4431 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4434 for (i
= regno
; i
< nregs
+ regno
; i
++)
4439 SET_HARD_REG_BIT (reload_reg_used
, i
);
4442 case RELOAD_FOR_INPUT_ADDRESS
:
4443 SET_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], i
);
4446 case RELOAD_FOR_INPADDR_ADDRESS
:
4447 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], i
);
4450 case RELOAD_FOR_OUTPUT_ADDRESS
:
4451 SET_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], i
);
4454 case RELOAD_FOR_OUTADDR_ADDRESS
:
4455 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], i
);
4458 case RELOAD_FOR_OPERAND_ADDRESS
:
4459 SET_HARD_REG_BIT (reload_reg_used_in_op_addr
, i
);
4462 case RELOAD_FOR_OPADDR_ADDR
:
4463 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, i
);
4466 case RELOAD_FOR_OTHER_ADDRESS
:
4467 SET_HARD_REG_BIT (reload_reg_used_in_other_addr
, i
);
4470 case RELOAD_FOR_INPUT
:
4471 SET_HARD_REG_BIT (reload_reg_used_in_input
[opnum
], i
);
4474 case RELOAD_FOR_OUTPUT
:
4475 SET_HARD_REG_BIT (reload_reg_used_in_output
[opnum
], i
);
4478 case RELOAD_FOR_INSN
:
4479 SET_HARD_REG_BIT (reload_reg_used_in_insn
, i
);
4483 SET_HARD_REG_BIT (reload_reg_used_at_all
, i
);
4487 /* Similarly, but show REGNO is no longer in use for a reload. */
4490 clear_reload_reg_in_use (regno
, opnum
, type
, mode
)
4493 enum reload_type type
;
4494 enum machine_mode mode
;
4496 int nregs
= HARD_REGNO_NREGS (regno
, mode
);
4497 int start_regno
, end_regno
;
4499 /* A complication is that for some reload types, inheritance might
4500 allow multiple reloads of the same types to share a reload register.
4501 We set check_opnum if we have to check only reloads with the same
4502 operand number, and check_any if we have to check all reloads. */
4503 int check_opnum
= 0;
4505 HARD_REG_SET
*used_in_set
;
4510 used_in_set
= &reload_reg_used
;
4513 case RELOAD_FOR_INPUT_ADDRESS
:
4514 used_in_set
= &reload_reg_used_in_input_addr
[opnum
];
4517 case RELOAD_FOR_INPADDR_ADDRESS
:
4519 used_in_set
= &reload_reg_used_in_inpaddr_addr
[opnum
];
4522 case RELOAD_FOR_OUTPUT_ADDRESS
:
4523 used_in_set
= &reload_reg_used_in_output_addr
[opnum
];
4526 case RELOAD_FOR_OUTADDR_ADDRESS
:
4528 used_in_set
= &reload_reg_used_in_outaddr_addr
[opnum
];
4531 case RELOAD_FOR_OPERAND_ADDRESS
:
4532 used_in_set
= &reload_reg_used_in_op_addr
;
4535 case RELOAD_FOR_OPADDR_ADDR
:
4537 used_in_set
= &reload_reg_used_in_op_addr_reload
;
4540 case RELOAD_FOR_OTHER_ADDRESS
:
4541 used_in_set
= &reload_reg_used_in_other_addr
;
4545 case RELOAD_FOR_INPUT
:
4546 used_in_set
= &reload_reg_used_in_input
[opnum
];
4549 case RELOAD_FOR_OUTPUT
:
4550 used_in_set
= &reload_reg_used_in_output
[opnum
];
4553 case RELOAD_FOR_INSN
:
4554 used_in_set
= &reload_reg_used_in_insn
;
4559 /* We resolve conflicts with remaining reloads of the same type by
4560 excluding the intervals of of reload registers by them from the
4561 interval of freed reload registers. Since we only keep track of
4562 one set of interval bounds, we might have to exclude somewhat
4563 more then what would be necessary if we used a HARD_REG_SET here.
4564 But this should only happen very infrequently, so there should
4565 be no reason to worry about it. */
4567 start_regno
= regno
;
4568 end_regno
= regno
+ nregs
;
4569 if (check_opnum
|| check_any
)
4571 for (i
= n_reloads
- 1; i
>= 0; i
--)
4573 if (reload_when_needed
[i
] == type
4574 && (check_any
|| reload_opnum
[i
] == opnum
)
4575 && reload_reg_rtx
[i
])
4577 int conflict_start
= true_regnum (reload_reg_rtx
[i
]);
4580 + HARD_REGNO_NREGS (conflict_start
, reload_mode
[i
]));
4582 /* If there is an overlap with the first to-be-freed register,
4583 adjust the interval start. */
4584 if (conflict_start
<= start_regno
&& conflict_end
> start_regno
)
4585 start_regno
= conflict_end
;
4586 /* Otherwise, if there is a conflict with one of the other
4587 to-be-freed registers, adjust the interval end. */
4588 if (conflict_start
> start_regno
&& conflict_start
< end_regno
)
4589 end_regno
= conflict_start
;
4593 for (i
= start_regno
; i
< end_regno
; i
++)
4594 CLEAR_HARD_REG_BIT (*used_in_set
, i
);
4597 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4598 specified by OPNUM and TYPE. */
4601 reload_reg_free_p (regno
, opnum
, type
)
4604 enum reload_type type
;
4608 /* In use for a RELOAD_OTHER means it's not available for anything. */
4609 if (TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4615 /* In use for anything means we can't use it for RELOAD_OTHER. */
4616 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
4617 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4618 || TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4621 for (i
= 0; i
< reload_n_operands
; i
++)
4622 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4623 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4624 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4625 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4626 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4627 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4632 case RELOAD_FOR_INPUT
:
4633 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4634 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4637 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4640 /* If it is used for some other input, can't use it. */
4641 for (i
= 0; i
< reload_n_operands
; i
++)
4642 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4645 /* If it is used in a later operand's address, can't use it. */
4646 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4647 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4648 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4653 case RELOAD_FOR_INPUT_ADDRESS
:
4654 /* Can't use a register if it is used for an input address for this
4655 operand or used as an input in an earlier one. */
4656 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[opnum
], regno
)
4657 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4660 for (i
= 0; i
< opnum
; i
++)
4661 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4666 case RELOAD_FOR_INPADDR_ADDRESS
:
4667 /* Can't use a register if it is used for an input address
4668 for this operand or used as an input in an earlier
4670 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4673 for (i
= 0; i
< opnum
; i
++)
4674 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4679 case RELOAD_FOR_OUTPUT_ADDRESS
:
4680 /* Can't use a register if it is used for an output address for this
4681 operand or used as an output in this or a later operand. */
4682 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4685 for (i
= opnum
; i
< reload_n_operands
; i
++)
4686 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4691 case RELOAD_FOR_OUTADDR_ADDRESS
:
4692 /* Can't use a register if it is used for an output address
4693 for this operand or used as an output in this or a
4695 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
4698 for (i
= opnum
; i
< reload_n_operands
; i
++)
4699 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4704 case RELOAD_FOR_OPERAND_ADDRESS
:
4705 for (i
= 0; i
< reload_n_operands
; i
++)
4706 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4709 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4710 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4712 case RELOAD_FOR_OPADDR_ADDR
:
4713 for (i
= 0; i
< reload_n_operands
; i
++)
4714 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4717 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
));
4719 case RELOAD_FOR_OUTPUT
:
4720 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4721 outputs, or an operand address for this or an earlier output. */
4722 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4725 for (i
= 0; i
< reload_n_operands
; i
++)
4726 if (TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4729 for (i
= 0; i
<= opnum
; i
++)
4730 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4731 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
4736 case RELOAD_FOR_INSN
:
4737 for (i
= 0; i
< reload_n_operands
; i
++)
4738 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4739 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4742 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4743 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4745 case RELOAD_FOR_OTHER_ADDRESS
:
4746 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4751 /* Return 1 if the value in reload reg REGNO, as used by a reload
4752 needed for the part of the insn specified by OPNUM and TYPE,
4753 is not in use for a reload in any prior part of the insn.
4755 We can assume that the reload reg was already tested for availability
4756 at the time it is needed, and we should not check this again,
4757 in case the reg has already been marked in use.
4759 However, if EQUIV is set, we are checking the availability of a register
4760 holding an equivalence to the value to be loaded into the reload register,
4761 not the availability of the reload register itself.
4763 This is still less stringent than what reload_reg_free_p checks; for
4764 example, compare the checks for RELOAD_OTHER. */
4767 reload_reg_free_before_p (regno
, opnum
, type
, equiv
)
4770 enum reload_type type
;
4775 /* The code to handle EQUIV below is wrong.
4777 If we wnat to know if a value in a particular reload register is available
4778 at a particular point in time during reloading, we must check *all*
4779 prior reloads to see if they clobber the value.
4781 Note this is significantly different from determining when a register is
4782 free for usage in a reload!
4784 This change is temporary. It will go away. */
4790 case RELOAD_FOR_OTHER_ADDRESS
:
4791 /* These always come first. */
4792 if (equiv
&& TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
))
4797 if (equiv
&& TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4799 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4801 /* If this use is for part of the insn,
4802 check the reg is not in use for any prior part. It is tempting
4803 to try to do this by falling through from objecs that occur
4804 later in the insn to ones that occur earlier, but that will not
4805 correctly take into account the fact that here we MUST ignore
4806 things that would prevent the register from being allocated in
4807 the first place, since we know that it was allocated. */
4809 case RELOAD_FOR_OUTPUT_ADDRESS
:
4811 && TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[opnum
], regno
))
4813 /* Earlier reloads include RELOAD_FOR_OUTADDR_ADDRESS reloads. */
4814 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
))
4816 /* ... fall through ... */
4817 case RELOAD_FOR_OUTADDR_ADDRESS
:
4819 && (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[opnum
], regno
)
4820 || TEST_HARD_REG_BIT (reload_reg_used
, regno
)))
4822 /* Earlier reloads are for earlier outputs or their addresses,
4823 any RELOAD_FOR_INSN reloads, any inputs or their addresses, or any
4824 RELOAD_FOR_OTHER_ADDRESS reloads (we know it can't conflict with
4826 for (i
= 0; i
< opnum
; i
++)
4827 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4828 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
4831 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
))
4834 for (i
= 0; i
< reload_n_operands
; i
++)
4835 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4836 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4837 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
)
4838 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4841 return (! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
)
4842 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4843 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
4844 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
));
4846 case RELOAD_FOR_OUTPUT
:
4847 case RELOAD_FOR_INSN
:
4848 /* There is no reason to call this function for output reloads, thus
4849 anything we'd put here wouldn't be tested. So just abort. */
4852 case RELOAD_FOR_OPERAND_ADDRESS
:
4853 if (equiv
&& TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
))
4856 /* Earlier reloads include RELOAD_FOR_OPADDR_ADDR reloads. */
4857 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4860 /* ... fall through ... */
4862 case RELOAD_FOR_OPADDR_ADDR
:
4865 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
)
4866 || TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4868 for (i
= 0; i
< reload_n_operands
; i
++)
4869 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4872 /* These can't conflict with inputs, or each other, so all we have to
4873 test is input addresses and the addresses of OTHER items. */
4875 for (i
= 0; i
< reload_n_operands
; i
++)
4876 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4877 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4880 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4882 case RELOAD_FOR_INPUT
:
4883 if (equiv
&& TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4886 /* The only things earlier are the address for this and
4887 earlier inputs, other inputs (which we know we don't conflict
4888 with), and addresses of RELOAD_OTHER objects.
4889 We can ignore the conflict with addresses of this operand, since
4890 when we inherit this operand, its address reloads are discarded. */
4892 for (i
= 0; i
< opnum
; i
++)
4893 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4894 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4897 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4899 case RELOAD_FOR_INPUT_ADDRESS
:
4900 /* Earlier reloads include RELOAD_FOR_INPADDR_ADDRESS reloads. */
4901 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[opnum
], regno
))
4903 /* ... fall through ... */
4904 case RELOAD_FOR_INPADDR_ADDRESS
:
4905 if (equiv
&& TEST_HARD_REG_BIT (reload_reg_used
, regno
))
4908 /* Similarly, all we have to check is for use in earlier inputs'
4910 for (i
= 0; i
< opnum
; i
++)
4911 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4912 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4915 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr
, regno
);
4920 /* Return 1 if the value in reload reg REGNO, as used by a reload
4921 needed for the part of the insn specified by OPNUM and TYPE,
4922 is still available in REGNO at the end of the insn.
4924 We can assume that the reload reg was already tested for availability
4925 at the time it is needed, and we should not check this again,
4926 in case the reg has already been marked in use. */
4929 reload_reg_reaches_end_p (regno
, opnum
, type
)
4932 enum reload_type type
;
4939 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4940 its value must reach the end. */
4943 /* If this use is for part of the insn,
4944 its value reaches if no subsequent part uses the same register.
4945 Just like the above function, don't try to do this with lots
4948 case RELOAD_FOR_OTHER_ADDRESS
:
4949 /* Here we check for everything else, since these don't conflict
4950 with anything else and everything comes later. */
4952 for (i
= 0; i
< reload_n_operands
; i
++)
4953 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4954 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4955 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
)
4956 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4957 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
4958 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4961 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4962 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
)
4963 && ! TEST_HARD_REG_BIT (reload_reg_used
, regno
));
4965 case RELOAD_FOR_INPUT_ADDRESS
:
4966 case RELOAD_FOR_INPADDR_ADDRESS
:
4967 /* Similar, except that we check only for this and subsequent inputs
4968 and the address of only subsequent inputs and we do not need
4969 to check for RELOAD_OTHER objects since they are known not to
4972 for (i
= opnum
; i
< reload_n_operands
; i
++)
4973 if (TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
4976 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
4977 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
4978 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
))
4981 for (i
= 0; i
< reload_n_operands
; i
++)
4982 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
4983 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
4984 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
4987 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload
, regno
))
4990 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
4991 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
4993 case RELOAD_FOR_INPUT
:
4994 /* Similar to input address, except we start at the next operand for
4995 both input and input address and we do not check for
4996 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4999 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5000 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr
[i
], regno
)
5001 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr
[i
], regno
)
5002 || TEST_HARD_REG_BIT (reload_reg_used_in_input
[i
], regno
))
5005 /* ... fall through ... */
5007 case RELOAD_FOR_OPERAND_ADDRESS
:
5008 /* Check outputs and their addresses. */
5010 for (i
= 0; i
< reload_n_operands
; i
++)
5011 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5012 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5013 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5018 case RELOAD_FOR_OPADDR_ADDR
:
5019 for (i
= 0; i
< reload_n_operands
; i
++)
5020 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5021 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
)
5022 || TEST_HARD_REG_BIT (reload_reg_used_in_output
[i
], regno
))
5025 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr
, regno
)
5026 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn
, regno
));
5028 case RELOAD_FOR_INSN
:
5029 /* These conflict with other outputs with RELOAD_OTHER. So
5030 we need only check for output addresses. */
5034 /* ... fall through ... */
5036 case RELOAD_FOR_OUTPUT
:
5037 case RELOAD_FOR_OUTPUT_ADDRESS
:
5038 case RELOAD_FOR_OUTADDR_ADDRESS
:
5039 /* We already know these can't conflict with a later output. So the
5040 only thing to check are later output addresses. */
5041 for (i
= opnum
+ 1; i
< reload_n_operands
; i
++)
5042 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr
[i
], regno
)
5043 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr
[i
], regno
))
5052 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
5055 This function uses the same algorithm as reload_reg_free_p above. */
5058 reloads_conflict (r1
, r2
)
5061 enum reload_type r1_type
= reload_when_needed
[r1
];
5062 enum reload_type r2_type
= reload_when_needed
[r2
];
5063 int r1_opnum
= reload_opnum
[r1
];
5064 int r2_opnum
= reload_opnum
[r2
];
5066 /* RELOAD_OTHER conflicts with everything. */
5067 if (r2_type
== RELOAD_OTHER
)
5070 /* Otherwise, check conflicts differently for each type. */
5074 case RELOAD_FOR_INPUT
:
5075 return (r2_type
== RELOAD_FOR_INSN
5076 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
5077 || r2_type
== RELOAD_FOR_OPADDR_ADDR
5078 || r2_type
== RELOAD_FOR_INPUT
5079 || ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
5080 || r2_type
== RELOAD_FOR_INPADDR_ADDRESS
)
5081 && r2_opnum
> r1_opnum
));
5083 case RELOAD_FOR_INPUT_ADDRESS
:
5084 return ((r2_type
== RELOAD_FOR_INPUT_ADDRESS
&& r1_opnum
== r2_opnum
)
5085 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
5087 case RELOAD_FOR_INPADDR_ADDRESS
:
5088 return ((r2_type
== RELOAD_FOR_INPADDR_ADDRESS
&& r1_opnum
== r2_opnum
)
5089 || (r2_type
== RELOAD_FOR_INPUT
&& r2_opnum
< r1_opnum
));
5091 case RELOAD_FOR_OUTPUT_ADDRESS
:
5092 return ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
&& r2_opnum
== r1_opnum
)
5093 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
>= r1_opnum
));
5095 case RELOAD_FOR_OUTADDR_ADDRESS
:
5096 return ((r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
&& r2_opnum
== r1_opnum
)
5097 || (r2_type
== RELOAD_FOR_OUTPUT
&& r2_opnum
>= r1_opnum
));
5099 case RELOAD_FOR_OPERAND_ADDRESS
:
5100 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_INSN
5101 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
5103 case RELOAD_FOR_OPADDR_ADDR
:
5104 return (r2_type
== RELOAD_FOR_INPUT
5105 || r2_type
== RELOAD_FOR_OPADDR_ADDR
);
5107 case RELOAD_FOR_OUTPUT
:
5108 return (r2_type
== RELOAD_FOR_INSN
|| r2_type
== RELOAD_FOR_OUTPUT
5109 || ((r2_type
== RELOAD_FOR_OUTPUT_ADDRESS
5110 || r2_type
== RELOAD_FOR_OUTADDR_ADDRESS
)
5111 && r2_opnum
>= r1_opnum
));
5113 case RELOAD_FOR_INSN
:
5114 return (r2_type
== RELOAD_FOR_INPUT
|| r2_type
== RELOAD_FOR_OUTPUT
5115 || r2_type
== RELOAD_FOR_INSN
5116 || r2_type
== RELOAD_FOR_OPERAND_ADDRESS
);
5118 case RELOAD_FOR_OTHER_ADDRESS
:
5119 return r2_type
== RELOAD_FOR_OTHER_ADDRESS
;
5129 /* Vector of reload-numbers showing the order in which the reloads should
5131 short reload_order
[MAX_RELOADS
];
5133 /* Indexed by reload number, 1 if incoming value
5134 inherited from previous insns. */
5135 char reload_inherited
[MAX_RELOADS
];
5137 /* For an inherited reload, this is the insn the reload was inherited from,
5138 if we know it. Otherwise, this is 0. */
5139 rtx reload_inheritance_insn
[MAX_RELOADS
];
5141 /* If non-zero, this is a place to get the value of the reload,
5142 rather than using reload_in. */
5143 rtx reload_override_in
[MAX_RELOADS
];
5145 /* For each reload, the hard register number of the register used,
5146 or -1 if we did not need a register for this reload. */
5147 int reload_spill_index
[MAX_RELOADS
];
5149 /* Return 1 if the value in reload reg REGNO, as used by a reload
5150 needed for the part of the insn specified by OPNUM and TYPE,
5151 may be used to load VALUE into it.
5153 Other read-only reloads with the same value do not conflict
5154 unless OUT is non-zero and these other reloads have to live while
5155 output reloads live.
5157 RELOADNUM is the number of the reload we want to load this value for;
5158 a reload does not conflict with itself.
5160 The caller has to make sure that there is no conflict with the return
5163 reload_reg_free_for_value_p (regno
, opnum
, type
, value
, out
, reloadnum
)
5166 enum reload_type type
;
5173 /* We use some pseudo 'time' value to check if the lifetimes of the
5174 new register use would overlap with the one of a previous reload
5175 that is not read-only or uses a different value.
5176 The 'time' used doesn't have to be linear in any shape or form, just
5178 Some reload types use different 'buckets' for each operand.
5179 So there are MAX_RECOG_OPERANDS different time values for each
5181 We compute TIME1 as the time when the register for the prospective
5182 new reload ceases to be live, and TIME2 for each existing
5183 reload as the time when that the reload register of that reload
5185 Where there is little to be gained by exact lifetime calculations,
5186 we just make conservative assumptions, i.e. a longer lifetime;
5187 this is done in the 'default:' cases. */
5190 case RELOAD_FOR_OTHER_ADDRESS
:
5193 /* For each input, we might have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
5194 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
5195 respectively, to the time values for these, we get distinct time
5196 values. To get distinct time values for each operand, we have to
5197 multiply opnum by at least three. We round that up to four because
5198 multiply by four is often cheaper. */
5199 case RELOAD_FOR_INPADDR_ADDRESS
:
5200 time1
= opnum
* 4 + 1;
5202 case RELOAD_FOR_INPUT_ADDRESS
:
5203 time1
= opnum
* 4 + 2;
5205 case RELOAD_FOR_OPADDR_ADDR
:
5206 /* opnum * 4 + 3 < opnum * 4 + 4
5207 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
5208 time1
= MAX_RECOG_OPERANDS
* 4;
5210 case RELOAD_FOR_INPUT
:
5211 /* All RELOAD_FOR_INPUT reloads remain live till just before the
5212 instruction is executed. */
5213 time1
= MAX_RECOG_OPERANDS
* 4 + 1;
5215 case RELOAD_FOR_OPERAND_ADDRESS
:
5216 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
5218 time1
= MAX_RECOG_OPERANDS
* 4 + 2;
5220 case RELOAD_FOR_OUTPUT_ADDRESS
:
5221 time1
= MAX_RECOG_OPERANDS
* 4 + 3 + opnum
;
5224 time1
= MAX_RECOG_OPERANDS
* 5 + 3;
5227 for (i
= 0; i
< n_reloads
; i
++)
5229 rtx reg
= reload_reg_rtx
[i
];
5230 if (reg
&& GET_CODE (reg
) == REG
5231 && ((unsigned) regno
- true_regnum (reg
)
5232 <= HARD_REGNO_NREGS (REGNO (reg
), GET_MODE (reg
)) - (unsigned)1)
5236 && reload_when_needed
[i
] != RELOAD_FOR_INPUT
5237 && reload_when_needed
[i
] != RELOAD_FOR_INPUT_ADDRESS
5238 && reload_when_needed
[i
] != RELOAD_FOR_INPADDR_ADDRESS
)
5240 if (! reload_in
[i
] || ! rtx_equal_p (reload_in
[i
], value
)
5244 switch (reload_when_needed
[i
])
5246 case RELOAD_FOR_OTHER_ADDRESS
:
5249 case RELOAD_FOR_INPADDR_ADDRESS
:
5250 /* find_reloads makes sure that a
5251 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
5252 by at most one - the first -
5253 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
5254 address reload is inherited, the address address reload
5255 goes away, so we can ignore this conflict. */
5256 if (type
== RELOAD_FOR_INPUT_ADDRESS
&& reloadnum
== i
+ 1)
5258 time2
= reload_opnum
[i
] * 4 + 1;
5260 case RELOAD_FOR_INPUT_ADDRESS
:
5261 time2
= reload_opnum
[i
] * 4 + 2;
5263 case RELOAD_FOR_INPUT
:
5264 time2
= reload_opnum
[i
] * 4 + 3;
5266 case RELOAD_FOR_OPADDR_ADDR
:
5267 if (type
== RELOAD_FOR_OPERAND_ADDRESS
&& reloadnum
== i
+ 1)
5269 time2
= MAX_RECOG_OPERANDS
* 4;
5271 case RELOAD_FOR_OPERAND_ADDRESS
:
5272 time2
= MAX_RECOG_OPERANDS
* 4 + 1;
5274 case RELOAD_FOR_OUTPUT
:
5275 /* All RELOAD_FOR_OUTPUT reloads become live just after the
5276 instruction is executed. */
5277 time2
= MAX_RECOG_OPERANDS
* 4 + 3;
5279 case RELOAD_FOR_OUTADDR_ADDRESS
:
5280 if (type
== RELOAD_FOR_OUTPUT_ADDRESS
&& reloadnum
== i
+ 1)
5283 /* The first RELOAD_FOR_OUTPUT_ADDRESS reload conflicts with the
5284 RELOAD_FOR_OUTPUT reloads, so assign it the same time value. */
5285 case RELOAD_FOR_OUTPUT_ADDRESS
:
5286 time2
= MAX_RECOG_OPERANDS
* 4 + 3 + reload_opnum
[i
];
5289 if (! reload_in
[i
] || rtx_equal_p (reload_in
[i
], value
))
5291 time2
= MAX_RECOG_OPERANDS
* 4 + 3;
5305 /* Find a spill register to use as a reload register for reload R.
5306 LAST_RELOAD is non-zero if this is the last reload for the insn being
5309 Set reload_reg_rtx[R] to the register allocated.
5311 If NOERROR is nonzero, we return 1 if successful,
5312 or 0 if we couldn't find a spill reg and we didn't change anything. */
5315 allocate_reload_reg (chain
, r
, last_reload
, noerror
)
5316 struct insn_chain
*chain
;
5321 rtx insn
= chain
->insn
;
5322 int i
, pass
, count
, regno
;
5325 /* If we put this reload ahead, thinking it is a group,
5326 then insist on finding a group. Otherwise we can grab a
5327 reg that some other reload needs.
5328 (That can happen when we have a 68000 DATA_OR_FP_REG
5329 which is a group of data regs or one fp reg.)
5330 We need not be so restrictive if there are no more reloads
5333 ??? Really it would be nicer to have smarter handling
5334 for that kind of reg class, where a problem like this is normal.
5335 Perhaps those classes should be avoided for reloading
5336 by use of more alternatives. */
5338 int force_group
= reload_nregs
[r
] > 1 && ! last_reload
;
5340 /* If we want a single register and haven't yet found one,
5341 take any reg in the right class and not in use.
5342 If we want a consecutive group, here is where we look for it.
5344 We use two passes so we can first look for reload regs to
5345 reuse, which are already in use for other reloads in this insn,
5346 and only then use additional registers.
5347 I think that maximizing reuse is needed to make sure we don't
5348 run out of reload regs. Suppose we have three reloads, and
5349 reloads A and B can share regs. These need two regs.
5350 Suppose A and B are given different regs.
5351 That leaves none for C. */
5352 for (pass
= 0; pass
< 2; pass
++)
5354 /* I is the index in spill_regs.
5355 We advance it round-robin between insns to use all spill regs
5356 equally, so that inherited reloads have a chance
5357 of leapfrogging each other. Don't do this, however, when we have
5358 group needs and failure would be fatal; if we only have a relatively
5359 small number of spill registers, and more than one of them has
5360 group needs, then by starting in the middle, we may end up
5361 allocating the first one in such a way that we are not left with
5362 sufficient groups to handle the rest. */
5364 if (noerror
|| ! force_group
)
5369 for (count
= 0; count
< n_spills
; count
++)
5371 int class = (int) reload_reg_class
[r
];
5377 regnum
= spill_regs
[i
];
5379 if ((reload_reg_free_p (regnum
, reload_opnum
[r
],
5380 reload_when_needed
[r
])
5382 /* We check reload_reg_used to make sure we
5383 don't clobber the return register. */
5384 && ! TEST_HARD_REG_BIT (reload_reg_used
, regnum
)
5385 && reload_reg_free_for_value_p (regnum
,
5387 reload_when_needed
[r
],
5390 && TEST_HARD_REG_BIT (reg_class_contents
[class], regnum
)
5391 && HARD_REGNO_MODE_OK (regnum
, reload_mode
[r
])
5392 /* Look first for regs to share, then for unshared. But
5393 don't share regs used for inherited reloads; they are
5394 the ones we want to preserve. */
5396 || (TEST_HARD_REG_BIT (reload_reg_used_at_all
,
5398 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit
,
5401 int nr
= HARD_REGNO_NREGS (regnum
, reload_mode
[r
]);
5402 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5403 (on 68000) got us two FP regs. If NR is 1,
5404 we would reject both of them. */
5406 nr
= CLASS_MAX_NREGS (reload_reg_class
[r
], reload_mode
[r
]);
5407 /* If we need only one reg, we have already won. */
5410 /* But reject a single reg if we demand a group. */
5415 /* Otherwise check that as many consecutive regs as we need
5417 Also, don't use for a group registers that are
5418 needed for nongroups. */
5419 if (! TEST_HARD_REG_BIT (chain
->counted_for_nongroups
, regnum
))
5422 regno
= regnum
+ nr
- 1;
5423 if (!(TEST_HARD_REG_BIT (reg_class_contents
[class], regno
)
5424 && spill_reg_order
[regno
] >= 0
5425 && reload_reg_free_p (regno
, reload_opnum
[r
],
5426 reload_when_needed
[r
])
5427 && ! TEST_HARD_REG_BIT (chain
->counted_for_nongroups
,
5437 /* If we found something on pass 1, omit pass 2. */
5438 if (count
< n_spills
)
5442 /* We should have found a spill register by now. */
5443 if (count
== n_spills
)
5450 /* I is the index in SPILL_REG_RTX of the reload register we are to
5451 allocate. Get an rtx for it and find its register number. */
5453 new = spill_reg_rtx
[i
];
5455 if (new == 0 || GET_MODE (new) != reload_mode
[r
])
5456 spill_reg_rtx
[i
] = new
5457 = gen_rtx_REG (reload_mode
[r
], spill_regs
[i
]);
5459 regno
= true_regnum (new);
5461 /* Detect when the reload reg can't hold the reload mode.
5462 This used to be one `if', but Sequent compiler can't handle that. */
5463 if (HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
5465 enum machine_mode test_mode
= VOIDmode
;
5467 test_mode
= GET_MODE (reload_in
[r
]);
5468 /* If reload_in[r] has VOIDmode, it means we will load it
5469 in whatever mode the reload reg has: to wit, reload_mode[r].
5470 We have already tested that for validity. */
5471 /* Aside from that, we need to test that the expressions
5472 to reload from or into have modes which are valid for this
5473 reload register. Otherwise the reload insns would be invalid. */
5474 if (! (reload_in
[r
] != 0 && test_mode
!= VOIDmode
5475 && ! HARD_REGNO_MODE_OK (regno
, test_mode
)))
5476 if (! (reload_out
[r
] != 0
5477 && ! HARD_REGNO_MODE_OK (regno
, GET_MODE (reload_out
[r
]))))
5479 /* The reg is OK. */
5482 /* Mark as in use for this insn the reload regs we use
5484 mark_reload_reg_in_use (spill_regs
[i
], reload_opnum
[r
],
5485 reload_when_needed
[r
], reload_mode
[r
]);
5487 reload_reg_rtx
[r
] = new;
5488 reload_spill_index
[r
] = spill_regs
[i
];
5493 /* The reg is not OK. */
5498 if (asm_noperands (PATTERN (insn
)) < 0)
5499 /* It's the compiler's fault. */
5500 fatal_insn ("Could not find a spill register", insn
);
5502 /* It's the user's fault; the operand's mode and constraint
5503 don't match. Disable this reload so we don't crash in final. */
5504 error_for_asm (insn
,
5505 "`asm' operand constraint incompatible with operand size");
5508 reload_reg_rtx
[r
] = 0;
5509 reload_optional
[r
] = 1;
5510 reload_secondary_p
[r
] = 1;
5515 /* Assign hard reg targets for the pseudo-registers we must reload
5516 into hard regs for this insn.
5517 Also output the instructions to copy them in and out of the hard regs.
5519 For machines with register classes, we are responsible for
5520 finding a reload reg in the proper class. */
5523 choose_reload_regs (chain
)
5524 struct insn_chain
*chain
;
5526 rtx insn
= chain
->insn
;
5528 int max_group_size
= 1;
5529 enum reg_class group_class
= NO_REGS
;
5533 rtx save_reload_reg_rtx
[MAX_RELOADS
];
5534 char save_reload_inherited
[MAX_RELOADS
];
5535 rtx save_reload_inheritance_insn
[MAX_RELOADS
];
5536 rtx save_reload_override_in
[MAX_RELOADS
];
5537 int save_reload_spill_index
[MAX_RELOADS
];
5538 HARD_REG_SET save_reload_reg_used
;
5539 HARD_REG_SET save_reload_reg_used_in_input_addr
[MAX_RECOG_OPERANDS
];
5540 HARD_REG_SET save_reload_reg_used_in_inpaddr_addr
[MAX_RECOG_OPERANDS
];
5541 HARD_REG_SET save_reload_reg_used_in_output_addr
[MAX_RECOG_OPERANDS
];
5542 HARD_REG_SET save_reload_reg_used_in_outaddr_addr
[MAX_RECOG_OPERANDS
];
5543 HARD_REG_SET save_reload_reg_used_in_input
[MAX_RECOG_OPERANDS
];
5544 HARD_REG_SET save_reload_reg_used_in_output
[MAX_RECOG_OPERANDS
];
5545 HARD_REG_SET save_reload_reg_used_in_op_addr
;
5546 HARD_REG_SET save_reload_reg_used_in_op_addr_reload
;
5547 HARD_REG_SET save_reload_reg_used_in_insn
;
5548 HARD_REG_SET save_reload_reg_used_in_other_addr
;
5549 HARD_REG_SET save_reload_reg_used_at_all
;
5551 bzero (reload_inherited
, MAX_RELOADS
);
5552 bzero ((char *) reload_inheritance_insn
, MAX_RELOADS
* sizeof (rtx
));
5553 bzero ((char *) reload_override_in
, MAX_RELOADS
* sizeof (rtx
));
5555 CLEAR_HARD_REG_SET (reload_reg_used
);
5556 CLEAR_HARD_REG_SET (reload_reg_used_at_all
);
5557 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr
);
5558 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload
);
5559 CLEAR_HARD_REG_SET (reload_reg_used_in_insn
);
5560 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr
);
5562 CLEAR_HARD_REG_SET (reg_used_by_pseudo
);
5563 compute_use_by_pseudos (®_used_by_pseudo
, chain
->live_before
);
5564 compute_use_by_pseudos (®_used_by_pseudo
, chain
->live_after
);
5566 for (i
= 0; i
< reload_n_operands
; i
++)
5568 CLEAR_HARD_REG_SET (reload_reg_used_in_output
[i
]);
5569 CLEAR_HARD_REG_SET (reload_reg_used_in_input
[i
]);
5570 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr
[i
]);
5571 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr
[i
]);
5572 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr
[i
]);
5573 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr
[i
]);
5576 IOR_COMPL_HARD_REG_SET (reload_reg_used
, chain
->used_spill_regs
);
5578 #if 0 /* Not needed, now that we can always retry without inheritance. */
5579 /* See if we have more mandatory reloads than spill regs.
5580 If so, then we cannot risk optimizations that could prevent
5581 reloads from sharing one spill register.
5583 Since we will try finding a better register than reload_reg_rtx
5584 unless it is equal to reload_in or reload_out, count such reloads. */
5588 for (j
= 0; j
< n_reloads
; j
++)
5589 if (! reload_optional
[j
]
5590 && (reload_in
[j
] != 0 || reload_out
[j
] != 0 || reload_secondary_p
[j
])
5591 && (reload_reg_rtx
[j
] == 0
5592 || (! rtx_equal_p (reload_reg_rtx
[j
], reload_in
[j
])
5593 && ! rtx_equal_p (reload_reg_rtx
[j
], reload_out
[j
]))))
5600 /* In order to be certain of getting the registers we need,
5601 we must sort the reloads into order of increasing register class.
5602 Then our grabbing of reload registers will parallel the process
5603 that provided the reload registers.
5605 Also note whether any of the reloads wants a consecutive group of regs.
5606 If so, record the maximum size of the group desired and what
5607 register class contains all the groups needed by this insn. */
5609 for (j
= 0; j
< n_reloads
; j
++)
5611 reload_order
[j
] = j
;
5612 reload_spill_index
[j
] = -1;
5615 = (reload_inmode
[j
] == VOIDmode
5616 || (GET_MODE_SIZE (reload_outmode
[j
])
5617 > GET_MODE_SIZE (reload_inmode
[j
])))
5618 ? reload_outmode
[j
] : reload_inmode
[j
];
5620 reload_nregs
[j
] = CLASS_MAX_NREGS (reload_reg_class
[j
], reload_mode
[j
]);
5622 if (reload_nregs
[j
] > 1)
5624 max_group_size
= MAX (reload_nregs
[j
], max_group_size
);
5625 group_class
= reg_class_superunion
[(int)reload_reg_class
[j
]][(int)group_class
];
5628 /* If we have already decided to use a certain register,
5629 don't use it in another way. */
5630 if (reload_reg_rtx
[j
])
5631 mark_reload_reg_in_use (REGNO (reload_reg_rtx
[j
]), reload_opnum
[j
],
5632 reload_when_needed
[j
], reload_mode
[j
]);
5636 qsort (reload_order
, n_reloads
, sizeof (short), reload_reg_class_lower
);
5638 bcopy ((char *) reload_reg_rtx
, (char *) save_reload_reg_rtx
,
5639 sizeof reload_reg_rtx
);
5640 bcopy (reload_inherited
, save_reload_inherited
, sizeof reload_inherited
);
5641 bcopy ((char *) reload_inheritance_insn
,
5642 (char *) save_reload_inheritance_insn
,
5643 sizeof reload_inheritance_insn
);
5644 bcopy ((char *) reload_override_in
, (char *) save_reload_override_in
,
5645 sizeof reload_override_in
);
5646 bcopy ((char *) reload_spill_index
, (char *) save_reload_spill_index
,
5647 sizeof reload_spill_index
);
5648 COPY_HARD_REG_SET (save_reload_reg_used
, reload_reg_used
);
5649 COPY_HARD_REG_SET (save_reload_reg_used_at_all
, reload_reg_used_at_all
);
5650 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr
,
5651 reload_reg_used_in_op_addr
);
5653 COPY_HARD_REG_SET (save_reload_reg_used_in_op_addr_reload
,
5654 reload_reg_used_in_op_addr_reload
);
5656 COPY_HARD_REG_SET (save_reload_reg_used_in_insn
,
5657 reload_reg_used_in_insn
);
5658 COPY_HARD_REG_SET (save_reload_reg_used_in_other_addr
,
5659 reload_reg_used_in_other_addr
);
5661 for (i
= 0; i
< reload_n_operands
; i
++)
5663 COPY_HARD_REG_SET (save_reload_reg_used_in_output
[i
],
5664 reload_reg_used_in_output
[i
]);
5665 COPY_HARD_REG_SET (save_reload_reg_used_in_input
[i
],
5666 reload_reg_used_in_input
[i
]);
5667 COPY_HARD_REG_SET (save_reload_reg_used_in_input_addr
[i
],
5668 reload_reg_used_in_input_addr
[i
]);
5669 COPY_HARD_REG_SET (save_reload_reg_used_in_inpaddr_addr
[i
],
5670 reload_reg_used_in_inpaddr_addr
[i
]);
5671 COPY_HARD_REG_SET (save_reload_reg_used_in_output_addr
[i
],
5672 reload_reg_used_in_output_addr
[i
]);
5673 COPY_HARD_REG_SET (save_reload_reg_used_in_outaddr_addr
[i
],
5674 reload_reg_used_in_outaddr_addr
[i
]);
5677 /* If -O, try first with inheritance, then turning it off.
5678 If not -O, don't do inheritance.
5679 Using inheritance when not optimizing leads to paradoxes
5680 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5681 because one side of the comparison might be inherited. */
5683 for (inheritance
= optimize
> 0; inheritance
>= 0; inheritance
--)
5685 /* Process the reloads in order of preference just found.
5686 Beyond this point, subregs can be found in reload_reg_rtx.
5688 This used to look for an existing reloaded home for all
5689 of the reloads, and only then perform any new reloads.
5690 But that could lose if the reloads were done out of reg-class order
5691 because a later reload with a looser constraint might have an old
5692 home in a register needed by an earlier reload with a tighter constraint.
5694 To solve this, we make two passes over the reloads, in the order
5695 described above. In the first pass we try to inherit a reload
5696 from a previous insn. If there is a later reload that needs a
5697 class that is a proper subset of the class being processed, we must
5698 also allocate a spill register during the first pass.
5700 Then make a second pass over the reloads to allocate any reloads
5701 that haven't been given registers yet. */
5703 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit
);
5705 for (j
= 0; j
< n_reloads
; j
++)
5707 register int r
= reload_order
[j
];
5709 /* Ignore reloads that got marked inoperative. */
5710 if (reload_out
[r
] == 0 && reload_in
[r
] == 0
5711 && ! reload_secondary_p
[r
])
5714 /* If find_reloads chose to use reload_in or reload_out as a reload
5715 register, we don't need to chose one. Otherwise, try even if it
5716 found one since we might save an insn if we find the value lying
5718 Try also when reload_in is a pseudo without a hard reg. */
5719 if (reload_in
[r
] != 0 && reload_reg_rtx
[r
] != 0
5720 && (rtx_equal_p (reload_in
[r
], reload_reg_rtx
[r
])
5721 || (rtx_equal_p (reload_out
[r
], reload_reg_rtx
[r
])
5722 && GET_CODE (reload_in
[r
]) != MEM
5723 && true_regnum (reload_in
[r
]) < FIRST_PSEUDO_REGISTER
)))
5726 #if 0 /* No longer needed for correct operation.
5727 It might give better code, or might not; worth an experiment? */
5728 /* If this is an optional reload, we can't inherit from earlier insns
5729 until we are sure that any non-optional reloads have been allocated.
5730 The following code takes advantage of the fact that optional reloads
5731 are at the end of reload_order. */
5732 if (reload_optional
[r
] != 0)
5733 for (i
= 0; i
< j
; i
++)
5734 if ((reload_out
[reload_order
[i
]] != 0
5735 || reload_in
[reload_order
[i
]] != 0
5736 || reload_secondary_p
[reload_order
[i
]])
5737 && ! reload_optional
[reload_order
[i
]]
5738 && reload_reg_rtx
[reload_order
[i
]] == 0)
5739 allocate_reload_reg (chain
, reload_order
[i
], 0, inheritance
);
5742 /* First see if this pseudo is already available as reloaded
5743 for a previous insn. We cannot try to inherit for reloads
5744 that are smaller than the maximum number of registers needed
5745 for groups unless the register we would allocate cannot be used
5748 We could check here to see if this is a secondary reload for
5749 an object that is already in a register of the desired class.
5750 This would avoid the need for the secondary reload register.
5751 But this is complex because we can't easily determine what
5752 objects might want to be loaded via this reload. So let a
5753 register be allocated here. In `emit_reload_insns' we suppress
5754 one of the loads in the case described above. */
5759 register int regno
= -1;
5760 enum machine_mode mode
;
5762 if (reload_in
[r
] == 0)
5764 else if (GET_CODE (reload_in
[r
]) == REG
)
5766 regno
= REGNO (reload_in
[r
]);
5767 mode
= GET_MODE (reload_in
[r
]);
5769 else if (GET_CODE (reload_in_reg
[r
]) == REG
)
5771 regno
= REGNO (reload_in_reg
[r
]);
5772 mode
= GET_MODE (reload_in_reg
[r
]);
5774 else if (GET_CODE (reload_in_reg
[r
]) == SUBREG
5775 && GET_CODE (SUBREG_REG (reload_in_reg
[r
])) == REG
)
5777 word
= SUBREG_WORD (reload_in_reg
[r
]);
5778 regno
= REGNO (SUBREG_REG (reload_in_reg
[r
]));
5779 if (regno
< FIRST_PSEUDO_REGISTER
)
5781 mode
= GET_MODE (reload_in_reg
[r
]);
5784 else if ((GET_CODE (reload_in_reg
[r
]) == PRE_INC
5785 || GET_CODE (reload_in_reg
[r
]) == PRE_DEC
5786 || GET_CODE (reload_in_reg
[r
]) == POST_INC
5787 || GET_CODE (reload_in_reg
[r
]) == POST_DEC
)
5788 && GET_CODE (XEXP (reload_in_reg
[r
], 0)) == REG
)
5790 regno
= REGNO (XEXP (reload_in_reg
[r
], 0));
5791 mode
= GET_MODE (XEXP (reload_in_reg
[r
], 0));
5792 reload_out
[r
] = reload_in
[r
];
5796 /* This won't work, since REGNO can be a pseudo reg number.
5797 Also, it takes much more hair to keep track of all the things
5798 that can invalidate an inherited reload of part of a pseudoreg. */
5799 else if (GET_CODE (reload_in
[r
]) == SUBREG
5800 && GET_CODE (SUBREG_REG (reload_in
[r
])) == REG
)
5801 regno
= REGNO (SUBREG_REG (reload_in
[r
])) + SUBREG_WORD (reload_in
[r
]);
5804 if (regno
>= 0 && reg_last_reload_reg
[regno
] != 0)
5806 enum reg_class
class = reload_reg_class
[r
], last_class
;
5807 rtx last_reg
= reg_last_reload_reg
[regno
];
5809 i
= REGNO (last_reg
) + word
;
5810 last_class
= REGNO_REG_CLASS (i
);
5811 if ((GET_MODE_SIZE (GET_MODE (last_reg
))
5812 >= GET_MODE_SIZE (mode
) + word
* UNITS_PER_WORD
)
5813 && reg_reloaded_contents
[i
] == regno
5814 && TEST_HARD_REG_BIT (reg_reloaded_valid
, i
)
5815 && HARD_REGNO_MODE_OK (i
, reload_mode
[r
])
5816 && (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], i
)
5817 /* Even if we can't use this register as a reload
5818 register, we might use it for reload_override_in,
5819 if copying it to the desired class is cheap
5821 || ((REGISTER_MOVE_COST (last_class
, class)
5822 < MEMORY_MOVE_COST (mode
, class, 1))
5823 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5824 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode
,
5828 #ifdef SECONDARY_MEMORY_NEEDED
5829 && ! SECONDARY_MEMORY_NEEDED (last_class
, class,
5834 && (reload_nregs
[r
] == max_group_size
5835 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) group_class
],
5837 && ((reload_reg_free_p (i
, reload_opnum
[r
],
5838 reload_when_needed
[r
])
5839 && reload_reg_free_before_p (i
, reload_opnum
[r
],
5840 reload_when_needed
[r
],
5842 || reload_reg_free_for_value_p (i
, reload_opnum
[r
],
5843 reload_when_needed
[r
],
5847 /* If a group is needed, verify that all the subsequent
5848 registers still have their values intact. */
5850 = HARD_REGNO_NREGS (i
, reload_mode
[r
]);
5853 for (k
= 1; k
< nr
; k
++)
5854 if (reg_reloaded_contents
[i
+ k
] != regno
5855 || ! TEST_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
))
5862 last_reg
= (GET_MODE (last_reg
) == mode
5863 ? last_reg
: gen_rtx_REG (mode
, i
));
5865 /* We found a register that contains the
5866 value we need. If this register is the
5867 same as an `earlyclobber' operand of the
5868 current insn, just mark it as a place to
5869 reload from since we can't use it as the
5870 reload register itself. */
5872 for (i1
= 0; i1
< n_earlyclobbers
; i1
++)
5873 if (reg_overlap_mentioned_for_reload_p
5874 (reg_last_reload_reg
[regno
],
5875 reload_earlyclobbers
[i1
]))
5878 if (i1
!= n_earlyclobbers
5879 /* Don't use it if we'd clobber a pseudo reg. */
5880 || (TEST_HARD_REG_BIT (reg_used_by_pseudo
, i
)
5882 && ! TEST_HARD_REG_BIT (reg_reloaded_dead
, i
))
5883 /* Don't really use the inherited spill reg
5884 if we need it wider than we've got it. */
5885 || (GET_MODE_SIZE (reload_mode
[r
])
5886 > GET_MODE_SIZE (mode
))
5887 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5890 /* If find_reloads chose reload_out as reload
5891 register, stay with it - that leaves the
5892 inherited register for subsequent reloads. */
5893 || (reload_out
[r
] && reload_reg_rtx
[r
]
5894 && rtx_equal_p (reload_out
[r
],
5895 reload_reg_rtx
[r
])))
5897 reload_override_in
[r
] = last_reg
;
5898 reload_inheritance_insn
[r
]
5899 = reg_reloaded_insn
[i
];
5904 /* We can use this as a reload reg. */
5905 /* Mark the register as in use for this part of
5907 mark_reload_reg_in_use (i
,
5909 reload_when_needed
[r
],
5911 reload_reg_rtx
[r
] = last_reg
;
5912 reload_inherited
[r
] = 1;
5913 reload_inheritance_insn
[r
]
5914 = reg_reloaded_insn
[i
];
5915 reload_spill_index
[r
] = i
;
5916 for (k
= 0; k
< nr
; k
++)
5917 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
5925 /* Here's another way to see if the value is already lying around. */
5927 && reload_in
[r
] != 0
5928 && ! reload_inherited
[r
]
5929 && reload_out
[r
] == 0
5930 && (CONSTANT_P (reload_in
[r
])
5931 || GET_CODE (reload_in
[r
]) == PLUS
5932 || GET_CODE (reload_in
[r
]) == REG
5933 || GET_CODE (reload_in
[r
]) == MEM
)
5934 && (reload_nregs
[r
] == max_group_size
5935 || ! reg_classes_intersect_p (reload_reg_class
[r
], group_class
)))
5938 = find_equiv_reg (reload_in
[r
], insn
, reload_reg_class
[r
],
5939 -1, NULL_PTR
, 0, reload_mode
[r
]);
5944 if (GET_CODE (equiv
) == REG
)
5945 regno
= REGNO (equiv
);
5946 else if (GET_CODE (equiv
) == SUBREG
)
5948 /* This must be a SUBREG of a hard register.
5949 Make a new REG since this might be used in an
5950 address and not all machines support SUBREGs
5952 regno
= REGNO (SUBREG_REG (equiv
)) + SUBREG_WORD (equiv
);
5953 equiv
= gen_rtx_REG (reload_mode
[r
], regno
);
5959 /* If we found a spill reg, reject it unless it is free
5960 and of the desired class. */
5962 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all
, regno
)
5963 && ! reload_reg_free_for_value_p (regno
, reload_opnum
[r
],
5964 reload_when_needed
[r
],
5967 || ! TEST_HARD_REG_BIT (reg_class_contents
[(int) reload_reg_class
[r
]],
5971 if (equiv
!= 0 && ! HARD_REGNO_MODE_OK (regno
, reload_mode
[r
]))
5974 /* We found a register that contains the value we need.
5975 If this register is the same as an `earlyclobber' operand
5976 of the current insn, just mark it as a place to reload from
5977 since we can't use it as the reload register itself. */
5980 for (i
= 0; i
< n_earlyclobbers
; i
++)
5981 if (reg_overlap_mentioned_for_reload_p (equiv
,
5982 reload_earlyclobbers
[i
]))
5984 reload_override_in
[r
] = equiv
;
5989 /* If the equiv register we have found is explicitly clobbered
5990 in the current insn, it depends on the reload type if we
5991 can use it, use it for reload_override_in, or not at all.
5992 In particular, we then can't use EQUIV for a
5993 RELOAD_FOR_OUTPUT_ADDRESS reload. */
5995 if (equiv
!= 0 && regno_clobbered_p (regno
, insn
))
5997 switch (reload_when_needed
[r
])
5999 case RELOAD_FOR_OTHER_ADDRESS
:
6000 case RELOAD_FOR_INPADDR_ADDRESS
:
6001 case RELOAD_FOR_INPUT_ADDRESS
:
6002 case RELOAD_FOR_OPADDR_ADDR
:
6005 case RELOAD_FOR_INPUT
:
6006 case RELOAD_FOR_OPERAND_ADDRESS
:
6007 reload_override_in
[r
] = equiv
;
6015 /* If we found an equivalent reg, say no code need be generated
6016 to load it, and use it as our reload reg. */
6017 if (equiv
!= 0 && regno
!= HARD_FRAME_POINTER_REGNUM
)
6019 int nr
= HARD_REGNO_NREGS (regno
, reload_mode
[r
]);
6021 reload_reg_rtx
[r
] = equiv
;
6022 reload_inherited
[r
] = 1;
6024 /* If reg_reloaded_valid is not set for this register,
6025 there might be a stale spill_reg_store lying around.
6026 We must clear it, since otherwise emit_reload_insns
6027 might delete the store. */
6028 if (! TEST_HARD_REG_BIT (reg_reloaded_valid
, regno
))
6029 spill_reg_store
[regno
] = NULL_RTX
;
6030 /* If any of the hard registers in EQUIV are spill
6031 registers, mark them as in use for this insn. */
6032 for (k
= 0; k
< nr
; k
++)
6034 i
= spill_reg_order
[regno
+ k
];
6037 mark_reload_reg_in_use (regno
, reload_opnum
[r
],
6038 reload_when_needed
[r
],
6040 SET_HARD_REG_BIT (reload_reg_used_for_inherit
,
6047 /* If we found a register to use already, or if this is an optional
6048 reload, we are done. */
6049 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
] != 0)
6052 #if 0 /* No longer needed for correct operation. Might or might not
6053 give better code on the average. Want to experiment? */
6055 /* See if there is a later reload that has a class different from our
6056 class that intersects our class or that requires less register
6057 than our reload. If so, we must allocate a register to this
6058 reload now, since that reload might inherit a previous reload
6059 and take the only available register in our class. Don't do this
6060 for optional reloads since they will force all previous reloads
6061 to be allocated. Also don't do this for reloads that have been
6064 for (i
= j
+ 1; i
< n_reloads
; i
++)
6066 int s
= reload_order
[i
];
6068 if ((reload_in
[s
] == 0 && reload_out
[s
] == 0
6069 && ! reload_secondary_p
[s
])
6070 || reload_optional
[s
])
6073 if ((reload_reg_class
[s
] != reload_reg_class
[r
]
6074 && reg_classes_intersect_p (reload_reg_class
[r
],
6075 reload_reg_class
[s
]))
6076 || reload_nregs
[s
] < reload_nregs
[r
])
6083 allocate_reload_reg (chain
, r
, j
== n_reloads
- 1, inheritance
);
6087 /* Now allocate reload registers for anything non-optional that
6088 didn't get one yet. */
6089 for (j
= 0; j
< n_reloads
; j
++)
6091 register int r
= reload_order
[j
];
6093 /* Ignore reloads that got marked inoperative. */
6094 if (reload_out
[r
] == 0 && reload_in
[r
] == 0 && ! reload_secondary_p
[r
])
6097 /* Skip reloads that already have a register allocated or are
6099 if (reload_reg_rtx
[r
] != 0 || reload_optional
[r
])
6102 if (! allocate_reload_reg (chain
, r
, j
== n_reloads
- 1, inheritance
))
6106 /* If that loop got all the way, we have won. */
6110 /* Loop around and try without any inheritance. */
6111 /* First undo everything done by the failed attempt
6112 to allocate with inheritance. */
6113 bcopy ((char *) save_reload_reg_rtx
, (char *) reload_reg_rtx
,
6114 sizeof reload_reg_rtx
);
6115 bcopy ((char *) save_reload_inherited
, (char *) reload_inherited
,
6116 sizeof reload_inherited
);
6117 bcopy ((char *) save_reload_inheritance_insn
,
6118 (char *) reload_inheritance_insn
,
6119 sizeof reload_inheritance_insn
);
6120 bcopy ((char *) save_reload_override_in
, (char *) reload_override_in
,
6121 sizeof reload_override_in
);
6122 bcopy ((char *) save_reload_spill_index
, (char *) reload_spill_index
,
6123 sizeof reload_spill_index
);
6124 COPY_HARD_REG_SET (reload_reg_used
, save_reload_reg_used
);
6125 COPY_HARD_REG_SET (reload_reg_used_at_all
, save_reload_reg_used_at_all
);
6126 COPY_HARD_REG_SET (reload_reg_used_in_op_addr
,
6127 save_reload_reg_used_in_op_addr
);
6128 COPY_HARD_REG_SET (reload_reg_used_in_op_addr_reload
,
6129 save_reload_reg_used_in_op_addr_reload
);
6130 COPY_HARD_REG_SET (reload_reg_used_in_insn
,
6131 save_reload_reg_used_in_insn
);
6132 COPY_HARD_REG_SET (reload_reg_used_in_other_addr
,
6133 save_reload_reg_used_in_other_addr
);
6135 for (i
= 0; i
< reload_n_operands
; i
++)
6137 COPY_HARD_REG_SET (reload_reg_used_in_input
[i
],
6138 save_reload_reg_used_in_input
[i
]);
6139 COPY_HARD_REG_SET (reload_reg_used_in_output
[i
],
6140 save_reload_reg_used_in_output
[i
]);
6141 COPY_HARD_REG_SET (reload_reg_used_in_input_addr
[i
],
6142 save_reload_reg_used_in_input_addr
[i
]);
6143 COPY_HARD_REG_SET (reload_reg_used_in_inpaddr_addr
[i
],
6144 save_reload_reg_used_in_inpaddr_addr
[i
]);
6145 COPY_HARD_REG_SET (reload_reg_used_in_output_addr
[i
],
6146 save_reload_reg_used_in_output_addr
[i
]);
6147 COPY_HARD_REG_SET (reload_reg_used_in_outaddr_addr
[i
],
6148 save_reload_reg_used_in_outaddr_addr
[i
]);
6152 /* If we thought we could inherit a reload, because it seemed that
6153 nothing else wanted the same reload register earlier in the insn,
6154 verify that assumption, now that all reloads have been assigned.
6155 Likewise for reloads where reload_override_in has been set. */
6157 /* If doing expensive optimizations, do one preliminary pass that doesn't
6158 cancel any inheritance, but removes reloads that have been needed only
6159 for reloads that we know can be inherited. */
6160 for (pass
= flag_expensive_optimizations
; pass
>= 0; pass
--)
6162 for (j
= 0; j
< n_reloads
; j
++)
6164 register int r
= reload_order
[j
];
6166 if (reload_inherited
[r
] && reload_reg_rtx
[r
])
6167 check_reg
= reload_reg_rtx
[r
];
6168 else if (reload_override_in
[r
]
6169 && (GET_CODE (reload_override_in
[r
]) == REG
6170 || GET_CODE (reload_override_in
[r
]) == SUBREG
))
6171 check_reg
= reload_override_in
[r
];
6174 if (! (reload_reg_free_before_p (true_regnum (check_reg
),
6175 reload_opnum
[r
], reload_when_needed
[r
],
6176 ! reload_inherited
[r
])
6177 || reload_reg_free_for_value_p (true_regnum (check_reg
),
6179 reload_when_needed
[r
],
6185 reload_inherited
[r
] = 0;
6186 reload_override_in
[r
] = 0;
6188 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
6189 reload_override_in, then we do not need its related
6190 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
6191 likewise for other reload types.
6192 We handle this by removing a reload when its only replacement
6193 is mentioned in reload_in of the reload we are going to inherit.
6194 A special case are auto_inc expressions; even if the input is
6195 inherited, we still need the address for the output. We can
6196 recognize them because they have RELOAD_OUT set but not
6198 If we suceeded removing some reload and we are doing a preliminary
6199 pass just to remove such reloads, make another pass, since the
6200 removal of one reload might allow us to inherit another one. */
6201 else if ((! reload_out
[r
] || reload_out_reg
[r
])
6202 && remove_address_replacements (reload_in
[r
]) && pass
)
6207 /* Now that reload_override_in is known valid,
6208 actually override reload_in. */
6209 for (j
= 0; j
< n_reloads
; j
++)
6210 if (reload_override_in
[j
])
6211 reload_in
[j
] = reload_override_in
[j
];
6213 /* If this reload won't be done because it has been cancelled or is
6214 optional and not inherited, clear reload_reg_rtx so other
6215 routines (such as subst_reloads) don't get confused. */
6216 for (j
= 0; j
< n_reloads
; j
++)
6217 if (reload_reg_rtx
[j
] != 0
6218 && ((reload_optional
[j
] && ! reload_inherited
[j
])
6219 || (reload_in
[j
] == 0 && reload_out
[j
] == 0
6220 && ! reload_secondary_p
[j
])))
6222 int regno
= true_regnum (reload_reg_rtx
[j
]);
6224 if (spill_reg_order
[regno
] >= 0)
6225 clear_reload_reg_in_use (regno
, reload_opnum
[j
],
6226 reload_when_needed
[j
], reload_mode
[j
]);
6227 reload_reg_rtx
[j
] = 0;
6230 /* Record which pseudos and which spill regs have output reloads. */
6231 for (j
= 0; j
< n_reloads
; j
++)
6233 register int r
= reload_order
[j
];
6235 i
= reload_spill_index
[r
];
6237 /* I is nonneg if this reload uses a register.
6238 If reload_reg_rtx[r] is 0, this is an optional reload
6239 that we opted to ignore. */
6240 if (reload_out_reg
[r
] != 0 && GET_CODE (reload_out_reg
[r
]) == REG
6241 && reload_reg_rtx
[r
] != 0)
6243 register int nregno
= REGNO (reload_out_reg
[r
]);
6246 if (nregno
< FIRST_PSEUDO_REGISTER
)
6247 nr
= HARD_REGNO_NREGS (nregno
, reload_mode
[r
]);
6250 reg_has_output_reload
[nregno
+ nr
] = 1;
6254 nr
= HARD_REGNO_NREGS (i
, reload_mode
[r
]);
6256 SET_HARD_REG_BIT (reg_is_output_reload
, i
+ nr
);
6259 if (reload_when_needed
[r
] != RELOAD_OTHER
6260 && reload_when_needed
[r
] != RELOAD_FOR_OUTPUT
6261 && reload_when_needed
[r
] != RELOAD_FOR_INSN
)
6267 /* Deallocate the reload register for reload R. This is called from
6268 remove_address_replacements. */
6270 deallocate_reload_reg (r
)
6275 if (! reload_reg_rtx
[r
])
6277 regno
= true_regnum (reload_reg_rtx
[r
]);
6278 reload_reg_rtx
[r
] = 0;
6279 if (spill_reg_order
[regno
] >= 0)
6280 clear_reload_reg_in_use (regno
, reload_opnum
[r
], reload_when_needed
[r
],
6282 reload_spill_index
[r
] = -1;
6285 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
6286 reloads of the same item for fear that we might not have enough reload
6287 registers. However, normally they will get the same reload register
6288 and hence actually need not be loaded twice.
6290 Here we check for the most common case of this phenomenon: when we have
6291 a number of reloads for the same object, each of which were allocated
6292 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
6293 reload, and is not modified in the insn itself. If we find such,
6294 merge all the reloads and set the resulting reload to RELOAD_OTHER.
6295 This will not increase the number of spill registers needed and will
6296 prevent redundant code. */
6299 merge_assigned_reloads (insn
)
6304 /* Scan all the reloads looking for ones that only load values and
6305 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
6306 assigned and not modified by INSN. */
6308 for (i
= 0; i
< n_reloads
; i
++)
6310 int conflicting_input
= 0;
6311 int max_input_address_opnum
= -1;
6312 int min_conflicting_input_opnum
= MAX_RECOG_OPERANDS
;
6314 if (reload_in
[i
] == 0 || reload_when_needed
[i
] == RELOAD_OTHER
6315 || reload_out
[i
] != 0 || reload_reg_rtx
[i
] == 0
6316 || reg_set_p (reload_reg_rtx
[i
], insn
))
6319 /* Look at all other reloads. Ensure that the only use of this
6320 reload_reg_rtx is in a reload that just loads the same value
6321 as we do. Note that any secondary reloads must be of the identical
6322 class since the values, modes, and result registers are the
6323 same, so we need not do anything with any secondary reloads. */
6325 for (j
= 0; j
< n_reloads
; j
++)
6327 if (i
== j
|| reload_reg_rtx
[j
] == 0
6328 || ! reg_overlap_mentioned_p (reload_reg_rtx
[j
],
6332 if (reload_when_needed
[j
] == RELOAD_FOR_INPUT_ADDRESS
6333 && reload_opnum
[j
] > max_input_address_opnum
)
6334 max_input_address_opnum
= reload_opnum
[j
];
6336 /* If the reload regs aren't exactly the same (e.g, different modes)
6337 or if the values are different, we can't merge this reload.
6338 But if it is an input reload, we might still merge
6339 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6341 if (! rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
])
6342 || reload_out
[j
] != 0 || reload_in
[j
] == 0
6343 || ! rtx_equal_p (reload_in
[i
], reload_in
[j
]))
6345 if (reload_when_needed
[j
] != RELOAD_FOR_INPUT
6346 || ((reload_when_needed
[i
] != RELOAD_FOR_INPUT_ADDRESS
6347 || reload_opnum
[i
] > reload_opnum
[j
])
6348 && reload_when_needed
[i
] != RELOAD_FOR_OTHER_ADDRESS
))
6350 conflicting_input
= 1;
6351 if (min_conflicting_input_opnum
> reload_opnum
[j
])
6352 min_conflicting_input_opnum
= reload_opnum
[j
];
6356 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6357 we, in fact, found any matching reloads. */
6360 && max_input_address_opnum
<= min_conflicting_input_opnum
)
6362 for (j
= 0; j
< n_reloads
; j
++)
6363 if (i
!= j
&& reload_reg_rtx
[j
] != 0
6364 && rtx_equal_p (reload_reg_rtx
[i
], reload_reg_rtx
[j
])
6365 && (! conflicting_input
6366 || reload_when_needed
[j
] == RELOAD_FOR_INPUT_ADDRESS
6367 || reload_when_needed
[j
] == RELOAD_FOR_OTHER_ADDRESS
))
6369 reload_when_needed
[i
] = RELOAD_OTHER
;
6371 reload_spill_index
[j
] = -1;
6372 transfer_replacements (i
, j
);
6375 /* If this is now RELOAD_OTHER, look for any reloads that load
6376 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6377 if they were for inputs, RELOAD_OTHER for outputs. Note that
6378 this test is equivalent to looking for reloads for this operand
6381 if (reload_when_needed
[i
] == RELOAD_OTHER
)
6382 for (j
= 0; j
< n_reloads
; j
++)
6383 if (reload_in
[j
] != 0
6384 && reload_when_needed
[i
] != RELOAD_OTHER
6385 && reg_overlap_mentioned_for_reload_p (reload_in
[j
],
6387 reload_when_needed
[j
]
6388 = ((reload_when_needed
[i
] == RELOAD_FOR_INPUT_ADDRESS
6389 || reload_when_needed
[i
] == RELOAD_FOR_INPADDR_ADDRESS
)
6390 ? RELOAD_FOR_OTHER_ADDRESS
: RELOAD_OTHER
);
6396 /* Output insns to reload values in and out of the chosen reload regs. */
6399 emit_reload_insns (chain
)
6400 struct insn_chain
*chain
;
6402 rtx insn
= chain
->insn
;
6405 rtx input_reload_insns
[MAX_RECOG_OPERANDS
];
6406 rtx other_input_address_reload_insns
= 0;
6407 rtx other_input_reload_insns
= 0;
6408 rtx input_address_reload_insns
[MAX_RECOG_OPERANDS
];
6409 rtx inpaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
6410 rtx output_reload_insns
[MAX_RECOG_OPERANDS
];
6411 rtx output_address_reload_insns
[MAX_RECOG_OPERANDS
];
6412 rtx outaddr_address_reload_insns
[MAX_RECOG_OPERANDS
];
6413 rtx operand_reload_insns
= 0;
6414 rtx other_operand_reload_insns
= 0;
6415 rtx other_output_reload_insns
[MAX_RECOG_OPERANDS
];
6416 rtx following_insn
= NEXT_INSN (insn
);
6417 rtx before_insn
= PREV_INSN (insn
);
6419 /* Values to be put in spill_reg_store are put here first. */
6420 rtx new_spill_reg_store
[FIRST_PSEUDO_REGISTER
];
6421 HARD_REG_SET reg_reloaded_died
;
6423 CLEAR_HARD_REG_SET (reg_reloaded_died
);
6425 for (j
= 0; j
< reload_n_operands
; j
++)
6426 input_reload_insns
[j
] = input_address_reload_insns
[j
]
6427 = inpaddr_address_reload_insns
[j
]
6428 = output_reload_insns
[j
] = output_address_reload_insns
[j
]
6429 = outaddr_address_reload_insns
[j
]
6430 = other_output_reload_insns
[j
] = 0;
6432 /* Now output the instructions to copy the data into and out of the
6433 reload registers. Do these in the order that the reloads were reported,
6434 since reloads of base and index registers precede reloads of operands
6435 and the operands may need the base and index registers reloaded. */
6437 for (j
= 0; j
< n_reloads
; j
++)
6440 rtx oldequiv_reg
= 0;
6441 rtx this_reload_insn
= 0;
6442 int expect_occurrences
= 1;
6444 if (reload_reg_rtx
[j
]
6445 && REGNO (reload_reg_rtx
[j
]) < FIRST_PSEUDO_REGISTER
)
6446 new_spill_reg_store
[REGNO (reload_reg_rtx
[j
])] = 0;
6448 old
= (reload_in
[j
] && GET_CODE (reload_in
[j
]) == MEM
6449 ? reload_in_reg
[j
] : reload_in
[j
]);
6452 /* AUTO_INC reloads need to be handled even if inherited. We got an
6453 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6454 && (! reload_inherited
[j
] || (reload_out
[j
] && ! reload_out_reg
[j
]))
6455 && ! rtx_equal_p (reload_reg_rtx
[j
], old
)
6456 && reload_reg_rtx
[j
] != 0)
6458 register rtx reloadreg
= reload_reg_rtx
[j
];
6460 enum machine_mode mode
;
6463 /* Determine the mode to reload in.
6464 This is very tricky because we have three to choose from.
6465 There is the mode the insn operand wants (reload_inmode[J]).
6466 There is the mode of the reload register RELOADREG.
6467 There is the intrinsic mode of the operand, which we could find
6468 by stripping some SUBREGs.
6469 It turns out that RELOADREG's mode is irrelevant:
6470 we can change that arbitrarily.
6472 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6473 then the reload reg may not support QImode moves, so use SImode.
6474 If foo is in memory due to spilling a pseudo reg, this is safe,
6475 because the QImode value is in the least significant part of a
6476 slot big enough for a SImode. If foo is some other sort of
6477 memory reference, then it is impossible to reload this case,
6478 so previous passes had better make sure this never happens.
6480 Then consider a one-word union which has SImode and one of its
6481 members is a float, being fetched as (SUBREG:SF union:SI).
6482 We must fetch that as SFmode because we could be loading into
6483 a float-only register. In this case OLD's mode is correct.
6485 Consider an immediate integer: it has VOIDmode. Here we need
6486 to get a mode from something else.
6488 In some cases, there is a fourth mode, the operand's
6489 containing mode. If the insn specifies a containing mode for
6490 this operand, it overrides all others.
6492 I am not sure whether the algorithm here is always right,
6493 but it does the right things in those cases. */
6495 mode
= GET_MODE (old
);
6496 if (mode
== VOIDmode
)
6497 mode
= reload_inmode
[j
];
6499 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6500 /* If we need a secondary register for this operation, see if
6501 the value is already in a register in that class. Don't
6502 do this if the secondary register will be used as a scratch
6505 if (reload_secondary_in_reload
[j
] >= 0
6506 && reload_secondary_in_icode
[j
] == CODE_FOR_nothing
6509 = find_equiv_reg (old
, insn
,
6510 reload_reg_class
[reload_secondary_in_reload
[j
]],
6511 -1, NULL_PTR
, 0, mode
);
6514 /* If reloading from memory, see if there is a register
6515 that already holds the same value. If so, reload from there.
6516 We can pass 0 as the reload_reg_p argument because
6517 any other reload has either already been emitted,
6518 in which case find_equiv_reg will see the reload-insn,
6519 or has yet to be emitted, in which case it doesn't matter
6520 because we will use this equiv reg right away. */
6522 if (oldequiv
== 0 && optimize
6523 && (GET_CODE (old
) == MEM
6524 || (GET_CODE (old
) == REG
6525 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6526 && reg_renumber
[REGNO (old
)] < 0)))
6527 oldequiv
= find_equiv_reg (old
, insn
, ALL_REGS
,
6528 -1, NULL_PTR
, 0, mode
);
6532 int regno
= true_regnum (oldequiv
);
6534 /* If OLDEQUIV is a spill register, don't use it for this
6535 if any other reload needs it at an earlier stage of this insn
6536 or at this stage. */
6537 if (spill_reg_order
[regno
] >= 0
6538 && (! reload_reg_free_p (regno
, reload_opnum
[j
],
6539 reload_when_needed
[j
])
6540 || ! reload_reg_free_before_p (regno
, reload_opnum
[j
],
6541 reload_when_needed
[j
], 1)))
6544 /* If OLDEQUIV is not a spill register,
6545 don't use it if any other reload wants it. */
6546 if (spill_reg_order
[regno
] < 0)
6549 for (k
= 0; k
< n_reloads
; k
++)
6550 if (reload_reg_rtx
[k
] != 0 && k
!= j
6551 && reg_overlap_mentioned_for_reload_p (reload_reg_rtx
[k
],
6559 /* If it is no cheaper to copy from OLDEQUIV into the
6560 reload register than it would be to move from memory,
6561 don't use it. Likewise, if we need a secondary register
6565 && ((REGNO_REG_CLASS (regno
) != reload_reg_class
[j
]
6566 && (REGISTER_MOVE_COST (REGNO_REG_CLASS (regno
),
6567 reload_reg_class
[j
])
6568 >= MEMORY_MOVE_COST (mode
, reload_reg_class
[j
], 1)))
6569 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6570 || (SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
6574 #ifdef SECONDARY_MEMORY_NEEDED
6575 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno
),
6576 reload_reg_class
[j
],
6583 /* delete_output_reload is only invoked properly if old contains
6584 the original pseudo register. Since this is replaced with a
6585 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6586 find the pseudo in RELOAD_IN_REG. */
6588 && reload_override_in
[j
]
6589 && GET_CODE (reload_in_reg
[j
]) == REG
)
6592 old
= reload_in_reg
[j
];
6596 else if (GET_CODE (oldequiv
) == REG
)
6597 oldequiv_reg
= oldequiv
;
6598 else if (GET_CODE (oldequiv
) == SUBREG
)
6599 oldequiv_reg
= SUBREG_REG (oldequiv
);
6601 /* If we are reloading from a register that was recently stored in
6602 with an output-reload, see if we can prove there was
6603 actually no need to store the old value in it. */
6605 if (optimize
&& GET_CODE (oldequiv
) == REG
6606 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
6607 && spill_reg_store
[REGNO (oldequiv
)]
6608 && GET_CODE (old
) == REG
6609 && (dead_or_set_p (insn
, spill_reg_stored_to
[REGNO (oldequiv
)])
6610 || rtx_equal_p (spill_reg_stored_to
[REGNO (oldequiv
)],
6611 reload_out_reg
[j
])))
6612 delete_output_reload (insn
, j
, REGNO (oldequiv
));
6614 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6615 then load RELOADREG from OLDEQUIV. Note that we cannot use
6616 gen_lowpart_common since it can do the wrong thing when
6617 RELOADREG has a multi-word mode. Note that RELOADREG
6618 must always be a REG here. */
6620 if (GET_MODE (reloadreg
) != mode
)
6621 reloadreg
= gen_rtx_REG (mode
, REGNO (reloadreg
));
6622 while (GET_CODE (oldequiv
) == SUBREG
&& GET_MODE (oldequiv
) != mode
)
6623 oldequiv
= SUBREG_REG (oldequiv
);
6624 if (GET_MODE (oldequiv
) != VOIDmode
6625 && mode
!= GET_MODE (oldequiv
))
6626 oldequiv
= gen_rtx_SUBREG (mode
, oldequiv
, 0);
6628 /* Switch to the right place to emit the reload insns. */
6629 switch (reload_when_needed
[j
])
6632 where
= &other_input_reload_insns
;
6634 case RELOAD_FOR_INPUT
:
6635 where
= &input_reload_insns
[reload_opnum
[j
]];
6637 case RELOAD_FOR_INPUT_ADDRESS
:
6638 where
= &input_address_reload_insns
[reload_opnum
[j
]];
6640 case RELOAD_FOR_INPADDR_ADDRESS
:
6641 where
= &inpaddr_address_reload_insns
[reload_opnum
[j
]];
6643 case RELOAD_FOR_OUTPUT_ADDRESS
:
6644 where
= &output_address_reload_insns
[reload_opnum
[j
]];
6646 case RELOAD_FOR_OUTADDR_ADDRESS
:
6647 where
= &outaddr_address_reload_insns
[reload_opnum
[j
]];
6649 case RELOAD_FOR_OPERAND_ADDRESS
:
6650 where
= &operand_reload_insns
;
6652 case RELOAD_FOR_OPADDR_ADDR
:
6653 where
= &other_operand_reload_insns
;
6655 case RELOAD_FOR_OTHER_ADDRESS
:
6656 where
= &other_input_address_reload_insns
;
6662 push_to_sequence (*where
);
6665 /* Auto-increment addresses must be reloaded in a special way. */
6666 if (reload_out
[j
] && ! reload_out_reg
[j
])
6668 /* We are not going to bother supporting the case where a
6669 incremented register can't be copied directly from
6670 OLDEQUIV since this seems highly unlikely. */
6671 if (reload_secondary_in_reload
[j
] >= 0)
6674 if (reload_inherited
[j
])
6675 oldequiv
= reloadreg
;
6677 old
= XEXP (reload_in_reg
[j
], 0);
6679 if (optimize
&& GET_CODE (oldequiv
) == REG
6680 && REGNO (oldequiv
) < FIRST_PSEUDO_REGISTER
6681 && spill_reg_store
[REGNO (oldequiv
)]
6682 && GET_CODE (old
) == REG
6683 && (dead_or_set_p (insn
,
6684 spill_reg_stored_to
[REGNO (oldequiv
)])
6685 || rtx_equal_p (spill_reg_stored_to
[REGNO (oldequiv
)],
6687 delete_output_reload (insn
, j
, REGNO (oldequiv
));
6689 /* Prevent normal processing of this reload. */
6691 /* Output a special code sequence for this case. */
6692 new_spill_reg_store
[REGNO (reloadreg
)]
6693 = inc_for_reload (reloadreg
, oldequiv
, reload_out
[j
],
6697 /* If we are reloading a pseudo-register that was set by the previous
6698 insn, see if we can get rid of that pseudo-register entirely
6699 by redirecting the previous insn into our reload register. */
6701 else if (optimize
&& GET_CODE (old
) == REG
6702 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6703 && dead_or_set_p (insn
, old
)
6704 /* This is unsafe if some other reload
6705 uses the same reg first. */
6706 && reload_reg_free_before_p (REGNO (reloadreg
),
6708 reload_when_needed
[j
], 0))
6710 rtx temp
= PREV_INSN (insn
);
6711 while (temp
&& GET_CODE (temp
) == NOTE
)
6712 temp
= PREV_INSN (temp
);
6714 && GET_CODE (temp
) == INSN
6715 && GET_CODE (PATTERN (temp
)) == SET
6716 && SET_DEST (PATTERN (temp
)) == old
6717 /* Make sure we can access insn_operand_constraint. */
6718 && asm_noperands (PATTERN (temp
)) < 0
6719 /* This is unsafe if prev insn rejects our reload reg. */
6720 && constraint_accepts_reg_p (insn_operand_constraint
[recog_memoized (temp
)][0],
6722 /* This is unsafe if operand occurs more than once in current
6723 insn. Perhaps some occurrences aren't reloaded. */
6724 && count_occurrences (PATTERN (insn
), old
) == 1
6725 /* Don't risk splitting a matching pair of operands. */
6726 && ! reg_mentioned_p (old
, SET_SRC (PATTERN (temp
))))
6728 /* Store into the reload register instead of the pseudo. */
6729 SET_DEST (PATTERN (temp
)) = reloadreg
;
6730 /* If these are the only uses of the pseudo reg,
6731 pretend for GDB it lives in the reload reg we used. */
6732 if (REG_N_DEATHS (REGNO (old
)) == 1
6733 && REG_N_SETS (REGNO (old
)) == 1)
6735 reg_renumber
[REGNO (old
)] = REGNO (reload_reg_rtx
[j
]);
6736 alter_reg (REGNO (old
), -1);
6742 /* We can't do that, so output an insn to load RELOADREG. */
6746 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6747 rtx second_reload_reg
= 0;
6748 enum insn_code icode
;
6750 /* If we have a secondary reload, pick up the secondary register
6751 and icode, if any. If OLDEQUIV and OLD are different or
6752 if this is an in-out reload, recompute whether or not we
6753 still need a secondary register and what the icode should
6754 be. If we still need a secondary register and the class or
6755 icode is different, go back to reloading from OLD if using
6756 OLDEQUIV means that we got the wrong type of register. We
6757 cannot have different class or icode due to an in-out reload
6758 because we don't make such reloads when both the input and
6759 output need secondary reload registers. */
6761 if (reload_secondary_in_reload
[j
] >= 0)
6763 int secondary_reload
= reload_secondary_in_reload
[j
];
6764 rtx real_oldequiv
= oldequiv
;
6767 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6768 and similarly for OLD.
6769 See comments in get_secondary_reload in reload.c. */
6770 /* If it is a pseudo that cannot be replaced with its
6771 equivalent MEM, we must fall back to reload_in, which
6772 will have all the necessary substitutions registered. */
6774 if (GET_CODE (oldequiv
) == REG
6775 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
6776 && reg_equiv_memory_loc
[REGNO (oldequiv
)] != 0)
6778 if (reg_equiv_address
[REGNO (oldequiv
)]
6779 || num_not_at_initial_offset
)
6780 real_oldequiv
= reload_in
[j
];
6782 real_oldequiv
= reg_equiv_mem
[REGNO (oldequiv
)];
6785 if (GET_CODE (old
) == REG
6786 && REGNO (old
) >= FIRST_PSEUDO_REGISTER
6787 && reg_equiv_memory_loc
[REGNO (old
)] != 0)
6789 if (reg_equiv_address
[REGNO (old
)]
6790 || num_not_at_initial_offset
)
6791 real_old
= reload_in
[j
];
6793 real_old
= reg_equiv_mem
[REGNO (old
)];
6796 second_reload_reg
= reload_reg_rtx
[secondary_reload
];
6797 icode
= reload_secondary_in_icode
[j
];
6799 if ((old
!= oldequiv
&& ! rtx_equal_p (old
, oldequiv
))
6800 || (reload_in
[j
] != 0 && reload_out
[j
] != 0))
6802 enum reg_class new_class
6803 = SECONDARY_INPUT_RELOAD_CLASS (reload_reg_class
[j
],
6804 mode
, real_oldequiv
);
6806 if (new_class
== NO_REGS
)
6807 second_reload_reg
= 0;
6810 enum insn_code new_icode
;
6811 enum machine_mode new_mode
;
6813 if (! TEST_HARD_REG_BIT (reg_class_contents
[(int) new_class
],
6814 REGNO (second_reload_reg
)))
6815 oldequiv
= old
, real_oldequiv
= real_old
;
6818 new_icode
= reload_in_optab
[(int) mode
];
6819 if (new_icode
!= CODE_FOR_nothing
6820 && ((insn_operand_predicate
[(int) new_icode
][0]
6821 && ! ((*insn_operand_predicate
[(int) new_icode
][0])
6823 || (insn_operand_predicate
[(int) new_icode
][1]
6824 && ! ((*insn_operand_predicate
[(int) new_icode
][1])
6825 (real_oldequiv
, mode
)))))
6826 new_icode
= CODE_FOR_nothing
;
6828 if (new_icode
== CODE_FOR_nothing
)
6831 new_mode
= insn_operand_mode
[(int) new_icode
][2];
6833 if (GET_MODE (second_reload_reg
) != new_mode
)
6835 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg
),
6837 oldequiv
= old
, real_oldequiv
= real_old
;
6840 = gen_rtx_REG (new_mode
,
6841 REGNO (second_reload_reg
));
6847 /* If we still need a secondary reload register, check
6848 to see if it is being used as a scratch or intermediate
6849 register and generate code appropriately. If we need
6850 a scratch register, use REAL_OLDEQUIV since the form of
6851 the insn may depend on the actual address if it is
6854 if (second_reload_reg
)
6856 if (icode
!= CODE_FOR_nothing
)
6858 emit_insn (GEN_FCN (icode
) (reloadreg
, real_oldequiv
,
6859 second_reload_reg
));
6864 /* See if we need a scratch register to load the
6865 intermediate register (a tertiary reload). */
6866 enum insn_code tertiary_icode
6867 = reload_secondary_in_icode
[secondary_reload
];
6869 if (tertiary_icode
!= CODE_FOR_nothing
)
6871 rtx third_reload_reg
6872 = reload_reg_rtx
[reload_secondary_in_reload
[secondary_reload
]];
6874 emit_insn ((GEN_FCN (tertiary_icode
)
6875 (second_reload_reg
, real_oldequiv
,
6876 third_reload_reg
)));
6879 gen_reload (second_reload_reg
, real_oldequiv
,
6881 reload_when_needed
[j
]);
6883 oldequiv
= second_reload_reg
;
6889 if (! special
&& ! rtx_equal_p (reloadreg
, oldequiv
))
6891 rtx real_oldequiv
= oldequiv
;
6893 if ((GET_CODE (oldequiv
) == REG
6894 && REGNO (oldequiv
) >= FIRST_PSEUDO_REGISTER
6895 && reg_equiv_memory_loc
[REGNO (oldequiv
)] != 0)
6896 || (GET_CODE (oldequiv
) == SUBREG
6897 && GET_CODE (SUBREG_REG (oldequiv
)) == REG
6898 && (REGNO (SUBREG_REG (oldequiv
))
6899 >= FIRST_PSEUDO_REGISTER
)
6900 && (reg_equiv_memory_loc
6901 [REGNO (SUBREG_REG (oldequiv
))] != 0)))
6902 real_oldequiv
= reload_in
[j
];
6903 gen_reload (reloadreg
, real_oldequiv
, reload_opnum
[j
],
6904 reload_when_needed
[j
]);
6909 this_reload_insn
= get_last_insn ();
6910 /* End this sequence. */
6911 *where
= get_insns ();
6914 /* Update reload_override_in so that delete_address_reloads_1
6915 can see the actual register usage. */
6917 reload_override_in
[j
] = oldequiv
;
6920 /* When inheriting a wider reload, we have a MEM in reload_in[j],
6921 e.g. inheriting a SImode output reload for
6922 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6923 if (optimize
&& reload_inherited
[j
] && reload_in
[j
]
6924 && GET_CODE (reload_in
[j
]) == MEM
6925 && GET_CODE (reload_in_reg
[j
]) == MEM
6926 && reload_spill_index
[j
] >= 0
6927 && TEST_HARD_REG_BIT (reg_reloaded_valid
, reload_spill_index
[j
]))
6930 = count_occurrences (PATTERN (insn
), reload_in
[j
]) == 1 ? 0 : -1;
6932 = regno_reg_rtx
[reg_reloaded_contents
[reload_spill_index
[j
]]];
6935 /* If we are reloading a register that was recently stored in with an
6936 output-reload, see if we can prove there was
6937 actually no need to store the old value in it. */
6940 && (reload_inherited
[j
] || reload_override_in
[j
])
6941 && reload_reg_rtx
[j
]
6942 && GET_CODE (reload_reg_rtx
[j
]) == REG
6943 && spill_reg_store
[REGNO (reload_reg_rtx
[j
])] != 0
6945 /* There doesn't seem to be any reason to restrict this to pseudos
6946 and doing so loses in the case where we are copying from a
6947 register of the wrong class. */
6948 && REGNO (spill_reg_stored_to
[REGNO (reload_reg_rtx
[j
])])
6949 >= FIRST_PSEUDO_REGISTER
6951 /* The insn might have already some references to stackslots
6952 replaced by MEMs, while reload_out_reg still names the
6954 && (dead_or_set_p (insn
,
6955 spill_reg_stored_to
[REGNO (reload_reg_rtx
[j
])])
6956 || rtx_equal_p (spill_reg_stored_to
[REGNO (reload_reg_rtx
[j
])],
6957 reload_out_reg
[j
])))
6958 delete_output_reload (insn
, j
, REGNO (reload_reg_rtx
[j
]));
6960 /* Input-reloading is done. Now do output-reloading,
6961 storing the value from the reload-register after the main insn
6962 if reload_out[j] is nonzero.
6964 ??? At some point we need to support handling output reloads of
6965 JUMP_INSNs or insns that set cc0. */
6967 /* If this is an output reload that stores something that is
6968 not loaded in this same reload, see if we can eliminate a previous
6971 rtx pseudo
= reload_out_reg
[j
];
6974 && GET_CODE (pseudo
) == REG
6975 && ! rtx_equal_p (reload_in_reg
[j
], pseudo
)
6976 && REGNO (pseudo
) >= FIRST_PSEUDO_REGISTER
6977 && reg_last_reload_reg
[REGNO (pseudo
)])
6979 int pseudo_no
= REGNO (pseudo
);
6980 int last_regno
= REGNO (reg_last_reload_reg
[pseudo_no
]);
6982 /* We don't need to test full validity of last_regno for
6983 inherit here; we only want to know if the store actually
6984 matches the pseudo. */
6985 if (reg_reloaded_contents
[last_regno
] == pseudo_no
6986 && spill_reg_store
[last_regno
]
6987 && rtx_equal_p (pseudo
, spill_reg_stored_to
[last_regno
]))
6988 delete_output_reload (insn
, j
, last_regno
);
6992 old
= reload_out_reg
[j
];
6994 && reload_reg_rtx
[j
] != old
6995 && reload_reg_rtx
[j
] != 0)
6997 register rtx reloadreg
= reload_reg_rtx
[j
];
6998 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6999 register rtx second_reloadreg
= 0;
7002 enum machine_mode mode
;
7005 /* An output operand that dies right away does need a reload,
7006 but need not be copied from it. Show the new location in the
7008 if ((GET_CODE (old
) == REG
|| GET_CODE (old
) == SCRATCH
)
7009 && (note
= find_reg_note (insn
, REG_UNUSED
, old
)) != 0)
7011 XEXP (note
, 0) = reload_reg_rtx
[j
];
7014 /* Likewise for a SUBREG of an operand that dies. */
7015 else if (GET_CODE (old
) == SUBREG
7016 && GET_CODE (SUBREG_REG (old
)) == REG
7017 && 0 != (note
= find_reg_note (insn
, REG_UNUSED
,
7020 XEXP (note
, 0) = gen_lowpart_common (GET_MODE (old
),
7024 else if (GET_CODE (old
) == SCRATCH
)
7025 /* If we aren't optimizing, there won't be a REG_UNUSED note,
7026 but we don't want to make an output reload. */
7030 /* Strip off of OLD any size-increasing SUBREGs such as
7031 (SUBREG:SI foo:QI 0). */
7033 while (GET_CODE (old
) == SUBREG
&& SUBREG_WORD (old
) == 0
7034 && (GET_MODE_SIZE (GET_MODE (old
))
7035 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (old
)))))
7036 old
= SUBREG_REG (old
);
7039 /* If is a JUMP_INSN, we can't support output reloads yet. */
7040 if (GET_CODE (insn
) == JUMP_INSN
)
7043 if (reload_when_needed
[j
] == RELOAD_OTHER
)
7046 push_to_sequence (output_reload_insns
[reload_opnum
[j
]]);
7048 old
= reload_out
[j
];
7050 /* Determine the mode to reload in.
7051 See comments above (for input reloading). */
7053 mode
= GET_MODE (old
);
7054 if (mode
== VOIDmode
)
7056 /* VOIDmode should never happen for an output. */
7057 if (asm_noperands (PATTERN (insn
)) < 0)
7058 /* It's the compiler's fault. */
7059 fatal_insn ("VOIDmode on an output", insn
);
7060 error_for_asm (insn
, "output operand is constant in `asm'");
7061 /* Prevent crash--use something we know is valid. */
7063 old
= gen_rtx_REG (mode
, REGNO (reloadreg
));
7066 if (GET_MODE (reloadreg
) != mode
)
7067 reloadreg
= gen_rtx_REG (mode
, REGNO (reloadreg
));
7069 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
7071 /* If we need two reload regs, set RELOADREG to the intermediate
7072 one, since it will be stored into OLD. We might need a secondary
7073 register only for an input reload, so check again here. */
7075 if (reload_secondary_out_reload
[j
] >= 0)
7079 if (GET_CODE (old
) == REG
&& REGNO (old
) >= FIRST_PSEUDO_REGISTER
7080 && reg_equiv_mem
[REGNO (old
)] != 0)
7081 real_old
= reg_equiv_mem
[REGNO (old
)];
7083 if((SECONDARY_OUTPUT_RELOAD_CLASS (reload_reg_class
[j
],
7087 second_reloadreg
= reloadreg
;
7088 reloadreg
= reload_reg_rtx
[reload_secondary_out_reload
[j
]];
7090 /* See if RELOADREG is to be used as a scratch register
7091 or as an intermediate register. */
7092 if (reload_secondary_out_icode
[j
] != CODE_FOR_nothing
)
7094 emit_insn ((GEN_FCN (reload_secondary_out_icode
[j
])
7095 (real_old
, second_reloadreg
, reloadreg
)));
7100 /* See if we need both a scratch and intermediate reload
7103 int secondary_reload
= reload_secondary_out_reload
[j
];
7104 enum insn_code tertiary_icode
7105 = reload_secondary_out_icode
[secondary_reload
];
7107 if (GET_MODE (reloadreg
) != mode
)
7108 reloadreg
= gen_rtx_REG (mode
, REGNO (reloadreg
));
7110 if (tertiary_icode
!= CODE_FOR_nothing
)
7113 = reload_reg_rtx
[reload_secondary_out_reload
[secondary_reload
]];
7116 /* Copy primary reload reg to secondary reload reg.
7117 (Note that these have been swapped above, then
7118 secondary reload reg to OLD using our insn. */
7120 /* If REAL_OLD is a paradoxical SUBREG, remove it
7121 and try to put the opposite SUBREG on
7123 if (GET_CODE (real_old
) == SUBREG
7124 && (GET_MODE_SIZE (GET_MODE (real_old
))
7125 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old
))))
7126 && 0 != (tem
= gen_lowpart_common
7127 (GET_MODE (SUBREG_REG (real_old
)),
7129 real_old
= SUBREG_REG (real_old
), reloadreg
= tem
;
7131 gen_reload (reloadreg
, second_reloadreg
,
7132 reload_opnum
[j
], reload_when_needed
[j
]);
7133 emit_insn ((GEN_FCN (tertiary_icode
)
7134 (real_old
, reloadreg
, third_reloadreg
)));
7139 /* Copy between the reload regs here and then to
7142 gen_reload (reloadreg
, second_reloadreg
,
7143 reload_opnum
[j
], reload_when_needed
[j
]);
7149 /* Output the last reload insn. */
7154 /* Don't output the last reload if OLD is not the dest of
7155 INSN and is in the src and is clobbered by INSN. */
7156 if (! flag_expensive_optimizations
7157 || GET_CODE (old
) != REG
7158 || !(set
= single_set (insn
))
7159 || rtx_equal_p (old
, SET_DEST (set
))
7160 || !reg_mentioned_p (old
, SET_SRC (set
))
7161 || !regno_clobbered_p (REGNO (old
), insn
))
7162 gen_reload (old
, reloadreg
, reload_opnum
[j
],
7163 reload_when_needed
[j
]);
7166 /* Look at all insns we emitted, just to be safe. */
7167 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
7168 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i')
7170 rtx pat
= PATTERN (p
);
7172 /* If this output reload doesn't come from a spill reg,
7173 clear any memory of reloaded copies of the pseudo reg.
7174 If this output reload comes from a spill reg,
7175 reg_has_output_reload will make this do nothing. */
7176 note_stores (pat
, forget_old_reloads_1
);
7178 if (reg_mentioned_p (reload_reg_rtx
[j
], pat
))
7180 rtx set
= single_set (insn
);
7181 if (reload_spill_index
[j
] < 0
7183 && SET_SRC (set
) == reload_reg_rtx
[j
])
7185 int src
= REGNO (SET_SRC (set
));
7187 reload_spill_index
[j
] = src
;
7188 SET_HARD_REG_BIT (reg_is_output_reload
, src
);
7189 if (find_regno_note (insn
, REG_DEAD
, src
))
7190 SET_HARD_REG_BIT (reg_reloaded_died
, src
);
7192 if (REGNO (reload_reg_rtx
[j
]) < FIRST_PSEUDO_REGISTER
)
7194 int s
= reload_secondary_out_reload
[j
];
7195 set
= single_set (p
);
7196 /* If this reload copies only to the secondary reload
7197 register, the secondary reload does the actual
7199 if (s
>= 0 && set
== NULL_RTX
)
7200 ; /* We can't tell what function the secondary reload
7201 has and where the actual store to the pseudo is
7202 made; leave new_spill_reg_store alone. */
7204 && SET_SRC (set
) == reload_reg_rtx
[j
]
7205 && SET_DEST (set
) == reload_reg_rtx
[s
])
7207 /* Usually the next instruction will be the
7208 secondary reload insn; if we can confirm
7209 that it is, setting new_spill_reg_store to
7210 that insn will allow an extra optimization. */
7211 rtx s_reg
= reload_reg_rtx
[s
];
7212 rtx next
= NEXT_INSN (p
);
7213 reload_out
[s
] = reload_out
[j
];
7214 reload_out_reg
[s
] = reload_out_reg
[j
];
7215 set
= single_set (next
);
7216 if (set
&& SET_SRC (set
) == s_reg
7217 && ! new_spill_reg_store
[REGNO (s_reg
)])
7219 SET_HARD_REG_BIT (reg_is_output_reload
,
7221 new_spill_reg_store
[REGNO (s_reg
)] = next
;
7225 new_spill_reg_store
[REGNO (reload_reg_rtx
[j
])] = p
;
7230 if (reload_when_needed
[j
] == RELOAD_OTHER
)
7232 emit_insns (other_output_reload_insns
[reload_opnum
[j
]]);
7233 other_output_reload_insns
[reload_opnum
[j
]] = get_insns ();
7236 output_reload_insns
[reload_opnum
[j
]] = get_insns ();
7242 /* Now write all the insns we made for reloads in the order expected by
7243 the allocation functions. Prior to the insn being reloaded, we write
7244 the following reloads:
7246 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
7248 RELOAD_OTHER reloads.
7250 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
7251 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
7252 RELOAD_FOR_INPUT reload for the operand.
7254 RELOAD_FOR_OPADDR_ADDRS reloads.
7256 RELOAD_FOR_OPERAND_ADDRESS reloads.
7258 After the insn being reloaded, we write the following:
7260 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7261 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7262 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7263 reloads for the operand. The RELOAD_OTHER output reloads are
7264 output in descending order by reload number. */
7266 emit_insns_before (other_input_address_reload_insns
, insn
);
7267 emit_insns_before (other_input_reload_insns
, insn
);
7269 for (j
= 0; j
< reload_n_operands
; j
++)
7271 emit_insns_before (inpaddr_address_reload_insns
[j
], insn
);
7272 emit_insns_before (input_address_reload_insns
[j
], insn
);
7273 emit_insns_before (input_reload_insns
[j
], insn
);
7276 emit_insns_before (other_operand_reload_insns
, insn
);
7277 emit_insns_before (operand_reload_insns
, insn
);
7279 for (j
= 0; j
< reload_n_operands
; j
++)
7281 emit_insns_before (outaddr_address_reload_insns
[j
], following_insn
);
7282 emit_insns_before (output_address_reload_insns
[j
], following_insn
);
7283 emit_insns_before (output_reload_insns
[j
], following_insn
);
7284 emit_insns_before (other_output_reload_insns
[j
], following_insn
);
7287 /* Keep basic block info up to date. */
7290 if (basic_block_head
[chain
->block
] == insn
)
7291 basic_block_head
[chain
->block
] = NEXT_INSN (before_insn
);
7292 if (basic_block_end
[chain
->block
] == insn
)
7293 basic_block_end
[chain
->block
] = PREV_INSN (following_insn
);
7296 /* For all the spill regs newly reloaded in this instruction,
7297 record what they were reloaded from, so subsequent instructions
7298 can inherit the reloads.
7300 Update spill_reg_store for the reloads of this insn.
7301 Copy the elements that were updated in the loop above. */
7303 for (j
= 0; j
< n_reloads
; j
++)
7305 register int r
= reload_order
[j
];
7306 register int i
= reload_spill_index
[r
];
7308 /* I is nonneg if this reload used a register.
7309 If reload_reg_rtx[r] is 0, this is an optional reload
7310 that we opted to ignore. */
7312 if (i
>= 0 && reload_reg_rtx
[r
] != 0)
7315 = HARD_REGNO_NREGS (i
, GET_MODE (reload_reg_rtx
[r
]));
7317 int part_reaches_end
= 0;
7318 int all_reaches_end
= 1;
7320 /* For a multi register reload, we need to check if all or part
7321 of the value lives to the end. */
7322 for (k
= 0; k
< nr
; k
++)
7324 if (reload_reg_reaches_end_p (i
+ k
, reload_opnum
[r
],
7325 reload_when_needed
[r
]))
7326 part_reaches_end
= 1;
7328 all_reaches_end
= 0;
7331 /* Ignore reloads that don't reach the end of the insn in
7333 if (all_reaches_end
)
7335 /* First, clear out memory of what used to be in this spill reg.
7336 If consecutive registers are used, clear them all. */
7338 for (k
= 0; k
< nr
; k
++)
7339 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7341 /* Maybe the spill reg contains a copy of reload_out. */
7342 if (reload_out
[r
] != 0
7343 && (GET_CODE (reload_out
[r
]) == REG
7345 || ! reload_out_reg
[r
]
7347 || GET_CODE (reload_out_reg
[r
]) == REG
))
7349 rtx out
= (GET_CODE (reload_out
[r
]) == REG
7353 /* AUTO_INC */ : XEXP (reload_in_reg
[r
], 0));
7354 register int nregno
= REGNO (out
);
7355 int nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
7356 : HARD_REGNO_NREGS (nregno
,
7357 GET_MODE (reload_reg_rtx
[r
])));
7359 spill_reg_store
[i
] = new_spill_reg_store
[i
];
7360 spill_reg_stored_to
[i
] = out
;
7361 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
7363 /* If NREGNO is a hard register, it may occupy more than
7364 one register. If it does, say what is in the
7365 rest of the registers assuming that both registers
7366 agree on how many words the object takes. If not,
7367 invalidate the subsequent registers. */
7369 if (nregno
< FIRST_PSEUDO_REGISTER
)
7370 for (k
= 1; k
< nnr
; k
++)
7371 reg_last_reload_reg
[nregno
+ k
]
7373 ? gen_rtx_REG (reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
7374 REGNO (reload_reg_rtx
[r
]) + k
)
7377 /* Now do the inverse operation. */
7378 for (k
= 0; k
< nr
; k
++)
7380 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, i
+ k
);
7381 reg_reloaded_contents
[i
+ k
]
7382 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
7385 reg_reloaded_insn
[i
+ k
] = insn
;
7386 SET_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7390 /* Maybe the spill reg contains a copy of reload_in. Only do
7391 something if there will not be an output reload for
7392 the register being reloaded. */
7393 else if (reload_out_reg
[r
] == 0
7394 && reload_in
[r
] != 0
7395 && ((GET_CODE (reload_in
[r
]) == REG
7396 && REGNO (reload_in
[r
]) >= FIRST_PSEUDO_REGISTER
7397 && ! reg_has_output_reload
[REGNO (reload_in
[r
])])
7398 || (GET_CODE (reload_in_reg
[r
]) == REG
7399 && ! reg_has_output_reload
[REGNO (reload_in_reg
[r
])]))
7400 && ! reg_set_p (reload_reg_rtx
[r
], PATTERN (insn
)))
7402 register int nregno
;
7405 if (GET_CODE (reload_in
[r
]) == REG
7406 && REGNO (reload_in
[r
]) >= FIRST_PSEUDO_REGISTER
)
7407 nregno
= REGNO (reload_in
[r
]);
7408 else if (GET_CODE (reload_in_reg
[r
]) == REG
)
7409 nregno
= REGNO (reload_in_reg
[r
]);
7411 nregno
= REGNO (XEXP (reload_in_reg
[r
], 0));
7413 nnr
= (nregno
>= FIRST_PSEUDO_REGISTER
? 1
7414 : HARD_REGNO_NREGS (nregno
,
7415 GET_MODE (reload_reg_rtx
[r
])));
7417 reg_last_reload_reg
[nregno
] = reload_reg_rtx
[r
];
7419 if (nregno
< FIRST_PSEUDO_REGISTER
)
7420 for (k
= 1; k
< nnr
; k
++)
7421 reg_last_reload_reg
[nregno
+ k
]
7423 ? gen_rtx_REG (reg_raw_mode
[REGNO (reload_reg_rtx
[r
]) + k
],
7424 REGNO (reload_reg_rtx
[r
]) + k
)
7427 /* Unless we inherited this reload, show we haven't
7428 recently done a store.
7429 Previous stores of inherited auto_inc expressions
7430 also have to be discarded. */
7431 if (! reload_inherited
[r
]
7432 || (reload_out
[r
] && ! reload_out_reg
[r
]))
7433 spill_reg_store
[i
] = 0;
7435 for (k
= 0; k
< nr
; k
++)
7437 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, i
+ k
);
7438 reg_reloaded_contents
[i
+ k
]
7439 = (nregno
>= FIRST_PSEUDO_REGISTER
|| nr
!= nnr
7442 reg_reloaded_insn
[i
+ k
] = insn
;
7443 SET_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7448 /* However, if part of the reload reaches the end, then we must
7449 invalidate the old info for the part that survives to the end. */
7450 else if (part_reaches_end
)
7452 for (k
= 0; k
< nr
; k
++)
7453 if (reload_reg_reaches_end_p (i
+ k
,
7455 reload_when_needed
[r
]))
7456 CLEAR_HARD_REG_BIT (reg_reloaded_valid
, i
+ k
);
7460 /* The following if-statement was #if 0'd in 1.34 (or before...).
7461 It's reenabled in 1.35 because supposedly nothing else
7462 deals with this problem. */
7464 /* If a register gets output-reloaded from a non-spill register,
7465 that invalidates any previous reloaded copy of it.
7466 But forget_old_reloads_1 won't get to see it, because
7467 it thinks only about the original insn. So invalidate it here. */
7468 if (i
< 0 && reload_out
[r
] != 0
7469 && (GET_CODE (reload_out
[r
]) == REG
7470 || (GET_CODE (reload_out
[r
]) == MEM
7471 && GET_CODE (reload_out_reg
[r
]) == REG
)))
7473 rtx out
= (GET_CODE (reload_out
[r
]) == REG
7474 ? reload_out
[r
] : reload_out_reg
[r
]);
7475 register int nregno
= REGNO (out
);
7476 if (nregno
>= FIRST_PSEUDO_REGISTER
)
7478 rtx src_reg
, store_insn
;
7480 reg_last_reload_reg
[nregno
] = 0;
7482 /* If we can find a hard register that is stored, record
7483 the storing insn so that we may delete this insn with
7484 delete_output_reload. */
7485 src_reg
= reload_reg_rtx
[r
];
7487 /* If this is an optional reload, try to find the source reg
7488 from an input reload. */
7491 rtx set
= single_set (insn
);
7492 if (SET_DEST (set
) == reload_out
[r
])
7496 src_reg
= SET_SRC (set
);
7498 for (k
= 0; k
< n_reloads
; k
++)
7500 if (reload_in
[k
] == src_reg
)
7502 src_reg
= reload_reg_rtx
[k
];
7509 store_insn
= new_spill_reg_store
[REGNO (src_reg
)];
7510 if (src_reg
&& GET_CODE (src_reg
) == REG
7511 && REGNO (src_reg
) < FIRST_PSEUDO_REGISTER
)
7513 int src_regno
= REGNO (src_reg
);
7514 int nr
= HARD_REGNO_NREGS (src_regno
, reload_mode
[r
]);
7515 /* The place where to find a death note varies with
7516 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7517 necessarily checked exactly in the code that moves
7518 notes, so just check both locations. */
7519 rtx note
= find_regno_note (insn
, REG_DEAD
, src_regno
);
7521 note
= find_regno_note (store_insn
, REG_DEAD
, src_regno
);
7524 spill_reg_store
[src_regno
+ nr
] = store_insn
;
7525 spill_reg_stored_to
[src_regno
+ nr
] = out
;
7526 reg_reloaded_contents
[src_regno
+ nr
] = nregno
;
7527 reg_reloaded_insn
[src_regno
+ nr
] = store_insn
;
7528 CLEAR_HARD_REG_BIT (reg_reloaded_dead
, src_regno
+ nr
);
7529 SET_HARD_REG_BIT (reg_reloaded_valid
, src_regno
+ nr
);
7530 SET_HARD_REG_BIT (reg_is_output_reload
, src_regno
+ nr
);
7532 SET_HARD_REG_BIT (reg_reloaded_died
, src_regno
);
7534 CLEAR_HARD_REG_BIT (reg_reloaded_died
, src_regno
);
7536 reg_last_reload_reg
[nregno
] = src_reg
;
7541 int num_regs
= HARD_REGNO_NREGS (nregno
,GET_MODE (reload_out
[r
]));
7543 while (num_regs
-- > 0)
7544 reg_last_reload_reg
[nregno
+ num_regs
] = 0;
7548 IOR_HARD_REG_SET (reg_reloaded_dead
, reg_reloaded_died
);
7551 /* Emit code to perform a reload from IN (which may be a reload register) to
7552 OUT (which may also be a reload register). IN or OUT is from operand
7553 OPNUM with reload type TYPE.
7555 Returns first insn emitted. */
7558 gen_reload (out
, in
, opnum
, type
)
7562 enum reload_type type
;
7564 rtx last
= get_last_insn ();
7567 /* If IN is a paradoxical SUBREG, remove it and try to put the
7568 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7569 if (GET_CODE (in
) == SUBREG
7570 && (GET_MODE_SIZE (GET_MODE (in
))
7571 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in
))))
7572 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (in
)), out
)) != 0)
7573 in
= SUBREG_REG (in
), out
= tem
;
7574 else if (GET_CODE (out
) == SUBREG
7575 && (GET_MODE_SIZE (GET_MODE (out
))
7576 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out
))))
7577 && (tem
= gen_lowpart_common (GET_MODE (SUBREG_REG (out
)), in
)) != 0)
7578 out
= SUBREG_REG (out
), in
= tem
;
7580 /* How to do this reload can get quite tricky. Normally, we are being
7581 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7582 register that didn't get a hard register. In that case we can just
7583 call emit_move_insn.
7585 We can also be asked to reload a PLUS that adds a register or a MEM to
7586 another register, constant or MEM. This can occur during frame pointer
7587 elimination and while reloading addresses. This case is handled by
7588 trying to emit a single insn to perform the add. If it is not valid,
7589 we use a two insn sequence.
7591 Finally, we could be called to handle an 'o' constraint by putting
7592 an address into a register. In that case, we first try to do this
7593 with a named pattern of "reload_load_address". If no such pattern
7594 exists, we just emit a SET insn and hope for the best (it will normally
7595 be valid on machines that use 'o').
7597 This entire process is made complex because reload will never
7598 process the insns we generate here and so we must ensure that
7599 they will fit their constraints and also by the fact that parts of
7600 IN might be being reloaded separately and replaced with spill registers.
7601 Because of this, we are, in some sense, just guessing the right approach
7602 here. The one listed above seems to work.
7604 ??? At some point, this whole thing needs to be rethought. */
7606 if (GET_CODE (in
) == PLUS
7607 && (GET_CODE (XEXP (in
, 0)) == REG
7608 || GET_CODE (XEXP (in
, 0)) == SUBREG
7609 || GET_CODE (XEXP (in
, 0)) == MEM
)
7610 && (GET_CODE (XEXP (in
, 1)) == REG
7611 || GET_CODE (XEXP (in
, 1)) == SUBREG
7612 || CONSTANT_P (XEXP (in
, 1))
7613 || GET_CODE (XEXP (in
, 1)) == MEM
))
7615 /* We need to compute the sum of a register or a MEM and another
7616 register, constant, or MEM, and put it into the reload
7617 register. The best possible way of doing this is if the machine
7618 has a three-operand ADD insn that accepts the required operands.
7620 The simplest approach is to try to generate such an insn and see if it
7621 is recognized and matches its constraints. If so, it can be used.
7623 It might be better not to actually emit the insn unless it is valid,
7624 but we need to pass the insn as an operand to `recog' and
7625 `insn_extract' and it is simpler to emit and then delete the insn if
7626 not valid than to dummy things up. */
7628 rtx op0
, op1
, tem
, insn
;
7631 op0
= find_replacement (&XEXP (in
, 0));
7632 op1
= find_replacement (&XEXP (in
, 1));
7634 /* Since constraint checking is strict, commutativity won't be
7635 checked, so we need to do that here to avoid spurious failure
7636 if the add instruction is two-address and the second operand
7637 of the add is the same as the reload reg, which is frequently
7638 the case. If the insn would be A = B + A, rearrange it so
7639 it will be A = A + B as constrain_operands expects. */
7641 if (GET_CODE (XEXP (in
, 1)) == REG
7642 && REGNO (out
) == REGNO (XEXP (in
, 1)))
7643 tem
= op0
, op0
= op1
, op1
= tem
;
7645 if (op0
!= XEXP (in
, 0) || op1
!= XEXP (in
, 1))
7646 in
= gen_rtx_PLUS (GET_MODE (in
), op0
, op1
);
7648 insn
= emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
7649 code
= recog_memoized (insn
);
7653 insn_extract (insn
);
7654 /* We want constrain operands to treat this insn strictly in
7655 its validity determination, i.e., the way it would after reload
7657 if (constrain_operands (code
, 1))
7661 delete_insns_since (last
);
7663 /* If that failed, we must use a conservative two-insn sequence.
7664 use move to copy constant, MEM, or pseudo register to the reload
7665 register since "move" will be able to handle an arbitrary operand,
7666 unlike add which can't, in general. Then add the registers.
7668 If there is another way to do this for a specific machine, a
7669 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7672 if (CONSTANT_P (op1
) || GET_CODE (op1
) == MEM
|| GET_CODE (op1
) == SUBREG
7673 || (GET_CODE (op1
) == REG
7674 && REGNO (op1
) >= FIRST_PSEUDO_REGISTER
))
7675 tem
= op0
, op0
= op1
, op1
= tem
;
7677 gen_reload (out
, op0
, opnum
, type
);
7679 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7680 This fixes a problem on the 32K where the stack pointer cannot
7681 be used as an operand of an add insn. */
7683 if (rtx_equal_p (op0
, op1
))
7686 insn
= emit_insn (gen_add2_insn (out
, op1
));
7688 /* If that failed, copy the address register to the reload register.
7689 Then add the constant to the reload register. */
7691 code
= recog_memoized (insn
);
7695 insn_extract (insn
);
7696 /* We want constrain operands to treat this insn strictly in
7697 its validity determination, i.e., the way it would after reload
7699 if (constrain_operands (code
, 1))
7701 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7703 = gen_rtx_EXPR_LIST (REG_EQUIV
, in
, REG_NOTES (insn
));
7708 delete_insns_since (last
);
7710 gen_reload (out
, op1
, opnum
, type
);
7711 insn
= emit_insn (gen_add2_insn (out
, op0
));
7712 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_EQUIV
, in
, REG_NOTES (insn
));
7715 #ifdef SECONDARY_MEMORY_NEEDED
7716 /* If we need a memory location to do the move, do it that way. */
7717 else if (GET_CODE (in
) == REG
&& REGNO (in
) < FIRST_PSEUDO_REGISTER
7718 && GET_CODE (out
) == REG
&& REGNO (out
) < FIRST_PSEUDO_REGISTER
7719 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in
)),
7720 REGNO_REG_CLASS (REGNO (out
)),
7723 /* Get the memory to use and rewrite both registers to its mode. */
7724 rtx loc
= get_secondary_mem (in
, GET_MODE (out
), opnum
, type
);
7726 if (GET_MODE (loc
) != GET_MODE (out
))
7727 out
= gen_rtx_REG (GET_MODE (loc
), REGNO (out
));
7729 if (GET_MODE (loc
) != GET_MODE (in
))
7730 in
= gen_rtx_REG (GET_MODE (loc
), REGNO (in
));
7732 gen_reload (loc
, in
, opnum
, type
);
7733 gen_reload (out
, loc
, opnum
, type
);
7737 /* If IN is a simple operand, use gen_move_insn. */
7738 else if (GET_RTX_CLASS (GET_CODE (in
)) == 'o' || GET_CODE (in
) == SUBREG
)
7739 emit_insn (gen_move_insn (out
, in
));
7741 #ifdef HAVE_reload_load_address
7742 else if (HAVE_reload_load_address
)
7743 emit_insn (gen_reload_load_address (out
, in
));
7746 /* Otherwise, just write (set OUT IN) and hope for the best. */
7748 emit_insn (gen_rtx_SET (VOIDmode
, out
, in
));
7750 /* Return the first insn emitted.
7751 We can not just return get_last_insn, because there may have
7752 been multiple instructions emitted. Also note that gen_move_insn may
7753 emit more than one insn itself, so we can not assume that there is one
7754 insn emitted per emit_insn_before call. */
7756 return last
? NEXT_INSN (last
) : get_insns ();
7759 /* Delete a previously made output-reload
7760 whose result we now believe is not needed.
7761 First we double-check.
7763 INSN is the insn now being processed.
7764 LAST_RELOAD_REG is the hard register number for which we want to delete
7765 the last output reload.
7766 J is the reload-number that originally used REG. The caller has made
7767 certain that reload J doesn't use REG any longer for input. */
7770 delete_output_reload (insn
, j
, last_reload_reg
)
7773 int last_reload_reg
;
7775 rtx output_reload_insn
= spill_reg_store
[last_reload_reg
];
7776 rtx reg
= spill_reg_stored_to
[last_reload_reg
];
7779 int n_inherited
= 0;
7783 /* Get the raw pseudo-register referred to. */
7785 while (GET_CODE (reg
) == SUBREG
)
7786 reg
= SUBREG_REG (reg
);
7787 substed
= reg_equiv_memory_loc
[REGNO (reg
)];
7789 /* This is unsafe if the operand occurs more often in the current
7790 insn than it is inherited. */
7791 for (k
= n_reloads
- 1; k
>= 0; k
--)
7793 rtx reg2
= reload_in
[k
];
7796 if (GET_CODE (reg2
) == MEM
|| reload_override_in
[k
])
7797 reg2
= reload_in_reg
[k
];
7799 if (reload_out
[k
] && ! reload_out_reg
[k
])
7800 reg2
= XEXP (reload_in_reg
[k
], 0);
7802 while (GET_CODE (reg2
) == SUBREG
)
7803 reg2
= SUBREG_REG (reg2
);
7804 if (rtx_equal_p (reg2
, reg
))
7806 if (reload_inherited
[k
] || reload_override_in
[k
] || k
== j
)
7809 reg2
= reload_out_reg
[k
];
7812 while (GET_CODE (reg2
) == SUBREG
)
7813 reg2
= XEXP (reg2
, 0);
7814 if (rtx_equal_p (reg2
, reg
))
7821 n_occurrences
= count_occurrences (PATTERN (insn
), reg
);
7823 n_occurrences
+= count_occurrences (PATTERN (insn
), substed
);
7824 if (n_occurrences
> n_inherited
)
7827 /* If the pseudo-reg we are reloading is no longer referenced
7828 anywhere between the store into it and here,
7829 and no jumps or labels intervene, then the value can get
7830 here through the reload reg alone.
7831 Otherwise, give up--return. */
7832 for (i1
= NEXT_INSN (output_reload_insn
);
7833 i1
!= insn
; i1
= NEXT_INSN (i1
))
7835 if (GET_CODE (i1
) == CODE_LABEL
|| GET_CODE (i1
) == JUMP_INSN
)
7837 if ((GET_CODE (i1
) == INSN
|| GET_CODE (i1
) == CALL_INSN
)
7838 && reg_mentioned_p (reg
, PATTERN (i1
)))
7840 /* If this is USE in front of INSN, we only have to check that
7841 there are no more references than accounted for by inheritance. */
7842 while (GET_CODE (i1
) == INSN
&& GET_CODE (PATTERN (i1
)) == USE
)
7844 n_occurrences
+= rtx_equal_p (reg
, XEXP (PATTERN (i1
), 0)) != 0;
7845 i1
= NEXT_INSN (i1
);
7847 if (n_occurrences
<= n_inherited
&& i1
== insn
)
7853 /* The caller has already checked that REG dies or is set in INSN.
7854 It has also checked that we are optimizing, and thus some inaccurancies
7855 in the debugging information are acceptable.
7856 So we could just delete output_reload_insn.
7857 But in some cases we can improve the debugging information without
7858 sacrificing optimization - maybe even improving the code:
7859 See if the pseudo reg has been completely replaced
7860 with reload regs. If so, delete the store insn
7861 and forget we had a stack slot for the pseudo. */
7862 if (reload_out
[j
] != reload_in
[j
]
7863 && REG_N_DEATHS (REGNO (reg
)) == 1
7864 && REG_BASIC_BLOCK (REGNO (reg
)) >= 0
7865 && find_regno_note (insn
, REG_DEAD
, REGNO (reg
)))
7869 /* We know that it was used only between here
7870 and the beginning of the current basic block.
7871 (We also know that the last use before INSN was
7872 the output reload we are thinking of deleting, but never mind that.)
7873 Search that range; see if any ref remains. */
7874 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
7876 rtx set
= single_set (i2
);
7878 /* Uses which just store in the pseudo don't count,
7879 since if they are the only uses, they are dead. */
7880 if (set
!= 0 && SET_DEST (set
) == reg
)
7882 if (GET_CODE (i2
) == CODE_LABEL
7883 || GET_CODE (i2
) == JUMP_INSN
)
7885 if ((GET_CODE (i2
) == INSN
|| GET_CODE (i2
) == CALL_INSN
)
7886 && reg_mentioned_p (reg
, PATTERN (i2
)))
7888 /* Some other ref remains; just delete the output reload we
7890 delete_address_reloads (output_reload_insn
, insn
);
7891 PUT_CODE (output_reload_insn
, NOTE
);
7892 NOTE_SOURCE_FILE (output_reload_insn
) = 0;
7893 NOTE_LINE_NUMBER (output_reload_insn
) = NOTE_INSN_DELETED
;
7898 /* Delete the now-dead stores into this pseudo. */
7899 for (i2
= PREV_INSN (insn
); i2
; i2
= PREV_INSN (i2
))
7901 rtx set
= single_set (i2
);
7903 if (set
!= 0 && SET_DEST (set
) == reg
)
7905 delete_address_reloads (i2
, insn
);
7906 /* This might be a basic block head,
7907 thus don't use delete_insn. */
7908 PUT_CODE (i2
, NOTE
);
7909 NOTE_SOURCE_FILE (i2
) = 0;
7910 NOTE_LINE_NUMBER (i2
) = NOTE_INSN_DELETED
;
7912 if (GET_CODE (i2
) == CODE_LABEL
7913 || GET_CODE (i2
) == JUMP_INSN
)
7917 /* For the debugging info,
7918 say the pseudo lives in this reload reg. */
7919 reg_renumber
[REGNO (reg
)] = REGNO (reload_reg_rtx
[j
]);
7920 alter_reg (REGNO (reg
), -1);
7922 delete_address_reloads (output_reload_insn
, insn
);
7923 PUT_CODE (output_reload_insn
, NOTE
);
7924 NOTE_SOURCE_FILE (output_reload_insn
) = 0;
7925 NOTE_LINE_NUMBER (output_reload_insn
) = NOTE_INSN_DELETED
;
7929 /* We are going to delete DEAD_INSN. Recursively delete loads of
7930 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7931 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
7933 delete_address_reloads (dead_insn
, current_insn
)
7934 rtx dead_insn
, current_insn
;
7936 rtx set
= single_set (dead_insn
);
7937 rtx set2
, dst
, prev
, next
;
7940 rtx dst
= SET_DEST (set
);
7941 if (GET_CODE (dst
) == MEM
)
7942 delete_address_reloads_1 (dead_insn
, XEXP (dst
, 0), current_insn
);
7944 /* If we deleted the store from a reloaded post_{in,de}c expression,
7945 we can delete the matching adds. */
7946 prev
= PREV_INSN (dead_insn
);
7947 next
= NEXT_INSN (dead_insn
);
7948 if (! prev
|| ! next
)
7950 set
= single_set (next
);
7951 set2
= single_set (prev
);
7953 || GET_CODE (SET_SRC (set
)) != PLUS
|| GET_CODE (SET_SRC (set2
)) != PLUS
7954 || GET_CODE (XEXP (SET_SRC (set
), 1)) != CONST_INT
7955 || GET_CODE (XEXP (SET_SRC (set2
), 1)) != CONST_INT
)
7957 dst
= SET_DEST (set
);
7958 if (! rtx_equal_p (dst
, SET_DEST (set2
))
7959 || ! rtx_equal_p (dst
, XEXP (SET_SRC (set
), 0))
7960 || ! rtx_equal_p (dst
, XEXP (SET_SRC (set2
), 0))
7961 || (INTVAL (XEXP (SET_SRC (set
), 1))
7962 != - INTVAL (XEXP (SET_SRC (set2
), 1))))
7968 /* Subfunction of delete_address_reloads: process registers found in X. */
7970 delete_address_reloads_1 (dead_insn
, x
, current_insn
)
7971 rtx dead_insn
, x
, current_insn
;
7973 rtx prev
, set
, dst
, i2
;
7975 enum rtx_code code
= GET_CODE (x
);
7979 char *fmt
= GET_RTX_FORMAT (code
);
7980 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7983 delete_address_reloads_1 (dead_insn
, XEXP (x
, i
), current_insn
);
7984 else if (fmt
[i
] == 'E')
7986 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
7987 delete_address_reloads_1 (dead_insn
, XVECEXP (x
, i
, j
),
7994 if (spill_reg_order
[REGNO (x
)] < 0)
7997 /* Scan backwards for the insn that sets x. This might be a way back due
7999 for (prev
= PREV_INSN (dead_insn
); prev
; prev
= PREV_INSN (prev
))
8001 code
= GET_CODE (prev
);
8002 if (code
== CODE_LABEL
|| code
== JUMP_INSN
)
8004 if (GET_RTX_CLASS (code
) != 'i')
8006 if (reg_set_p (x
, PATTERN (prev
)))
8008 if (reg_referenced_p (x
, PATTERN (prev
)))
8011 if (! prev
|| INSN_UID (prev
) < reload_first_uid
)
8013 /* Check that PREV only sets the reload register. */
8014 set
= single_set (prev
);
8017 dst
= SET_DEST (set
);
8018 if (GET_CODE (dst
) != REG
8019 || ! rtx_equal_p (dst
, x
))
8021 if (! reg_set_p (dst
, PATTERN (dead_insn
)))
8023 /* Check if DST was used in a later insn -
8024 it might have been inherited. */
8025 for (i2
= NEXT_INSN (dead_insn
); i2
; i2
= NEXT_INSN (i2
))
8027 if (GET_CODE (i2
) == CODE_LABEL
)
8029 if (GET_RTX_CLASS (GET_CODE (i2
)) != 'i')
8031 if (reg_referenced_p (dst
, PATTERN (i2
)))
8033 /* If there is a reference to the register in the current insn,
8034 it might be loaded in a non-inherited reload. If no other
8035 reload uses it, that means the register is set before
8037 if (i2
== current_insn
)
8039 for (j
= n_reloads
- 1; j
>= 0; j
--)
8040 if ((reload_reg_rtx
[j
] == dst
&& reload_inherited
[j
])
8041 || reload_override_in
[j
] == dst
)
8043 for (j
= n_reloads
- 1; j
>= 0; j
--)
8044 if (reload_in
[j
] && reload_reg_rtx
[j
] == dst
)
8051 if (GET_CODE (i2
) == JUMP_INSN
)
8053 if (reg_set_p (dst
, PATTERN (i2
)))
8055 /* If DST is still live at CURRENT_INSN, check if it is used for
8057 if (i2
== current_insn
)
8059 for (j
= n_reloads
- 1; j
>= 0; j
--)
8060 if ((reload_reg_rtx
[j
] == dst
&& reload_inherited
[j
])
8061 || reload_override_in
[j
] == dst
)
8063 /* ??? We can't finish the loop here, because dst might be
8064 allocated to a pseudo in this block if no reload in this
8065 block needs any of the clsses containing DST - see
8066 spill_hard_reg. There is no easy way to tell this, so we
8067 have to scan till the end of the basic block. */
8071 delete_address_reloads_1 (prev
, SET_SRC (set
), current_insn
);
8072 reg_reloaded_contents
[REGNO (dst
)] = -1;
8073 /* Can't use delete_insn here because PREV might be a basic block head. */
8074 PUT_CODE (prev
, NOTE
);
8075 NOTE_LINE_NUMBER (prev
) = NOTE_INSN_DELETED
;
8076 NOTE_SOURCE_FILE (prev
) = 0;
8079 /* Output reload-insns to reload VALUE into RELOADREG.
8080 VALUE is an autoincrement or autodecrement RTX whose operand
8081 is a register or memory location;
8082 so reloading involves incrementing that location.
8083 IN is either identical to VALUE, or some cheaper place to reload from.
8085 INC_AMOUNT is the number to increment or decrement by (always positive).
8086 This cannot be deduced from VALUE.
8088 Return the instruction that stores into RELOADREG. */
8091 inc_for_reload (reloadreg
, in
, value
, inc_amount
)
8096 /* REG or MEM to be copied and incremented. */
8097 rtx incloc
= XEXP (value
, 0);
8098 /* Nonzero if increment after copying. */
8099 int post
= (GET_CODE (value
) == POST_DEC
|| GET_CODE (value
) == POST_INC
);
8105 rtx real_in
= in
== value
? XEXP (in
, 0) : in
;
8107 /* No hard register is equivalent to this register after
8108 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
8109 we could inc/dec that register as well (maybe even using it for
8110 the source), but I'm not sure it's worth worrying about. */
8111 if (GET_CODE (incloc
) == REG
)
8112 reg_last_reload_reg
[REGNO (incloc
)] = 0;
8114 if (GET_CODE (value
) == PRE_DEC
|| GET_CODE (value
) == POST_DEC
)
8115 inc_amount
= - inc_amount
;
8117 inc
= GEN_INT (inc_amount
);
8119 /* If this is post-increment, first copy the location to the reload reg. */
8120 if (post
&& real_in
!= reloadreg
)
8121 emit_insn (gen_move_insn (reloadreg
, real_in
));
8125 /* See if we can directly increment INCLOC. Use a method similar to
8126 that in gen_reload. */
8128 last
= get_last_insn ();
8129 add_insn
= emit_insn (gen_rtx_SET (VOIDmode
, incloc
,
8130 gen_rtx_PLUS (GET_MODE (incloc
),
8133 code
= recog_memoized (add_insn
);
8136 insn_extract (add_insn
);
8137 if (constrain_operands (code
, 1))
8139 /* If this is a pre-increment and we have incremented the value
8140 where it lives, copy the incremented value to RELOADREG to
8141 be used as an address. */
8144 emit_insn (gen_move_insn (reloadreg
, incloc
));
8149 delete_insns_since (last
);
8152 /* If couldn't do the increment directly, must increment in RELOADREG.
8153 The way we do this depends on whether this is pre- or post-increment.
8154 For pre-increment, copy INCLOC to the reload register, increment it
8155 there, then save back. */
8159 if (in
!= reloadreg
)
8160 emit_insn (gen_move_insn (reloadreg
, real_in
));
8161 emit_insn (gen_add2_insn (reloadreg
, inc
));
8162 store
= emit_insn (gen_move_insn (incloc
, reloadreg
));
8167 Because this might be a jump insn or a compare, and because RELOADREG
8168 may not be available after the insn in an input reload, we must do
8169 the incrementation before the insn being reloaded for.
8171 We have already copied IN to RELOADREG. Increment the copy in
8172 RELOADREG, save that back, then decrement RELOADREG so it has
8173 the original value. */
8175 emit_insn (gen_add2_insn (reloadreg
, inc
));
8176 store
= emit_insn (gen_move_insn (incloc
, reloadreg
));
8177 emit_insn (gen_add2_insn (reloadreg
, GEN_INT (-inc_amount
)));
8183 /* Return 1 if we are certain that the constraint-string STRING allows
8184 the hard register REG. Return 0 if we can't be sure of this. */
8187 constraint_accepts_reg_p (string
, reg
)
8192 int regno
= true_regnum (reg
);
8195 /* Initialize for first alternative. */
8197 /* Check that each alternative contains `g' or `r'. */
8199 switch (c
= *string
++)
8202 /* If an alternative lacks `g' or `r', we lose. */
8205 /* If an alternative lacks `g' or `r', we lose. */
8208 /* Initialize for next alternative. */
8213 /* Any general reg wins for this alternative. */
8214 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) GENERAL_REGS
], regno
))
8218 /* Any reg in specified class wins for this alternative. */
8220 enum reg_class
class = REG_CLASS_FROM_LETTER (c
);
8222 if (TEST_HARD_REG_BIT (reg_class_contents
[(int) class], regno
))
8228 /* Return the number of places FIND appears within X, but don't count
8229 an occurrence if some SET_DEST is FIND. */
8232 count_occurrences (x
, find
)
8233 register rtx x
, find
;
8236 register enum rtx_code code
;
8237 register char *format_ptr
;
8245 code
= GET_CODE (x
);
8260 if (GET_CODE (find
) == MEM
&& rtx_equal_p (x
, find
))
8264 if (SET_DEST (x
) == find
)
8265 return count_occurrences (SET_SRC (x
), find
);
8272 format_ptr
= GET_RTX_FORMAT (code
);
8275 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
8277 switch (*format_ptr
++)
8280 count
+= count_occurrences (XEXP (x
, i
), find
);
8284 if (XVEC (x
, i
) != NULL
)
8286 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
8287 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
);
8295 /* This array holds values which are equivalent to a hard register
8296 during reload_cse_regs. Each array element is an EXPR_LIST of
8297 values. Each time a hard register is set, we set the corresponding
8298 array element to the value. Each time a hard register is copied
8299 into memory, we add the memory location to the corresponding array
8300 element. We don't store values or memory addresses with side
8301 effects in this array.
8303 If the value is a CONST_INT, then the mode of the containing
8304 EXPR_LIST is the mode in which that CONST_INT was referenced.
8306 We sometimes clobber a specific entry in a list. In that case, we
8307 just set XEXP (list-entry, 0) to 0. */
8309 static rtx
*reg_values
;
8311 /* This is a preallocated REG rtx which we use as a temporary in
8312 reload_cse_invalidate_regno, so that we don't need to allocate a
8313 new one each time through a loop in that function. */
8315 static rtx invalidate_regno_rtx
;
8317 /* Invalidate any entries in reg_values which depend on REGNO,
8318 including those for REGNO itself. This is called if REGNO is
8319 changing. If CLOBBER is true, then always forget anything we
8320 currently know about REGNO. MODE is the mode of the assignment to
8321 REGNO, which is used to determine how many hard registers are being
8322 changed. If MODE is VOIDmode, then only REGNO is being changed;
8323 this is used when invalidating call clobbered registers across a
8327 reload_cse_invalidate_regno (regno
, mode
, clobber
)
8329 enum machine_mode mode
;
8335 /* Our callers don't always go through true_regnum; we may see a
8336 pseudo-register here from a CLOBBER or the like. We probably
8337 won't ever see a pseudo-register that has a real register number,
8338 for we check anyhow for safety. */
8339 if (regno
>= FIRST_PSEUDO_REGISTER
)
8340 regno
= reg_renumber
[regno
];
8344 if (mode
== VOIDmode
)
8345 endregno
= regno
+ 1;
8347 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
8350 for (i
= regno
; i
< endregno
; i
++)
8353 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8357 for (x
= reg_values
[i
]; x
; x
= XEXP (x
, 1))
8359 if (XEXP (x
, 0) != 0
8360 && refers_to_regno_p (regno
, endregno
, XEXP (x
, 0), NULL_PTR
))
8362 /* If this is the only entry on the list, clear
8363 reg_values[i]. Otherwise, just clear this entry on
8365 if (XEXP (x
, 1) == 0 && x
== reg_values
[i
])
8375 /* We must look at earlier registers, in case REGNO is part of a
8376 multi word value but is not the first register. If an earlier
8377 register has a value in a mode which overlaps REGNO, then we must
8378 invalidate that earlier register. Note that we do not need to
8379 check REGNO or later registers (we must not check REGNO itself,
8380 because we would incorrectly conclude that there was a conflict). */
8382 for (i
= 0; i
< regno
; i
++)
8386 for (x
= reg_values
[i
]; x
; x
= XEXP (x
, 1))
8388 if (XEXP (x
, 0) != 0)
8390 PUT_MODE (invalidate_regno_rtx
, GET_MODE (x
));
8391 REGNO (invalidate_regno_rtx
) = i
;
8392 if (refers_to_regno_p (regno
, endregno
, invalidate_regno_rtx
,
8395 reload_cse_invalidate_regno (i
, VOIDmode
, 1);
8403 /* The memory at address MEM_BASE is being changed.
8404 Return whether this change will invalidate VAL. */
8407 reload_cse_mem_conflict_p (mem_base
, val
)
8415 code
= GET_CODE (val
);
8418 /* Get rid of a few simple cases quickly. */
8431 if (GET_MODE (mem_base
) == BLKmode
8432 || GET_MODE (val
) == BLKmode
)
8434 if (anti_dependence (val
, mem_base
))
8436 /* The address may contain nested MEMs. */
8443 fmt
= GET_RTX_FORMAT (code
);
8445 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
8449 if (reload_cse_mem_conflict_p (mem_base
, XEXP (val
, i
)))
8452 else if (fmt
[i
] == 'E')
8456 for (j
= 0; j
< XVECLEN (val
, i
); j
++)
8457 if (reload_cse_mem_conflict_p (mem_base
, XVECEXP (val
, i
, j
)))
8465 /* Invalidate any entries in reg_values which are changed because of a
8466 store to MEM_RTX. If this is called because of a non-const call
8467 instruction, MEM_RTX is (mem:BLK const0_rtx). */
8470 reload_cse_invalidate_mem (mem_rtx
)
8475 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8479 for (x
= reg_values
[i
]; x
; x
= XEXP (x
, 1))
8481 if (XEXP (x
, 0) != 0
8482 && reload_cse_mem_conflict_p (mem_rtx
, XEXP (x
, 0)))
8484 /* If this is the only entry on the list, clear
8485 reg_values[i]. Otherwise, just clear this entry on
8487 if (XEXP (x
, 1) == 0 && x
== reg_values
[i
])
8498 /* Invalidate DEST, which is being assigned to or clobbered. The
8499 second parameter exists so that this function can be passed to
8500 note_stores; it is ignored. */
8503 reload_cse_invalidate_rtx (dest
, ignore
)
8505 rtx ignore ATTRIBUTE_UNUSED
;
8507 while (GET_CODE (dest
) == STRICT_LOW_PART
8508 || GET_CODE (dest
) == SIGN_EXTRACT
8509 || GET_CODE (dest
) == ZERO_EXTRACT
8510 || GET_CODE (dest
) == SUBREG
)
8511 dest
= XEXP (dest
, 0);
8513 if (GET_CODE (dest
) == REG
)
8514 reload_cse_invalidate_regno (REGNO (dest
), GET_MODE (dest
), 1);
8515 else if (GET_CODE (dest
) == MEM
)
8516 reload_cse_invalidate_mem (dest
);
8519 /* Do a very simple CSE pass over the hard registers.
8521 This function detects no-op moves where we happened to assign two
8522 different pseudo-registers to the same hard register, and then
8523 copied one to the other. Reload will generate a useless
8524 instruction copying a register to itself.
8526 This function also detects cases where we load a value from memory
8527 into two different registers, and (if memory is more expensive than
8528 registers) changes it to simply copy the first register into the
8531 Another optimization is performed that scans the operands of each
8532 instruction to see whether the value is already available in a
8533 hard register. It then replaces the operand with the hard register
8534 if possible, much like an optional reload would. */
8537 reload_cse_regs_1 (first
)
8545 init_alias_analysis ();
8547 reg_values
= (rtx
*) alloca (FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
8548 bzero ((char *)reg_values
, FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
8550 /* Create our EXPR_LIST structures on reload_obstack, so that we can
8551 free them when we are done. */
8552 push_obstacks (&reload_obstack
, &reload_obstack
);
8553 firstobj
= (char *) obstack_alloc (&reload_obstack
, 0);
8555 /* We pass this to reload_cse_invalidate_mem to invalidate all of
8556 memory for a non-const call instruction. */
8557 callmem
= gen_rtx_MEM (BLKmode
, const0_rtx
);
8559 /* This is used in reload_cse_invalidate_regno to avoid consing a
8560 new REG in a loop in that function. */
8561 invalidate_regno_rtx
= gen_rtx_REG (VOIDmode
, 0);
8563 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
8567 if (GET_CODE (insn
) == CODE_LABEL
)
8569 /* Forget all the register values at a code label. We don't
8570 try to do anything clever around jumps. */
8571 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8577 #ifdef NON_SAVING_SETJMP
8578 if (NON_SAVING_SETJMP
&& GET_CODE (insn
) == NOTE
8579 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
8581 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8588 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
8591 /* If this is a call instruction, forget anything stored in a
8592 call clobbered register, or, if this is not a const call, in
8594 if (GET_CODE (insn
) == CALL_INSN
)
8596 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8597 if (call_used_regs
[i
])
8598 reload_cse_invalidate_regno (i
, VOIDmode
, 1);
8600 if (! CONST_CALL_P (insn
))
8601 reload_cse_invalidate_mem (callmem
);
8604 body
= PATTERN (insn
);
8605 if (GET_CODE (body
) == SET
)
8608 if (reload_cse_noop_set_p (body
, insn
))
8610 /* If this sets the return value of the function, we must keep
8611 a USE around, in case this is in a different basic block
8612 than the final USE. Otherwise, we could loose important
8613 register lifeness information on SMALL_REGISTER_CLASSES
8614 machines, where return registers might be used as spills:
8615 subsequent passes assume that spill registers are dead at
8616 the end of a basic block. */
8617 if (REG_FUNCTION_VALUE_P (SET_DEST (body
)))
8620 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, SET_DEST (body
));
8621 INSN_CODE (insn
) = -1;
8622 REG_NOTES (insn
) = NULL_RTX
;
8623 push_obstacks (&reload_obstack
, &reload_obstack
);
8627 PUT_CODE (insn
, NOTE
);
8628 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
8629 NOTE_SOURCE_FILE (insn
) = 0;
8632 /* We're done with this insn. */
8636 /* It's not a no-op, but we can try to simplify it. */
8637 count
+= reload_cse_simplify_set (body
, insn
);
8640 apply_change_group ();
8642 reload_cse_simplify_operands (insn
);
8644 reload_cse_record_set (body
, body
);
8646 else if (GET_CODE (body
) == PARALLEL
)
8649 rtx value
= NULL_RTX
;
8651 /* If every action in a PARALLEL is a noop, we can delete
8652 the entire PARALLEL. */
8653 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
8655 rtx part
= XVECEXP (body
, 0, i
);
8656 if (GET_CODE (part
) == SET
)
8658 if (! reload_cse_noop_set_p (part
, insn
))
8660 if (REG_FUNCTION_VALUE_P (SET_DEST (part
)))
8664 value
= SET_DEST (part
);
8667 else if (GET_CODE (part
) != CLOBBER
)
8675 PATTERN (insn
) = gen_rtx_USE (VOIDmode
, value
);
8676 INSN_CODE (insn
) = -1;
8677 REG_NOTES (insn
) = NULL_RTX
;
8678 push_obstacks (&reload_obstack
, &reload_obstack
);
8682 PUT_CODE (insn
, NOTE
);
8683 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
8684 NOTE_SOURCE_FILE (insn
) = 0;
8687 /* We're done with this insn. */
8691 /* It's not a no-op, but we can try to simplify it. */
8692 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
8693 if (GET_CODE (XVECEXP (body
, 0, i
)) == SET
)
8694 count
+= reload_cse_simplify_set (XVECEXP (body
, 0, i
), insn
);
8697 apply_change_group ();
8699 reload_cse_simplify_operands (insn
);
8701 /* Look through the PARALLEL and record the values being
8702 set, if possible. Also handle any CLOBBERs. */
8703 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
8705 rtx x
= XVECEXP (body
, 0, i
);
8707 if (GET_CODE (x
) == SET
)
8708 reload_cse_record_set (x
, body
);
8710 note_stores (x
, reload_cse_invalidate_rtx
);
8714 note_stores (body
, reload_cse_invalidate_rtx
);
8717 /* Clobber any registers which appear in REG_INC notes. We
8718 could keep track of the changes to their values, but it is
8719 unlikely to help. */
8723 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
8724 if (REG_NOTE_KIND (x
) == REG_INC
)
8725 reload_cse_invalidate_rtx (XEXP (x
, 0), NULL_RTX
);
8729 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
8730 after we have processed the insn. */
8731 if (GET_CODE (insn
) == CALL_INSN
)
8735 for (x
= CALL_INSN_FUNCTION_USAGE (insn
); x
; x
= XEXP (x
, 1))
8736 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
8737 reload_cse_invalidate_rtx (XEXP (XEXP (x
, 0), 0), NULL_RTX
);
8741 /* Free all the temporary structures we created, and go back to the
8742 regular obstacks. */
8743 obstack_free (&reload_obstack
, firstobj
);
8747 /* Call cse / combine like post-reload optimization phases.
8748 FIRST is the first instruction. */
8750 reload_cse_regs (first
)
8753 reload_cse_regs_1 (first
);
8755 reload_cse_move2add (first
);
8756 if (flag_expensive_optimizations
)
8757 reload_cse_regs_1 (first
);
8760 /* Return whether the values known for REGNO are equal to VAL. MODE
8761 is the mode of the object that VAL is being copied to; this matters
8762 if VAL is a CONST_INT. */
8765 reload_cse_regno_equal_p (regno
, val
, mode
)
8768 enum machine_mode mode
;
8775 for (x
= reg_values
[regno
]; x
; x
= XEXP (x
, 1))
8776 if (XEXP (x
, 0) != 0
8777 && rtx_equal_p (XEXP (x
, 0), val
)
8778 && (! flag_float_store
|| GET_CODE (XEXP (x
, 0)) != MEM
8779 || GET_MODE_CLASS (GET_MODE (x
)) != MODE_FLOAT
)
8780 && (GET_CODE (val
) != CONST_INT
8781 || mode
== GET_MODE (x
)
8782 || (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (x
))
8783 /* On a big endian machine if the value spans more than
8784 one register then this register holds the high part of
8785 it and we can't use it.
8787 ??? We should also compare with the high part of the
8789 && !(WORDS_BIG_ENDIAN
8790 && HARD_REGNO_NREGS (regno
, GET_MODE (x
)) > 1)
8791 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode
),
8792 GET_MODE_BITSIZE (GET_MODE (x
))))))
8798 /* See whether a single set is a noop. SET is the set instruction we
8799 are should check, and INSN is the instruction from which it came. */
8802 reload_cse_noop_set_p (set
, insn
)
8807 enum machine_mode dest_mode
;
8811 src
= SET_SRC (set
);
8812 dest
= SET_DEST (set
);
8813 dest_mode
= GET_MODE (dest
);
8815 if (side_effects_p (src
))
8818 dreg
= true_regnum (dest
);
8819 sreg
= true_regnum (src
);
8821 /* Check for setting a register to itself. In this case, we don't
8822 have to worry about REG_DEAD notes. */
8823 if (dreg
>= 0 && dreg
== sreg
)
8829 /* Check for setting a register to itself. */
8833 /* Check for setting a register to a value which we already know
8834 is in the register. */
8835 else if (reload_cse_regno_equal_p (dreg
, src
, dest_mode
))
8838 /* Check for setting a register DREG to another register SREG
8839 where SREG is equal to a value which is already in DREG. */
8844 for (x
= reg_values
[sreg
]; x
; x
= XEXP (x
, 1))
8848 if (XEXP (x
, 0) == 0)
8851 if (dest_mode
== GET_MODE (x
))
8853 else if (GET_MODE_BITSIZE (dest_mode
)
8854 < GET_MODE_BITSIZE (GET_MODE (x
)))
8855 tmp
= gen_lowpart_common (dest_mode
, XEXP (x
, 0));
8860 && reload_cse_regno_equal_p (dreg
, tmp
, dest_mode
))
8868 else if (GET_CODE (dest
) == MEM
)
8870 /* Check for storing a register to memory when we know that the
8871 register is equivalent to the memory location. */
8873 && reload_cse_regno_equal_p (sreg
, dest
, dest_mode
)
8874 && ! side_effects_p (dest
))
8881 /* Try to simplify a single SET instruction. SET is the set pattern.
8882 INSN is the instruction it came from.
8883 This function only handles one case: if we set a register to a value
8884 which is not a register, we try to find that value in some other register
8885 and change the set into a register copy. */
8888 reload_cse_simplify_set (set
, insn
)
8894 enum machine_mode dest_mode
;
8895 enum reg_class dclass
;
8898 dreg
= true_regnum (SET_DEST (set
));
8902 src
= SET_SRC (set
);
8903 if (side_effects_p (src
) || true_regnum (src
) >= 0)
8906 dclass
= REGNO_REG_CLASS (dreg
);
8908 /* If memory loads are cheaper than register copies, don't change them. */
8909 if (GET_CODE (src
) == MEM
8910 && MEMORY_MOVE_COST (GET_MODE (src
), dclass
, 1) < 2)
8913 /* If the constant is cheaper than a register, don't change it. */
8914 if (CONSTANT_P (src
)
8915 && rtx_cost (src
, SET
) < 2)
8918 dest_mode
= GET_MODE (SET_DEST (set
));
8919 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
8922 && REGISTER_MOVE_COST (REGNO_REG_CLASS (i
), dclass
) == 2
8923 && reload_cse_regno_equal_p (i
, src
, dest_mode
))
8927 /* Pop back to the real obstacks while changing the insn. */
8930 validated
= validate_change (insn
, &SET_SRC (set
),
8931 gen_rtx_REG (dest_mode
, i
), 1);
8933 /* Go back to the obstack we are using for temporary
8935 push_obstacks (&reload_obstack
, &reload_obstack
);
8944 /* Try to replace operands in INSN with equivalent values that are already
8945 in registers. This can be viewed as optional reloading.
8947 For each non-register operand in the insn, see if any hard regs are
8948 known to be equivalent to that operand. Record the alternatives which
8949 can accept these hard registers. Among all alternatives, select the
8950 ones which are better or equal to the one currently matching, where
8951 "better" is in terms of '?' and '!' constraints. Among the remaining
8952 alternatives, select the one which replaces most operands with
8956 reload_cse_simplify_operands (insn
)
8959 #ifdef REGISTER_CONSTRAINTS
8960 int insn_code_number
, n_operands
, n_alternatives
;
8963 char *constraints
[MAX_RECOG_OPERANDS
];
8965 /* Vector recording how bad an alternative is. */
8966 int *alternative_reject
;
8967 /* Vector recording how many registers can be introduced by choosing
8968 this alternative. */
8969 int *alternative_nregs
;
8970 /* Array of vectors recording, for each operand and each alternative,
8971 which hard register to substitute, or -1 if the operand should be
8973 int *op_alt_regno
[MAX_RECOG_OPERANDS
];
8974 /* Array of alternatives, sorted in order of decreasing desirability. */
8975 int *alternative_order
;
8976 rtx reg
= gen_rtx_REG (VOIDmode
, -1);
8978 /* Find out some information about this insn. */
8979 insn_code_number
= recog_memoized (insn
);
8980 /* We don't modify asm instructions. */
8981 if (insn_code_number
< 0)
8984 n_operands
= insn_n_operands
[insn_code_number
];
8985 n_alternatives
= insn_n_alternatives
[insn_code_number
];
8987 if (n_alternatives
== 0 || n_operands
== 0)
8989 insn_extract (insn
);
8991 /* Figure out which alternative currently matches. */
8992 if (! constrain_operands (insn_code_number
, 1))
8993 fatal_insn_not_found (insn
);
8995 alternative_reject
= (int *) alloca (n_alternatives
* sizeof (int));
8996 alternative_nregs
= (int *) alloca (n_alternatives
* sizeof (int));
8997 alternative_order
= (int *) alloca (n_alternatives
* sizeof (int));
8998 bzero ((char *)alternative_reject
, n_alternatives
* sizeof (int));
8999 bzero ((char *)alternative_nregs
, n_alternatives
* sizeof (int));
9001 for (i
= 0; i
< n_operands
; i
++)
9003 enum machine_mode mode
;
9007 op_alt_regno
[i
] = (int *) alloca (n_alternatives
* sizeof (int));
9008 for (j
= 0; j
< n_alternatives
; j
++)
9009 op_alt_regno
[i
][j
] = -1;
9011 p
= constraints
[i
] = insn_operand_constraint
[insn_code_number
][i
];
9012 mode
= insn_operand_mode
[insn_code_number
][i
];
9014 /* Add the reject values for each alternative given by the constraints
9015 for this operand. */
9023 alternative_reject
[j
] += 3;
9025 alternative_reject
[j
] += 300;
9028 /* We won't change operands which are already registers. We
9029 also don't want to modify output operands. */
9030 regno
= true_regnum (recog_operand
[i
]);
9032 || constraints
[i
][0] == '='
9033 || constraints
[i
][0] == '+')
9036 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
9038 int class = (int) NO_REGS
;
9040 if (! reload_cse_regno_equal_p (regno
, recog_operand
[i
], mode
))
9043 REGNO (reg
) = regno
;
9044 PUT_MODE (reg
, mode
);
9046 /* We found a register equal to this operand. Now look for all
9047 alternatives that can accept this register and have not been
9048 assigned a register they can use yet. */
9057 case '=': case '+': case '?':
9058 case '#': case '&': case '!':
9060 case '0': case '1': case '2': case '3': case '4':
9061 case 'm': case '<': case '>': case 'V': case 'o':
9062 case 'E': case 'F': case 'G': case 'H':
9063 case 's': case 'i': case 'n':
9064 case 'I': case 'J': case 'K': case 'L':
9065 case 'M': case 'N': case 'O': case 'P':
9066 #ifdef EXTRA_CONSTRAINT
9067 case 'Q': case 'R': case 'S': case 'T': case 'U':
9070 /* These don't say anything we care about. */
9074 class = reg_class_subunion
[(int) class][(int) GENERAL_REGS
];
9079 = reg_class_subunion
[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c
)];
9082 case ',': case '\0':
9083 /* See if REGNO fits this alternative, and set it up as the
9084 replacement register if we don't have one for this
9085 alternative yet and the operand being replaced is not
9086 a cheap CONST_INT. */
9087 if (op_alt_regno
[i
][j
] == -1
9088 && reg_fits_class_p (reg
, class, 0, mode
)
9089 && (GET_CODE (recog_operand
[i
]) != CONST_INT
9090 || rtx_cost (recog_operand
[i
], SET
) > rtx_cost (reg
, SET
)))
9092 alternative_nregs
[j
]++;
9093 op_alt_regno
[i
][j
] = regno
;
9105 /* Record all alternatives which are better or equal to the currently
9106 matching one in the alternative_order array. */
9107 for (i
= j
= 0; i
< n_alternatives
; i
++)
9108 if (alternative_reject
[i
] <= alternative_reject
[which_alternative
])
9109 alternative_order
[j
++] = i
;
9112 /* Sort it. Given a small number of alternatives, a dumb algorithm
9113 won't hurt too much. */
9114 for (i
= 0; i
< n_alternatives
- 1; i
++)
9117 int best_reject
= alternative_reject
[alternative_order
[i
]];
9118 int best_nregs
= alternative_nregs
[alternative_order
[i
]];
9121 for (j
= i
+ 1; j
< n_alternatives
; j
++)
9123 int this_reject
= alternative_reject
[alternative_order
[j
]];
9124 int this_nregs
= alternative_nregs
[alternative_order
[j
]];
9126 if (this_reject
< best_reject
9127 || (this_reject
== best_reject
&& this_nregs
< best_nregs
))
9130 best_reject
= this_reject
;
9131 best_nregs
= this_nregs
;
9135 tmp
= alternative_order
[best
];
9136 alternative_order
[best
] = alternative_order
[i
];
9137 alternative_order
[i
] = tmp
;
9140 /* Substitute the operands as determined by op_alt_regno for the best
9142 j
= alternative_order
[0];
9144 /* Pop back to the real obstacks while changing the insn. */
9147 for (i
= 0; i
< n_operands
; i
++)
9149 enum machine_mode mode
= insn_operand_mode
[insn_code_number
][i
];
9150 if (op_alt_regno
[i
][j
] == -1)
9153 validate_change (insn
, recog_operand_loc
[i
],
9154 gen_rtx_REG (mode
, op_alt_regno
[i
][j
]), 1);
9157 for (i
= insn_n_dups
[insn_code_number
] - 1; i
>= 0; i
--)
9159 int op
= recog_dup_num
[i
];
9160 enum machine_mode mode
= insn_operand_mode
[insn_code_number
][op
];
9162 if (op_alt_regno
[op
][j
] == -1)
9165 validate_change (insn
, recog_dup_loc
[i
],
9166 gen_rtx_REG (mode
, op_alt_regno
[op
][j
]), 1);
9169 /* Go back to the obstack we are using for temporary
9171 push_obstacks (&reload_obstack
, &reload_obstack
);
9173 return apply_change_group ();
9179 /* These two variables are used to pass information from
9180 reload_cse_record_set to reload_cse_check_clobber. */
9182 static int reload_cse_check_clobbered
;
9183 static rtx reload_cse_check_src
;
9185 /* See if DEST overlaps with RELOAD_CSE_CHECK_SRC. If it does, set
9186 RELOAD_CSE_CHECK_CLOBBERED. This is called via note_stores. The
9187 second argument, which is passed by note_stores, is ignored. */
9190 reload_cse_check_clobber (dest
, ignore
)
9192 rtx ignore ATTRIBUTE_UNUSED
;
9194 if (reg_overlap_mentioned_p (dest
, reload_cse_check_src
))
9195 reload_cse_check_clobbered
= 1;
9198 /* Record the result of a SET instruction. SET is the set pattern.
9199 BODY is the pattern of the insn that it came from. */
9202 reload_cse_record_set (set
, body
)
9208 enum machine_mode dest_mode
;
9210 dest
= SET_DEST (set
);
9211 src
= SET_SRC (set
);
9212 dreg
= true_regnum (dest
);
9213 sreg
= true_regnum (src
);
9214 dest_mode
= GET_MODE (dest
);
9216 /* Some machines don't define AUTO_INC_DEC, but they still use push
9217 instructions. We need to catch that case here in order to
9218 invalidate the stack pointer correctly. Note that invalidating
9219 the stack pointer is different from invalidating DEST. */
9221 while (GET_CODE (x
) == SUBREG
9222 || GET_CODE (x
) == ZERO_EXTRACT
9223 || GET_CODE (x
) == SIGN_EXTRACT
9224 || GET_CODE (x
) == STRICT_LOW_PART
)
9226 if (push_operand (x
, GET_MODE (x
)))
9228 reload_cse_invalidate_rtx (stack_pointer_rtx
, NULL_RTX
);
9229 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
9233 /* We can only handle an assignment to a register, or a store of a
9234 register to a memory location. For other cases, we just clobber
9235 the destination. We also have to just clobber if there are side
9236 effects in SRC or DEST. */
9237 if ((dreg
< 0 && GET_CODE (dest
) != MEM
)
9238 || side_effects_p (src
)
9239 || side_effects_p (dest
))
9241 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
9246 /* We don't try to handle values involving CC, because it's a pain
9247 to keep track of when they have to be invalidated. */
9248 if (reg_mentioned_p (cc0_rtx
, src
)
9249 || reg_mentioned_p (cc0_rtx
, dest
))
9251 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
9256 /* If BODY is a PARALLEL, then we need to see whether the source of
9257 SET is clobbered by some other instruction in the PARALLEL. */
9258 if (GET_CODE (body
) == PARALLEL
)
9262 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
9266 x
= XVECEXP (body
, 0, i
);
9270 reload_cse_check_clobbered
= 0;
9271 reload_cse_check_src
= src
;
9272 note_stores (x
, reload_cse_check_clobber
);
9273 if (reload_cse_check_clobbered
)
9275 reload_cse_invalidate_rtx (dest
, NULL_RTX
);
9285 /* This is an assignment to a register. Update the value we
9286 have stored for the register. */
9291 /* This is a copy from one register to another. Any values
9292 which were valid for SREG are now valid for DREG. If the
9293 mode changes, we use gen_lowpart_common to extract only
9294 the part of the value that is copied. */
9295 reg_values
[dreg
] = 0;
9296 for (x
= reg_values
[sreg
]; x
; x
= XEXP (x
, 1))
9300 if (XEXP (x
, 0) == 0)
9302 if (dest_mode
== GET_MODE (XEXP (x
, 0)))
9304 else if (GET_MODE_BITSIZE (dest_mode
)
9305 > GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))))
9308 tmp
= gen_lowpart_common (dest_mode
, XEXP (x
, 0));
9310 reg_values
[dreg
] = gen_rtx_EXPR_LIST (dest_mode
, tmp
,
9315 reg_values
[dreg
] = gen_rtx_EXPR_LIST (dest_mode
, src
, NULL_RTX
);
9317 /* We've changed DREG, so invalidate any values held by other
9318 registers that depend upon it. */
9319 reload_cse_invalidate_regno (dreg
, dest_mode
, 0);
9321 /* If this assignment changes more than one hard register,
9322 forget anything we know about the others. */
9323 for (i
= 1; i
< HARD_REGNO_NREGS (dreg
, dest_mode
); i
++)
9324 reg_values
[dreg
+ i
] = 0;
9326 else if (GET_CODE (dest
) == MEM
)
9328 /* Invalidate conflicting memory locations. */
9329 reload_cse_invalidate_mem (dest
);
9331 /* If we're storing a register to memory, add DEST to the list
9333 if (sreg
>= 0 && ! side_effects_p (dest
))
9334 reg_values
[sreg
] = gen_rtx_EXPR_LIST (dest_mode
, dest
,
9339 /* We should have bailed out earlier. */
9344 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
9346 This code might also be useful when reload gave up on reg+reg addresssing
9347 because of clashes between the return register and INDEX_REG_CLASS. */
9349 /* The maximum number of uses of a register we can keep track of to
9350 replace them with reg+reg addressing. */
9351 #define RELOAD_COMBINE_MAX_USES 6
9353 /* INSN is the insn where a register has ben used, and USEP points to the
9354 location of the register within the rtl. */
9355 struct reg_use
{ rtx insn
, *usep
; };
9357 /* If the register is used in some unknown fashion, USE_INDEX is negative.
9358 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
9359 indicates where it becomes live again.
9360 Otherwise, USE_INDEX is the index of the last encountered use of the
9361 register (which is first among these we have seen since we scan backwards),
9362 OFFSET contains the constant offset that is added to the register in
9363 all encountered uses, and USE_RUID indicates the first encountered, i.e.
9364 last, of these uses. */
9367 struct reg_use reg_use
[RELOAD_COMBINE_MAX_USES
];
9372 } reg_state
[FIRST_PSEUDO_REGISTER
];
9374 /* Reverse linear uid. This is increased in reload_combine while scanning
9375 the instructions from last to first. It is used to set last_label_ruid
9376 and the store_ruid / use_ruid fields in reg_state. */
9377 static int reload_combine_ruid
;
9383 int first_index_reg
= 1, last_index_reg
= 0;
9385 int last_label_ruid
;
9387 /* If reg+reg can be used in offsetable memory adresses, the main chunk of
9388 reload has already used it where appropriate, so there is no use in
9389 trying to generate it now. */
9390 if (double_reg_address_ok
&& INDEX_REG_CLASS
!= NO_REGS
)
9393 /* To avoid wasting too much time later searching for an index register,
9394 determine the minimum and maximum index register numbers. */
9395 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; --i
)
9397 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], i
))
9399 if (! last_index_reg
)
9401 first_index_reg
= i
;
9404 /* If no index register is available, we can quit now. */
9405 if (first_index_reg
> last_index_reg
)
9408 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
9409 last_label_ruid
= reload_combine_ruid
= 0;
9410 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; --i
)
9413 reg_state
[i
].use_index
= -1;
9416 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
9417 reg_state
[i
].store_ruid
= reload_combine_ruid
;
9421 for (insn
= get_last_insn (); insn
; insn
= PREV_INSN (insn
))
9425 /* We cannot do our optimization across labels. Invalidating all the use
9426 information we have would be costly, so we just note where the label
9427 is and then later disable any optimization that would cross it. */
9428 if (GET_CODE (insn
) == CODE_LABEL
)
9429 last_label_ruid
= reload_combine_ruid
;
9430 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
9432 reload_combine_ruid
++;
9434 /* Look for (set (REGX) (CONST_INT))
9435 (set (REGX) (PLUS (REGX) (REGY)))
9437 ... (MEM (REGX)) ...
9439 (set (REGZ) (CONST_INT))
9441 ... (MEM (PLUS (REGZ) (REGY)))... .
9443 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
9444 and that we know all uses of REGX before it dies. */
9445 set
= single_set (insn
);
9447 && GET_CODE (SET_DEST (set
)) == REG
9448 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set
)),
9449 GET_MODE (SET_DEST (set
)))
9451 && GET_CODE (SET_SRC (set
)) == PLUS
9452 && GET_CODE (XEXP (SET_SRC (set
), 1)) == REG
9453 && rtx_equal_p (XEXP (SET_SRC (set
), 0), SET_DEST (set
))
9454 && last_label_ruid
< reg_state
[REGNO (SET_DEST (set
))].use_ruid
)
9456 rtx reg
= SET_DEST (set
);
9457 rtx plus
= SET_SRC (set
);
9458 rtx base
= XEXP (plus
, 1);
9459 rtx prev
= prev_nonnote_insn (insn
);
9460 rtx prev_set
= prev
? single_set (prev
) : NULL_RTX
;
9461 int regno
= REGNO (reg
);
9463 rtx reg_sum
= NULL_RTX
;
9465 /* Now, we need an index register.
9466 We'll set index_reg to this index register, const_reg to the
9467 register that is to be loaded with the constant
9468 (denoted as REGZ in the substitution illustration above),
9469 and reg_sum to the register-register that we want to use to
9470 substitute uses of REG (typically in MEMs) with.
9471 First check REG and BASE for being index registers;
9472 we can use them even if they are not dead. */
9473 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], regno
)
9474 || TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
],
9482 /* Otherwise, look for a free index register. Since we have
9483 checked above that neiter REG nor BASE are index registers,
9484 if we find anything at all, it will be different from these
9486 for (i
= first_index_reg
; i
<= last_index_reg
; i
++)
9488 if (TEST_HARD_REG_BIT (reg_class_contents
[INDEX_REG_CLASS
], i
)
9489 && reg_state
[i
].use_index
== RELOAD_COMBINE_MAX_USES
9490 && reg_state
[i
].store_ruid
<= reg_state
[regno
].use_ruid
9491 && HARD_REGNO_NREGS (i
, GET_MODE (reg
)) == 1)
9493 rtx index_reg
= gen_rtx_REG (GET_MODE (reg
), i
);
9494 const_reg
= index_reg
;
9495 reg_sum
= gen_rtx_PLUS (GET_MODE (reg
), index_reg
, base
);
9501 && GET_CODE (SET_SRC (prev_set
)) == CONST_INT
9502 && rtx_equal_p (SET_DEST (prev_set
), reg
)
9503 && reg_state
[regno
].use_index
>= 0
9508 /* Change destination register and - if necessary - the
9509 constant value in PREV, the constant loading instruction. */
9510 validate_change (prev
, &SET_DEST (prev_set
), const_reg
, 1);
9511 if (reg_state
[regno
].offset
!= const0_rtx
)
9512 validate_change (prev
,
9513 &SET_SRC (prev_set
),
9514 GEN_INT (INTVAL (SET_SRC (prev_set
))
9515 + INTVAL (reg_state
[regno
].offset
)),
9517 /* Now for every use of REG that we have recorded, replace REG
9519 for (i
= reg_state
[regno
].use_index
;
9520 i
< RELOAD_COMBINE_MAX_USES
; i
++)
9521 validate_change (reg_state
[regno
].reg_use
[i
].insn
,
9522 reg_state
[regno
].reg_use
[i
].usep
,
9525 if (apply_change_group ())
9529 /* Delete the reg-reg addition. */
9530 PUT_CODE (insn
, NOTE
);
9531 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
9532 NOTE_SOURCE_FILE (insn
) = 0;
9534 if (reg_state
[regno
].offset
!= const0_rtx
)
9536 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
9538 for (np
= ®_NOTES (prev
); *np
; )
9540 if (REG_NOTE_KIND (*np
) == REG_EQUAL
9541 || REG_NOTE_KIND (*np
) == REG_EQUIV
)
9542 *np
= XEXP (*np
, 1);
9544 np
= &XEXP (*np
, 1);
9547 reg_state
[regno
].use_index
= RELOAD_COMBINE_MAX_USES
;
9548 reg_state
[REGNO (const_reg
)].store_ruid
= reload_combine_ruid
;
9553 note_stores (PATTERN (insn
), reload_combine_note_store
);
9554 if (GET_CODE (insn
) == CALL_INSN
)
9558 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; --i
)
9560 if (call_used_regs
[i
])
9562 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
9563 reg_state
[i
].store_ruid
= reload_combine_ruid
;
9566 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
;
9567 link
= XEXP (link
, 1))
9569 rtx use
= XEXP (link
, 0);
9570 int regno
= REGNO (XEXP (use
, 0));
9571 if (GET_CODE (use
) == CLOBBER
)
9573 reg_state
[regno
].use_index
= RELOAD_COMBINE_MAX_USES
;
9574 reg_state
[regno
].store_ruid
= reload_combine_ruid
;
9577 reg_state
[regno
].use_index
= -1;
9580 if (GET_CODE (insn
) == JUMP_INSN
)
9582 /* Non-spill registers might be used at the call destination in
9583 some unknown fashion, so we have to mark the unknown use. */
9584 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; --i
)
9587 reg_state
[i
].use_index
= -1;
9590 reload_combine_note_use (&PATTERN (insn
), insn
);
9591 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
9593 if (REG_NOTE_KIND (note
) == REG_INC
9594 && GET_CODE (XEXP (note
, 0)) == REG
)
9595 reg_state
[REGNO (XEXP (note
, 0))].use_index
= -1;
9600 /* Check if DST is a register or a subreg of a register; if it is,
9601 update reg_state[regno].store_ruid and reg_state[regno].use_index
9602 accordingly. Called via note_stores from reload_combine.
9603 The second argument, SET, is ignored. */
9605 reload_combine_note_store (dst
, set
)
9606 rtx dst
, set ATTRIBUTE_UNUSED
;
9610 unsigned size
= GET_MODE_SIZE (GET_MODE (dst
));
9612 if (GET_CODE (dst
) == SUBREG
)
9614 regno
= SUBREG_WORD (dst
);
9615 dst
= SUBREG_REG (dst
);
9617 if (GET_CODE (dst
) != REG
)
9619 regno
+= REGNO (dst
);
9620 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
9621 careful with registers / register parts that are not full words. */
9622 if (size
< (unsigned) UNITS_PER_WORD
)
9623 reg_state
[regno
].use_index
= -1;
9626 for (i
= size
/ UNITS_PER_WORD
- 1 + regno
; i
>= regno
; i
--)
9628 reg_state
[i
].store_ruid
= reload_combine_ruid
;
9629 reg_state
[i
].use_index
= RELOAD_COMBINE_MAX_USES
;
9634 /* XP points to a piece of rtl that has to be checked for any uses of
9636 *XP is the pattern of INSN, or a part of it.
9637 Called from reload_combine, and recursively by itself. */
9639 reload_combine_note_use (xp
, insn
)
9643 enum rtx_code code
= x
->code
;
9646 rtx offset
= const0_rtx
; /* For the REG case below. */
9651 if (GET_CODE (SET_DEST (x
)) == REG
)
9653 reload_combine_note_use (&SET_SRC (x
), insn
);
9659 if (GET_CODE (SET_DEST (x
)) == REG
)
9664 /* We are interested in (plus (reg) (const_int)) . */
9665 if (GET_CODE (XEXP (x
, 0)) != REG
|| GET_CODE (XEXP (x
, 1)) != CONST_INT
)
9667 offset
= XEXP (x
, 1);
9672 int regno
= REGNO (x
);
9675 /* Some spurious USEs of pseudo registers might remain.
9676 Just ignore them. */
9677 if (regno
>= FIRST_PSEUDO_REGISTER
)
9680 /* If this register is already used in some unknown fashion, we
9682 If we decrement the index from zero to -1, we can't store more
9683 uses, so this register becomes used in an unknown fashion. */
9684 use_index
= --reg_state
[regno
].use_index
;
9688 if (use_index
!= RELOAD_COMBINE_MAX_USES
- 1)
9690 /* We have found another use for a register that is already
9691 used later. Check if the offsets match; if not, mark the
9692 register as used in an unknown fashion. */
9693 if (! rtx_equal_p (offset
, reg_state
[regno
].offset
))
9695 reg_state
[regno
].use_index
= -1;
9701 /* This is the first use of this register we have seen since we
9702 marked it as dead. */
9703 reg_state
[regno
].offset
= offset
;
9704 reg_state
[regno
].use_ruid
= reload_combine_ruid
;
9706 reg_state
[regno
].reg_use
[use_index
].insn
= insn
;
9707 reg_state
[regno
].reg_use
[use_index
].usep
= xp
;
9715 /* Recursively process the components of X. */
9716 fmt
= GET_RTX_FORMAT (code
);
9717 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
9720 reload_combine_note_use (&XEXP (x
, i
), insn
);
9721 else if (fmt
[i
] == 'E')
9723 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
9724 reload_combine_note_use (&XVECEXP (x
, i
, j
), insn
);
9729 /* See if we can reduce the cost of a constant by replacing a move with
9731 /* We cannot do our optimization across labels. Invalidating all the
9732 information about register contents we have would be costly, so we
9733 use last_label_luid (local variable of reload_cse_move2add) to note
9734 where the label is and then later disable any optimization that would
9736 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9737 reg_set_luid[n] is larger than last_label_luid[n] . */
9738 static int reg_set_luid
[FIRST_PSEUDO_REGISTER
];
9739 /* reg_offset[n] has to be CONST_INT for it and reg_base_reg[n] /
9740 reg_mode[n] to be valid.
9741 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is negative, register n
9742 has been set to reg_offset[n] in mode reg_mode[n] .
9743 If reg_offset[n] is a CONST_INT and reg_base_reg[n] is non-negative,
9744 register n has been set to the sum of reg_offset[n] and register
9745 reg_base_reg[n], calculated in mode reg_mode[n] . */
9746 static rtx reg_offset
[FIRST_PSEUDO_REGISTER
];
9747 static int reg_base_reg
[FIRST_PSEUDO_REGISTER
];
9748 static enum machine_mode reg_mode
[FIRST_PSEUDO_REGISTER
];
9749 /* move2add_luid is linearily increased while scanning the instructions
9750 from first to last. It is used to set reg_set_luid in
9751 reload_cse_move2add and move2add_note_store. */
9752 static int move2add_luid
;
9755 reload_cse_move2add (first
)
9760 int last_label_luid
;
9762 for (i
= FIRST_PSEUDO_REGISTER
-1; i
>= 0; i
--)
9763 reg_set_luid
[i
] = 0;
9765 last_label_luid
= 0;
9767 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
), move2add_luid
++)
9771 if (GET_CODE (insn
) == CODE_LABEL
)
9772 last_label_luid
= move2add_luid
;
9773 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
9775 pat
= PATTERN (insn
);
9776 /* For simplicity, we only perform this optimization on
9777 straightforward SETs. */
9778 if (GET_CODE (pat
) == SET
9779 && GET_CODE (SET_DEST (pat
)) == REG
)
9781 rtx reg
= SET_DEST (pat
);
9782 int regno
= REGNO (reg
);
9783 rtx src
= SET_SRC (pat
);
9785 /* Check if we have valid information on the contents of this
9786 register in the mode of REG. */
9787 /* ??? We don't know how zero / sign extension is handled, hence
9788 we can't go from a narrower to a wider mode. */
9789 if (reg_set_luid
[regno
] > last_label_luid
9790 && (GET_MODE_SIZE (GET_MODE (reg
))
9791 <= GET_MODE_SIZE (reg_mode
[regno
]))
9792 && GET_CODE (reg_offset
[regno
]) == CONST_INT
)
9794 /* Try to transform (set (REGX) (CONST_INT A))
9796 (set (REGX) (CONST_INT B))
9798 (set (REGX) (CONST_INT A))
9800 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9802 if (GET_CODE (src
) == CONST_INT
&& reg_base_reg
[regno
] < 0)
9805 rtx new_src
= GEN_INT (INTVAL (src
)
9806 - INTVAL (reg_offset
[regno
]));
9807 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
9808 use (set (reg) (reg)) instead.
9809 We don't delete this insn, nor do we convert it into a
9810 note, to avoid losing register notes or the return
9811 value flag. jump2 already knowns how to get rid of
9813 if (new_src
== const0_rtx
)
9814 success
= validate_change (insn
, &SET_SRC (pat
), reg
, 0);
9815 else if (rtx_cost (new_src
, PLUS
) < rtx_cost (src
, SET
)
9816 && have_add2_insn (GET_MODE (reg
)))
9817 success
= validate_change (insn
, &PATTERN (insn
),
9818 gen_add2_insn (reg
, new_src
), 0);
9819 reg_set_luid
[regno
] = move2add_luid
;
9820 reg_mode
[regno
] = GET_MODE (reg
);
9821 reg_offset
[regno
] = src
;
9825 /* Try to transform (set (REGX) (REGY))
9826 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9829 (set (REGX) (PLUS (REGX) (CONST_INT B)))
9832 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9834 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9835 else if (GET_CODE (src
) == REG
9836 && reg_base_reg
[regno
] == REGNO (src
)
9837 && reg_set_luid
[regno
] > reg_set_luid
[REGNO (src
)])
9839 rtx next
= next_nonnote_insn (insn
);
9842 set
= single_set (next
);
9845 && SET_DEST (set
) == reg
9846 && GET_CODE (SET_SRC (set
)) == PLUS
9847 && XEXP (SET_SRC (set
), 0) == reg
9848 && GET_CODE (XEXP (SET_SRC (set
), 1)) == CONST_INT
)
9850 rtx src3
= XEXP (SET_SRC (set
), 1);
9851 rtx new_src
= GEN_INT (INTVAL (src3
)
9852 - INTVAL (reg_offset
[regno
]));
9855 if (new_src
== const0_rtx
)
9856 /* See above why we create (set (reg) (reg)) here. */
9858 = validate_change (next
, &SET_SRC (set
), reg
, 0);
9859 else if ((rtx_cost (new_src
, PLUS
)
9860 < 2 + rtx_cost (src3
, SET
))
9861 && have_add2_insn (GET_MODE (reg
)))
9863 = validate_change (next
, &PATTERN (next
),
9864 gen_add2_insn (reg
, new_src
), 0);
9867 /* INSN might be the first insn in a basic block
9868 if the preceding insn is a conditional jump
9869 or a possible-throwing call. */
9870 PUT_CODE (insn
, NOTE
);
9871 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
9872 NOTE_SOURCE_FILE (insn
) = 0;
9875 reg_set_luid
[regno
] = move2add_luid
;
9876 reg_mode
[regno
] = GET_MODE (reg
);
9877 reg_offset
[regno
] = src3
;
9884 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
9886 if (REG_NOTE_KIND (note
) == REG_INC
9887 && GET_CODE (XEXP (note
, 0)) == REG
)
9889 /* Indicate that this register has been recently written to,
9890 but the exact contents are not available. */
9891 int regno
= REGNO (XEXP (note
, 0));
9892 if (regno
< FIRST_PSEUDO_REGISTER
)
9894 reg_set_luid
[regno
] = move2add_luid
;
9895 reg_offset
[regno
] = note
;
9899 note_stores (PATTERN (insn
), move2add_note_store
);
9900 /* If this is a CALL_INSN, all call used registers are stored with
9902 if (GET_CODE (insn
) == CALL_INSN
)
9904 for (i
= FIRST_PSEUDO_REGISTER
-1; i
>= 0; i
--)
9906 if (call_used_regs
[i
])
9908 reg_set_luid
[i
] = move2add_luid
;
9909 reg_offset
[i
] = insn
; /* Invalidate contents. */
9916 /* SET is a SET or CLOBBER that sets DST.
9917 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
9918 Called from reload_cse_move2add via note_stores. */
9920 move2add_note_store (dst
, set
)
9926 enum machine_mode mode
= GET_MODE (dst
);
9927 if (GET_CODE (dst
) == SUBREG
)
9929 regno
= SUBREG_WORD (dst
);
9930 dst
= SUBREG_REG (dst
);
9932 if (GET_CODE (dst
) != REG
)
9935 regno
+= REGNO (dst
);
9937 if (HARD_REGNO_NREGS (regno
, mode
) == 1 && GET_CODE (set
) == SET
)
9939 rtx src
= SET_SRC (set
);
9941 reg_mode
[regno
] = mode
;
9942 switch (GET_CODE (src
))
9946 rtx src0
= XEXP (src
, 0);
9947 if (GET_CODE (src0
) == REG
)
9949 if (REGNO (src0
) != regno
9950 || reg_offset
[regno
] != const0_rtx
)
9952 reg_base_reg
[regno
] = REGNO (src0
);
9953 reg_set_luid
[regno
] = move2add_luid
;
9955 reg_offset
[regno
] = XEXP (src
, 1);
9958 reg_set_luid
[regno
] = move2add_luid
;
9959 reg_offset
[regno
] = set
; /* Invalidate contents. */
9964 reg_base_reg
[regno
] = REGNO (SET_SRC (set
));
9965 reg_offset
[regno
] = const0_rtx
;
9966 reg_set_luid
[regno
] = move2add_luid
;
9970 reg_base_reg
[regno
] = -1;
9971 reg_offset
[regno
] = SET_SRC (set
);
9972 reg_set_luid
[regno
] = move2add_luid
;
9978 for (i
= regno
+ HARD_REGNO_NREGS (regno
, mode
) - 1; i
>= regno
; i
--)
9980 /* Indicate that this register has been recently written to,
9981 but the exact contents are not available. */
9982 reg_set_luid
[i
] = move2add_luid
;
9983 reg_offset
[i
] = dst
;