]> gcc.gnu.org Git - gcc.git/blob - gcc/reload1.c
reload1.c (reload): Use delete_insn instead of splatting NOTE_INSN_DELETED.
[gcc.git] / gcc / reload1.c
1 /* Reload pseudo regs into hard regs for insns that require hard regs.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24
25 #include "machmode.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "obstack.h"
30 #include "insn-config.h"
31 #include "flags.h"
32 #include "function.h"
33 #include "expr.h"
34 #include "optabs.h"
35 #include "regs.h"
36 #include "basic-block.h"
37 #include "reload.h"
38 #include "recog.h"
39 #include "output.h"
40 #include "cselib.h"
41 #include "real.h"
42 #include "toplev.h"
43 #include "except.h"
44
45 /* This file contains the reload pass of the compiler, which is
46 run after register allocation has been done. It checks that
47 each insn is valid (operands required to be in registers really
48 are in registers of the proper class) and fixes up invalid ones
49 by copying values temporarily into registers for the insns
50 that need them.
51
52 The results of register allocation are described by the vector
53 reg_renumber; the insns still contain pseudo regs, but reg_renumber
54 can be used to find which hard reg, if any, a pseudo reg is in.
55
56 The technique we always use is to free up a few hard regs that are
57 called ``reload regs'', and for each place where a pseudo reg
58 must be in a hard reg, copy it temporarily into one of the reload regs.
59
60 Reload regs are allocated locally for every instruction that needs
61 reloads. When there are pseudos which are allocated to a register that
62 has been chosen as a reload reg, such pseudos must be ``spilled''.
63 This means that they go to other hard regs, or to stack slots if no other
64 available hard regs can be found. Spilling can invalidate more
65 insns, requiring additional need for reloads, so we must keep checking
66 until the process stabilizes.
67
68 For machines with different classes of registers, we must keep track
69 of the register class needed for each reload, and make sure that
70 we allocate enough reload registers of each class.
71
72 The file reload.c contains the code that checks one insn for
73 validity and reports the reloads that it needs. This file
74 is in charge of scanning the entire rtl code, accumulating the
75 reload needs, spilling, assigning reload registers to use for
76 fixing up each insn, and generating the new insns to copy values
77 into the reload registers. */
78
79 #ifndef REGISTER_MOVE_COST
80 #define REGISTER_MOVE_COST(m, x, y) 2
81 #endif
82
83 #ifndef LOCAL_REGNO
84 #define LOCAL_REGNO(REGNO) 0
85 #endif
86 \f
87 /* During reload_as_needed, element N contains a REG rtx for the hard reg
88 into which reg N has been reloaded (perhaps for a previous insn). */
89 static rtx *reg_last_reload_reg;
90
91 /* Elt N nonzero if reg_last_reload_reg[N] has been set in this insn
92 for an output reload that stores into reg N. */
93 static char *reg_has_output_reload;
94
95 /* Indicates which hard regs are reload-registers for an output reload
96 in the current insn. */
97 static HARD_REG_SET reg_is_output_reload;
98
99 /* Element N is the constant value to which pseudo reg N is equivalent,
100 or zero if pseudo reg N is not equivalent to a constant.
101 find_reloads looks at this in order to replace pseudo reg N
102 with the constant it stands for. */
103 rtx *reg_equiv_constant;
104
105 /* Element N is a memory location to which pseudo reg N is equivalent,
106 prior to any register elimination (such as frame pointer to stack
107 pointer). Depending on whether or not it is a valid address, this value
108 is transferred to either reg_equiv_address or reg_equiv_mem. */
109 rtx *reg_equiv_memory_loc;
110
111 /* Element N is the address of stack slot to which pseudo reg N is equivalent.
112 This is used when the address is not valid as a memory address
113 (because its displacement is too big for the machine.) */
114 rtx *reg_equiv_address;
115
116 /* Element N is the memory slot to which pseudo reg N is equivalent,
117 or zero if pseudo reg N is not equivalent to a memory slot. */
118 rtx *reg_equiv_mem;
119
120 /* Widest width in which each pseudo reg is referred to (via subreg). */
121 static unsigned int *reg_max_ref_width;
122
123 /* Element N is the list of insns that initialized reg N from its equivalent
124 constant or memory slot. */
125 static rtx *reg_equiv_init;
126
127 /* Vector to remember old contents of reg_renumber before spilling. */
128 static short *reg_old_renumber;
129
130 /* During reload_as_needed, element N contains the last pseudo regno reloaded
131 into hard register N. If that pseudo reg occupied more than one register,
132 reg_reloaded_contents points to that pseudo for each spill register in
133 use; all of these must remain set for an inheritance to occur. */
134 static int reg_reloaded_contents[FIRST_PSEUDO_REGISTER];
135
136 /* During reload_as_needed, element N contains the insn for which
137 hard register N was last used. Its contents are significant only
138 when reg_reloaded_valid is set for this register. */
139 static rtx reg_reloaded_insn[FIRST_PSEUDO_REGISTER];
140
141 /* Indicate if reg_reloaded_insn / reg_reloaded_contents is valid */
142 static HARD_REG_SET reg_reloaded_valid;
143 /* Indicate if the register was dead at the end of the reload.
144 This is only valid if reg_reloaded_contents is set and valid. */
145 static HARD_REG_SET reg_reloaded_dead;
146
147 /* Number of spill-regs so far; number of valid elements of spill_regs. */
148 static int n_spills;
149
150 /* In parallel with spill_regs, contains REG rtx's for those regs.
151 Holds the last rtx used for any given reg, or 0 if it has never
152 been used for spilling yet. This rtx is reused, provided it has
153 the proper mode. */
154 static rtx spill_reg_rtx[FIRST_PSEUDO_REGISTER];
155
156 /* In parallel with spill_regs, contains nonzero for a spill reg
157 that was stored after the last time it was used.
158 The precise value is the insn generated to do the store. */
159 static rtx spill_reg_store[FIRST_PSEUDO_REGISTER];
160
161 /* This is the register that was stored with spill_reg_store. This is a
162 copy of reload_out / reload_out_reg when the value was stored; if
163 reload_out is a MEM, spill_reg_stored_to will be set to reload_out_reg. */
164 static rtx spill_reg_stored_to[FIRST_PSEUDO_REGISTER];
165
166 /* This table is the inverse mapping of spill_regs:
167 indexed by hard reg number,
168 it contains the position of that reg in spill_regs,
169 or -1 for something that is not in spill_regs.
170
171 ?!? This is no longer accurate. */
172 static short spill_reg_order[FIRST_PSEUDO_REGISTER];
173
174 /* This reg set indicates registers that can't be used as spill registers for
175 the currently processed insn. These are the hard registers which are live
176 during the insn, but not allocated to pseudos, as well as fixed
177 registers. */
178 static HARD_REG_SET bad_spill_regs;
179
180 /* These are the hard registers that can't be used as spill register for any
181 insn. This includes registers used for user variables and registers that
182 we can't eliminate. A register that appears in this set also can't be used
183 to retry register allocation. */
184 static HARD_REG_SET bad_spill_regs_global;
185
186 /* Describes order of use of registers for reloading
187 of spilled pseudo-registers. `n_spills' is the number of
188 elements that are actually valid; new ones are added at the end.
189
190 Both spill_regs and spill_reg_order are used on two occasions:
191 once during find_reload_regs, where they keep track of the spill registers
192 for a single insn, but also during reload_as_needed where they show all
193 the registers ever used by reload. For the latter case, the information
194 is calculated during finish_spills. */
195 static short spill_regs[FIRST_PSEUDO_REGISTER];
196
197 /* This vector of reg sets indicates, for each pseudo, which hard registers
198 may not be used for retrying global allocation because the register was
199 formerly spilled from one of them. If we allowed reallocating a pseudo to
200 a register that it was already allocated to, reload might not
201 terminate. */
202 static HARD_REG_SET *pseudo_previous_regs;
203
204 /* This vector of reg sets indicates, for each pseudo, which hard
205 registers may not be used for retrying global allocation because they
206 are used as spill registers during one of the insns in which the
207 pseudo is live. */
208 static HARD_REG_SET *pseudo_forbidden_regs;
209
210 /* All hard regs that have been used as spill registers for any insn are
211 marked in this set. */
212 static HARD_REG_SET used_spill_regs;
213
214 /* Index of last register assigned as a spill register. We allocate in
215 a round-robin fashion. */
216 static int last_spill_reg;
217
218 /* Nonzero if indirect addressing is supported on the machine; this means
219 that spilling (REG n) does not require reloading it into a register in
220 order to do (MEM (REG n)) or (MEM (PLUS (REG n) (CONST_INT c))). The
221 value indicates the level of indirect addressing supported, e.g., two
222 means that (MEM (MEM (REG n))) is also valid if (REG n) does not get
223 a hard register. */
224 static char spill_indirect_levels;
225
226 /* Nonzero if indirect addressing is supported when the innermost MEM is
227 of the form (MEM (SYMBOL_REF sym)). It is assumed that the level to
228 which these are valid is the same as spill_indirect_levels, above. */
229 char indirect_symref_ok;
230
231 /* Nonzero if an address (plus (reg frame_pointer) (reg ...)) is valid. */
232 char double_reg_address_ok;
233
234 /* Record the stack slot for each spilled hard register. */
235 static rtx spill_stack_slot[FIRST_PSEUDO_REGISTER];
236
237 /* Width allocated so far for that stack slot. */
238 static unsigned int spill_stack_slot_width[FIRST_PSEUDO_REGISTER];
239
240 /* Record which pseudos needed to be spilled. */
241 static regset_head spilled_pseudos;
242
243 /* Used for communication between order_regs_for_reload and count_pseudo.
244 Used to avoid counting one pseudo twice. */
245 static regset_head pseudos_counted;
246
247 /* First uid used by insns created by reload in this function.
248 Used in find_equiv_reg. */
249 int reload_first_uid;
250
251 /* Flag set by local-alloc or global-alloc if anything is live in
252 a call-clobbered reg across calls. */
253 int caller_save_needed;
254
255 /* Set to 1 while reload_as_needed is operating.
256 Required by some machines to handle any generated moves differently. */
257 int reload_in_progress = 0;
258
259 /* These arrays record the insn_code of insns that may be needed to
260 perform input and output reloads of special objects. They provide a
261 place to pass a scratch register. */
262 enum insn_code reload_in_optab[NUM_MACHINE_MODES];
263 enum insn_code reload_out_optab[NUM_MACHINE_MODES];
264
265 /* This obstack is used for allocation of rtl during register elimination.
266 The allocated storage can be freed once find_reloads has processed the
267 insn. */
268 struct obstack reload_obstack;
269
270 /* Points to the beginning of the reload_obstack. All insn_chain structures
271 are allocated first. */
272 char *reload_startobj;
273
274 /* The point after all insn_chain structures. Used to quickly deallocate
275 memory allocated in copy_reloads during calculate_needs_all_insns. */
276 char *reload_firstobj;
277
278 /* This points before all local rtl generated by register elimination.
279 Used to quickly free all memory after processing one insn. */
280 static char *reload_insn_firstobj;
281
282 #define obstack_chunk_alloc xmalloc
283 #define obstack_chunk_free free
284
285 /* List of insn_chain instructions, one for every insn that reload needs to
286 examine. */
287 struct insn_chain *reload_insn_chain;
288
289 #ifdef TREE_CODE
290 extern tree current_function_decl;
291 #else
292 extern union tree_node *current_function_decl;
293 #endif
294
295 /* List of all insns needing reloads. */
296 static struct insn_chain *insns_need_reload;
297 \f
298 /* This structure is used to record information about register eliminations.
299 Each array entry describes one possible way of eliminating a register
300 in favor of another. If there is more than one way of eliminating a
301 particular register, the most preferred should be specified first. */
302
303 struct elim_table
304 {
305 int from; /* Register number to be eliminated. */
306 int to; /* Register number used as replacement. */
307 int initial_offset; /* Initial difference between values. */
308 int can_eliminate; /* Non-zero if this elimination can be done. */
309 int can_eliminate_previous; /* Value of CAN_ELIMINATE in previous scan over
310 insns made by reload. */
311 int offset; /* Current offset between the two regs. */
312 int previous_offset; /* Offset at end of previous insn. */
313 int ref_outside_mem; /* "to" has been referenced outside a MEM. */
314 rtx from_rtx; /* REG rtx for the register to be eliminated.
315 We cannot simply compare the number since
316 we might then spuriously replace a hard
317 register corresponding to a pseudo
318 assigned to the reg to be eliminated. */
319 rtx to_rtx; /* REG rtx for the replacement. */
320 };
321
322 static struct elim_table *reg_eliminate = 0;
323
324 /* This is an intermediate structure to initialize the table. It has
325 exactly the members provided by ELIMINABLE_REGS. */
326 static struct elim_table_1
327 {
328 int from;
329 int to;
330 } reg_eliminate_1[] =
331
332 /* If a set of eliminable registers was specified, define the table from it.
333 Otherwise, default to the normal case of the frame pointer being
334 replaced by the stack pointer. */
335
336 #ifdef ELIMINABLE_REGS
337 ELIMINABLE_REGS;
338 #else
339 {{ FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}};
340 #endif
341
342 #define NUM_ELIMINABLE_REGS ARRAY_SIZE (reg_eliminate_1)
343
344 /* Record the number of pending eliminations that have an offset not equal
345 to their initial offset. If non-zero, we use a new copy of each
346 replacement result in any insns encountered. */
347 int num_not_at_initial_offset;
348
349 /* Count the number of registers that we may be able to eliminate. */
350 static int num_eliminable;
351 /* And the number of registers that are equivalent to a constant that
352 can be eliminated to frame_pointer / arg_pointer + constant. */
353 static int num_eliminable_invariants;
354
355 /* For each label, we record the offset of each elimination. If we reach
356 a label by more than one path and an offset differs, we cannot do the
357 elimination. This information is indexed by the number of the label.
358 The first table is an array of flags that records whether we have yet
359 encountered a label and the second table is an array of arrays, one
360 entry in the latter array for each elimination. */
361
362 static char *offsets_known_at;
363 static int (*offsets_at)[NUM_ELIMINABLE_REGS];
364
365 /* Number of labels in the current function. */
366
367 static int num_labels;
368 \f
369 static void replace_pseudos_in_call_usage PARAMS((rtx *,
370 enum machine_mode,
371 rtx));
372 static void maybe_fix_stack_asms PARAMS ((void));
373 static void copy_reloads PARAMS ((struct insn_chain *));
374 static void calculate_needs_all_insns PARAMS ((int));
375 static int find_reg PARAMS ((struct insn_chain *, int));
376 static void find_reload_regs PARAMS ((struct insn_chain *));
377 static void select_reload_regs PARAMS ((void));
378 static void delete_caller_save_insns PARAMS ((void));
379
380 static void spill_failure PARAMS ((rtx, enum reg_class));
381 static void count_spilled_pseudo PARAMS ((int, int, int));
382 static void delete_dead_insn PARAMS ((rtx));
383 static void alter_reg PARAMS ((int, int));
384 static void set_label_offsets PARAMS ((rtx, rtx, int));
385 static void check_eliminable_occurrences PARAMS ((rtx));
386 static void elimination_effects PARAMS ((rtx, enum machine_mode));
387 static int eliminate_regs_in_insn PARAMS ((rtx, int));
388 static void update_eliminable_offsets PARAMS ((void));
389 static void mark_not_eliminable PARAMS ((rtx, rtx, void *));
390 static void set_initial_elim_offsets PARAMS ((void));
391 static void verify_initial_elim_offsets PARAMS ((void));
392 static void set_initial_label_offsets PARAMS ((void));
393 static void set_offsets_for_label PARAMS ((rtx));
394 static void init_elim_table PARAMS ((void));
395 static void update_eliminables PARAMS ((HARD_REG_SET *));
396 static void spill_hard_reg PARAMS ((unsigned int, int));
397 static int finish_spills PARAMS ((int));
398 static void ior_hard_reg_set PARAMS ((HARD_REG_SET *, HARD_REG_SET *));
399 static void scan_paradoxical_subregs PARAMS ((rtx));
400 static void count_pseudo PARAMS ((int));
401 static void order_regs_for_reload PARAMS ((struct insn_chain *));
402 static void reload_as_needed PARAMS ((int));
403 static void forget_old_reloads_1 PARAMS ((rtx, rtx, void *));
404 static int reload_reg_class_lower PARAMS ((const PTR, const PTR));
405 static void mark_reload_reg_in_use PARAMS ((unsigned int, int,
406 enum reload_type,
407 enum machine_mode));
408 static void clear_reload_reg_in_use PARAMS ((unsigned int, int,
409 enum reload_type,
410 enum machine_mode));
411 static int reload_reg_free_p PARAMS ((unsigned int, int,
412 enum reload_type));
413 static int reload_reg_free_for_value_p PARAMS ((int, int, int,
414 enum reload_type,
415 rtx, rtx, int, int));
416 static int free_for_value_p PARAMS ((int, enum machine_mode, int,
417 enum reload_type, rtx, rtx,
418 int, int));
419 static int reload_reg_reaches_end_p PARAMS ((unsigned int, int,
420 enum reload_type));
421 static int allocate_reload_reg PARAMS ((struct insn_chain *, int,
422 int));
423 static int conflicts_with_override PARAMS ((rtx));
424 static void failed_reload PARAMS ((rtx, int));
425 static int set_reload_reg PARAMS ((int, int));
426 static void choose_reload_regs_init PARAMS ((struct insn_chain *, rtx *));
427 static void choose_reload_regs PARAMS ((struct insn_chain *));
428 static void merge_assigned_reloads PARAMS ((rtx));
429 static void emit_input_reload_insns PARAMS ((struct insn_chain *,
430 struct reload *, rtx, int));
431 static void emit_output_reload_insns PARAMS ((struct insn_chain *,
432 struct reload *, int));
433 static void do_input_reload PARAMS ((struct insn_chain *,
434 struct reload *, int));
435 static void do_output_reload PARAMS ((struct insn_chain *,
436 struct reload *, int));
437 static void emit_reload_insns PARAMS ((struct insn_chain *));
438 static void delete_output_reload PARAMS ((rtx, int, int));
439 static void delete_address_reloads PARAMS ((rtx, rtx));
440 static void delete_address_reloads_1 PARAMS ((rtx, rtx, rtx));
441 static rtx inc_for_reload PARAMS ((rtx, rtx, rtx, int));
442 static int constraint_accepts_reg_p PARAMS ((const char *, rtx));
443 static void reload_cse_regs_1 PARAMS ((rtx));
444 static int reload_cse_noop_set_p PARAMS ((rtx));
445 static int reload_cse_simplify_set PARAMS ((rtx, rtx));
446 static int reload_cse_simplify_operands PARAMS ((rtx));
447 static void reload_combine PARAMS ((void));
448 static void reload_combine_note_use PARAMS ((rtx *, rtx));
449 static void reload_combine_note_store PARAMS ((rtx, rtx, void *));
450 static void reload_cse_move2add PARAMS ((rtx));
451 static void move2add_note_store PARAMS ((rtx, rtx, void *));
452 #ifdef AUTO_INC_DEC
453 static void add_auto_inc_notes PARAMS ((rtx, rtx));
454 #endif
455 static void copy_eh_notes PARAMS ((rtx, rtx));
456 static HOST_WIDE_INT sext_for_mode PARAMS ((enum machine_mode,
457 HOST_WIDE_INT));
458 static void failed_reload PARAMS ((rtx, int));
459 static int set_reload_reg PARAMS ((int, int));
460 static void reload_cse_delete_noop_set PARAMS ((rtx, rtx));
461 static void reload_cse_simplify PARAMS ((rtx));
462 static void fixup_abnormal_edges PARAMS ((void));
463 extern void dump_needs PARAMS ((struct insn_chain *));
464 \f
465 /* Initialize the reload pass once per compilation. */
466
467 void
468 init_reload ()
469 {
470 register int i;
471
472 /* Often (MEM (REG n)) is still valid even if (REG n) is put on the stack.
473 Set spill_indirect_levels to the number of levels such addressing is
474 permitted, zero if it is not permitted at all. */
475
476 register rtx tem
477 = gen_rtx_MEM (Pmode,
478 gen_rtx_PLUS (Pmode,
479 gen_rtx_REG (Pmode,
480 LAST_VIRTUAL_REGISTER + 1),
481 GEN_INT (4)));
482 spill_indirect_levels = 0;
483
484 while (memory_address_p (QImode, tem))
485 {
486 spill_indirect_levels++;
487 tem = gen_rtx_MEM (Pmode, tem);
488 }
489
490 /* See if indirect addressing is valid for (MEM (SYMBOL_REF ...)). */
491
492 tem = gen_rtx_MEM (Pmode, gen_rtx_SYMBOL_REF (Pmode, "foo"));
493 indirect_symref_ok = memory_address_p (QImode, tem);
494
495 /* See if reg+reg is a valid (and offsettable) address. */
496
497 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
498 {
499 tem = gen_rtx_PLUS (Pmode,
500 gen_rtx_REG (Pmode, HARD_FRAME_POINTER_REGNUM),
501 gen_rtx_REG (Pmode, i));
502
503 /* This way, we make sure that reg+reg is an offsettable address. */
504 tem = plus_constant (tem, 4);
505
506 if (memory_address_p (QImode, tem))
507 {
508 double_reg_address_ok = 1;
509 break;
510 }
511 }
512
513 /* Initialize obstack for our rtl allocation. */
514 gcc_obstack_init (&reload_obstack);
515 reload_startobj = (char *) obstack_alloc (&reload_obstack, 0);
516
517 INIT_REG_SET (&spilled_pseudos);
518 INIT_REG_SET (&pseudos_counted);
519 }
520
521 /* List of insn chains that are currently unused. */
522 static struct insn_chain *unused_insn_chains = 0;
523
524 /* Allocate an empty insn_chain structure. */
525 struct insn_chain *
526 new_insn_chain ()
527 {
528 struct insn_chain *c;
529
530 if (unused_insn_chains == 0)
531 {
532 c = (struct insn_chain *)
533 obstack_alloc (&reload_obstack, sizeof (struct insn_chain));
534 INIT_REG_SET (&c->live_throughout);
535 INIT_REG_SET (&c->dead_or_set);
536 }
537 else
538 {
539 c = unused_insn_chains;
540 unused_insn_chains = c->next;
541 }
542 c->is_caller_save_insn = 0;
543 c->need_operand_change = 0;
544 c->need_reload = 0;
545 c->need_elim = 0;
546 return c;
547 }
548
549 /* Small utility function to set all regs in hard reg set TO which are
550 allocated to pseudos in regset FROM. */
551
552 void
553 compute_use_by_pseudos (to, from)
554 HARD_REG_SET *to;
555 regset from;
556 {
557 unsigned int regno;
558
559 EXECUTE_IF_SET_IN_REG_SET
560 (from, FIRST_PSEUDO_REGISTER, regno,
561 {
562 int r = reg_renumber[regno];
563 int nregs;
564
565 if (r < 0)
566 {
567 /* reload_combine uses the information from
568 BASIC_BLOCK->global_live_at_start, which might still
569 contain registers that have not actually been allocated
570 since they have an equivalence. */
571 if (! reload_completed)
572 abort ();
573 }
574 else
575 {
576 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (regno));
577 while (nregs-- > 0)
578 SET_HARD_REG_BIT (*to, r + nregs);
579 }
580 });
581 }
582
583 /* Replace all pseudos found in LOC with their corresponding
584 equivalences. */
585
586 static void
587 replace_pseudos_in_call_usage (loc, mem_mode, usage)
588 rtx *loc;
589 enum machine_mode mem_mode;
590 rtx usage;
591 {
592 rtx x = *loc;
593 enum rtx_code code;
594 const char *fmt;
595 int i, j;
596
597 if (! x)
598 return;
599
600 code = GET_CODE (x);
601 if (code == REG)
602 {
603 unsigned int regno = REGNO (x);
604
605 if (regno < FIRST_PSEUDO_REGISTER)
606 return;
607
608 x = eliminate_regs (x, mem_mode, usage);
609 if (x != *loc)
610 {
611 *loc = x;
612 replace_pseudos_in_call_usage (loc, mem_mode, usage);
613 return;
614 }
615
616 if (reg_equiv_constant[regno])
617 *loc = reg_equiv_constant[regno];
618 else if (reg_equiv_mem[regno])
619 *loc = reg_equiv_mem[regno];
620 else if (reg_equiv_address[regno])
621 *loc = gen_rtx_MEM (GET_MODE (x), reg_equiv_address[regno]);
622 else if (GET_CODE (regno_reg_rtx[regno]) != REG
623 || REGNO (regno_reg_rtx[regno]) != regno)
624 *loc = regno_reg_rtx[regno];
625 else
626 abort ();
627
628 return;
629 }
630 else if (code == MEM)
631 {
632 replace_pseudos_in_call_usage (& XEXP (x, 0), GET_MODE (x), usage);
633 return;
634 }
635
636 /* Process each of our operands recursively. */
637 fmt = GET_RTX_FORMAT (code);
638 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
639 if (*fmt == 'e')
640 replace_pseudos_in_call_usage (&XEXP (x, i), mem_mode, usage);
641 else if (*fmt == 'E')
642 for (j = 0; j < XVECLEN (x, i); j++)
643 replace_pseudos_in_call_usage (& XVECEXP (x, i, j), mem_mode, usage);
644 }
645
646 \f
647 /* Global variables used by reload and its subroutines. */
648
649 /* Set during calculate_needs if an insn needs register elimination. */
650 static int something_needs_elimination;
651 /* Set during calculate_needs if an insn needs an operand changed. */
652 int something_needs_operands_changed;
653
654 /* Nonzero means we couldn't get enough spill regs. */
655 static int failure;
656
657 /* Main entry point for the reload pass.
658
659 FIRST is the first insn of the function being compiled.
660
661 GLOBAL nonzero means we were called from global_alloc
662 and should attempt to reallocate any pseudoregs that we
663 displace from hard regs we will use for reloads.
664 If GLOBAL is zero, we do not have enough information to do that,
665 so any pseudo reg that is spilled must go to the stack.
666
667 Return value is nonzero if reload failed
668 and we must not do any more for this function. */
669
670 int
671 reload (first, global)
672 rtx first;
673 int global;
674 {
675 register int i;
676 register rtx insn;
677 register struct elim_table *ep;
678
679 /* The two pointers used to track the true location of the memory used
680 for label offsets. */
681 char *real_known_ptr = NULL;
682 int (*real_at_ptr)[NUM_ELIMINABLE_REGS];
683
684 /* Make sure even insns with volatile mem refs are recognizable. */
685 init_recog ();
686
687 failure = 0;
688
689 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
690
691 /* Make sure that the last insn in the chain
692 is not something that needs reloading. */
693 emit_note (NULL, NOTE_INSN_DELETED);
694
695 /* Enable find_equiv_reg to distinguish insns made by reload. */
696 reload_first_uid = get_max_uid ();
697
698 #ifdef SECONDARY_MEMORY_NEEDED
699 /* Initialize the secondary memory table. */
700 clear_secondary_mem ();
701 #endif
702
703 /* We don't have a stack slot for any spill reg yet. */
704 memset ((char *) spill_stack_slot, 0, sizeof spill_stack_slot);
705 memset ((char *) spill_stack_slot_width, 0, sizeof spill_stack_slot_width);
706
707 /* Initialize the save area information for caller-save, in case some
708 are needed. */
709 init_save_areas ();
710
711 /* Compute which hard registers are now in use
712 as homes for pseudo registers.
713 This is done here rather than (eg) in global_alloc
714 because this point is reached even if not optimizing. */
715 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
716 mark_home_live (i);
717
718 /* A function that receives a nonlocal goto must save all call-saved
719 registers. */
720 if (current_function_has_nonlocal_label)
721 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
722 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
723 regs_ever_live[i] = 1;
724
725 /* Find all the pseudo registers that didn't get hard regs
726 but do have known equivalent constants or memory slots.
727 These include parameters (known equivalent to parameter slots)
728 and cse'd or loop-moved constant memory addresses.
729
730 Record constant equivalents in reg_equiv_constant
731 so they will be substituted by find_reloads.
732 Record memory equivalents in reg_mem_equiv so they can
733 be substituted eventually by altering the REG-rtx's. */
734
735 reg_equiv_constant = (rtx *) xcalloc (max_regno, sizeof (rtx));
736 reg_equiv_mem = (rtx *) xcalloc (max_regno, sizeof (rtx));
737 reg_equiv_init = (rtx *) xcalloc (max_regno, sizeof (rtx));
738 reg_equiv_address = (rtx *) xcalloc (max_regno, sizeof (rtx));
739 reg_max_ref_width = (unsigned int *) xcalloc (max_regno, sizeof (int));
740 reg_old_renumber = (short *) xcalloc (max_regno, sizeof (short));
741 memcpy (reg_old_renumber, reg_renumber, max_regno * sizeof (short));
742 pseudo_forbidden_regs
743 = (HARD_REG_SET *) xmalloc (max_regno * sizeof (HARD_REG_SET));
744 pseudo_previous_regs
745 = (HARD_REG_SET *) xcalloc (max_regno, sizeof (HARD_REG_SET));
746
747 CLEAR_HARD_REG_SET (bad_spill_regs_global);
748
749 /* Look for REG_EQUIV notes; record what each pseudo is equivalent to.
750 Also find all paradoxical subregs and find largest such for each pseudo.
751 On machines with small register classes, record hard registers that
752 are used for user variables. These can never be used for spills.
753 Also look for a "constant" REG_SETJMP. This means that all
754 caller-saved registers must be marked live. */
755
756 num_eliminable_invariants = 0;
757 for (insn = first; insn; insn = NEXT_INSN (insn))
758 {
759 rtx set = single_set (insn);
760
761 if (GET_CODE (insn) == CALL_INSN
762 && find_reg_note (insn, REG_SETJMP, NULL))
763 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
764 if (! call_used_regs[i])
765 regs_ever_live[i] = 1;
766
767 if (set != 0 && GET_CODE (SET_DEST (set)) == REG)
768 {
769 rtx note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
770 if (note
771 #ifdef LEGITIMATE_PIC_OPERAND_P
772 && (! function_invariant_p (XEXP (note, 0))
773 || ! flag_pic
774 || LEGITIMATE_PIC_OPERAND_P (XEXP (note, 0)))
775 #endif
776 )
777 {
778 rtx x = XEXP (note, 0);
779 i = REGNO (SET_DEST (set));
780 if (i > LAST_VIRTUAL_REGISTER)
781 {
782 if (GET_CODE (x) == MEM)
783 {
784 /* Always unshare the equivalence, so we can
785 substitute into this insn without touching the
786 equivalence. */
787 reg_equiv_memory_loc[i] = copy_rtx (x);
788 }
789 else if (function_invariant_p (x))
790 {
791 if (GET_CODE (x) == PLUS)
792 {
793 /* This is PLUS of frame pointer and a constant,
794 and might be shared. Unshare it. */
795 reg_equiv_constant[i] = copy_rtx (x);
796 num_eliminable_invariants++;
797 }
798 else if (x == frame_pointer_rtx
799 || x == arg_pointer_rtx)
800 {
801 reg_equiv_constant[i] = x;
802 num_eliminable_invariants++;
803 }
804 else if (LEGITIMATE_CONSTANT_P (x))
805 reg_equiv_constant[i] = x;
806 else
807 reg_equiv_memory_loc[i]
808 = force_const_mem (GET_MODE (SET_DEST (set)), x);
809 }
810 else
811 continue;
812
813 /* If this register is being made equivalent to a MEM
814 and the MEM is not SET_SRC, the equivalencing insn
815 is one with the MEM as a SET_DEST and it occurs later.
816 So don't mark this insn now. */
817 if (GET_CODE (x) != MEM
818 || rtx_equal_p (SET_SRC (set), x))
819 reg_equiv_init[i]
820 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[i]);
821 }
822 }
823 }
824
825 /* If this insn is setting a MEM from a register equivalent to it,
826 this is the equivalencing insn. */
827 else if (set && GET_CODE (SET_DEST (set)) == MEM
828 && GET_CODE (SET_SRC (set)) == REG
829 && reg_equiv_memory_loc[REGNO (SET_SRC (set))]
830 && rtx_equal_p (SET_DEST (set),
831 reg_equiv_memory_loc[REGNO (SET_SRC (set))]))
832 reg_equiv_init[REGNO (SET_SRC (set))]
833 = gen_rtx_INSN_LIST (VOIDmode, insn,
834 reg_equiv_init[REGNO (SET_SRC (set))]);
835
836 if (INSN_P (insn))
837 scan_paradoxical_subregs (PATTERN (insn));
838 }
839
840 init_elim_table ();
841
842 num_labels = max_label_num () - get_first_label_num ();
843
844 /* Allocate the tables used to store offset information at labels. */
845 /* We used to use alloca here, but the size of what it would try to
846 allocate would occasionally cause it to exceed the stack limit and
847 cause a core dump. */
848 real_known_ptr = xmalloc (num_labels);
849 real_at_ptr
850 = (int (*)[NUM_ELIMINABLE_REGS])
851 xmalloc (num_labels * NUM_ELIMINABLE_REGS * sizeof (int));
852
853 offsets_known_at = real_known_ptr - get_first_label_num ();
854 offsets_at
855 = (int (*)[NUM_ELIMINABLE_REGS]) (real_at_ptr - get_first_label_num ());
856
857 /* Alter each pseudo-reg rtx to contain its hard reg number.
858 Assign stack slots to the pseudos that lack hard regs or equivalents.
859 Do not touch virtual registers. */
860
861 for (i = LAST_VIRTUAL_REGISTER + 1; i < max_regno; i++)
862 alter_reg (i, -1);
863
864 /* If we have some registers we think can be eliminated, scan all insns to
865 see if there is an insn that sets one of these registers to something
866 other than itself plus a constant. If so, the register cannot be
867 eliminated. Doing this scan here eliminates an extra pass through the
868 main reload loop in the most common case where register elimination
869 cannot be done. */
870 for (insn = first; insn && num_eliminable; insn = NEXT_INSN (insn))
871 if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
872 || GET_CODE (insn) == CALL_INSN)
873 note_stores (PATTERN (insn), mark_not_eliminable, NULL);
874
875 maybe_fix_stack_asms ();
876
877 insns_need_reload = 0;
878 something_needs_elimination = 0;
879
880 /* Initialize to -1, which means take the first spill register. */
881 last_spill_reg = -1;
882
883 /* Spill any hard regs that we know we can't eliminate. */
884 CLEAR_HARD_REG_SET (used_spill_regs);
885 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
886 if (! ep->can_eliminate)
887 spill_hard_reg (ep->from, 1);
888
889 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
890 if (frame_pointer_needed)
891 spill_hard_reg (HARD_FRAME_POINTER_REGNUM, 1);
892 #endif
893 finish_spills (global);
894
895 /* From now on, we may need to generate moves differently. We may also
896 allow modifications of insns which cause them to not be recognized.
897 Any such modifications will be cleaned up during reload itself. */
898 reload_in_progress = 1;
899
900 /* This loop scans the entire function each go-round
901 and repeats until one repetition spills no additional hard regs. */
902 for (;;)
903 {
904 int something_changed;
905 int did_spill;
906
907 HOST_WIDE_INT starting_frame_size;
908
909 /* Round size of stack frame to stack_alignment_needed. This must be done
910 here because the stack size may be a part of the offset computation
911 for register elimination, and there might have been new stack slots
912 created in the last iteration of this loop. */
913 if (cfun->stack_alignment_needed)
914 assign_stack_local (BLKmode, 0, cfun->stack_alignment_needed);
915
916 starting_frame_size = get_frame_size ();
917
918 set_initial_elim_offsets ();
919 set_initial_label_offsets ();
920
921 /* For each pseudo register that has an equivalent location defined,
922 try to eliminate any eliminable registers (such as the frame pointer)
923 assuming initial offsets for the replacement register, which
924 is the normal case.
925
926 If the resulting location is directly addressable, substitute
927 the MEM we just got directly for the old REG.
928
929 If it is not addressable but is a constant or the sum of a hard reg
930 and constant, it is probably not addressable because the constant is
931 out of range, in that case record the address; we will generate
932 hairy code to compute the address in a register each time it is
933 needed. Similarly if it is a hard register, but one that is not
934 valid as an address register.
935
936 If the location is not addressable, but does not have one of the
937 above forms, assign a stack slot. We have to do this to avoid the
938 potential of producing lots of reloads if, e.g., a location involves
939 a pseudo that didn't get a hard register and has an equivalent memory
940 location that also involves a pseudo that didn't get a hard register.
941
942 Perhaps at some point we will improve reload_when_needed handling
943 so this problem goes away. But that's very hairy. */
944
945 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
946 if (reg_renumber[i] < 0 && reg_equiv_memory_loc[i])
947 {
948 rtx x = eliminate_regs (reg_equiv_memory_loc[i], 0, NULL_RTX);
949
950 if (strict_memory_address_p (GET_MODE (regno_reg_rtx[i]),
951 XEXP (x, 0)))
952 reg_equiv_mem[i] = x, reg_equiv_address[i] = 0;
953 else if (CONSTANT_P (XEXP (x, 0))
954 || (GET_CODE (XEXP (x, 0)) == REG
955 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
956 || (GET_CODE (XEXP (x, 0)) == PLUS
957 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
958 && (REGNO (XEXP (XEXP (x, 0), 0))
959 < FIRST_PSEUDO_REGISTER)
960 && CONSTANT_P (XEXP (XEXP (x, 0), 1))))
961 reg_equiv_address[i] = XEXP (x, 0), reg_equiv_mem[i] = 0;
962 else
963 {
964 /* Make a new stack slot. Then indicate that something
965 changed so we go back and recompute offsets for
966 eliminable registers because the allocation of memory
967 below might change some offset. reg_equiv_{mem,address}
968 will be set up for this pseudo on the next pass around
969 the loop. */
970 reg_equiv_memory_loc[i] = 0;
971 reg_equiv_init[i] = 0;
972 alter_reg (i, -1);
973 }
974 }
975
976 if (caller_save_needed)
977 setup_save_areas ();
978
979 /* If we allocated another stack slot, redo elimination bookkeeping. */
980 if (starting_frame_size != get_frame_size ())
981 continue;
982
983 if (caller_save_needed)
984 {
985 save_call_clobbered_regs ();
986 /* That might have allocated new insn_chain structures. */
987 reload_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
988 }
989
990 calculate_needs_all_insns (global);
991
992 CLEAR_REG_SET (&spilled_pseudos);
993 did_spill = 0;
994
995 something_changed = 0;
996
997 /* If we allocated any new memory locations, make another pass
998 since it might have changed elimination offsets. */
999 if (starting_frame_size != get_frame_size ())
1000 something_changed = 1;
1001
1002 {
1003 HARD_REG_SET to_spill;
1004 CLEAR_HARD_REG_SET (to_spill);
1005 update_eliminables (&to_spill);
1006 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1007 if (TEST_HARD_REG_BIT (to_spill, i))
1008 {
1009 spill_hard_reg (i, 1);
1010 did_spill = 1;
1011
1012 /* Regardless of the state of spills, if we previously had
1013 a register that we thought we could eliminate, but no can
1014 not eliminate, we must run another pass.
1015
1016 Consider pseudos which have an entry in reg_equiv_* which
1017 reference an eliminable register. We must make another pass
1018 to update reg_equiv_* so that we do not substitute in the
1019 old value from when we thought the elimination could be
1020 performed. */
1021 something_changed = 1;
1022 }
1023 }
1024
1025 select_reload_regs ();
1026 if (failure)
1027 goto failed;
1028
1029 if (insns_need_reload != 0 || did_spill)
1030 something_changed |= finish_spills (global);
1031
1032 if (! something_changed)
1033 break;
1034
1035 if (caller_save_needed)
1036 delete_caller_save_insns ();
1037
1038 obstack_free (&reload_obstack, reload_firstobj);
1039 }
1040
1041 /* If global-alloc was run, notify it of any register eliminations we have
1042 done. */
1043 if (global)
1044 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
1045 if (ep->can_eliminate)
1046 mark_elimination (ep->from, ep->to);
1047
1048 /* If a pseudo has no hard reg, delete the insns that made the equivalence.
1049 If that insn didn't set the register (i.e., it copied the register to
1050 memory), just delete that insn instead of the equivalencing insn plus
1051 anything now dead. If we call delete_dead_insn on that insn, we may
1052 delete the insn that actually sets the register if the register dies
1053 there and that is incorrect. */
1054
1055 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1056 {
1057 if (reg_renumber[i] < 0 && reg_equiv_init[i] != 0)
1058 {
1059 rtx list;
1060 for (list = reg_equiv_init[i]; list; list = XEXP (list, 1))
1061 {
1062 rtx equiv_insn = XEXP (list, 0);
1063 if (GET_CODE (equiv_insn) == NOTE)
1064 continue;
1065 if (reg_set_p (regno_reg_rtx[i], PATTERN (equiv_insn)))
1066 delete_dead_insn (equiv_insn);
1067 else
1068 {
1069 PUT_CODE (equiv_insn, NOTE);
1070 NOTE_SOURCE_FILE (equiv_insn) = 0;
1071 NOTE_LINE_NUMBER (equiv_insn) = NOTE_INSN_DELETED;
1072 }
1073 }
1074 }
1075 }
1076
1077 /* Use the reload registers where necessary
1078 by generating move instructions to move the must-be-register
1079 values into or out of the reload registers. */
1080
1081 if (insns_need_reload != 0 || something_needs_elimination
1082 || something_needs_operands_changed)
1083 {
1084 HOST_WIDE_INT old_frame_size = get_frame_size ();
1085
1086 reload_as_needed (global);
1087
1088 if (old_frame_size != get_frame_size ())
1089 abort ();
1090
1091 if (num_eliminable)
1092 verify_initial_elim_offsets ();
1093 }
1094
1095 /* If we were able to eliminate the frame pointer, show that it is no
1096 longer live at the start of any basic block. If it ls live by
1097 virtue of being in a pseudo, that pseudo will be marked live
1098 and hence the frame pointer will be known to be live via that
1099 pseudo. */
1100
1101 if (! frame_pointer_needed)
1102 for (i = 0; i < n_basic_blocks; i++)
1103 CLEAR_REGNO_REG_SET (BASIC_BLOCK (i)->global_live_at_start,
1104 HARD_FRAME_POINTER_REGNUM);
1105
1106 /* Come here (with failure set nonzero) if we can't get enough spill regs
1107 and we decide not to abort about it. */
1108 failed:
1109
1110 CLEAR_REG_SET (&spilled_pseudos);
1111 reload_in_progress = 0;
1112
1113 /* Now eliminate all pseudo regs by modifying them into
1114 their equivalent memory references.
1115 The REG-rtx's for the pseudos are modified in place,
1116 so all insns that used to refer to them now refer to memory.
1117
1118 For a reg that has a reg_equiv_address, all those insns
1119 were changed by reloading so that no insns refer to it any longer;
1120 but the DECL_RTL of a variable decl may refer to it,
1121 and if so this causes the debugging info to mention the variable. */
1122
1123 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
1124 {
1125 rtx addr = 0;
1126 int in_struct = 0;
1127 int is_scalar = 0;
1128 int is_readonly = 0;
1129
1130 if (reg_equiv_memory_loc[i])
1131 {
1132 in_struct = MEM_IN_STRUCT_P (reg_equiv_memory_loc[i]);
1133 is_scalar = MEM_SCALAR_P (reg_equiv_memory_loc[i]);
1134 is_readonly = RTX_UNCHANGING_P (reg_equiv_memory_loc[i]);
1135 }
1136
1137 if (reg_equiv_mem[i])
1138 addr = XEXP (reg_equiv_mem[i], 0);
1139
1140 if (reg_equiv_address[i])
1141 addr = reg_equiv_address[i];
1142
1143 if (addr)
1144 {
1145 if (reg_renumber[i] < 0)
1146 {
1147 rtx reg = regno_reg_rtx[i];
1148 PUT_CODE (reg, MEM);
1149 XEXP (reg, 0) = addr;
1150 REG_USERVAR_P (reg) = 0;
1151 RTX_UNCHANGING_P (reg) = is_readonly;
1152 MEM_IN_STRUCT_P (reg) = in_struct;
1153 MEM_SCALAR_P (reg) = is_scalar;
1154 /* We have no alias information about this newly created
1155 MEM. */
1156 set_mem_alias_set (reg, 0);
1157 }
1158 else if (reg_equiv_mem[i])
1159 XEXP (reg_equiv_mem[i], 0) = addr;
1160 }
1161 }
1162
1163 /* We must set reload_completed now since the cleanup_subreg_operands call
1164 below will re-recognize each insn and reload may have generated insns
1165 which are only valid during and after reload. */
1166 reload_completed = 1;
1167
1168 /* Make a pass over all the insns and delete all USEs which we inserted
1169 only to tag a REG_EQUAL note on them. Remove all REG_DEAD and REG_UNUSED
1170 notes. Delete all CLOBBER insns that don't refer to the return value
1171 and simplify (subreg (reg)) operands. Also remove all REG_RETVAL and
1172 REG_LIBCALL notes since they are no longer useful or accurate. Strip
1173 and regenerate REG_INC notes that may have been moved around. */
1174
1175 for (insn = first; insn; insn = NEXT_INSN (insn))
1176 if (INSN_P (insn))
1177 {
1178 rtx *pnote;
1179
1180 if (GET_CODE (insn) == CALL_INSN)
1181 replace_pseudos_in_call_usage (& CALL_INSN_FUNCTION_USAGE (insn),
1182 VOIDmode,
1183 CALL_INSN_FUNCTION_USAGE (insn));
1184
1185 if ((GET_CODE (PATTERN (insn)) == USE
1186 && find_reg_note (insn, REG_EQUAL, NULL_RTX))
1187 || (GET_CODE (PATTERN (insn)) == CLOBBER
1188 && (GET_CODE (XEXP (PATTERN (insn), 0)) != REG
1189 || ! REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))))
1190 {
1191 delete_insn (insn);
1192 continue;
1193 }
1194
1195 pnote = &REG_NOTES (insn);
1196 while (*pnote != 0)
1197 {
1198 if (REG_NOTE_KIND (*pnote) == REG_DEAD
1199 || REG_NOTE_KIND (*pnote) == REG_UNUSED
1200 || REG_NOTE_KIND (*pnote) == REG_INC
1201 || REG_NOTE_KIND (*pnote) == REG_RETVAL
1202 || REG_NOTE_KIND (*pnote) == REG_LIBCALL)
1203 *pnote = XEXP (*pnote, 1);
1204 else
1205 pnote = &XEXP (*pnote, 1);
1206 }
1207
1208 #ifdef AUTO_INC_DEC
1209 add_auto_inc_notes (insn, PATTERN (insn));
1210 #endif
1211
1212 /* And simplify (subreg (reg)) if it appears as an operand. */
1213 cleanup_subreg_operands (insn);
1214 }
1215
1216 /* If we are doing stack checking, give a warning if this function's
1217 frame size is larger than we expect. */
1218 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1219 {
1220 HOST_WIDE_INT size = get_frame_size () + STACK_CHECK_FIXED_FRAME_SIZE;
1221 static int verbose_warned = 0;
1222
1223 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1224 if (regs_ever_live[i] && ! fixed_regs[i] && call_used_regs[i])
1225 size += UNITS_PER_WORD;
1226
1227 if (size > STACK_CHECK_MAX_FRAME_SIZE)
1228 {
1229 warning ("frame size too large for reliable stack checking");
1230 if (! verbose_warned)
1231 {
1232 warning ("try reducing the number of local variables");
1233 verbose_warned = 1;
1234 }
1235 }
1236 }
1237
1238 /* Indicate that we no longer have known memory locations or constants. */
1239 if (reg_equiv_constant)
1240 free (reg_equiv_constant);
1241 reg_equiv_constant = 0;
1242 if (reg_equiv_memory_loc)
1243 free (reg_equiv_memory_loc);
1244 reg_equiv_memory_loc = 0;
1245
1246 if (real_known_ptr)
1247 free (real_known_ptr);
1248 if (real_at_ptr)
1249 free (real_at_ptr);
1250
1251 free (reg_equiv_mem);
1252 free (reg_equiv_init);
1253 free (reg_equiv_address);
1254 free (reg_max_ref_width);
1255 free (reg_old_renumber);
1256 free (pseudo_previous_regs);
1257 free (pseudo_forbidden_regs);
1258
1259 CLEAR_HARD_REG_SET (used_spill_regs);
1260 for (i = 0; i < n_spills; i++)
1261 SET_HARD_REG_BIT (used_spill_regs, spill_regs[i]);
1262
1263 /* Free all the insn_chain structures at once. */
1264 obstack_free (&reload_obstack, reload_startobj);
1265 unused_insn_chains = 0;
1266 compute_bb_for_insn (get_max_uid ());
1267 fixup_abnormal_edges ();
1268
1269 return failure;
1270 }
1271
1272 /* Yet another special case. Unfortunately, reg-stack forces people to
1273 write incorrect clobbers in asm statements. These clobbers must not
1274 cause the register to appear in bad_spill_regs, otherwise we'll call
1275 fatal_insn later. We clear the corresponding regnos in the live
1276 register sets to avoid this.
1277 The whole thing is rather sick, I'm afraid. */
1278
1279 static void
1280 maybe_fix_stack_asms ()
1281 {
1282 #ifdef STACK_REGS
1283 const char *constraints[MAX_RECOG_OPERANDS];
1284 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1285 struct insn_chain *chain;
1286
1287 for (chain = reload_insn_chain; chain != 0; chain = chain->next)
1288 {
1289 int i, noperands;
1290 HARD_REG_SET clobbered, allowed;
1291 rtx pat;
1292
1293 if (! INSN_P (chain->insn)
1294 || (noperands = asm_noperands (PATTERN (chain->insn))) < 0)
1295 continue;
1296 pat = PATTERN (chain->insn);
1297 if (GET_CODE (pat) != PARALLEL)
1298 continue;
1299
1300 CLEAR_HARD_REG_SET (clobbered);
1301 CLEAR_HARD_REG_SET (allowed);
1302
1303 /* First, make a mask of all stack regs that are clobbered. */
1304 for (i = 0; i < XVECLEN (pat, 0); i++)
1305 {
1306 rtx t = XVECEXP (pat, 0, i);
1307 if (GET_CODE (t) == CLOBBER && STACK_REG_P (XEXP (t, 0)))
1308 SET_HARD_REG_BIT (clobbered, REGNO (XEXP (t, 0)));
1309 }
1310
1311 /* Get the operand values and constraints out of the insn. */
1312 decode_asm_operands (pat, recog_data.operand, recog_data.operand_loc,
1313 constraints, operand_mode);
1314
1315 /* For every operand, see what registers are allowed. */
1316 for (i = 0; i < noperands; i++)
1317 {
1318 const char *p = constraints[i];
1319 /* For every alternative, we compute the class of registers allowed
1320 for reloading in CLS, and merge its contents into the reg set
1321 ALLOWED. */
1322 int cls = (int) NO_REGS;
1323
1324 for (;;)
1325 {
1326 char c = *p++;
1327
1328 if (c == '\0' || c == ',' || c == '#')
1329 {
1330 /* End of one alternative - mark the regs in the current
1331 class, and reset the class. */
1332 IOR_HARD_REG_SET (allowed, reg_class_contents[cls]);
1333 cls = NO_REGS;
1334 if (c == '#')
1335 do {
1336 c = *p++;
1337 } while (c != '\0' && c != ',');
1338 if (c == '\0')
1339 break;
1340 continue;
1341 }
1342
1343 switch (c)
1344 {
1345 case '=': case '+': case '*': case '%': case '?': case '!':
1346 case '0': case '1': case '2': case '3': case '4': case 'm':
1347 case '<': case '>': case 'V': case 'o': case '&': case 'E':
1348 case 'F': case 's': case 'i': case 'n': case 'X': case 'I':
1349 case 'J': case 'K': case 'L': case 'M': case 'N': case 'O':
1350 case 'P':
1351 break;
1352
1353 case 'p':
1354 cls = (int) reg_class_subunion[cls][(int) BASE_REG_CLASS];
1355 break;
1356
1357 case 'g':
1358 case 'r':
1359 cls = (int) reg_class_subunion[cls][(int) GENERAL_REGS];
1360 break;
1361
1362 default:
1363 cls = (int) reg_class_subunion[cls][(int) REG_CLASS_FROM_LETTER (c)];
1364
1365 }
1366 }
1367 }
1368 /* Those of the registers which are clobbered, but allowed by the
1369 constraints, must be usable as reload registers. So clear them
1370 out of the life information. */
1371 AND_HARD_REG_SET (allowed, clobbered);
1372 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1373 if (TEST_HARD_REG_BIT (allowed, i))
1374 {
1375 CLEAR_REGNO_REG_SET (&chain->live_throughout, i);
1376 CLEAR_REGNO_REG_SET (&chain->dead_or_set, i);
1377 }
1378 }
1379
1380 #endif
1381 }
1382 \f
1383 /* Copy the global variables n_reloads and rld into the corresponding elts
1384 of CHAIN. */
1385 static void
1386 copy_reloads (chain)
1387 struct insn_chain *chain;
1388 {
1389 chain->n_reloads = n_reloads;
1390 chain->rld
1391 = (struct reload *) obstack_alloc (&reload_obstack,
1392 n_reloads * sizeof (struct reload));
1393 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1394 reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1395 }
1396
1397 /* Walk the chain of insns, and determine for each whether it needs reloads
1398 and/or eliminations. Build the corresponding insns_need_reload list, and
1399 set something_needs_elimination as appropriate. */
1400 static void
1401 calculate_needs_all_insns (global)
1402 int global;
1403 {
1404 struct insn_chain **pprev_reload = &insns_need_reload;
1405 struct insn_chain *chain, *next = 0;
1406
1407 something_needs_elimination = 0;
1408
1409 reload_insn_firstobj = (char *) obstack_alloc (&reload_obstack, 0);
1410 for (chain = reload_insn_chain; chain != 0; chain = next)
1411 {
1412 rtx insn = chain->insn;
1413
1414 next = chain->next;
1415
1416 /* Clear out the shortcuts. */
1417 chain->n_reloads = 0;
1418 chain->need_elim = 0;
1419 chain->need_reload = 0;
1420 chain->need_operand_change = 0;
1421
1422 /* If this is a label, a JUMP_INSN, or has REG_NOTES (which might
1423 include REG_LABEL), we need to see what effects this has on the
1424 known offsets at labels. */
1425
1426 if (GET_CODE (insn) == CODE_LABEL || GET_CODE (insn) == JUMP_INSN
1427 || (INSN_P (insn) && REG_NOTES (insn) != 0))
1428 set_label_offsets (insn, insn, 0);
1429
1430 if (INSN_P (insn))
1431 {
1432 rtx old_body = PATTERN (insn);
1433 int old_code = INSN_CODE (insn);
1434 rtx old_notes = REG_NOTES (insn);
1435 int did_elimination = 0;
1436 int operands_changed = 0;
1437 rtx set = single_set (insn);
1438
1439 /* Skip insns that only set an equivalence. */
1440 if (set && GET_CODE (SET_DEST (set)) == REG
1441 && reg_renumber[REGNO (SET_DEST (set))] < 0
1442 && reg_equiv_constant[REGNO (SET_DEST (set))])
1443 continue;
1444
1445 /* If needed, eliminate any eliminable registers. */
1446 if (num_eliminable || num_eliminable_invariants)
1447 did_elimination = eliminate_regs_in_insn (insn, 0);
1448
1449 /* Analyze the instruction. */
1450 operands_changed = find_reloads (insn, 0, spill_indirect_levels,
1451 global, spill_reg_order);
1452
1453 /* If a no-op set needs more than one reload, this is likely
1454 to be something that needs input address reloads. We
1455 can't get rid of this cleanly later, and it is of no use
1456 anyway, so discard it now.
1457 We only do this when expensive_optimizations is enabled,
1458 since this complements reload inheritance / output
1459 reload deletion, and it can make debugging harder. */
1460 if (flag_expensive_optimizations && n_reloads > 1)
1461 {
1462 rtx set = single_set (insn);
1463 if (set
1464 && SET_SRC (set) == SET_DEST (set)
1465 && GET_CODE (SET_SRC (set)) == REG
1466 && REGNO (SET_SRC (set)) >= FIRST_PSEUDO_REGISTER)
1467 {
1468 PUT_CODE (insn, NOTE);
1469 NOTE_SOURCE_FILE (insn) = 0;
1470 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1471 /* Delete it from the reload chain */
1472 if (chain->prev)
1473 chain->prev->next = next;
1474 else
1475 reload_insn_chain = next;
1476 if (next)
1477 next->prev = chain->prev;
1478 chain->next = unused_insn_chains;
1479 unused_insn_chains = chain;
1480 continue;
1481 }
1482 }
1483 if (num_eliminable)
1484 update_eliminable_offsets ();
1485
1486 /* Remember for later shortcuts which insns had any reloads or
1487 register eliminations. */
1488 chain->need_elim = did_elimination;
1489 chain->need_reload = n_reloads > 0;
1490 chain->need_operand_change = operands_changed;
1491
1492 /* Discard any register replacements done. */
1493 if (did_elimination)
1494 {
1495 obstack_free (&reload_obstack, reload_insn_firstobj);
1496 PATTERN (insn) = old_body;
1497 INSN_CODE (insn) = old_code;
1498 REG_NOTES (insn) = old_notes;
1499 something_needs_elimination = 1;
1500 }
1501
1502 something_needs_operands_changed |= operands_changed;
1503
1504 if (n_reloads != 0)
1505 {
1506 copy_reloads (chain);
1507 *pprev_reload = chain;
1508 pprev_reload = &chain->next_need_reload;
1509 }
1510 }
1511 }
1512 *pprev_reload = 0;
1513 }
1514 \f
1515 /* Comparison function for qsort to decide which of two reloads
1516 should be handled first. *P1 and *P2 are the reload numbers. */
1517
1518 static int
1519 reload_reg_class_lower (r1p, r2p)
1520 const PTR r1p;
1521 const PTR r2p;
1522 {
1523 register int r1 = *(const short *) r1p, r2 = *(const short *) r2p;
1524 register int t;
1525
1526 /* Consider required reloads before optional ones. */
1527 t = rld[r1].optional - rld[r2].optional;
1528 if (t != 0)
1529 return t;
1530
1531 /* Count all solitary classes before non-solitary ones. */
1532 t = ((reg_class_size[(int) rld[r2].class] == 1)
1533 - (reg_class_size[(int) rld[r1].class] == 1));
1534 if (t != 0)
1535 return t;
1536
1537 /* Aside from solitaires, consider all multi-reg groups first. */
1538 t = rld[r2].nregs - rld[r1].nregs;
1539 if (t != 0)
1540 return t;
1541
1542 /* Consider reloads in order of increasing reg-class number. */
1543 t = (int) rld[r1].class - (int) rld[r2].class;
1544 if (t != 0)
1545 return t;
1546
1547 /* If reloads are equally urgent, sort by reload number,
1548 so that the results of qsort leave nothing to chance. */
1549 return r1 - r2;
1550 }
1551 \f
1552 /* The cost of spilling each hard reg. */
1553 static int spill_cost[FIRST_PSEUDO_REGISTER];
1554
1555 /* When spilling multiple hard registers, we use SPILL_COST for the first
1556 spilled hard reg and SPILL_ADD_COST for subsequent regs. SPILL_ADD_COST
1557 only the first hard reg for a multi-reg pseudo. */
1558 static int spill_add_cost[FIRST_PSEUDO_REGISTER];
1559
1560 /* Update the spill cost arrays, considering that pseudo REG is live. */
1561
1562 static void
1563 count_pseudo (reg)
1564 int reg;
1565 {
1566 int freq = REG_FREQ (reg);
1567 int r = reg_renumber[reg];
1568 int nregs;
1569
1570 if (REGNO_REG_SET_P (&pseudos_counted, reg)
1571 || REGNO_REG_SET_P (&spilled_pseudos, reg))
1572 return;
1573
1574 SET_REGNO_REG_SET (&pseudos_counted, reg);
1575
1576 if (r < 0)
1577 abort ();
1578
1579 spill_add_cost[r] += freq;
1580
1581 nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1582 while (nregs-- > 0)
1583 spill_cost[r + nregs] += freq;
1584 }
1585
1586 /* Calculate the SPILL_COST and SPILL_ADD_COST arrays and determine the
1587 contents of BAD_SPILL_REGS for the insn described by CHAIN. */
1588
1589 static void
1590 order_regs_for_reload (chain)
1591 struct insn_chain *chain;
1592 {
1593 int i;
1594 HARD_REG_SET used_by_pseudos;
1595 HARD_REG_SET used_by_pseudos2;
1596
1597 COPY_HARD_REG_SET (bad_spill_regs, fixed_reg_set);
1598
1599 memset (spill_cost, 0, sizeof spill_cost);
1600 memset (spill_add_cost, 0, sizeof spill_add_cost);
1601
1602 /* Count number of uses of each hard reg by pseudo regs allocated to it
1603 and then order them by decreasing use. First exclude hard registers
1604 that are live in or across this insn. */
1605
1606 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
1607 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
1608 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos);
1609 IOR_HARD_REG_SET (bad_spill_regs, used_by_pseudos2);
1610
1611 /* Now find out which pseudos are allocated to it, and update
1612 hard_reg_n_uses. */
1613 CLEAR_REG_SET (&pseudos_counted);
1614
1615 EXECUTE_IF_SET_IN_REG_SET
1616 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
1617 {
1618 count_pseudo (i);
1619 });
1620 EXECUTE_IF_SET_IN_REG_SET
1621 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
1622 {
1623 count_pseudo (i);
1624 });
1625 CLEAR_REG_SET (&pseudos_counted);
1626 }
1627 \f
1628 /* Vector of reload-numbers showing the order in which the reloads should
1629 be processed. */
1630 static short reload_order[MAX_RELOADS];
1631
1632 /* This is used to keep track of the spill regs used in one insn. */
1633 static HARD_REG_SET used_spill_regs_local;
1634
1635 /* We decided to spill hard register SPILLED, which has a size of
1636 SPILLED_NREGS. Determine how pseudo REG, which is live during the insn,
1637 is affected. We will add it to SPILLED_PSEUDOS if necessary, and we will
1638 update SPILL_COST/SPILL_ADD_COST. */
1639
1640 static void
1641 count_spilled_pseudo (spilled, spilled_nregs, reg)
1642 int spilled, spilled_nregs, reg;
1643 {
1644 int r = reg_renumber[reg];
1645 int nregs = HARD_REGNO_NREGS (r, PSEUDO_REGNO_MODE (reg));
1646
1647 if (REGNO_REG_SET_P (&spilled_pseudos, reg)
1648 || spilled + spilled_nregs <= r || r + nregs <= spilled)
1649 return;
1650
1651 SET_REGNO_REG_SET (&spilled_pseudos, reg);
1652
1653 spill_add_cost[r] -= REG_FREQ (reg);
1654 while (nregs-- > 0)
1655 spill_cost[r + nregs] -= REG_FREQ (reg);
1656 }
1657
1658 /* Find reload register to use for reload number ORDER. */
1659
1660 static int
1661 find_reg (chain, order)
1662 struct insn_chain *chain;
1663 int order;
1664 {
1665 int rnum = reload_order[order];
1666 struct reload *rl = rld + rnum;
1667 int best_cost = INT_MAX;
1668 int best_reg = -1;
1669 unsigned int i, j;
1670 int k;
1671 HARD_REG_SET not_usable;
1672 HARD_REG_SET used_by_other_reload;
1673
1674 COPY_HARD_REG_SET (not_usable, bad_spill_regs);
1675 IOR_HARD_REG_SET (not_usable, bad_spill_regs_global);
1676 IOR_COMPL_HARD_REG_SET (not_usable, reg_class_contents[rl->class]);
1677
1678 CLEAR_HARD_REG_SET (used_by_other_reload);
1679 for (k = 0; k < order; k++)
1680 {
1681 int other = reload_order[k];
1682
1683 if (rld[other].regno >= 0 && reloads_conflict (other, rnum))
1684 for (j = 0; j < rld[other].nregs; j++)
1685 SET_HARD_REG_BIT (used_by_other_reload, rld[other].regno + j);
1686 }
1687
1688 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1689 {
1690 unsigned int regno = i;
1691
1692 if (! TEST_HARD_REG_BIT (not_usable, regno)
1693 && ! TEST_HARD_REG_BIT (used_by_other_reload, regno)
1694 && HARD_REGNO_MODE_OK (regno, rl->mode))
1695 {
1696 int this_cost = spill_cost[regno];
1697 int ok = 1;
1698 unsigned int this_nregs = HARD_REGNO_NREGS (regno, rl->mode);
1699
1700 for (j = 1; j < this_nregs; j++)
1701 {
1702 this_cost += spill_add_cost[regno + j];
1703 if ((TEST_HARD_REG_BIT (not_usable, regno + j))
1704 || TEST_HARD_REG_BIT (used_by_other_reload, regno + j))
1705 ok = 0;
1706 }
1707 if (! ok)
1708 continue;
1709 if (rl->in && GET_CODE (rl->in) == REG && REGNO (rl->in) == regno)
1710 this_cost--;
1711 if (rl->out && GET_CODE (rl->out) == REG && REGNO (rl->out) == regno)
1712 this_cost--;
1713 if (this_cost < best_cost
1714 /* Among registers with equal cost, prefer caller-saved ones, or
1715 use REG_ALLOC_ORDER if it is defined. */
1716 || (this_cost == best_cost
1717 #ifdef REG_ALLOC_ORDER
1718 && (inv_reg_alloc_order[regno]
1719 < inv_reg_alloc_order[best_reg])
1720 #else
1721 && call_used_regs[regno]
1722 && ! call_used_regs[best_reg]
1723 #endif
1724 ))
1725 {
1726 best_reg = regno;
1727 best_cost = this_cost;
1728 }
1729 }
1730 }
1731 if (best_reg == -1)
1732 return 0;
1733
1734 if (rtl_dump_file)
1735 fprintf (rtl_dump_file, "Using reg %d for reload %d\n", best_reg, rnum);
1736
1737 rl->nregs = HARD_REGNO_NREGS (best_reg, rl->mode);
1738 rl->regno = best_reg;
1739
1740 EXECUTE_IF_SET_IN_REG_SET
1741 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, j,
1742 {
1743 count_spilled_pseudo (best_reg, rl->nregs, j);
1744 });
1745
1746 EXECUTE_IF_SET_IN_REG_SET
1747 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, j,
1748 {
1749 count_spilled_pseudo (best_reg, rl->nregs, j);
1750 });
1751
1752 for (i = 0; i < rl->nregs; i++)
1753 {
1754 if (spill_cost[best_reg + i] != 0
1755 || spill_add_cost[best_reg + i] != 0)
1756 abort ();
1757 SET_HARD_REG_BIT (used_spill_regs_local, best_reg + i);
1758 }
1759 return 1;
1760 }
1761
1762 /* Find more reload regs to satisfy the remaining need of an insn, which
1763 is given by CHAIN.
1764 Do it by ascending class number, since otherwise a reg
1765 might be spilled for a big class and might fail to count
1766 for a smaller class even though it belongs to that class. */
1767
1768 static void
1769 find_reload_regs (chain)
1770 struct insn_chain *chain;
1771 {
1772 int i;
1773
1774 /* In order to be certain of getting the registers we need,
1775 we must sort the reloads into order of increasing register class.
1776 Then our grabbing of reload registers will parallel the process
1777 that provided the reload registers. */
1778 for (i = 0; i < chain->n_reloads; i++)
1779 {
1780 /* Show whether this reload already has a hard reg. */
1781 if (chain->rld[i].reg_rtx)
1782 {
1783 int regno = REGNO (chain->rld[i].reg_rtx);
1784 chain->rld[i].regno = regno;
1785 chain->rld[i].nregs
1786 = HARD_REGNO_NREGS (regno, GET_MODE (chain->rld[i].reg_rtx));
1787 }
1788 else
1789 chain->rld[i].regno = -1;
1790 reload_order[i] = i;
1791 }
1792
1793 n_reloads = chain->n_reloads;
1794 memcpy (rld, chain->rld, n_reloads * sizeof (struct reload));
1795
1796 CLEAR_HARD_REG_SET (used_spill_regs_local);
1797
1798 if (rtl_dump_file)
1799 fprintf (rtl_dump_file, "Spilling for insn %d.\n", INSN_UID (chain->insn));
1800
1801 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
1802
1803 /* Compute the order of preference for hard registers to spill. */
1804
1805 order_regs_for_reload (chain);
1806
1807 for (i = 0; i < n_reloads; i++)
1808 {
1809 int r = reload_order[i];
1810
1811 /* Ignore reloads that got marked inoperative. */
1812 if ((rld[r].out != 0 || rld[r].in != 0 || rld[r].secondary_p)
1813 && ! rld[r].optional
1814 && rld[r].regno == -1)
1815 if (! find_reg (chain, i))
1816 {
1817 spill_failure (chain->insn, rld[r].class);
1818 failure = 1;
1819 return;
1820 }
1821 }
1822
1823 COPY_HARD_REG_SET (chain->used_spill_regs, used_spill_regs_local);
1824 IOR_HARD_REG_SET (used_spill_regs, used_spill_regs_local);
1825
1826 memcpy (chain->rld, rld, n_reloads * sizeof (struct reload));
1827 }
1828
1829 static void
1830 select_reload_regs ()
1831 {
1832 struct insn_chain *chain;
1833
1834 /* Try to satisfy the needs for each insn. */
1835 for (chain = insns_need_reload; chain != 0;
1836 chain = chain->next_need_reload)
1837 find_reload_regs (chain);
1838 }
1839 \f
1840 /* Delete all insns that were inserted by emit_caller_save_insns during
1841 this iteration. */
1842 static void
1843 delete_caller_save_insns ()
1844 {
1845 struct insn_chain *c = reload_insn_chain;
1846
1847 while (c != 0)
1848 {
1849 while (c != 0 && c->is_caller_save_insn)
1850 {
1851 struct insn_chain *next = c->next;
1852 rtx insn = c->insn;
1853
1854 if (insn == BLOCK_HEAD (c->block))
1855 BLOCK_HEAD (c->block) = NEXT_INSN (insn);
1856 if (insn == BLOCK_END (c->block))
1857 BLOCK_END (c->block) = PREV_INSN (insn);
1858 if (c == reload_insn_chain)
1859 reload_insn_chain = next;
1860
1861 if (NEXT_INSN (insn) != 0)
1862 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1863 if (PREV_INSN (insn) != 0)
1864 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1865
1866 if (next)
1867 next->prev = c->prev;
1868 if (c->prev)
1869 c->prev->next = next;
1870 c->next = unused_insn_chains;
1871 unused_insn_chains = c;
1872 c = next;
1873 }
1874 if (c != 0)
1875 c = c->next;
1876 }
1877 }
1878 \f
1879 /* Handle the failure to find a register to spill.
1880 INSN should be one of the insns which needed this particular spill reg. */
1881
1882 static void
1883 spill_failure (insn, class)
1884 rtx insn;
1885 enum reg_class class;
1886 {
1887 static const char *const reg_class_names[] = REG_CLASS_NAMES;
1888 if (asm_noperands (PATTERN (insn)) >= 0)
1889 error_for_asm (insn, "Can't find a register in class `%s' while reloading `asm'.",
1890 reg_class_names[class]);
1891 else
1892 {
1893 error ("Unable to find a register to spill in class `%s'.",
1894 reg_class_names[class]);
1895 fatal_insn ("This is the insn:", insn);
1896 }
1897 }
1898 \f
1899 /* Delete an unneeded INSN and any previous insns who sole purpose is loading
1900 data that is dead in INSN. */
1901
1902 static void
1903 delete_dead_insn (insn)
1904 rtx insn;
1905 {
1906 rtx prev = prev_real_insn (insn);
1907 rtx prev_dest;
1908
1909 /* If the previous insn sets a register that dies in our insn, delete it
1910 too. */
1911 if (prev && GET_CODE (PATTERN (prev)) == SET
1912 && (prev_dest = SET_DEST (PATTERN (prev)), GET_CODE (prev_dest) == REG)
1913 && reg_mentioned_p (prev_dest, PATTERN (insn))
1914 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
1915 && ! side_effects_p (SET_SRC (PATTERN (prev))))
1916 delete_dead_insn (prev);
1917
1918 PUT_CODE (insn, NOTE);
1919 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
1920 NOTE_SOURCE_FILE (insn) = 0;
1921 }
1922
1923 /* Modify the home of pseudo-reg I.
1924 The new home is present in reg_renumber[I].
1925
1926 FROM_REG may be the hard reg that the pseudo-reg is being spilled from;
1927 or it may be -1, meaning there is none or it is not relevant.
1928 This is used so that all pseudos spilled from a given hard reg
1929 can share one stack slot. */
1930
1931 static void
1932 alter_reg (i, from_reg)
1933 register int i;
1934 int from_reg;
1935 {
1936 /* When outputting an inline function, this can happen
1937 for a reg that isn't actually used. */
1938 if (regno_reg_rtx[i] == 0)
1939 return;
1940
1941 /* If the reg got changed to a MEM at rtl-generation time,
1942 ignore it. */
1943 if (GET_CODE (regno_reg_rtx[i]) != REG)
1944 return;
1945
1946 /* Modify the reg-rtx to contain the new hard reg
1947 number or else to contain its pseudo reg number. */
1948 REGNO (regno_reg_rtx[i])
1949 = reg_renumber[i] >= 0 ? reg_renumber[i] : i;
1950
1951 /* If we have a pseudo that is needed but has no hard reg or equivalent,
1952 allocate a stack slot for it. */
1953
1954 if (reg_renumber[i] < 0
1955 && REG_N_REFS (i) > 0
1956 && reg_equiv_constant[i] == 0
1957 && reg_equiv_memory_loc[i] == 0)
1958 {
1959 register rtx x;
1960 unsigned int inherent_size = PSEUDO_REGNO_BYTES (i);
1961 unsigned int total_size = MAX (inherent_size, reg_max_ref_width[i]);
1962 int adjust = 0;
1963
1964 /* Each pseudo reg has an inherent size which comes from its own mode,
1965 and a total size which provides room for paradoxical subregs
1966 which refer to the pseudo reg in wider modes.
1967
1968 We can use a slot already allocated if it provides both
1969 enough inherent space and enough total space.
1970 Otherwise, we allocate a new slot, making sure that it has no less
1971 inherent space, and no less total space, then the previous slot. */
1972 if (from_reg == -1)
1973 {
1974 /* No known place to spill from => no slot to reuse. */
1975 x = assign_stack_local (GET_MODE (regno_reg_rtx[i]), total_size,
1976 inherent_size == total_size ? 0 : -1);
1977 if (BYTES_BIG_ENDIAN)
1978 /* Cancel the big-endian correction done in assign_stack_local.
1979 Get the address of the beginning of the slot.
1980 This is so we can do a big-endian correction unconditionally
1981 below. */
1982 adjust = inherent_size - total_size;
1983
1984 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (regno_reg_rtx[i]);
1985
1986 /* Nothing can alias this slot except this pseudo. */
1987 set_mem_alias_set (x, new_alias_set ());
1988 }
1989
1990 /* Reuse a stack slot if possible. */
1991 else if (spill_stack_slot[from_reg] != 0
1992 && spill_stack_slot_width[from_reg] >= total_size
1993 && (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
1994 >= inherent_size))
1995 x = spill_stack_slot[from_reg];
1996
1997 /* Allocate a bigger slot. */
1998 else
1999 {
2000 /* Compute maximum size needed, both for inherent size
2001 and for total size. */
2002 enum machine_mode mode = GET_MODE (regno_reg_rtx[i]);
2003 rtx stack_slot;
2004
2005 if (spill_stack_slot[from_reg])
2006 {
2007 if (GET_MODE_SIZE (GET_MODE (spill_stack_slot[from_reg]))
2008 > inherent_size)
2009 mode = GET_MODE (spill_stack_slot[from_reg]);
2010 if (spill_stack_slot_width[from_reg] > total_size)
2011 total_size = spill_stack_slot_width[from_reg];
2012 }
2013
2014 /* Make a slot with that size. */
2015 x = assign_stack_local (mode, total_size,
2016 inherent_size == total_size ? 0 : -1);
2017 stack_slot = x;
2018
2019 /* All pseudos mapped to this slot can alias each other. */
2020 if (spill_stack_slot[from_reg])
2021 set_mem_alias_set (x, MEM_ALIAS_SET (spill_stack_slot[from_reg]));
2022 else
2023 set_mem_alias_set (x, new_alias_set ());
2024
2025 if (BYTES_BIG_ENDIAN)
2026 {
2027 /* Cancel the big-endian correction done in assign_stack_local.
2028 Get the address of the beginning of the slot.
2029 This is so we can do a big-endian correction unconditionally
2030 below. */
2031 adjust = GET_MODE_SIZE (mode) - total_size;
2032 if (adjust)
2033 stack_slot = gen_rtx_MEM (mode_for_size (total_size
2034 * BITS_PER_UNIT,
2035 MODE_INT, 1),
2036 plus_constant (XEXP (x, 0), adjust));
2037 }
2038
2039 spill_stack_slot[from_reg] = stack_slot;
2040 spill_stack_slot_width[from_reg] = total_size;
2041 }
2042
2043 /* On a big endian machine, the "address" of the slot
2044 is the address of the low part that fits its inherent mode. */
2045 if (BYTES_BIG_ENDIAN && inherent_size < total_size)
2046 adjust += (total_size - inherent_size);
2047
2048 /* If we have any adjustment to make, or if the stack slot is the
2049 wrong mode, make a new stack slot. */
2050 if (adjust != 0 || GET_MODE (x) != GET_MODE (regno_reg_rtx[i]))
2051 x = adjust_address_nv (x, GET_MODE (regno_reg_rtx[i]), adjust);
2052
2053 /* Save the stack slot for later. */
2054 reg_equiv_memory_loc[i] = x;
2055 }
2056 }
2057
2058 /* Mark the slots in regs_ever_live for the hard regs
2059 used by pseudo-reg number REGNO. */
2060
2061 void
2062 mark_home_live (regno)
2063 int regno;
2064 {
2065 register int i, lim;
2066
2067 i = reg_renumber[regno];
2068 if (i < 0)
2069 return;
2070 lim = i + HARD_REGNO_NREGS (i, PSEUDO_REGNO_MODE (regno));
2071 while (i < lim)
2072 regs_ever_live[i++] = 1;
2073 }
2074 \f
2075 /* This function handles the tracking of elimination offsets around branches.
2076
2077 X is a piece of RTL being scanned.
2078
2079 INSN is the insn that it came from, if any.
2080
2081 INITIAL_P is non-zero if we are to set the offset to be the initial
2082 offset and zero if we are setting the offset of the label to be the
2083 current offset. */
2084
2085 static void
2086 set_label_offsets (x, insn, initial_p)
2087 rtx x;
2088 rtx insn;
2089 int initial_p;
2090 {
2091 enum rtx_code code = GET_CODE (x);
2092 rtx tem;
2093 unsigned int i;
2094 struct elim_table *p;
2095
2096 switch (code)
2097 {
2098 case LABEL_REF:
2099 if (LABEL_REF_NONLOCAL_P (x))
2100 return;
2101
2102 x = XEXP (x, 0);
2103
2104 /* ... fall through ... */
2105
2106 case CODE_LABEL:
2107 /* If we know nothing about this label, set the desired offsets. Note
2108 that this sets the offset at a label to be the offset before a label
2109 if we don't know anything about the label. This is not correct for
2110 the label after a BARRIER, but is the best guess we can make. If
2111 we guessed wrong, we will suppress an elimination that might have
2112 been possible had we been able to guess correctly. */
2113
2114 if (! offsets_known_at[CODE_LABEL_NUMBER (x)])
2115 {
2116 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2117 offsets_at[CODE_LABEL_NUMBER (x)][i]
2118 = (initial_p ? reg_eliminate[i].initial_offset
2119 : reg_eliminate[i].offset);
2120 offsets_known_at[CODE_LABEL_NUMBER (x)] = 1;
2121 }
2122
2123 /* Otherwise, if this is the definition of a label and it is
2124 preceded by a BARRIER, set our offsets to the known offset of
2125 that label. */
2126
2127 else if (x == insn
2128 && (tem = prev_nonnote_insn (insn)) != 0
2129 && GET_CODE (tem) == BARRIER)
2130 set_offsets_for_label (insn);
2131 else
2132 /* If neither of the above cases is true, compare each offset
2133 with those previously recorded and suppress any eliminations
2134 where the offsets disagree. */
2135
2136 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
2137 if (offsets_at[CODE_LABEL_NUMBER (x)][i]
2138 != (initial_p ? reg_eliminate[i].initial_offset
2139 : reg_eliminate[i].offset))
2140 reg_eliminate[i].can_eliminate = 0;
2141
2142 return;
2143
2144 case JUMP_INSN:
2145 set_label_offsets (PATTERN (insn), insn, initial_p);
2146
2147 /* ... fall through ... */
2148
2149 case INSN:
2150 case CALL_INSN:
2151 /* Any labels mentioned in REG_LABEL notes can be branched to indirectly
2152 and hence must have all eliminations at their initial offsets. */
2153 for (tem = REG_NOTES (x); tem; tem = XEXP (tem, 1))
2154 if (REG_NOTE_KIND (tem) == REG_LABEL)
2155 set_label_offsets (XEXP (tem, 0), insn, 1);
2156 return;
2157
2158 case PARALLEL:
2159 case ADDR_VEC:
2160 case ADDR_DIFF_VEC:
2161 /* Each of the labels in the parallel or address vector must be
2162 at their initial offsets. We want the first field for PARALLEL
2163 and ADDR_VEC and the second field for ADDR_DIFF_VEC. */
2164
2165 for (i = 0; i < (unsigned) XVECLEN (x, code == ADDR_DIFF_VEC); i++)
2166 set_label_offsets (XVECEXP (x, code == ADDR_DIFF_VEC, i),
2167 insn, initial_p);
2168 return;
2169
2170 case SET:
2171 /* We only care about setting PC. If the source is not RETURN,
2172 IF_THEN_ELSE, or a label, disable any eliminations not at
2173 their initial offsets. Similarly if any arm of the IF_THEN_ELSE
2174 isn't one of those possibilities. For branches to a label,
2175 call ourselves recursively.
2176
2177 Note that this can disable elimination unnecessarily when we have
2178 a non-local goto since it will look like a non-constant jump to
2179 someplace in the current function. This isn't a significant
2180 problem since such jumps will normally be when all elimination
2181 pairs are back to their initial offsets. */
2182
2183 if (SET_DEST (x) != pc_rtx)
2184 return;
2185
2186 switch (GET_CODE (SET_SRC (x)))
2187 {
2188 case PC:
2189 case RETURN:
2190 return;
2191
2192 case LABEL_REF:
2193 set_label_offsets (XEXP (SET_SRC (x), 0), insn, initial_p);
2194 return;
2195
2196 case IF_THEN_ELSE:
2197 tem = XEXP (SET_SRC (x), 1);
2198 if (GET_CODE (tem) == LABEL_REF)
2199 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2200 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2201 break;
2202
2203 tem = XEXP (SET_SRC (x), 2);
2204 if (GET_CODE (tem) == LABEL_REF)
2205 set_label_offsets (XEXP (tem, 0), insn, initial_p);
2206 else if (GET_CODE (tem) != PC && GET_CODE (tem) != RETURN)
2207 break;
2208 return;
2209
2210 default:
2211 break;
2212 }
2213
2214 /* If we reach here, all eliminations must be at their initial
2215 offset because we are doing a jump to a variable address. */
2216 for (p = reg_eliminate; p < &reg_eliminate[NUM_ELIMINABLE_REGS]; p++)
2217 if (p->offset != p->initial_offset)
2218 p->can_eliminate = 0;
2219 break;
2220
2221 default:
2222 break;
2223 }
2224 }
2225 \f
2226 /* Scan X and replace any eliminable registers (such as fp) with a
2227 replacement (such as sp), plus an offset.
2228
2229 MEM_MODE is the mode of an enclosing MEM. We need this to know how
2230 much to adjust a register for, e.g., PRE_DEC. Also, if we are inside a
2231 MEM, we are allowed to replace a sum of a register and the constant zero
2232 with the register, which we cannot do outside a MEM. In addition, we need
2233 to record the fact that a register is referenced outside a MEM.
2234
2235 If INSN is an insn, it is the insn containing X. If we replace a REG
2236 in a SET_DEST with an equivalent MEM and INSN is non-zero, write a
2237 CLOBBER of the pseudo after INSN so find_equiv_regs will know that
2238 the REG is being modified.
2239
2240 Alternatively, INSN may be a note (an EXPR_LIST or INSN_LIST).
2241 That's used when we eliminate in expressions stored in notes.
2242 This means, do not set ref_outside_mem even if the reference
2243 is outside of MEMs.
2244
2245 REG_EQUIV_MEM and REG_EQUIV_ADDRESS contain address that have had
2246 replacements done assuming all offsets are at their initial values. If
2247 they are not, or if REG_EQUIV_ADDRESS is nonzero for a pseudo we
2248 encounter, return the actual location so that find_reloads will do
2249 the proper thing. */
2250
2251 rtx
2252 eliminate_regs (x, mem_mode, insn)
2253 rtx x;
2254 enum machine_mode mem_mode;
2255 rtx insn;
2256 {
2257 enum rtx_code code = GET_CODE (x);
2258 struct elim_table *ep;
2259 int regno;
2260 rtx new;
2261 int i, j;
2262 const char *fmt;
2263 int copied = 0;
2264
2265 if (! current_function_decl)
2266 return x;
2267
2268 switch (code)
2269 {
2270 case CONST_INT:
2271 case CONST_DOUBLE:
2272 case CONST:
2273 case SYMBOL_REF:
2274 case CODE_LABEL:
2275 case PC:
2276 case CC0:
2277 case ASM_INPUT:
2278 case ADDR_VEC:
2279 case ADDR_DIFF_VEC:
2280 case RETURN:
2281 return x;
2282
2283 case ADDRESSOF:
2284 /* This is only for the benefit of the debugging backends, which call
2285 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2286 removed after CSE. */
2287 new = eliminate_regs (XEXP (x, 0), 0, insn);
2288 if (GET_CODE (new) == MEM)
2289 return XEXP (new, 0);
2290 return x;
2291
2292 case REG:
2293 regno = REGNO (x);
2294
2295 /* First handle the case where we encounter a bare register that
2296 is eliminable. Replace it with a PLUS. */
2297 if (regno < FIRST_PSEUDO_REGISTER)
2298 {
2299 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2300 ep++)
2301 if (ep->from_rtx == x && ep->can_eliminate)
2302 return plus_constant (ep->to_rtx, ep->previous_offset);
2303
2304 }
2305 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2306 && reg_equiv_constant[regno]
2307 && ! CONSTANT_P (reg_equiv_constant[regno]))
2308 return eliminate_regs (copy_rtx (reg_equiv_constant[regno]),
2309 mem_mode, insn);
2310 return x;
2311
2312 /* You might think handling MINUS in a manner similar to PLUS is a
2313 good idea. It is not. It has been tried multiple times and every
2314 time the change has had to have been reverted.
2315
2316 Other parts of reload know a PLUS is special (gen_reload for example)
2317 and require special code to handle code a reloaded PLUS operand.
2318
2319 Also consider backends where the flags register is clobbered by a
2320 MINUS, but we can emit a PLUS that does not clobber flags (ia32,
2321 lea instruction comes to mind). If we try to reload a MINUS, we
2322 may kill the flags register that was holding a useful value.
2323
2324 So, please before trying to handle MINUS, consider reload as a
2325 whole instead of this little section as well as the backend issues. */
2326 case PLUS:
2327 /* If this is the sum of an eliminable register and a constant, rework
2328 the sum. */
2329 if (GET_CODE (XEXP (x, 0)) == REG
2330 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2331 && CONSTANT_P (XEXP (x, 1)))
2332 {
2333 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2334 ep++)
2335 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2336 {
2337 /* The only time we want to replace a PLUS with a REG (this
2338 occurs when the constant operand of the PLUS is the negative
2339 of the offset) is when we are inside a MEM. We won't want
2340 to do so at other times because that would change the
2341 structure of the insn in a way that reload can't handle.
2342 We special-case the commonest situation in
2343 eliminate_regs_in_insn, so just replace a PLUS with a
2344 PLUS here, unless inside a MEM. */
2345 if (mem_mode != 0 && GET_CODE (XEXP (x, 1)) == CONST_INT
2346 && INTVAL (XEXP (x, 1)) == - ep->previous_offset)
2347 return ep->to_rtx;
2348 else
2349 return gen_rtx_PLUS (Pmode, ep->to_rtx,
2350 plus_constant (XEXP (x, 1),
2351 ep->previous_offset));
2352 }
2353
2354 /* If the register is not eliminable, we are done since the other
2355 operand is a constant. */
2356 return x;
2357 }
2358
2359 /* If this is part of an address, we want to bring any constant to the
2360 outermost PLUS. We will do this by doing register replacement in
2361 our operands and seeing if a constant shows up in one of them.
2362
2363 Note that there is no risk of modifying the structure of the insn,
2364 since we only get called for its operands, thus we are either
2365 modifying the address inside a MEM, or something like an address
2366 operand of a load-address insn. */
2367
2368 {
2369 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2370 rtx new1 = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2371
2372 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2373 {
2374 /* If one side is a PLUS and the other side is a pseudo that
2375 didn't get a hard register but has a reg_equiv_constant,
2376 we must replace the constant here since it may no longer
2377 be in the position of any operand. */
2378 if (GET_CODE (new0) == PLUS && GET_CODE (new1) == REG
2379 && REGNO (new1) >= FIRST_PSEUDO_REGISTER
2380 && reg_renumber[REGNO (new1)] < 0
2381 && reg_equiv_constant != 0
2382 && reg_equiv_constant[REGNO (new1)] != 0)
2383 new1 = reg_equiv_constant[REGNO (new1)];
2384 else if (GET_CODE (new1) == PLUS && GET_CODE (new0) == REG
2385 && REGNO (new0) >= FIRST_PSEUDO_REGISTER
2386 && reg_renumber[REGNO (new0)] < 0
2387 && reg_equiv_constant[REGNO (new0)] != 0)
2388 new0 = reg_equiv_constant[REGNO (new0)];
2389
2390 new = form_sum (new0, new1);
2391
2392 /* As above, if we are not inside a MEM we do not want to
2393 turn a PLUS into something else. We might try to do so here
2394 for an addition of 0 if we aren't optimizing. */
2395 if (! mem_mode && GET_CODE (new) != PLUS)
2396 return gen_rtx_PLUS (GET_MODE (x), new, const0_rtx);
2397 else
2398 return new;
2399 }
2400 }
2401 return x;
2402
2403 case MULT:
2404 /* If this is the product of an eliminable register and a
2405 constant, apply the distribute law and move the constant out
2406 so that we have (plus (mult ..) ..). This is needed in order
2407 to keep load-address insns valid. This case is pathological.
2408 We ignore the possibility of overflow here. */
2409 if (GET_CODE (XEXP (x, 0)) == REG
2410 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2411 && GET_CODE (XEXP (x, 1)) == CONST_INT)
2412 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2413 ep++)
2414 if (ep->from_rtx == XEXP (x, 0) && ep->can_eliminate)
2415 {
2416 if (! mem_mode
2417 /* Refs inside notes don't count for this purpose. */
2418 && ! (insn != 0 && (GET_CODE (insn) == EXPR_LIST
2419 || GET_CODE (insn) == INSN_LIST)))
2420 ep->ref_outside_mem = 1;
2421
2422 return
2423 plus_constant (gen_rtx_MULT (Pmode, ep->to_rtx, XEXP (x, 1)),
2424 ep->previous_offset * INTVAL (XEXP (x, 1)));
2425 }
2426
2427 /* ... fall through ... */
2428
2429 case CALL:
2430 case COMPARE:
2431 /* See comments before PLUS about handling MINUS. */
2432 case MINUS:
2433 case DIV: case UDIV:
2434 case MOD: case UMOD:
2435 case AND: case IOR: case XOR:
2436 case ROTATERT: case ROTATE:
2437 case ASHIFTRT: case LSHIFTRT: case ASHIFT:
2438 case NE: case EQ:
2439 case GE: case GT: case GEU: case GTU:
2440 case LE: case LT: case LEU: case LTU:
2441 {
2442 rtx new0 = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2443 rtx new1
2444 = XEXP (x, 1) ? eliminate_regs (XEXP (x, 1), mem_mode, insn) : 0;
2445
2446 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
2447 return gen_rtx_fmt_ee (code, GET_MODE (x), new0, new1);
2448 }
2449 return x;
2450
2451 case EXPR_LIST:
2452 /* If we have something in XEXP (x, 0), the usual case, eliminate it. */
2453 if (XEXP (x, 0))
2454 {
2455 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2456 if (new != XEXP (x, 0))
2457 {
2458 /* If this is a REG_DEAD note, it is not valid anymore.
2459 Using the eliminated version could result in creating a
2460 REG_DEAD note for the stack or frame pointer. */
2461 if (GET_MODE (x) == REG_DEAD)
2462 return (XEXP (x, 1)
2463 ? eliminate_regs (XEXP (x, 1), mem_mode, insn)
2464 : NULL_RTX);
2465
2466 x = gen_rtx_EXPR_LIST (REG_NOTE_KIND (x), new, XEXP (x, 1));
2467 }
2468 }
2469
2470 /* ... fall through ... */
2471
2472 case INSN_LIST:
2473 /* Now do eliminations in the rest of the chain. If this was
2474 an EXPR_LIST, this might result in allocating more memory than is
2475 strictly needed, but it simplifies the code. */
2476 if (XEXP (x, 1))
2477 {
2478 new = eliminate_regs (XEXP (x, 1), mem_mode, insn);
2479 if (new != XEXP (x, 1))
2480 return gen_rtx_fmt_ee (GET_CODE (x), GET_MODE (x), XEXP (x, 0), new);
2481 }
2482 return x;
2483
2484 case PRE_INC:
2485 case POST_INC:
2486 case PRE_DEC:
2487 case POST_DEC:
2488 case STRICT_LOW_PART:
2489 case NEG: case NOT:
2490 case SIGN_EXTEND: case ZERO_EXTEND:
2491 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2492 case FLOAT: case FIX:
2493 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2494 case ABS:
2495 case SQRT:
2496 case FFS:
2497 new = eliminate_regs (XEXP (x, 0), mem_mode, insn);
2498 if (new != XEXP (x, 0))
2499 return gen_rtx_fmt_e (code, GET_MODE (x), new);
2500 return x;
2501
2502 case SUBREG:
2503 /* Similar to above processing, but preserve SUBREG_BYTE.
2504 Convert (subreg (mem)) to (mem) if not paradoxical.
2505 Also, if we have a non-paradoxical (subreg (pseudo)) and the
2506 pseudo didn't get a hard reg, we must replace this with the
2507 eliminated version of the memory location because push_reloads
2508 may do the replacement in certain circumstances. */
2509 if (GET_CODE (SUBREG_REG (x)) == REG
2510 && (GET_MODE_SIZE (GET_MODE (x))
2511 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2512 && reg_equiv_memory_loc != 0
2513 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2514 {
2515 new = SUBREG_REG (x);
2516 }
2517 else
2518 new = eliminate_regs (SUBREG_REG (x), mem_mode, insn);
2519
2520 if (new != SUBREG_REG (x))
2521 {
2522 int x_size = GET_MODE_SIZE (GET_MODE (x));
2523 int new_size = GET_MODE_SIZE (GET_MODE (new));
2524
2525 if (GET_CODE (new) == MEM
2526 && ((x_size < new_size
2527 #ifdef WORD_REGISTER_OPERATIONS
2528 /* On these machines, combine can create rtl of the form
2529 (set (subreg:m1 (reg:m2 R) 0) ...)
2530 where m1 < m2, and expects something interesting to
2531 happen to the entire word. Moreover, it will use the
2532 (reg:m2 R) later, expecting all bits to be preserved.
2533 So if the number of words is the same, preserve the
2534 subreg so that push_reloads can see it. */
2535 && ! ((x_size - 1) / UNITS_PER_WORD
2536 == (new_size -1 ) / UNITS_PER_WORD)
2537 #endif
2538 )
2539 || x_size == new_size)
2540 )
2541 {
2542 int offset = SUBREG_BYTE (x);
2543 enum machine_mode mode = GET_MODE (x);
2544
2545 PUT_MODE (new, mode);
2546 XEXP (new, 0) = plus_constant (XEXP (new, 0), offset);
2547 return new;
2548 }
2549 else
2550 return gen_rtx_SUBREG (GET_MODE (x), new, SUBREG_BYTE (x));
2551 }
2552
2553 return x;
2554
2555 case MEM:
2556 /* This is only for the benefit of the debugging backends, which call
2557 eliminate_regs on DECL_RTL; any ADDRESSOFs in the actual insns are
2558 removed after CSE. */
2559 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2560 return eliminate_regs (XEXP (XEXP (x, 0), 0), 0, insn);
2561
2562 /* Our only special processing is to pass the mode of the MEM to our
2563 recursive call and copy the flags. While we are here, handle this
2564 case more efficiently. */
2565 return
2566 replace_equiv_address_nv (x,
2567 eliminate_regs (XEXP (x, 0),
2568 GET_MODE (x), insn));
2569
2570 case USE:
2571 /* Handle insn_list USE that a call to a pure function may generate. */
2572 new = eliminate_regs (XEXP (x, 0), 0, insn);
2573 if (new != XEXP (x, 0))
2574 return gen_rtx_USE (GET_MODE (x), new);
2575 return x;
2576
2577 case CLOBBER:
2578 case ASM_OPERANDS:
2579 case SET:
2580 abort ();
2581
2582 default:
2583 break;
2584 }
2585
2586 /* Process each of our operands recursively. If any have changed, make a
2587 copy of the rtx. */
2588 fmt = GET_RTX_FORMAT (code);
2589 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2590 {
2591 if (*fmt == 'e')
2592 {
2593 new = eliminate_regs (XEXP (x, i), mem_mode, insn);
2594 if (new != XEXP (x, i) && ! copied)
2595 {
2596 rtx new_x = rtx_alloc (code);
2597 memcpy (new_x, x,
2598 (sizeof (*new_x) - sizeof (new_x->fld)
2599 + sizeof (new_x->fld[0]) * GET_RTX_LENGTH (code)));
2600 x = new_x;
2601 copied = 1;
2602 }
2603 XEXP (x, i) = new;
2604 }
2605 else if (*fmt == 'E')
2606 {
2607 int copied_vec = 0;
2608 for (j = 0; j < XVECLEN (x, i); j++)
2609 {
2610 new = eliminate_regs (XVECEXP (x, i, j), mem_mode, insn);
2611 if (new != XVECEXP (x, i, j) && ! copied_vec)
2612 {
2613 rtvec new_v = gen_rtvec_v (XVECLEN (x, i),
2614 XVEC (x, i)->elem);
2615 if (! copied)
2616 {
2617 rtx new_x = rtx_alloc (code);
2618 memcpy (new_x, x,
2619 (sizeof (*new_x) - sizeof (new_x->fld)
2620 + (sizeof (new_x->fld[0])
2621 * GET_RTX_LENGTH (code))));
2622 x = new_x;
2623 copied = 1;
2624 }
2625 XVEC (x, i) = new_v;
2626 copied_vec = 1;
2627 }
2628 XVECEXP (x, i, j) = new;
2629 }
2630 }
2631 }
2632
2633 return x;
2634 }
2635
2636 /* Scan rtx X for modifications of elimination target registers. Update
2637 the table of eliminables to reflect the changed state. MEM_MODE is
2638 the mode of an enclosing MEM rtx, or VOIDmode if not within a MEM. */
2639
2640 static void
2641 elimination_effects (x, mem_mode)
2642 rtx x;
2643 enum machine_mode mem_mode;
2644
2645 {
2646 enum rtx_code code = GET_CODE (x);
2647 struct elim_table *ep;
2648 int regno;
2649 int i, j;
2650 const char *fmt;
2651
2652 switch (code)
2653 {
2654 case CONST_INT:
2655 case CONST_DOUBLE:
2656 case CONST:
2657 case SYMBOL_REF:
2658 case CODE_LABEL:
2659 case PC:
2660 case CC0:
2661 case ASM_INPUT:
2662 case ADDR_VEC:
2663 case ADDR_DIFF_VEC:
2664 case RETURN:
2665 return;
2666
2667 case ADDRESSOF:
2668 abort ();
2669
2670 case REG:
2671 regno = REGNO (x);
2672
2673 /* First handle the case where we encounter a bare register that
2674 is eliminable. Replace it with a PLUS. */
2675 if (regno < FIRST_PSEUDO_REGISTER)
2676 {
2677 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2678 ep++)
2679 if (ep->from_rtx == x && ep->can_eliminate)
2680 {
2681 if (! mem_mode)
2682 ep->ref_outside_mem = 1;
2683 return;
2684 }
2685
2686 }
2687 else if (reg_renumber[regno] < 0 && reg_equiv_constant
2688 && reg_equiv_constant[regno]
2689 && ! CONSTANT_P (reg_equiv_constant[regno]))
2690 elimination_effects (reg_equiv_constant[regno], mem_mode);
2691 return;
2692
2693 case PRE_INC:
2694 case POST_INC:
2695 case PRE_DEC:
2696 case POST_DEC:
2697 case POST_MODIFY:
2698 case PRE_MODIFY:
2699 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2700 if (ep->to_rtx == XEXP (x, 0))
2701 {
2702 int size = GET_MODE_SIZE (mem_mode);
2703
2704 /* If more bytes than MEM_MODE are pushed, account for them. */
2705 #ifdef PUSH_ROUNDING
2706 if (ep->to_rtx == stack_pointer_rtx)
2707 size = PUSH_ROUNDING (size);
2708 #endif
2709 if (code == PRE_DEC || code == POST_DEC)
2710 ep->offset += size;
2711 else if (code == PRE_INC || code == POST_INC)
2712 ep->offset -= size;
2713 else if ((code == PRE_MODIFY || code == POST_MODIFY)
2714 && GET_CODE (XEXP (x, 1)) == PLUS
2715 && XEXP (x, 0) == XEXP (XEXP (x, 1), 0)
2716 && CONSTANT_P (XEXP (XEXP (x, 1), 1)))
2717 ep->offset -= INTVAL (XEXP (XEXP (x, 1), 1));
2718 }
2719
2720 /* These two aren't unary operators. */
2721 if (code == POST_MODIFY || code == PRE_MODIFY)
2722 break;
2723
2724 /* Fall through to generic unary operation case. */
2725 case STRICT_LOW_PART:
2726 case NEG: case NOT:
2727 case SIGN_EXTEND: case ZERO_EXTEND:
2728 case TRUNCATE: case FLOAT_EXTEND: case FLOAT_TRUNCATE:
2729 case FLOAT: case FIX:
2730 case UNSIGNED_FIX: case UNSIGNED_FLOAT:
2731 case ABS:
2732 case SQRT:
2733 case FFS:
2734 elimination_effects (XEXP (x, 0), mem_mode);
2735 return;
2736
2737 case SUBREG:
2738 if (GET_CODE (SUBREG_REG (x)) == REG
2739 && (GET_MODE_SIZE (GET_MODE (x))
2740 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
2741 && reg_equiv_memory_loc != 0
2742 && reg_equiv_memory_loc[REGNO (SUBREG_REG (x))] != 0)
2743 return;
2744
2745 elimination_effects (SUBREG_REG (x), mem_mode);
2746 return;
2747
2748 case USE:
2749 /* If using a register that is the source of an eliminate we still
2750 think can be performed, note it cannot be performed since we don't
2751 know how this register is used. */
2752 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2753 if (ep->from_rtx == XEXP (x, 0))
2754 ep->can_eliminate = 0;
2755
2756 elimination_effects (XEXP (x, 0), mem_mode);
2757 return;
2758
2759 case CLOBBER:
2760 /* If clobbering a register that is the replacement register for an
2761 elimination we still think can be performed, note that it cannot
2762 be performed. Otherwise, we need not be concerned about it. */
2763 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2764 if (ep->to_rtx == XEXP (x, 0))
2765 ep->can_eliminate = 0;
2766
2767 elimination_effects (XEXP (x, 0), mem_mode);
2768 return;
2769
2770 case SET:
2771 /* Check for setting a register that we know about. */
2772 if (GET_CODE (SET_DEST (x)) == REG)
2773 {
2774 /* See if this is setting the replacement register for an
2775 elimination.
2776
2777 If DEST is the hard frame pointer, we do nothing because we
2778 assume that all assignments to the frame pointer are for
2779 non-local gotos and are being done at a time when they are valid
2780 and do not disturb anything else. Some machines want to
2781 eliminate a fake argument pointer (or even a fake frame pointer)
2782 with either the real frame or the stack pointer. Assignments to
2783 the hard frame pointer must not prevent this elimination. */
2784
2785 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
2786 ep++)
2787 if (ep->to_rtx == SET_DEST (x)
2788 && SET_DEST (x) != hard_frame_pointer_rtx)
2789 {
2790 /* If it is being incremented, adjust the offset. Otherwise,
2791 this elimination can't be done. */
2792 rtx src = SET_SRC (x);
2793
2794 if (GET_CODE (src) == PLUS
2795 && XEXP (src, 0) == SET_DEST (x)
2796 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2797 ep->offset -= INTVAL (XEXP (src, 1));
2798 else
2799 ep->can_eliminate = 0;
2800 }
2801 }
2802
2803 elimination_effects (SET_DEST (x), 0);
2804 elimination_effects (SET_SRC (x), 0);
2805 return;
2806
2807 case MEM:
2808 if (GET_CODE (XEXP (x, 0)) == ADDRESSOF)
2809 abort ();
2810
2811 /* Our only special processing is to pass the mode of the MEM to our
2812 recursive call. */
2813 elimination_effects (XEXP (x, 0), GET_MODE (x));
2814 return;
2815
2816 default:
2817 break;
2818 }
2819
2820 fmt = GET_RTX_FORMAT (code);
2821 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2822 {
2823 if (*fmt == 'e')
2824 elimination_effects (XEXP (x, i), mem_mode);
2825 else if (*fmt == 'E')
2826 for (j = 0; j < XVECLEN (x, i); j++)
2827 elimination_effects (XVECEXP (x, i, j), mem_mode);
2828 }
2829 }
2830
2831 /* Descend through rtx X and verify that no references to eliminable registers
2832 remain. If any do remain, mark the involved register as not
2833 eliminable. */
2834
2835 static void
2836 check_eliminable_occurrences (x)
2837 rtx x;
2838 {
2839 const char *fmt;
2840 int i;
2841 enum rtx_code code;
2842
2843 if (x == 0)
2844 return;
2845
2846 code = GET_CODE (x);
2847
2848 if (code == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
2849 {
2850 struct elim_table *ep;
2851
2852 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2853 if (ep->from_rtx == x && ep->can_eliminate)
2854 ep->can_eliminate = 0;
2855 return;
2856 }
2857
2858 fmt = GET_RTX_FORMAT (code);
2859 for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2860 {
2861 if (*fmt == 'e')
2862 check_eliminable_occurrences (XEXP (x, i));
2863 else if (*fmt == 'E')
2864 {
2865 int j;
2866 for (j = 0; j < XVECLEN (x, i); j++)
2867 check_eliminable_occurrences (XVECEXP (x, i, j));
2868 }
2869 }
2870 }
2871 \f
2872 /* Scan INSN and eliminate all eliminable registers in it.
2873
2874 If REPLACE is nonzero, do the replacement destructively. Also
2875 delete the insn as dead it if it is setting an eliminable register.
2876
2877 If REPLACE is zero, do all our allocations in reload_obstack.
2878
2879 If no eliminations were done and this insn doesn't require any elimination
2880 processing (these are not identical conditions: it might be updating sp,
2881 but not referencing fp; this needs to be seen during reload_as_needed so
2882 that the offset between fp and sp can be taken into consideration), zero
2883 is returned. Otherwise, 1 is returned. */
2884
2885 static int
2886 eliminate_regs_in_insn (insn, replace)
2887 rtx insn;
2888 int replace;
2889 {
2890 int icode = recog_memoized (insn);
2891 rtx old_body = PATTERN (insn);
2892 int insn_is_asm = asm_noperands (old_body) >= 0;
2893 rtx old_set = single_set (insn);
2894 rtx new_body;
2895 int val = 0;
2896 int i, any_changes;
2897 rtx substed_operand[MAX_RECOG_OPERANDS];
2898 rtx orig_operand[MAX_RECOG_OPERANDS];
2899 struct elim_table *ep;
2900
2901 if (! insn_is_asm && icode < 0)
2902 {
2903 if (GET_CODE (PATTERN (insn)) == USE
2904 || GET_CODE (PATTERN (insn)) == CLOBBER
2905 || GET_CODE (PATTERN (insn)) == ADDR_VEC
2906 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC
2907 || GET_CODE (PATTERN (insn)) == ASM_INPUT)
2908 return 0;
2909 abort ();
2910 }
2911
2912 if (old_set != 0 && GET_CODE (SET_DEST (old_set)) == REG
2913 && REGNO (SET_DEST (old_set)) < FIRST_PSEUDO_REGISTER)
2914 {
2915 /* Check for setting an eliminable register. */
2916 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
2917 if (ep->from_rtx == SET_DEST (old_set) && ep->can_eliminate)
2918 {
2919 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2920 /* If this is setting the frame pointer register to the
2921 hardware frame pointer register and this is an elimination
2922 that will be done (tested above), this insn is really
2923 adjusting the frame pointer downward to compensate for
2924 the adjustment done before a nonlocal goto. */
2925 if (ep->from == FRAME_POINTER_REGNUM
2926 && ep->to == HARD_FRAME_POINTER_REGNUM)
2927 {
2928 rtx src = SET_SRC (old_set);
2929 int offset = 0, ok = 0;
2930 rtx prev_insn, prev_set;
2931
2932 if (src == ep->to_rtx)
2933 offset = 0, ok = 1;
2934 else if (GET_CODE (src) == PLUS
2935 && GET_CODE (XEXP (src, 0)) == CONST_INT
2936 && XEXP (src, 1) == ep->to_rtx)
2937 offset = INTVAL (XEXP (src, 0)), ok = 1;
2938 else if (GET_CODE (src) == PLUS
2939 && GET_CODE (XEXP (src, 1)) == CONST_INT
2940 && XEXP (src, 0) == ep->to_rtx)
2941 offset = INTVAL (XEXP (src, 1)), ok = 1;
2942 else if ((prev_insn = prev_nonnote_insn (insn)) != 0
2943 && (prev_set = single_set (prev_insn)) != 0
2944 && rtx_equal_p (SET_DEST (prev_set), src))
2945 {
2946 src = SET_SRC (prev_set);
2947 if (src == ep->to_rtx)
2948 offset = 0, ok = 1;
2949 else if (GET_CODE (src) == PLUS
2950 && GET_CODE (XEXP (src, 0)) == CONST_INT
2951 && XEXP (src, 1) == ep->to_rtx)
2952 offset = INTVAL (XEXP (src, 0)), ok = 1;
2953 else if (GET_CODE (src) == PLUS
2954 && GET_CODE (XEXP (src, 1)) == CONST_INT
2955 && XEXP (src, 0) == ep->to_rtx)
2956 offset = INTVAL (XEXP (src, 1)), ok = 1;
2957 }
2958
2959 if (ok)
2960 {
2961 rtx src
2962 = plus_constant (ep->to_rtx, offset - ep->offset);
2963
2964 new_body = old_body;
2965 if (! replace)
2966 {
2967 new_body = copy_insn (old_body);
2968 if (REG_NOTES (insn))
2969 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
2970 }
2971 PATTERN (insn) = new_body;
2972 old_set = single_set (insn);
2973
2974 /* First see if this insn remains valid when we
2975 make the change. If not, keep the INSN_CODE
2976 the same and let reload fit it up. */
2977 validate_change (insn, &SET_SRC (old_set), src, 1);
2978 validate_change (insn, &SET_DEST (old_set),
2979 ep->to_rtx, 1);
2980 if (! apply_change_group ())
2981 {
2982 SET_SRC (old_set) = src;
2983 SET_DEST (old_set) = ep->to_rtx;
2984 }
2985
2986 val = 1;
2987 goto done;
2988 }
2989 }
2990 #endif
2991
2992 /* In this case this insn isn't serving a useful purpose. We
2993 will delete it in reload_as_needed once we know that this
2994 elimination is, in fact, being done.
2995
2996 If REPLACE isn't set, we can't delete this insn, but needn't
2997 process it since it won't be used unless something changes. */
2998 if (replace)
2999 {
3000 delete_dead_insn (insn);
3001 return 1;
3002 }
3003 val = 1;
3004 goto done;
3005 }
3006 }
3007
3008 /* We allow one special case which happens to work on all machines we
3009 currently support: a single set with the source being a PLUS of an
3010 eliminable register and a constant. */
3011 if (old_set
3012 && GET_CODE (SET_DEST (old_set)) == REG
3013 && GET_CODE (SET_SRC (old_set)) == PLUS
3014 && GET_CODE (XEXP (SET_SRC (old_set), 0)) == REG
3015 && GET_CODE (XEXP (SET_SRC (old_set), 1)) == CONST_INT
3016 && REGNO (XEXP (SET_SRC (old_set), 0)) < FIRST_PSEUDO_REGISTER)
3017 {
3018 rtx reg = XEXP (SET_SRC (old_set), 0);
3019 int offset = INTVAL (XEXP (SET_SRC (old_set), 1));
3020
3021 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3022 if (ep->from_rtx == reg && ep->can_eliminate)
3023 {
3024 offset += ep->offset;
3025
3026 if (offset == 0)
3027 {
3028 int num_clobbers;
3029 /* We assume here that if we need a PARALLEL with
3030 CLOBBERs for this assignment, we can do with the
3031 MATCH_SCRATCHes that add_clobbers allocates.
3032 There's not much we can do if that doesn't work. */
3033 PATTERN (insn) = gen_rtx_SET (VOIDmode,
3034 SET_DEST (old_set),
3035 ep->to_rtx);
3036 num_clobbers = 0;
3037 INSN_CODE (insn) = recog (PATTERN (insn), insn, &num_clobbers);
3038 if (num_clobbers)
3039 {
3040 rtvec vec = rtvec_alloc (num_clobbers + 1);
3041
3042 vec->elem[0] = PATTERN (insn);
3043 PATTERN (insn) = gen_rtx_PARALLEL (VOIDmode, vec);
3044 add_clobbers (PATTERN (insn), INSN_CODE (insn));
3045 }
3046 if (INSN_CODE (insn) < 0)
3047 abort ();
3048 }
3049 else
3050 {
3051 new_body = old_body;
3052 if (! replace)
3053 {
3054 new_body = copy_insn (old_body);
3055 if (REG_NOTES (insn))
3056 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3057 }
3058 PATTERN (insn) = new_body;
3059 old_set = single_set (insn);
3060
3061 XEXP (SET_SRC (old_set), 0) = ep->to_rtx;
3062 XEXP (SET_SRC (old_set), 1) = GEN_INT (offset);
3063 }
3064 val = 1;
3065 /* This can't have an effect on elimination offsets, so skip right
3066 to the end. */
3067 goto done;
3068 }
3069 }
3070
3071 /* Determine the effects of this insn on elimination offsets. */
3072 elimination_effects (old_body, 0);
3073
3074 /* Eliminate all eliminable registers occurring in operands that
3075 can be handled by reload. */
3076 extract_insn (insn);
3077 any_changes = 0;
3078 for (i = 0; i < recog_data.n_operands; i++)
3079 {
3080 orig_operand[i] = recog_data.operand[i];
3081 substed_operand[i] = recog_data.operand[i];
3082
3083 /* For an asm statement, every operand is eliminable. */
3084 if (insn_is_asm || insn_data[icode].operand[i].eliminable)
3085 {
3086 /* Check for setting a register that we know about. */
3087 if (recog_data.operand_type[i] != OP_IN
3088 && GET_CODE (orig_operand[i]) == REG)
3089 {
3090 /* If we are assigning to a register that can be eliminated, it
3091 must be as part of a PARALLEL, since the code above handles
3092 single SETs. We must indicate that we can no longer
3093 eliminate this reg. */
3094 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS];
3095 ep++)
3096 if (ep->from_rtx == orig_operand[i] && ep->can_eliminate)
3097 ep->can_eliminate = 0;
3098 }
3099
3100 substed_operand[i] = eliminate_regs (recog_data.operand[i], 0,
3101 replace ? insn : NULL_RTX);
3102 if (substed_operand[i] != orig_operand[i])
3103 val = any_changes = 1;
3104 /* Terminate the search in check_eliminable_occurrences at
3105 this point. */
3106 *recog_data.operand_loc[i] = 0;
3107
3108 /* If an output operand changed from a REG to a MEM and INSN is an
3109 insn, write a CLOBBER insn. */
3110 if (recog_data.operand_type[i] != OP_IN
3111 && GET_CODE (orig_operand[i]) == REG
3112 && GET_CODE (substed_operand[i]) == MEM
3113 && replace)
3114 emit_insn_after (gen_rtx_CLOBBER (VOIDmode, orig_operand[i]),
3115 insn);
3116 }
3117 }
3118
3119 for (i = 0; i < recog_data.n_dups; i++)
3120 *recog_data.dup_loc[i]
3121 = *recog_data.operand_loc[(int) recog_data.dup_num[i]];
3122
3123 /* If any eliminable remain, they aren't eliminable anymore. */
3124 check_eliminable_occurrences (old_body);
3125
3126 /* Substitute the operands; the new values are in the substed_operand
3127 array. */
3128 for (i = 0; i < recog_data.n_operands; i++)
3129 *recog_data.operand_loc[i] = substed_operand[i];
3130 for (i = 0; i < recog_data.n_dups; i++)
3131 *recog_data.dup_loc[i] = substed_operand[(int) recog_data.dup_num[i]];
3132
3133 /* If we are replacing a body that was a (set X (plus Y Z)), try to
3134 re-recognize the insn. We do this in case we had a simple addition
3135 but now can do this as a load-address. This saves an insn in this
3136 common case.
3137 If re-recognition fails, the old insn code number will still be used,
3138 and some register operands may have changed into PLUS expressions.
3139 These will be handled by find_reloads by loading them into a register
3140 again. */
3141
3142 if (val)
3143 {
3144 /* If we aren't replacing things permanently and we changed something,
3145 make another copy to ensure that all the RTL is new. Otherwise
3146 things can go wrong if find_reload swaps commutative operands
3147 and one is inside RTL that has been copied while the other is not. */
3148 new_body = old_body;
3149 if (! replace)
3150 {
3151 new_body = copy_insn (old_body);
3152 if (REG_NOTES (insn))
3153 REG_NOTES (insn) = copy_insn_1 (REG_NOTES (insn));
3154 }
3155 PATTERN (insn) = new_body;
3156
3157 /* If we had a move insn but now we don't, rerecognize it. This will
3158 cause spurious re-recognition if the old move had a PARALLEL since
3159 the new one still will, but we can't call single_set without
3160 having put NEW_BODY into the insn and the re-recognition won't
3161 hurt in this rare case. */
3162 /* ??? Why this huge if statement - why don't we just rerecognize the
3163 thing always? */
3164 if (! insn_is_asm
3165 && old_set != 0
3166 && ((GET_CODE (SET_SRC (old_set)) == REG
3167 && (GET_CODE (new_body) != SET
3168 || GET_CODE (SET_SRC (new_body)) != REG))
3169 /* If this was a load from or store to memory, compare
3170 the MEM in recog_data.operand to the one in the insn.
3171 If they are not equal, then rerecognize the insn. */
3172 || (old_set != 0
3173 && ((GET_CODE (SET_SRC (old_set)) == MEM
3174 && SET_SRC (old_set) != recog_data.operand[1])
3175 || (GET_CODE (SET_DEST (old_set)) == MEM
3176 && SET_DEST (old_set) != recog_data.operand[0])))
3177 /* If this was an add insn before, rerecognize. */
3178 || GET_CODE (SET_SRC (old_set)) == PLUS))
3179 {
3180 int new_icode = recog (PATTERN (insn), insn, 0);
3181 if (new_icode < 0)
3182 INSN_CODE (insn) = icode;
3183 }
3184 }
3185
3186 /* Restore the old body. If there were any changes to it, we made a copy
3187 of it while the changes were still in place, so we'll correctly return
3188 a modified insn below. */
3189 if (! replace)
3190 {
3191 /* Restore the old body. */
3192 for (i = 0; i < recog_data.n_operands; i++)
3193 *recog_data.operand_loc[i] = orig_operand[i];
3194 for (i = 0; i < recog_data.n_dups; i++)
3195 *recog_data.dup_loc[i] = orig_operand[(int) recog_data.dup_num[i]];
3196 }
3197
3198 /* Update all elimination pairs to reflect the status after the current
3199 insn. The changes we make were determined by the earlier call to
3200 elimination_effects.
3201
3202 We also detect a cases where register elimination cannot be done,
3203 namely, if a register would be both changed and referenced outside a MEM
3204 in the resulting insn since such an insn is often undefined and, even if
3205 not, we cannot know what meaning will be given to it. Note that it is
3206 valid to have a register used in an address in an insn that changes it
3207 (presumably with a pre- or post-increment or decrement).
3208
3209 If anything changes, return nonzero. */
3210
3211 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3212 {
3213 if (ep->previous_offset != ep->offset && ep->ref_outside_mem)
3214 ep->can_eliminate = 0;
3215
3216 ep->ref_outside_mem = 0;
3217
3218 if (ep->previous_offset != ep->offset)
3219 val = 1;
3220 }
3221
3222 done:
3223 /* If we changed something, perform elimination in REG_NOTES. This is
3224 needed even when REPLACE is zero because a REG_DEAD note might refer
3225 to a register that we eliminate and could cause a different number
3226 of spill registers to be needed in the final reload pass than in
3227 the pre-passes. */
3228 if (val && REG_NOTES (insn) != 0)
3229 REG_NOTES (insn) = eliminate_regs (REG_NOTES (insn), 0, REG_NOTES (insn));
3230
3231 return val;
3232 }
3233
3234 /* Loop through all elimination pairs.
3235 Recalculate the number not at initial offset.
3236
3237 Compute the maximum offset (minimum offset if the stack does not
3238 grow downward) for each elimination pair. */
3239
3240 static void
3241 update_eliminable_offsets ()
3242 {
3243 struct elim_table *ep;
3244
3245 num_not_at_initial_offset = 0;
3246 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3247 {
3248 ep->previous_offset = ep->offset;
3249 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3250 num_not_at_initial_offset++;
3251 }
3252 }
3253
3254 /* Given X, a SET or CLOBBER of DEST, if DEST is the target of a register
3255 replacement we currently believe is valid, mark it as not eliminable if X
3256 modifies DEST in any way other than by adding a constant integer to it.
3257
3258 If DEST is the frame pointer, we do nothing because we assume that
3259 all assignments to the hard frame pointer are nonlocal gotos and are being
3260 done at a time when they are valid and do not disturb anything else.
3261 Some machines want to eliminate a fake argument pointer with either the
3262 frame or stack pointer. Assignments to the hard frame pointer must not
3263 prevent this elimination.
3264
3265 Called via note_stores from reload before starting its passes to scan
3266 the insns of the function. */
3267
3268 static void
3269 mark_not_eliminable (dest, x, data)
3270 rtx dest;
3271 rtx x;
3272 void *data ATTRIBUTE_UNUSED;
3273 {
3274 register unsigned int i;
3275
3276 /* A SUBREG of a hard register here is just changing its mode. We should
3277 not see a SUBREG of an eliminable hard register, but check just in
3278 case. */
3279 if (GET_CODE (dest) == SUBREG)
3280 dest = SUBREG_REG (dest);
3281
3282 if (dest == hard_frame_pointer_rtx)
3283 return;
3284
3285 for (i = 0; i < NUM_ELIMINABLE_REGS; i++)
3286 if (reg_eliminate[i].can_eliminate && dest == reg_eliminate[i].to_rtx
3287 && (GET_CODE (x) != SET
3288 || GET_CODE (SET_SRC (x)) != PLUS
3289 || XEXP (SET_SRC (x), 0) != dest
3290 || GET_CODE (XEXP (SET_SRC (x), 1)) != CONST_INT))
3291 {
3292 reg_eliminate[i].can_eliminate_previous
3293 = reg_eliminate[i].can_eliminate = 0;
3294 num_eliminable--;
3295 }
3296 }
3297
3298 /* Verify that the initial elimination offsets did not change since the
3299 last call to set_initial_elim_offsets. This is used to catch cases
3300 where something illegal happened during reload_as_needed that could
3301 cause incorrect code to be generated if we did not check for it. */
3302
3303 static void
3304 verify_initial_elim_offsets ()
3305 {
3306 int t;
3307
3308 #ifdef ELIMINABLE_REGS
3309 struct elim_table *ep;
3310
3311 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3312 {
3313 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, t);
3314 if (t != ep->initial_offset)
3315 abort ();
3316 }
3317 #else
3318 INITIAL_FRAME_POINTER_OFFSET (t);
3319 if (t != reg_eliminate[0].initial_offset)
3320 abort ();
3321 #endif
3322 }
3323
3324 /* Reset all offsets on eliminable registers to their initial values. */
3325
3326 static void
3327 set_initial_elim_offsets ()
3328 {
3329 struct elim_table *ep = reg_eliminate;
3330
3331 #ifdef ELIMINABLE_REGS
3332 for (; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3333 {
3334 INITIAL_ELIMINATION_OFFSET (ep->from, ep->to, ep->initial_offset);
3335 ep->previous_offset = ep->offset = ep->initial_offset;
3336 }
3337 #else
3338 INITIAL_FRAME_POINTER_OFFSET (ep->initial_offset);
3339 ep->previous_offset = ep->offset = ep->initial_offset;
3340 #endif
3341
3342 num_not_at_initial_offset = 0;
3343 }
3344
3345 /* Initialize the known label offsets.
3346 Set a known offset for each forced label to be at the initial offset
3347 of each elimination. We do this because we assume that all
3348 computed jumps occur from a location where each elimination is
3349 at its initial offset.
3350 For all other labels, show that we don't know the offsets. */
3351
3352 static void
3353 set_initial_label_offsets ()
3354 {
3355 rtx x;
3356 memset ((char *) &offsets_known_at[get_first_label_num ()], 0, num_labels);
3357
3358 for (x = forced_labels; x; x = XEXP (x, 1))
3359 if (XEXP (x, 0))
3360 set_label_offsets (XEXP (x, 0), NULL_RTX, 1);
3361 }
3362
3363 /* Set all elimination offsets to the known values for the code label given
3364 by INSN. */
3365
3366 static void
3367 set_offsets_for_label (insn)
3368 rtx insn;
3369 {
3370 unsigned int i;
3371 int label_nr = CODE_LABEL_NUMBER (insn);
3372 struct elim_table *ep;
3373
3374 num_not_at_initial_offset = 0;
3375 for (i = 0, ep = reg_eliminate; i < NUM_ELIMINABLE_REGS; ep++, i++)
3376 {
3377 ep->offset = ep->previous_offset = offsets_at[label_nr][i];
3378 if (ep->can_eliminate && ep->offset != ep->initial_offset)
3379 num_not_at_initial_offset++;
3380 }
3381 }
3382
3383 /* See if anything that happened changes which eliminations are valid.
3384 For example, on the Sparc, whether or not the frame pointer can
3385 be eliminated can depend on what registers have been used. We need
3386 not check some conditions again (such as flag_omit_frame_pointer)
3387 since they can't have changed. */
3388
3389 static void
3390 update_eliminables (pset)
3391 HARD_REG_SET *pset;
3392 {
3393 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3394 int previous_frame_pointer_needed = frame_pointer_needed;
3395 #endif
3396 struct elim_table *ep;
3397
3398 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3399 if ((ep->from == HARD_FRAME_POINTER_REGNUM && FRAME_POINTER_REQUIRED)
3400 #ifdef ELIMINABLE_REGS
3401 || ! CAN_ELIMINATE (ep->from, ep->to)
3402 #endif
3403 )
3404 ep->can_eliminate = 0;
3405
3406 /* Look for the case where we have discovered that we can't replace
3407 register A with register B and that means that we will now be
3408 trying to replace register A with register C. This means we can
3409 no longer replace register C with register B and we need to disable
3410 such an elimination, if it exists. This occurs often with A == ap,
3411 B == sp, and C == fp. */
3412
3413 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3414 {
3415 struct elim_table *op;
3416 register int new_to = -1;
3417
3418 if (! ep->can_eliminate && ep->can_eliminate_previous)
3419 {
3420 /* Find the current elimination for ep->from, if there is a
3421 new one. */
3422 for (op = reg_eliminate;
3423 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3424 if (op->from == ep->from && op->can_eliminate)
3425 {
3426 new_to = op->to;
3427 break;
3428 }
3429
3430 /* See if there is an elimination of NEW_TO -> EP->TO. If so,
3431 disable it. */
3432 for (op = reg_eliminate;
3433 op < &reg_eliminate[NUM_ELIMINABLE_REGS]; op++)
3434 if (op->from == new_to && op->to == ep->to)
3435 op->can_eliminate = 0;
3436 }
3437 }
3438
3439 /* See if any registers that we thought we could eliminate the previous
3440 time are no longer eliminable. If so, something has changed and we
3441 must spill the register. Also, recompute the number of eliminable
3442 registers and see if the frame pointer is needed; it is if there is
3443 no elimination of the frame pointer that we can perform. */
3444
3445 frame_pointer_needed = 1;
3446 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3447 {
3448 if (ep->can_eliminate && ep->from == FRAME_POINTER_REGNUM
3449 && ep->to != HARD_FRAME_POINTER_REGNUM)
3450 frame_pointer_needed = 0;
3451
3452 if (! ep->can_eliminate && ep->can_eliminate_previous)
3453 {
3454 ep->can_eliminate_previous = 0;
3455 SET_HARD_REG_BIT (*pset, ep->from);
3456 num_eliminable--;
3457 }
3458 }
3459
3460 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3461 /* If we didn't need a frame pointer last time, but we do now, spill
3462 the hard frame pointer. */
3463 if (frame_pointer_needed && ! previous_frame_pointer_needed)
3464 SET_HARD_REG_BIT (*pset, HARD_FRAME_POINTER_REGNUM);
3465 #endif
3466 }
3467
3468 /* Initialize the table of registers to eliminate. */
3469
3470 static void
3471 init_elim_table ()
3472 {
3473 struct elim_table *ep;
3474 #ifdef ELIMINABLE_REGS
3475 struct elim_table_1 *ep1;
3476 #endif
3477
3478 if (!reg_eliminate)
3479 reg_eliminate = (struct elim_table *)
3480 xcalloc (sizeof (struct elim_table), NUM_ELIMINABLE_REGS);
3481
3482 /* Does this function require a frame pointer? */
3483
3484 frame_pointer_needed = (! flag_omit_frame_pointer
3485 #ifdef EXIT_IGNORE_STACK
3486 /* ?? If EXIT_IGNORE_STACK is set, we will not save
3487 and restore sp for alloca. So we can't eliminate
3488 the frame pointer in that case. At some point,
3489 we should improve this by emitting the
3490 sp-adjusting insns for this case. */
3491 || (current_function_calls_alloca
3492 && EXIT_IGNORE_STACK)
3493 #endif
3494 || FRAME_POINTER_REQUIRED);
3495
3496 num_eliminable = 0;
3497
3498 #ifdef ELIMINABLE_REGS
3499 for (ep = reg_eliminate, ep1 = reg_eliminate_1;
3500 ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++, ep1++)
3501 {
3502 ep->from = ep1->from;
3503 ep->to = ep1->to;
3504 ep->can_eliminate = ep->can_eliminate_previous
3505 = (CAN_ELIMINATE (ep->from, ep->to)
3506 && ! (ep->to == STACK_POINTER_REGNUM && frame_pointer_needed));
3507 }
3508 #else
3509 reg_eliminate[0].from = reg_eliminate_1[0].from;
3510 reg_eliminate[0].to = reg_eliminate_1[0].to;
3511 reg_eliminate[0].can_eliminate = reg_eliminate[0].can_eliminate_previous
3512 = ! frame_pointer_needed;
3513 #endif
3514
3515 /* Count the number of eliminable registers and build the FROM and TO
3516 REG rtx's. Note that code in gen_rtx will cause, e.g.,
3517 gen_rtx (REG, Pmode, STACK_POINTER_REGNUM) to equal stack_pointer_rtx.
3518 We depend on this. */
3519 for (ep = reg_eliminate; ep < &reg_eliminate[NUM_ELIMINABLE_REGS]; ep++)
3520 {
3521 num_eliminable += ep->can_eliminate;
3522 ep->from_rtx = gen_rtx_REG (Pmode, ep->from);
3523 ep->to_rtx = gen_rtx_REG (Pmode, ep->to);
3524 }
3525 }
3526 \f
3527 /* Kick all pseudos out of hard register REGNO.
3528
3529 If CANT_ELIMINATE is nonzero, it means that we are doing this spill
3530 because we found we can't eliminate some register. In the case, no pseudos
3531 are allowed to be in the register, even if they are only in a block that
3532 doesn't require spill registers, unlike the case when we are spilling this
3533 hard reg to produce another spill register.
3534
3535 Return nonzero if any pseudos needed to be kicked out. */
3536
3537 static void
3538 spill_hard_reg (regno, cant_eliminate)
3539 unsigned int regno;
3540 int cant_eliminate;
3541 {
3542 register int i;
3543
3544 if (cant_eliminate)
3545 {
3546 SET_HARD_REG_BIT (bad_spill_regs_global, regno);
3547 regs_ever_live[regno] = 1;
3548 }
3549
3550 /* Spill every pseudo reg that was allocated to this reg
3551 or to something that overlaps this reg. */
3552
3553 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3554 if (reg_renumber[i] >= 0
3555 && (unsigned int) reg_renumber[i] <= regno
3556 && ((unsigned int) reg_renumber[i]
3557 + HARD_REGNO_NREGS ((unsigned int) reg_renumber[i],
3558 PSEUDO_REGNO_MODE (i))
3559 > regno))
3560 SET_REGNO_REG_SET (&spilled_pseudos, i);
3561 }
3562
3563 /* I'm getting weird preprocessor errors if I use IOR_HARD_REG_SET
3564 from within EXECUTE_IF_SET_IN_REG_SET. Hence this awkwardness. */
3565
3566 static void
3567 ior_hard_reg_set (set1, set2)
3568 HARD_REG_SET *set1, *set2;
3569 {
3570 IOR_HARD_REG_SET (*set1, *set2);
3571 }
3572
3573 /* After find_reload_regs has been run for all insn that need reloads,
3574 and/or spill_hard_regs was called, this function is used to actually
3575 spill pseudo registers and try to reallocate them. It also sets up the
3576 spill_regs array for use by choose_reload_regs. */
3577
3578 static int
3579 finish_spills (global)
3580 int global;
3581 {
3582 struct insn_chain *chain;
3583 int something_changed = 0;
3584 int i;
3585
3586 /* Build the spill_regs array for the function. */
3587 /* If there are some registers still to eliminate and one of the spill regs
3588 wasn't ever used before, additional stack space may have to be
3589 allocated to store this register. Thus, we may have changed the offset
3590 between the stack and frame pointers, so mark that something has changed.
3591
3592 One might think that we need only set VAL to 1 if this is a call-used
3593 register. However, the set of registers that must be saved by the
3594 prologue is not identical to the call-used set. For example, the
3595 register used by the call insn for the return PC is a call-used register,
3596 but must be saved by the prologue. */
3597
3598 n_spills = 0;
3599 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3600 if (TEST_HARD_REG_BIT (used_spill_regs, i))
3601 {
3602 spill_reg_order[i] = n_spills;
3603 spill_regs[n_spills++] = i;
3604 if (num_eliminable && ! regs_ever_live[i])
3605 something_changed = 1;
3606 regs_ever_live[i] = 1;
3607 }
3608 else
3609 spill_reg_order[i] = -1;
3610
3611 EXECUTE_IF_SET_IN_REG_SET
3612 (&spilled_pseudos, FIRST_PSEUDO_REGISTER, i,
3613 {
3614 /* Record the current hard register the pseudo is allocated to in
3615 pseudo_previous_regs so we avoid reallocating it to the same
3616 hard reg in a later pass. */
3617 if (reg_renumber[i] < 0)
3618 abort ();
3619
3620 SET_HARD_REG_BIT (pseudo_previous_regs[i], reg_renumber[i]);
3621 /* Mark it as no longer having a hard register home. */
3622 reg_renumber[i] = -1;
3623 /* We will need to scan everything again. */
3624 something_changed = 1;
3625 });
3626
3627 /* Retry global register allocation if possible. */
3628 if (global)
3629 {
3630 memset ((char *) pseudo_forbidden_regs, 0, max_regno * sizeof (HARD_REG_SET));
3631 /* For every insn that needs reloads, set the registers used as spill
3632 regs in pseudo_forbidden_regs for every pseudo live across the
3633 insn. */
3634 for (chain = insns_need_reload; chain; chain = chain->next_need_reload)
3635 {
3636 EXECUTE_IF_SET_IN_REG_SET
3637 (&chain->live_throughout, FIRST_PSEUDO_REGISTER, i,
3638 {
3639 ior_hard_reg_set (pseudo_forbidden_regs + i,
3640 &chain->used_spill_regs);
3641 });
3642 EXECUTE_IF_SET_IN_REG_SET
3643 (&chain->dead_or_set, FIRST_PSEUDO_REGISTER, i,
3644 {
3645 ior_hard_reg_set (pseudo_forbidden_regs + i,
3646 &chain->used_spill_regs);
3647 });
3648 }
3649
3650 /* Retry allocating the spilled pseudos. For each reg, merge the
3651 various reg sets that indicate which hard regs can't be used,
3652 and call retry_global_alloc.
3653 We change spill_pseudos here to only contain pseudos that did not
3654 get a new hard register. */
3655 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3656 if (reg_old_renumber[i] != reg_renumber[i])
3657 {
3658 HARD_REG_SET forbidden;
3659 COPY_HARD_REG_SET (forbidden, bad_spill_regs_global);
3660 IOR_HARD_REG_SET (forbidden, pseudo_forbidden_regs[i]);
3661 IOR_HARD_REG_SET (forbidden, pseudo_previous_regs[i]);
3662 retry_global_alloc (i, forbidden);
3663 if (reg_renumber[i] >= 0)
3664 CLEAR_REGNO_REG_SET (&spilled_pseudos, i);
3665 }
3666 }
3667
3668 /* Fix up the register information in the insn chain.
3669 This involves deleting those of the spilled pseudos which did not get
3670 a new hard register home from the live_{before,after} sets. */
3671 for (chain = reload_insn_chain; chain; chain = chain->next)
3672 {
3673 HARD_REG_SET used_by_pseudos;
3674 HARD_REG_SET used_by_pseudos2;
3675
3676 AND_COMPL_REG_SET (&chain->live_throughout, &spilled_pseudos);
3677 AND_COMPL_REG_SET (&chain->dead_or_set, &spilled_pseudos);
3678
3679 /* Mark any unallocated hard regs as available for spills. That
3680 makes inheritance work somewhat better. */
3681 if (chain->need_reload)
3682 {
3683 REG_SET_TO_HARD_REG_SET (used_by_pseudos, &chain->live_throughout);
3684 REG_SET_TO_HARD_REG_SET (used_by_pseudos2, &chain->dead_or_set);
3685 IOR_HARD_REG_SET (used_by_pseudos, used_by_pseudos2);
3686
3687 /* Save the old value for the sanity test below. */
3688 COPY_HARD_REG_SET (used_by_pseudos2, chain->used_spill_regs);
3689
3690 compute_use_by_pseudos (&used_by_pseudos, &chain->live_throughout);
3691 compute_use_by_pseudos (&used_by_pseudos, &chain->dead_or_set);
3692 COMPL_HARD_REG_SET (chain->used_spill_regs, used_by_pseudos);
3693 AND_HARD_REG_SET (chain->used_spill_regs, used_spill_regs);
3694
3695 /* Make sure we only enlarge the set. */
3696 GO_IF_HARD_REG_SUBSET (used_by_pseudos2, chain->used_spill_regs, ok);
3697 abort ();
3698 ok:;
3699 }
3700 }
3701
3702 /* Let alter_reg modify the reg rtx's for the modified pseudos. */
3703 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
3704 {
3705 int regno = reg_renumber[i];
3706 if (reg_old_renumber[i] == regno)
3707 continue;
3708
3709 alter_reg (i, reg_old_renumber[i]);
3710 reg_old_renumber[i] = regno;
3711 if (rtl_dump_file)
3712 {
3713 if (regno == -1)
3714 fprintf (rtl_dump_file, " Register %d now on stack.\n\n", i);
3715 else
3716 fprintf (rtl_dump_file, " Register %d now in %d.\n\n",
3717 i, reg_renumber[i]);
3718 }
3719 }
3720
3721 return something_changed;
3722 }
3723 \f
3724 /* Find all paradoxical subregs within X and update reg_max_ref_width.
3725 Also mark any hard registers used to store user variables as
3726 forbidden from being used for spill registers. */
3727
3728 static void
3729 scan_paradoxical_subregs (x)
3730 register rtx x;
3731 {
3732 register int i;
3733 register const char *fmt;
3734 register enum rtx_code code = GET_CODE (x);
3735
3736 switch (code)
3737 {
3738 case REG:
3739 #if 0
3740 if (SMALL_REGISTER_CLASSES && REGNO (x) < FIRST_PSEUDO_REGISTER
3741 && REG_USERVAR_P (x))
3742 SET_HARD_REG_BIT (bad_spill_regs_global, REGNO (x));
3743 #endif
3744 return;
3745
3746 case CONST_INT:
3747 case CONST:
3748 case SYMBOL_REF:
3749 case LABEL_REF:
3750 case CONST_DOUBLE:
3751 case CC0:
3752 case PC:
3753 case USE:
3754 case CLOBBER:
3755 return;
3756
3757 case SUBREG:
3758 if (GET_CODE (SUBREG_REG (x)) == REG
3759 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3760 reg_max_ref_width[REGNO (SUBREG_REG (x))]
3761 = GET_MODE_SIZE (GET_MODE (x));
3762 return;
3763
3764 default:
3765 break;
3766 }
3767
3768 fmt = GET_RTX_FORMAT (code);
3769 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3770 {
3771 if (fmt[i] == 'e')
3772 scan_paradoxical_subregs (XEXP (x, i));
3773 else if (fmt[i] == 'E')
3774 {
3775 register int j;
3776 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3777 scan_paradoxical_subregs (XVECEXP (x, i, j));
3778 }
3779 }
3780 }
3781 \f
3782 /* Reload pseudo-registers into hard regs around each insn as needed.
3783 Additional register load insns are output before the insn that needs it
3784 and perhaps store insns after insns that modify the reloaded pseudo reg.
3785
3786 reg_last_reload_reg and reg_reloaded_contents keep track of
3787 which registers are already available in reload registers.
3788 We update these for the reloads that we perform,
3789 as the insns are scanned. */
3790
3791 static void
3792 reload_as_needed (live_known)
3793 int live_known;
3794 {
3795 struct insn_chain *chain;
3796 #if defined (AUTO_INC_DEC)
3797 register int i;
3798 #endif
3799 rtx x;
3800
3801 memset ((char *) spill_reg_rtx, 0, sizeof spill_reg_rtx);
3802 memset ((char *) spill_reg_store, 0, sizeof spill_reg_store);
3803 reg_last_reload_reg = (rtx *) xcalloc (max_regno, sizeof (rtx));
3804 reg_has_output_reload = (char *) xmalloc (max_regno);
3805 CLEAR_HARD_REG_SET (reg_reloaded_valid);
3806
3807 set_initial_elim_offsets ();
3808
3809 for (chain = reload_insn_chain; chain; chain = chain->next)
3810 {
3811 rtx prev;
3812 rtx insn = chain->insn;
3813 rtx old_next = NEXT_INSN (insn);
3814
3815 /* If we pass a label, copy the offsets from the label information
3816 into the current offsets of each elimination. */
3817 if (GET_CODE (insn) == CODE_LABEL)
3818 set_offsets_for_label (insn);
3819
3820 else if (INSN_P (insn))
3821 {
3822 rtx oldpat = PATTERN (insn);
3823
3824 /* If this is a USE and CLOBBER of a MEM, ensure that any
3825 references to eliminable registers have been removed. */
3826
3827 if ((GET_CODE (PATTERN (insn)) == USE
3828 || GET_CODE (PATTERN (insn)) == CLOBBER)
3829 && GET_CODE (XEXP (PATTERN (insn), 0)) == MEM)
3830 XEXP (XEXP (PATTERN (insn), 0), 0)
3831 = eliminate_regs (XEXP (XEXP (PATTERN (insn), 0), 0),
3832 GET_MODE (XEXP (PATTERN (insn), 0)),
3833 NULL_RTX);
3834
3835 /* If we need to do register elimination processing, do so.
3836 This might delete the insn, in which case we are done. */
3837 if ((num_eliminable || num_eliminable_invariants) && chain->need_elim)
3838 {
3839 eliminate_regs_in_insn (insn, 1);
3840 if (GET_CODE (insn) == NOTE)
3841 {
3842 update_eliminable_offsets ();
3843 continue;
3844 }
3845 }
3846
3847 /* If need_elim is nonzero but need_reload is zero, one might think
3848 that we could simply set n_reloads to 0. However, find_reloads
3849 could have done some manipulation of the insn (such as swapping
3850 commutative operands), and these manipulations are lost during
3851 the first pass for every insn that needs register elimination.
3852 So the actions of find_reloads must be redone here. */
3853
3854 if (! chain->need_elim && ! chain->need_reload
3855 && ! chain->need_operand_change)
3856 n_reloads = 0;
3857 /* First find the pseudo regs that must be reloaded for this insn.
3858 This info is returned in the tables reload_... (see reload.h).
3859 Also modify the body of INSN by substituting RELOAD
3860 rtx's for those pseudo regs. */
3861 else
3862 {
3863 memset (reg_has_output_reload, 0, max_regno);
3864 CLEAR_HARD_REG_SET (reg_is_output_reload);
3865
3866 find_reloads (insn, 1, spill_indirect_levels, live_known,
3867 spill_reg_order);
3868 }
3869
3870 if (n_reloads > 0)
3871 {
3872 rtx next = NEXT_INSN (insn);
3873 rtx p;
3874
3875 prev = PREV_INSN (insn);
3876
3877 /* Now compute which reload regs to reload them into. Perhaps
3878 reusing reload regs from previous insns, or else output
3879 load insns to reload them. Maybe output store insns too.
3880 Record the choices of reload reg in reload_reg_rtx. */
3881 choose_reload_regs (chain);
3882
3883 /* Merge any reloads that we didn't combine for fear of
3884 increasing the number of spill registers needed but now
3885 discover can be safely merged. */
3886 if (SMALL_REGISTER_CLASSES)
3887 merge_assigned_reloads (insn);
3888
3889 /* Generate the insns to reload operands into or out of
3890 their reload regs. */
3891 emit_reload_insns (chain);
3892
3893 /* Substitute the chosen reload regs from reload_reg_rtx
3894 into the insn's body (or perhaps into the bodies of other
3895 load and store insn that we just made for reloading
3896 and that we moved the structure into). */
3897 subst_reloads (insn);
3898
3899 /* If this was an ASM, make sure that all the reload insns
3900 we have generated are valid. If not, give an error
3901 and delete them. */
3902
3903 if (asm_noperands (PATTERN (insn)) >= 0)
3904 for (p = NEXT_INSN (prev); p != next; p = NEXT_INSN (p))
3905 if (p != insn && INSN_P (p)
3906 && (recog_memoized (p) < 0
3907 || (extract_insn (p), ! constrain_operands (1))))
3908 {
3909 error_for_asm (insn,
3910 "`asm' operand requires impossible reload");
3911 PUT_CODE (p, NOTE);
3912 NOTE_SOURCE_FILE (p) = 0;
3913 NOTE_LINE_NUMBER (p) = NOTE_INSN_DELETED;
3914 }
3915 }
3916
3917 if (num_eliminable && chain->need_elim)
3918 update_eliminable_offsets ();
3919
3920 /* Any previously reloaded spilled pseudo reg, stored in this insn,
3921 is no longer validly lying around to save a future reload.
3922 Note that this does not detect pseudos that were reloaded
3923 for this insn in order to be stored in
3924 (obeying register constraints). That is correct; such reload
3925 registers ARE still valid. */
3926 note_stores (oldpat, forget_old_reloads_1, NULL);
3927
3928 /* There may have been CLOBBER insns placed after INSN. So scan
3929 between INSN and NEXT and use them to forget old reloads. */
3930 for (x = NEXT_INSN (insn); x != old_next; x = NEXT_INSN (x))
3931 if (GET_CODE (x) == INSN && GET_CODE (PATTERN (x)) == CLOBBER)
3932 note_stores (PATTERN (x), forget_old_reloads_1, NULL);
3933
3934 #ifdef AUTO_INC_DEC
3935 /* Likewise for regs altered by auto-increment in this insn.
3936 REG_INC notes have been changed by reloading:
3937 find_reloads_address_1 records substitutions for them,
3938 which have been performed by subst_reloads above. */
3939 for (i = n_reloads - 1; i >= 0; i--)
3940 {
3941 rtx in_reg = rld[i].in_reg;
3942 if (in_reg)
3943 {
3944 enum rtx_code code = GET_CODE (in_reg);
3945 /* PRE_INC / PRE_DEC will have the reload register ending up
3946 with the same value as the stack slot, but that doesn't
3947 hold true for POST_INC / POST_DEC. Either we have to
3948 convert the memory access to a true POST_INC / POST_DEC,
3949 or we can't use the reload register for inheritance. */
3950 if ((code == POST_INC || code == POST_DEC)
3951 && TEST_HARD_REG_BIT (reg_reloaded_valid,
3952 REGNO (rld[i].reg_rtx))
3953 /* Make sure it is the inc/dec pseudo, and not
3954 some other (e.g. output operand) pseudo. */
3955 && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
3956 == REGNO (XEXP (in_reg, 0))))
3957
3958 {
3959 rtx reload_reg = rld[i].reg_rtx;
3960 enum machine_mode mode = GET_MODE (reload_reg);
3961 int n = 0;
3962 rtx p;
3963
3964 for (p = PREV_INSN (old_next); p != prev; p = PREV_INSN (p))
3965 {
3966 /* We really want to ignore REG_INC notes here, so
3967 use PATTERN (p) as argument to reg_set_p . */
3968 if (reg_set_p (reload_reg, PATTERN (p)))
3969 break;
3970 n = count_occurrences (PATTERN (p), reload_reg, 0);
3971 if (! n)
3972 continue;
3973 if (n == 1)
3974 {
3975 n = validate_replace_rtx (reload_reg,
3976 gen_rtx (code, mode,
3977 reload_reg),
3978 p);
3979
3980 /* We must also verify that the constraints
3981 are met after the replacement. */
3982 extract_insn (p);
3983 if (n)
3984 n = constrain_operands (1);
3985 else
3986 break;
3987
3988 /* If the constraints were not met, then
3989 undo the replacement. */
3990 if (!n)
3991 {
3992 validate_replace_rtx (gen_rtx (code, mode,
3993 reload_reg),
3994 reload_reg, p);
3995 break;
3996 }
3997
3998 }
3999 break;
4000 }
4001 if (n == 1)
4002 {
4003 REG_NOTES (p)
4004 = gen_rtx_EXPR_LIST (REG_INC, reload_reg,
4005 REG_NOTES (p));
4006 /* Mark this as having an output reload so that the
4007 REG_INC processing code below won't invalidate
4008 the reload for inheritance. */
4009 SET_HARD_REG_BIT (reg_is_output_reload,
4010 REGNO (reload_reg));
4011 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4012 }
4013 else
4014 forget_old_reloads_1 (XEXP (in_reg, 0), NULL_RTX,
4015 NULL);
4016 }
4017 else if ((code == PRE_INC || code == PRE_DEC)
4018 && TEST_HARD_REG_BIT (reg_reloaded_valid,
4019 REGNO (rld[i].reg_rtx))
4020 /* Make sure it is the inc/dec pseudo, and not
4021 some other (e.g. output operand) pseudo. */
4022 && (reg_reloaded_contents[REGNO (rld[i].reg_rtx)]
4023 == REGNO (XEXP (in_reg, 0))))
4024 {
4025 SET_HARD_REG_BIT (reg_is_output_reload,
4026 REGNO (rld[i].reg_rtx));
4027 reg_has_output_reload[REGNO (XEXP (in_reg, 0))] = 1;
4028 }
4029 }
4030 }
4031 /* If a pseudo that got a hard register is auto-incremented,
4032 we must purge records of copying it into pseudos without
4033 hard registers. */
4034 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
4035 if (REG_NOTE_KIND (x) == REG_INC)
4036 {
4037 /* See if this pseudo reg was reloaded in this insn.
4038 If so, its last-reload info is still valid
4039 because it is based on this insn's reload. */
4040 for (i = 0; i < n_reloads; i++)
4041 if (rld[i].out == XEXP (x, 0))
4042 break;
4043
4044 if (i == n_reloads)
4045 forget_old_reloads_1 (XEXP (x, 0), NULL_RTX, NULL);
4046 }
4047 #endif
4048 }
4049 /* A reload reg's contents are unknown after a label. */
4050 if (GET_CODE (insn) == CODE_LABEL)
4051 CLEAR_HARD_REG_SET (reg_reloaded_valid);
4052
4053 /* Don't assume a reload reg is still good after a call insn
4054 if it is a call-used reg. */
4055 else if (GET_CODE (insn) == CALL_INSN)
4056 AND_COMPL_HARD_REG_SET(reg_reloaded_valid, call_used_reg_set);
4057 }
4058
4059 /* Clean up. */
4060 free (reg_last_reload_reg);
4061 free (reg_has_output_reload);
4062 }
4063
4064 /* Discard all record of any value reloaded from X,
4065 or reloaded in X from someplace else;
4066 unless X is an output reload reg of the current insn.
4067
4068 X may be a hard reg (the reload reg)
4069 or it may be a pseudo reg that was reloaded from. */
4070
4071 static void
4072 forget_old_reloads_1 (x, ignored, data)
4073 rtx x;
4074 rtx ignored ATTRIBUTE_UNUSED;
4075 void *data ATTRIBUTE_UNUSED;
4076 {
4077 unsigned int regno;
4078 unsigned int nr;
4079 int offset = 0;
4080
4081 /* note_stores does give us subregs of hard regs,
4082 subreg_regno_offset will abort if it is not a hard reg. */
4083 while (GET_CODE (x) == SUBREG)
4084 {
4085 offset += subreg_regno_offset (REGNO (SUBREG_REG (x)),
4086 GET_MODE (SUBREG_REG (x)),
4087 SUBREG_BYTE (x),
4088 GET_MODE (x));
4089 x = SUBREG_REG (x);
4090 }
4091
4092 if (GET_CODE (x) != REG)
4093 return;
4094
4095 regno = REGNO (x) + offset;
4096
4097 if (regno >= FIRST_PSEUDO_REGISTER)
4098 nr = 1;
4099 else
4100 {
4101 unsigned int i;
4102
4103 nr = HARD_REGNO_NREGS (regno, GET_MODE (x));
4104 /* Storing into a spilled-reg invalidates its contents.
4105 This can happen if a block-local pseudo is allocated to that reg
4106 and it wasn't spilled because this block's total need is 0.
4107 Then some insn might have an optional reload and use this reg. */
4108 for (i = 0; i < nr; i++)
4109 /* But don't do this if the reg actually serves as an output
4110 reload reg in the current instruction. */
4111 if (n_reloads == 0
4112 || ! TEST_HARD_REG_BIT (reg_is_output_reload, regno + i))
4113 {
4114 CLEAR_HARD_REG_BIT (reg_reloaded_valid, regno + i);
4115 spill_reg_store[regno + i] = 0;
4116 }
4117 }
4118
4119 /* Since value of X has changed,
4120 forget any value previously copied from it. */
4121
4122 while (nr-- > 0)
4123 /* But don't forget a copy if this is the output reload
4124 that establishes the copy's validity. */
4125 if (n_reloads == 0 || reg_has_output_reload[regno + nr] == 0)
4126 reg_last_reload_reg[regno + nr] = 0;
4127 }
4128 \f
4129 /* The following HARD_REG_SETs indicate when each hard register is
4130 used for a reload of various parts of the current insn. */
4131
4132 /* If reg is unavailable for all reloads. */
4133 static HARD_REG_SET reload_reg_unavailable;
4134 /* If reg is in use as a reload reg for a RELOAD_OTHER reload. */
4135 static HARD_REG_SET reload_reg_used;
4136 /* If reg is in use for a RELOAD_FOR_INPUT_ADDRESS reload for operand I. */
4137 static HARD_REG_SET reload_reg_used_in_input_addr[MAX_RECOG_OPERANDS];
4138 /* If reg is in use for a RELOAD_FOR_INPADDR_ADDRESS reload for operand I. */
4139 static HARD_REG_SET reload_reg_used_in_inpaddr_addr[MAX_RECOG_OPERANDS];
4140 /* If reg is in use for a RELOAD_FOR_OUTPUT_ADDRESS reload for operand I. */
4141 static HARD_REG_SET reload_reg_used_in_output_addr[MAX_RECOG_OPERANDS];
4142 /* If reg is in use for a RELOAD_FOR_OUTADDR_ADDRESS reload for operand I. */
4143 static HARD_REG_SET reload_reg_used_in_outaddr_addr[MAX_RECOG_OPERANDS];
4144 /* If reg is in use for a RELOAD_FOR_INPUT reload for operand I. */
4145 static HARD_REG_SET reload_reg_used_in_input[MAX_RECOG_OPERANDS];
4146 /* If reg is in use for a RELOAD_FOR_OUTPUT reload for operand I. */
4147 static HARD_REG_SET reload_reg_used_in_output[MAX_RECOG_OPERANDS];
4148 /* If reg is in use for a RELOAD_FOR_OPERAND_ADDRESS reload. */
4149 static HARD_REG_SET reload_reg_used_in_op_addr;
4150 /* If reg is in use for a RELOAD_FOR_OPADDR_ADDR reload. */
4151 static HARD_REG_SET reload_reg_used_in_op_addr_reload;
4152 /* If reg is in use for a RELOAD_FOR_INSN reload. */
4153 static HARD_REG_SET reload_reg_used_in_insn;
4154 /* If reg is in use for a RELOAD_FOR_OTHER_ADDRESS reload. */
4155 static HARD_REG_SET reload_reg_used_in_other_addr;
4156
4157 /* If reg is in use as a reload reg for any sort of reload. */
4158 static HARD_REG_SET reload_reg_used_at_all;
4159
4160 /* If reg is use as an inherited reload. We just mark the first register
4161 in the group. */
4162 static HARD_REG_SET reload_reg_used_for_inherit;
4163
4164 /* Records which hard regs are used in any way, either as explicit use or
4165 by being allocated to a pseudo during any point of the current insn. */
4166 static HARD_REG_SET reg_used_in_insn;
4167
4168 /* Mark reg REGNO as in use for a reload of the sort spec'd by OPNUM and
4169 TYPE. MODE is used to indicate how many consecutive regs are
4170 actually used. */
4171
4172 static void
4173 mark_reload_reg_in_use (regno, opnum, type, mode)
4174 unsigned int regno;
4175 int opnum;
4176 enum reload_type type;
4177 enum machine_mode mode;
4178 {
4179 unsigned int nregs = HARD_REGNO_NREGS (regno, mode);
4180 unsigned int i;
4181
4182 for (i = regno; i < nregs + regno; i++)
4183 {
4184 switch (type)
4185 {
4186 case RELOAD_OTHER:
4187 SET_HARD_REG_BIT (reload_reg_used, i);
4188 break;
4189
4190 case RELOAD_FOR_INPUT_ADDRESS:
4191 SET_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], i);
4192 break;
4193
4194 case RELOAD_FOR_INPADDR_ADDRESS:
4195 SET_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], i);
4196 break;
4197
4198 case RELOAD_FOR_OUTPUT_ADDRESS:
4199 SET_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], i);
4200 break;
4201
4202 case RELOAD_FOR_OUTADDR_ADDRESS:
4203 SET_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], i);
4204 break;
4205
4206 case RELOAD_FOR_OPERAND_ADDRESS:
4207 SET_HARD_REG_BIT (reload_reg_used_in_op_addr, i);
4208 break;
4209
4210 case RELOAD_FOR_OPADDR_ADDR:
4211 SET_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, i);
4212 break;
4213
4214 case RELOAD_FOR_OTHER_ADDRESS:
4215 SET_HARD_REG_BIT (reload_reg_used_in_other_addr, i);
4216 break;
4217
4218 case RELOAD_FOR_INPUT:
4219 SET_HARD_REG_BIT (reload_reg_used_in_input[opnum], i);
4220 break;
4221
4222 case RELOAD_FOR_OUTPUT:
4223 SET_HARD_REG_BIT (reload_reg_used_in_output[opnum], i);
4224 break;
4225
4226 case RELOAD_FOR_INSN:
4227 SET_HARD_REG_BIT (reload_reg_used_in_insn, i);
4228 break;
4229 }
4230
4231 SET_HARD_REG_BIT (reload_reg_used_at_all, i);
4232 }
4233 }
4234
4235 /* Similarly, but show REGNO is no longer in use for a reload. */
4236
4237 static void
4238 clear_reload_reg_in_use (regno, opnum, type, mode)
4239 unsigned int regno;
4240 int opnum;
4241 enum reload_type type;
4242 enum machine_mode mode;
4243 {
4244 unsigned int nregs = HARD_REGNO_NREGS (regno, mode);
4245 unsigned int start_regno, end_regno, r;
4246 int i;
4247 /* A complication is that for some reload types, inheritance might
4248 allow multiple reloads of the same types to share a reload register.
4249 We set check_opnum if we have to check only reloads with the same
4250 operand number, and check_any if we have to check all reloads. */
4251 int check_opnum = 0;
4252 int check_any = 0;
4253 HARD_REG_SET *used_in_set;
4254
4255 switch (type)
4256 {
4257 case RELOAD_OTHER:
4258 used_in_set = &reload_reg_used;
4259 break;
4260
4261 case RELOAD_FOR_INPUT_ADDRESS:
4262 used_in_set = &reload_reg_used_in_input_addr[opnum];
4263 break;
4264
4265 case RELOAD_FOR_INPADDR_ADDRESS:
4266 check_opnum = 1;
4267 used_in_set = &reload_reg_used_in_inpaddr_addr[opnum];
4268 break;
4269
4270 case RELOAD_FOR_OUTPUT_ADDRESS:
4271 used_in_set = &reload_reg_used_in_output_addr[opnum];
4272 break;
4273
4274 case RELOAD_FOR_OUTADDR_ADDRESS:
4275 check_opnum = 1;
4276 used_in_set = &reload_reg_used_in_outaddr_addr[opnum];
4277 break;
4278
4279 case RELOAD_FOR_OPERAND_ADDRESS:
4280 used_in_set = &reload_reg_used_in_op_addr;
4281 break;
4282
4283 case RELOAD_FOR_OPADDR_ADDR:
4284 check_any = 1;
4285 used_in_set = &reload_reg_used_in_op_addr_reload;
4286 break;
4287
4288 case RELOAD_FOR_OTHER_ADDRESS:
4289 used_in_set = &reload_reg_used_in_other_addr;
4290 check_any = 1;
4291 break;
4292
4293 case RELOAD_FOR_INPUT:
4294 used_in_set = &reload_reg_used_in_input[opnum];
4295 break;
4296
4297 case RELOAD_FOR_OUTPUT:
4298 used_in_set = &reload_reg_used_in_output[opnum];
4299 break;
4300
4301 case RELOAD_FOR_INSN:
4302 used_in_set = &reload_reg_used_in_insn;
4303 break;
4304 default:
4305 abort ();
4306 }
4307 /* We resolve conflicts with remaining reloads of the same type by
4308 excluding the intervals of of reload registers by them from the
4309 interval of freed reload registers. Since we only keep track of
4310 one set of interval bounds, we might have to exclude somewhat
4311 more then what would be necessary if we used a HARD_REG_SET here.
4312 But this should only happen very infrequently, so there should
4313 be no reason to worry about it. */
4314
4315 start_regno = regno;
4316 end_regno = regno + nregs;
4317 if (check_opnum || check_any)
4318 {
4319 for (i = n_reloads - 1; i >= 0; i--)
4320 {
4321 if (rld[i].when_needed == type
4322 && (check_any || rld[i].opnum == opnum)
4323 && rld[i].reg_rtx)
4324 {
4325 unsigned int conflict_start = true_regnum (rld[i].reg_rtx);
4326 unsigned int conflict_end
4327 = (conflict_start
4328 + HARD_REGNO_NREGS (conflict_start, rld[i].mode));
4329
4330 /* If there is an overlap with the first to-be-freed register,
4331 adjust the interval start. */
4332 if (conflict_start <= start_regno && conflict_end > start_regno)
4333 start_regno = conflict_end;
4334 /* Otherwise, if there is a conflict with one of the other
4335 to-be-freed registers, adjust the interval end. */
4336 if (conflict_start > start_regno && conflict_start < end_regno)
4337 end_regno = conflict_start;
4338 }
4339 }
4340 }
4341
4342 for (r = start_regno; r < end_regno; r++)
4343 CLEAR_HARD_REG_BIT (*used_in_set, r);
4344 }
4345
4346 /* 1 if reg REGNO is free as a reload reg for a reload of the sort
4347 specified by OPNUM and TYPE. */
4348
4349 static int
4350 reload_reg_free_p (regno, opnum, type)
4351 unsigned int regno;
4352 int opnum;
4353 enum reload_type type;
4354 {
4355 int i;
4356
4357 /* In use for a RELOAD_OTHER means it's not available for anything. */
4358 if (TEST_HARD_REG_BIT (reload_reg_used, regno)
4359 || TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4360 return 0;
4361
4362 switch (type)
4363 {
4364 case RELOAD_OTHER:
4365 /* In use for anything means we can't use it for RELOAD_OTHER. */
4366 if (TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno)
4367 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4368 || TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4369 return 0;
4370
4371 for (i = 0; i < reload_n_operands; i++)
4372 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4373 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4374 || TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4375 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4376 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4377 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4378 return 0;
4379
4380 return 1;
4381
4382 case RELOAD_FOR_INPUT:
4383 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4384 || TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno))
4385 return 0;
4386
4387 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4388 return 0;
4389
4390 /* If it is used for some other input, can't use it. */
4391 for (i = 0; i < reload_n_operands; i++)
4392 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4393 return 0;
4394
4395 /* If it is used in a later operand's address, can't use it. */
4396 for (i = opnum + 1; i < reload_n_operands; i++)
4397 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4398 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4399 return 0;
4400
4401 return 1;
4402
4403 case RELOAD_FOR_INPUT_ADDRESS:
4404 /* Can't use a register if it is used for an input address for this
4405 operand or used as an input in an earlier one. */
4406 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[opnum], regno)
4407 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4408 return 0;
4409
4410 for (i = 0; i < opnum; i++)
4411 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4412 return 0;
4413
4414 return 1;
4415
4416 case RELOAD_FOR_INPADDR_ADDRESS:
4417 /* Can't use a register if it is used for an input address
4418 for this operand or used as an input in an earlier
4419 one. */
4420 if (TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[opnum], regno))
4421 return 0;
4422
4423 for (i = 0; i < opnum; i++)
4424 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4425 return 0;
4426
4427 return 1;
4428
4429 case RELOAD_FOR_OUTPUT_ADDRESS:
4430 /* Can't use a register if it is used for an output address for this
4431 operand or used as an output in this or a later operand. */
4432 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[opnum], regno))
4433 return 0;
4434
4435 for (i = opnum; i < reload_n_operands; i++)
4436 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4437 return 0;
4438
4439 return 1;
4440
4441 case RELOAD_FOR_OUTADDR_ADDRESS:
4442 /* Can't use a register if it is used for an output address
4443 for this operand or used as an output in this or a
4444 later operand. */
4445 if (TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[opnum], regno))
4446 return 0;
4447
4448 for (i = opnum; i < reload_n_operands; i++)
4449 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4450 return 0;
4451
4452 return 1;
4453
4454 case RELOAD_FOR_OPERAND_ADDRESS:
4455 for (i = 0; i < reload_n_operands; i++)
4456 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4457 return 0;
4458
4459 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4460 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4461
4462 case RELOAD_FOR_OPADDR_ADDR:
4463 for (i = 0; i < reload_n_operands; i++)
4464 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4465 return 0;
4466
4467 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno));
4468
4469 case RELOAD_FOR_OUTPUT:
4470 /* This cannot share a register with RELOAD_FOR_INSN reloads, other
4471 outputs, or an operand address for this or an earlier output. */
4472 if (TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno))
4473 return 0;
4474
4475 for (i = 0; i < reload_n_operands; i++)
4476 if (TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4477 return 0;
4478
4479 for (i = 0; i <= opnum; i++)
4480 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4481 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4482 return 0;
4483
4484 return 1;
4485
4486 case RELOAD_FOR_INSN:
4487 for (i = 0; i < reload_n_operands; i++)
4488 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno)
4489 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4490 return 0;
4491
4492 return (! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4493 && ! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno));
4494
4495 case RELOAD_FOR_OTHER_ADDRESS:
4496 return ! TEST_HARD_REG_BIT (reload_reg_used_in_other_addr, regno);
4497 }
4498 abort ();
4499 }
4500
4501 /* Return 1 if the value in reload reg REGNO, as used by a reload
4502 needed for the part of the insn specified by OPNUM and TYPE,
4503 is still available in REGNO at the end of the insn.
4504
4505 We can assume that the reload reg was already tested for availability
4506 at the time it is needed, and we should not check this again,
4507 in case the reg has already been marked in use. */
4508
4509 static int
4510 reload_reg_reaches_end_p (regno, opnum, type)
4511 unsigned int regno;
4512 int opnum;
4513 enum reload_type type;
4514 {
4515 int i;
4516
4517 switch (type)
4518 {
4519 case RELOAD_OTHER:
4520 /* Since a RELOAD_OTHER reload claims the reg for the entire insn,
4521 its value must reach the end. */
4522 return 1;
4523
4524 /* If this use is for part of the insn,
4525 its value reaches if no subsequent part uses the same register.
4526 Just like the above function, don't try to do this with lots
4527 of fallthroughs. */
4528
4529 case RELOAD_FOR_OTHER_ADDRESS:
4530 /* Here we check for everything else, since these don't conflict
4531 with anything else and everything comes later. */
4532
4533 for (i = 0; i < reload_n_operands; i++)
4534 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4535 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4536 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno)
4537 || TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4538 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4539 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4540 return 0;
4541
4542 return (! TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4543 && ! TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4544 && ! TEST_HARD_REG_BIT (reload_reg_used, regno));
4545
4546 case RELOAD_FOR_INPUT_ADDRESS:
4547 case RELOAD_FOR_INPADDR_ADDRESS:
4548 /* Similar, except that we check only for this and subsequent inputs
4549 and the address of only subsequent inputs and we do not need
4550 to check for RELOAD_OTHER objects since they are known not to
4551 conflict. */
4552
4553 for (i = opnum; i < reload_n_operands; i++)
4554 if (TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4555 return 0;
4556
4557 for (i = opnum + 1; i < reload_n_operands; i++)
4558 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4559 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno))
4560 return 0;
4561
4562 for (i = 0; i < reload_n_operands; i++)
4563 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4564 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4565 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4566 return 0;
4567
4568 if (TEST_HARD_REG_BIT (reload_reg_used_in_op_addr_reload, regno))
4569 return 0;
4570
4571 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4572 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4573 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4574
4575 case RELOAD_FOR_INPUT:
4576 /* Similar to input address, except we start at the next operand for
4577 both input and input address and we do not check for
4578 RELOAD_FOR_OPERAND_ADDRESS and RELOAD_FOR_INSN since these
4579 would conflict. */
4580
4581 for (i = opnum + 1; i < reload_n_operands; i++)
4582 if (TEST_HARD_REG_BIT (reload_reg_used_in_input_addr[i], regno)
4583 || TEST_HARD_REG_BIT (reload_reg_used_in_inpaddr_addr[i], regno)
4584 || TEST_HARD_REG_BIT (reload_reg_used_in_input[i], regno))
4585 return 0;
4586
4587 /* ... fall through ... */
4588
4589 case RELOAD_FOR_OPERAND_ADDRESS:
4590 /* Check outputs and their addresses. */
4591
4592 for (i = 0; i < reload_n_operands; i++)
4593 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4594 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4595 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4596 return 0;
4597
4598 return (!TEST_HARD_REG_BIT (reload_reg_used, regno));
4599
4600 case RELOAD_FOR_OPADDR_ADDR:
4601 for (i = 0; i < reload_n_operands; i++)
4602 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4603 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno)
4604 || TEST_HARD_REG_BIT (reload_reg_used_in_output[i], regno))
4605 return 0;
4606
4607 return (!TEST_HARD_REG_BIT (reload_reg_used_in_op_addr, regno)
4608 && !TEST_HARD_REG_BIT (reload_reg_used_in_insn, regno)
4609 && !TEST_HARD_REG_BIT (reload_reg_used, regno));
4610
4611 case RELOAD_FOR_INSN:
4612 /* These conflict with other outputs with RELOAD_OTHER. So
4613 we need only check for output addresses. */
4614
4615 opnum = -1;
4616
4617 /* ... fall through ... */
4618
4619 case RELOAD_FOR_OUTPUT:
4620 case RELOAD_FOR_OUTPUT_ADDRESS:
4621 case RELOAD_FOR_OUTADDR_ADDRESS:
4622 /* We already know these can't conflict with a later output. So the
4623 only thing to check are later output addresses. */
4624 for (i = opnum + 1; i < reload_n_operands; i++)
4625 if (TEST_HARD_REG_BIT (reload_reg_used_in_output_addr[i], regno)
4626 || TEST_HARD_REG_BIT (reload_reg_used_in_outaddr_addr[i], regno))
4627 return 0;
4628
4629 return 1;
4630 }
4631
4632 abort ();
4633 }
4634 \f
4635 /* Return 1 if the reloads denoted by R1 and R2 cannot share a register.
4636 Return 0 otherwise.
4637
4638 This function uses the same algorithm as reload_reg_free_p above. */
4639
4640 int
4641 reloads_conflict (r1, r2)
4642 int r1, r2;
4643 {
4644 enum reload_type r1_type = rld[r1].when_needed;
4645 enum reload_type r2_type = rld[r2].when_needed;
4646 int r1_opnum = rld[r1].opnum;
4647 int r2_opnum = rld[r2].opnum;
4648
4649 /* RELOAD_OTHER conflicts with everything. */
4650 if (r2_type == RELOAD_OTHER)
4651 return 1;
4652
4653 /* Otherwise, check conflicts differently for each type. */
4654
4655 switch (r1_type)
4656 {
4657 case RELOAD_FOR_INPUT:
4658 return (r2_type == RELOAD_FOR_INSN
4659 || r2_type == RELOAD_FOR_OPERAND_ADDRESS
4660 || r2_type == RELOAD_FOR_OPADDR_ADDR
4661 || r2_type == RELOAD_FOR_INPUT
4662 || ((r2_type == RELOAD_FOR_INPUT_ADDRESS
4663 || r2_type == RELOAD_FOR_INPADDR_ADDRESS)
4664 && r2_opnum > r1_opnum));
4665
4666 case RELOAD_FOR_INPUT_ADDRESS:
4667 return ((r2_type == RELOAD_FOR_INPUT_ADDRESS && r1_opnum == r2_opnum)
4668 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4669
4670 case RELOAD_FOR_INPADDR_ADDRESS:
4671 return ((r2_type == RELOAD_FOR_INPADDR_ADDRESS && r1_opnum == r2_opnum)
4672 || (r2_type == RELOAD_FOR_INPUT && r2_opnum < r1_opnum));
4673
4674 case RELOAD_FOR_OUTPUT_ADDRESS:
4675 return ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS && r2_opnum == r1_opnum)
4676 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4677
4678 case RELOAD_FOR_OUTADDR_ADDRESS:
4679 return ((r2_type == RELOAD_FOR_OUTADDR_ADDRESS && r2_opnum == r1_opnum)
4680 || (r2_type == RELOAD_FOR_OUTPUT && r2_opnum >= r1_opnum));
4681
4682 case RELOAD_FOR_OPERAND_ADDRESS:
4683 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_INSN
4684 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4685
4686 case RELOAD_FOR_OPADDR_ADDR:
4687 return (r2_type == RELOAD_FOR_INPUT
4688 || r2_type == RELOAD_FOR_OPADDR_ADDR);
4689
4690 case RELOAD_FOR_OUTPUT:
4691 return (r2_type == RELOAD_FOR_INSN || r2_type == RELOAD_FOR_OUTPUT
4692 || ((r2_type == RELOAD_FOR_OUTPUT_ADDRESS
4693 || r2_type == RELOAD_FOR_OUTADDR_ADDRESS)
4694 && r2_opnum <= r1_opnum));
4695
4696 case RELOAD_FOR_INSN:
4697 return (r2_type == RELOAD_FOR_INPUT || r2_type == RELOAD_FOR_OUTPUT
4698 || r2_type == RELOAD_FOR_INSN
4699 || r2_type == RELOAD_FOR_OPERAND_ADDRESS);
4700
4701 case RELOAD_FOR_OTHER_ADDRESS:
4702 return r2_type == RELOAD_FOR_OTHER_ADDRESS;
4703
4704 case RELOAD_OTHER:
4705 return 1;
4706
4707 default:
4708 abort ();
4709 }
4710 }
4711 \f
4712 /* Indexed by reload number, 1 if incoming value
4713 inherited from previous insns. */
4714 char reload_inherited[MAX_RELOADS];
4715
4716 /* For an inherited reload, this is the insn the reload was inherited from,
4717 if we know it. Otherwise, this is 0. */
4718 rtx reload_inheritance_insn[MAX_RELOADS];
4719
4720 /* If non-zero, this is a place to get the value of the reload,
4721 rather than using reload_in. */
4722 rtx reload_override_in[MAX_RELOADS];
4723
4724 /* For each reload, the hard register number of the register used,
4725 or -1 if we did not need a register for this reload. */
4726 int reload_spill_index[MAX_RELOADS];
4727
4728 /* Subroutine of free_for_value_p, used to check a single register.
4729 START_REGNO is the starting regno of the full reload register
4730 (possibly comprising multiple hard registers) that we are considering. */
4731
4732 static int
4733 reload_reg_free_for_value_p (start_regno, regno, opnum, type, value, out,
4734 reloadnum, ignore_address_reloads)
4735 int start_regno, regno;
4736 int opnum;
4737 enum reload_type type;
4738 rtx value, out;
4739 int reloadnum;
4740 int ignore_address_reloads;
4741 {
4742 int time1;
4743 /* Set if we see an input reload that must not share its reload register
4744 with any new earlyclobber, but might otherwise share the reload
4745 register with an output or input-output reload. */
4746 int check_earlyclobber = 0;
4747 int i;
4748 int copy = 0;
4749
4750 if (TEST_HARD_REG_BIT (reload_reg_unavailable, regno))
4751 return 0;
4752
4753 if (out == const0_rtx)
4754 {
4755 copy = 1;
4756 out = NULL_RTX;
4757 }
4758
4759 /* We use some pseudo 'time' value to check if the lifetimes of the
4760 new register use would overlap with the one of a previous reload
4761 that is not read-only or uses a different value.
4762 The 'time' used doesn't have to be linear in any shape or form, just
4763 monotonic.
4764 Some reload types use different 'buckets' for each operand.
4765 So there are MAX_RECOG_OPERANDS different time values for each
4766 such reload type.
4767 We compute TIME1 as the time when the register for the prospective
4768 new reload ceases to be live, and TIME2 for each existing
4769 reload as the time when that the reload register of that reload
4770 becomes live.
4771 Where there is little to be gained by exact lifetime calculations,
4772 we just make conservative assumptions, i.e. a longer lifetime;
4773 this is done in the 'default:' cases. */
4774 switch (type)
4775 {
4776 case RELOAD_FOR_OTHER_ADDRESS:
4777 /* RELOAD_FOR_OTHER_ADDRESS conflicts with RELOAD_OTHER reloads. */
4778 time1 = copy ? 0 : 1;
4779 break;
4780 case RELOAD_OTHER:
4781 time1 = copy ? 1 : MAX_RECOG_OPERANDS * 5 + 5;
4782 break;
4783 /* For each input, we may have a sequence of RELOAD_FOR_INPADDR_ADDRESS,
4784 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT. By adding 0 / 1 / 2 ,
4785 respectively, to the time values for these, we get distinct time
4786 values. To get distinct time values for each operand, we have to
4787 multiply opnum by at least three. We round that up to four because
4788 multiply by four is often cheaper. */
4789 case RELOAD_FOR_INPADDR_ADDRESS:
4790 time1 = opnum * 4 + 2;
4791 break;
4792 case RELOAD_FOR_INPUT_ADDRESS:
4793 time1 = opnum * 4 + 3;
4794 break;
4795 case RELOAD_FOR_INPUT:
4796 /* All RELOAD_FOR_INPUT reloads remain live till the instruction
4797 executes (inclusive). */
4798 time1 = copy ? opnum * 4 + 4 : MAX_RECOG_OPERANDS * 4 + 3;
4799 break;
4800 case RELOAD_FOR_OPADDR_ADDR:
4801 /* opnum * 4 + 4
4802 <= (MAX_RECOG_OPERANDS - 1) * 4 + 4 == MAX_RECOG_OPERANDS * 4 */
4803 time1 = MAX_RECOG_OPERANDS * 4 + 1;
4804 break;
4805 case RELOAD_FOR_OPERAND_ADDRESS:
4806 /* RELOAD_FOR_OPERAND_ADDRESS reloads are live even while the insn
4807 is executed. */
4808 time1 = copy ? MAX_RECOG_OPERANDS * 4 + 2 : MAX_RECOG_OPERANDS * 4 + 3;
4809 break;
4810 case RELOAD_FOR_OUTADDR_ADDRESS:
4811 time1 = MAX_RECOG_OPERANDS * 4 + 4 + opnum;
4812 break;
4813 case RELOAD_FOR_OUTPUT_ADDRESS:
4814 time1 = MAX_RECOG_OPERANDS * 4 + 5 + opnum;
4815 break;
4816 default:
4817 time1 = MAX_RECOG_OPERANDS * 5 + 5;
4818 }
4819
4820 for (i = 0; i < n_reloads; i++)
4821 {
4822 rtx reg = rld[i].reg_rtx;
4823 if (reg && GET_CODE (reg) == REG
4824 && ((unsigned) regno - true_regnum (reg)
4825 <= HARD_REGNO_NREGS (REGNO (reg), GET_MODE (reg)) - (unsigned)1)
4826 && i != reloadnum)
4827 {
4828 rtx other_input = rld[i].in;
4829
4830 /* If the other reload loads the same input value, that
4831 will not cause a conflict only if it's loading it into
4832 the same register. */
4833 if (true_regnum (reg) != start_regno)
4834 other_input = NULL_RTX;
4835 if (! other_input || ! rtx_equal_p (other_input, value)
4836 || rld[i].out || out)
4837 {
4838 int time2;
4839 switch (rld[i].when_needed)
4840 {
4841 case RELOAD_FOR_OTHER_ADDRESS:
4842 time2 = 0;
4843 break;
4844 case RELOAD_FOR_INPADDR_ADDRESS:
4845 /* find_reloads makes sure that a
4846 RELOAD_FOR_{INP,OP,OUT}ADDR_ADDRESS reload is only used
4847 by at most one - the first -
4848 RELOAD_FOR_{INPUT,OPERAND,OUTPUT}_ADDRESS . If the
4849 address reload is inherited, the address address reload
4850 goes away, so we can ignore this conflict. */
4851 if (type == RELOAD_FOR_INPUT_ADDRESS && reloadnum == i + 1
4852 && ignore_address_reloads
4853 /* Unless the RELOAD_FOR_INPUT is an auto_inc expression.
4854 Then the address address is still needed to store
4855 back the new address. */
4856 && ! rld[reloadnum].out)
4857 continue;
4858 /* Likewise, if a RELOAD_FOR_INPUT can inherit a value, its
4859 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS
4860 reloads go away. */
4861 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4862 && ignore_address_reloads
4863 /* Unless we are reloading an auto_inc expression. */
4864 && ! rld[reloadnum].out)
4865 continue;
4866 time2 = rld[i].opnum * 4 + 2;
4867 break;
4868 case RELOAD_FOR_INPUT_ADDRESS:
4869 if (type == RELOAD_FOR_INPUT && opnum == rld[i].opnum
4870 && ignore_address_reloads
4871 && ! rld[reloadnum].out)
4872 continue;
4873 time2 = rld[i].opnum * 4 + 3;
4874 break;
4875 case RELOAD_FOR_INPUT:
4876 time2 = rld[i].opnum * 4 + 4;
4877 check_earlyclobber = 1;
4878 break;
4879 /* rld[i].opnum * 4 + 4 <= (MAX_RECOG_OPERAND - 1) * 4 + 4
4880 == MAX_RECOG_OPERAND * 4 */
4881 case RELOAD_FOR_OPADDR_ADDR:
4882 if (type == RELOAD_FOR_OPERAND_ADDRESS && reloadnum == i + 1
4883 && ignore_address_reloads
4884 && ! rld[reloadnum].out)
4885 continue;
4886 time2 = MAX_RECOG_OPERANDS * 4 + 1;
4887 break;
4888 case RELOAD_FOR_OPERAND_ADDRESS:
4889 time2 = MAX_RECOG_OPERANDS * 4 + 2;
4890 check_earlyclobber = 1;
4891 break;
4892 case RELOAD_FOR_INSN:
4893 time2 = MAX_RECOG_OPERANDS * 4 + 3;
4894 break;
4895 case RELOAD_FOR_OUTPUT:
4896 /* All RELOAD_FOR_OUTPUT reloads become live just after the
4897 instruction is executed. */
4898 time2 = MAX_RECOG_OPERANDS * 4 + 4;
4899 break;
4900 /* The first RELOAD_FOR_OUTADDR_ADDRESS reload conflicts with
4901 the RELOAD_FOR_OUTPUT reloads, so assign it the same time
4902 value. */
4903 case RELOAD_FOR_OUTADDR_ADDRESS:
4904 if (type == RELOAD_FOR_OUTPUT_ADDRESS && reloadnum == i + 1
4905 && ignore_address_reloads
4906 && ! rld[reloadnum].out)
4907 continue;
4908 time2 = MAX_RECOG_OPERANDS * 4 + 4 + rld[i].opnum;
4909 break;
4910 case RELOAD_FOR_OUTPUT_ADDRESS:
4911 time2 = MAX_RECOG_OPERANDS * 4 + 5 + rld[i].opnum;
4912 break;
4913 case RELOAD_OTHER:
4914 /* If there is no conflict in the input part, handle this
4915 like an output reload. */
4916 if (! rld[i].in || rtx_equal_p (other_input, value))
4917 {
4918 time2 = MAX_RECOG_OPERANDS * 4 + 4;
4919 /* Earlyclobbered outputs must conflict with inputs. */
4920 if (earlyclobber_operand_p (rld[i].out))
4921 time2 = MAX_RECOG_OPERANDS * 4 + 3;
4922
4923 break;
4924 }
4925 time2 = 1;
4926 /* RELOAD_OTHER might be live beyond instruction execution,
4927 but this is not obvious when we set time2 = 1. So check
4928 here if there might be a problem with the new reload
4929 clobbering the register used by the RELOAD_OTHER. */
4930 if (out)
4931 return 0;
4932 break;
4933 default:
4934 return 0;
4935 }
4936 if ((time1 >= time2
4937 && (! rld[i].in || rld[i].out
4938 || ! rtx_equal_p (other_input, value)))
4939 || (out && rld[reloadnum].out_reg
4940 && time2 >= MAX_RECOG_OPERANDS * 4 + 3))
4941 return 0;
4942 }
4943 }
4944 }
4945
4946 /* Earlyclobbered outputs must conflict with inputs. */
4947 if (check_earlyclobber && out && earlyclobber_operand_p (out))
4948 return 0;
4949
4950 return 1;
4951 }
4952
4953 /* Return 1 if the value in reload reg REGNO, as used by a reload
4954 needed for the part of the insn specified by OPNUM and TYPE,
4955 may be used to load VALUE into it.
4956
4957 MODE is the mode in which the register is used, this is needed to
4958 determine how many hard regs to test.
4959
4960 Other read-only reloads with the same value do not conflict
4961 unless OUT is non-zero and these other reloads have to live while
4962 output reloads live.
4963 If OUT is CONST0_RTX, this is a special case: it means that the
4964 test should not be for using register REGNO as reload register, but
4965 for copying from register REGNO into the reload register.
4966
4967 RELOADNUM is the number of the reload we want to load this value for;
4968 a reload does not conflict with itself.
4969
4970 When IGNORE_ADDRESS_RELOADS is set, we can not have conflicts with
4971 reloads that load an address for the very reload we are considering.
4972
4973 The caller has to make sure that there is no conflict with the return
4974 register. */
4975
4976 static int
4977 free_for_value_p (regno, mode, opnum, type, value, out, reloadnum,
4978 ignore_address_reloads)
4979 int regno;
4980 enum machine_mode mode;
4981 int opnum;
4982 enum reload_type type;
4983 rtx value, out;
4984 int reloadnum;
4985 int ignore_address_reloads;
4986 {
4987 int nregs = HARD_REGNO_NREGS (regno, mode);
4988 while (nregs-- > 0)
4989 if (! reload_reg_free_for_value_p (regno, regno + nregs, opnum, type,
4990 value, out, reloadnum,
4991 ignore_address_reloads))
4992 return 0;
4993 return 1;
4994 }
4995
4996 /* Determine whether the reload reg X overlaps any rtx'es used for
4997 overriding inheritance. Return nonzero if so. */
4998
4999 static int
5000 conflicts_with_override (x)
5001 rtx x;
5002 {
5003 int i;
5004 for (i = 0; i < n_reloads; i++)
5005 if (reload_override_in[i]
5006 && reg_overlap_mentioned_p (x, reload_override_in[i]))
5007 return 1;
5008 return 0;
5009 }
5010 \f
5011 /* Give an error message saying we failed to find a reload for INSN,
5012 and clear out reload R. */
5013 static void
5014 failed_reload (insn, r)
5015 rtx insn;
5016 int r;
5017 {
5018 if (asm_noperands (PATTERN (insn)) < 0)
5019 /* It's the compiler's fault. */
5020 fatal_insn ("Could not find a spill register", insn);
5021
5022 /* It's the user's fault; the operand's mode and constraint
5023 don't match. Disable this reload so we don't crash in final. */
5024 error_for_asm (insn,
5025 "`asm' operand constraint incompatible with operand size");
5026 rld[r].in = 0;
5027 rld[r].out = 0;
5028 rld[r].reg_rtx = 0;
5029 rld[r].optional = 1;
5030 rld[r].secondary_p = 1;
5031 }
5032
5033 /* I is the index in SPILL_REG_RTX of the reload register we are to allocate
5034 for reload R. If it's valid, get an rtx for it. Return nonzero if
5035 successful. */
5036 static int
5037 set_reload_reg (i, r)
5038 int i, r;
5039 {
5040 int regno;
5041 rtx reg = spill_reg_rtx[i];
5042
5043 if (reg == 0 || GET_MODE (reg) != rld[r].mode)
5044 spill_reg_rtx[i] = reg
5045 = gen_rtx_REG (rld[r].mode, spill_regs[i]);
5046
5047 regno = true_regnum (reg);
5048
5049 /* Detect when the reload reg can't hold the reload mode.
5050 This used to be one `if', but Sequent compiler can't handle that. */
5051 if (HARD_REGNO_MODE_OK (regno, rld[r].mode))
5052 {
5053 enum machine_mode test_mode = VOIDmode;
5054 if (rld[r].in)
5055 test_mode = GET_MODE (rld[r].in);
5056 /* If rld[r].in has VOIDmode, it means we will load it
5057 in whatever mode the reload reg has: to wit, rld[r].mode.
5058 We have already tested that for validity. */
5059 /* Aside from that, we need to test that the expressions
5060 to reload from or into have modes which are valid for this
5061 reload register. Otherwise the reload insns would be invalid. */
5062 if (! (rld[r].in != 0 && test_mode != VOIDmode
5063 && ! HARD_REGNO_MODE_OK (regno, test_mode)))
5064 if (! (rld[r].out != 0
5065 && ! HARD_REGNO_MODE_OK (regno, GET_MODE (rld[r].out))))
5066 {
5067 /* The reg is OK. */
5068 last_spill_reg = i;
5069
5070 /* Mark as in use for this insn the reload regs we use
5071 for this. */
5072 mark_reload_reg_in_use (spill_regs[i], rld[r].opnum,
5073 rld[r].when_needed, rld[r].mode);
5074
5075 rld[r].reg_rtx = reg;
5076 reload_spill_index[r] = spill_regs[i];
5077 return 1;
5078 }
5079 }
5080 return 0;
5081 }
5082
5083 /* Find a spill register to use as a reload register for reload R.
5084 LAST_RELOAD is non-zero if this is the last reload for the insn being
5085 processed.
5086
5087 Set rld[R].reg_rtx to the register allocated.
5088
5089 We return 1 if successful, or 0 if we couldn't find a spill reg and
5090 we didn't change anything. */
5091
5092 static int
5093 allocate_reload_reg (chain, r, last_reload)
5094 struct insn_chain *chain ATTRIBUTE_UNUSED;
5095 int r;
5096 int last_reload;
5097 {
5098 int i, pass, count;
5099
5100 /* If we put this reload ahead, thinking it is a group,
5101 then insist on finding a group. Otherwise we can grab a
5102 reg that some other reload needs.
5103 (That can happen when we have a 68000 DATA_OR_FP_REG
5104 which is a group of data regs or one fp reg.)
5105 We need not be so restrictive if there are no more reloads
5106 for this insn.
5107
5108 ??? Really it would be nicer to have smarter handling
5109 for that kind of reg class, where a problem like this is normal.
5110 Perhaps those classes should be avoided for reloading
5111 by use of more alternatives. */
5112
5113 int force_group = rld[r].nregs > 1 && ! last_reload;
5114
5115 /* If we want a single register and haven't yet found one,
5116 take any reg in the right class and not in use.
5117 If we want a consecutive group, here is where we look for it.
5118
5119 We use two passes so we can first look for reload regs to
5120 reuse, which are already in use for other reloads in this insn,
5121 and only then use additional registers.
5122 I think that maximizing reuse is needed to make sure we don't
5123 run out of reload regs. Suppose we have three reloads, and
5124 reloads A and B can share regs. These need two regs.
5125 Suppose A and B are given different regs.
5126 That leaves none for C. */
5127 for (pass = 0; pass < 2; pass++)
5128 {
5129 /* I is the index in spill_regs.
5130 We advance it round-robin between insns to use all spill regs
5131 equally, so that inherited reloads have a chance
5132 of leapfrogging each other. */
5133
5134 i = last_spill_reg;
5135
5136 for (count = 0; count < n_spills; count++)
5137 {
5138 int class = (int) rld[r].class;
5139 int regnum;
5140
5141 i++;
5142 if (i >= n_spills)
5143 i -= n_spills;
5144 regnum = spill_regs[i];
5145
5146 if ((reload_reg_free_p (regnum, rld[r].opnum,
5147 rld[r].when_needed)
5148 || (rld[r].in
5149 /* We check reload_reg_used to make sure we
5150 don't clobber the return register. */
5151 && ! TEST_HARD_REG_BIT (reload_reg_used, regnum)
5152 && free_for_value_p (regnum, rld[r].mode, rld[r].opnum,
5153 rld[r].when_needed, rld[r].in,
5154 rld[r].out, r, 1)))
5155 && TEST_HARD_REG_BIT (reg_class_contents[class], regnum)
5156 && HARD_REGNO_MODE_OK (regnum, rld[r].mode)
5157 /* Look first for regs to share, then for unshared. But
5158 don't share regs used for inherited reloads; they are
5159 the ones we want to preserve. */
5160 && (pass
5161 || (TEST_HARD_REG_BIT (reload_reg_used_at_all,
5162 regnum)
5163 && ! TEST_HARD_REG_BIT (reload_reg_used_for_inherit,
5164 regnum))))
5165 {
5166 int nr = HARD_REGNO_NREGS (regnum, rld[r].mode);
5167 /* Avoid the problem where spilling a GENERAL_OR_FP_REG
5168 (on 68000) got us two FP regs. If NR is 1,
5169 we would reject both of them. */
5170 if (force_group)
5171 nr = rld[r].nregs;
5172 /* If we need only one reg, we have already won. */
5173 if (nr == 1)
5174 {
5175 /* But reject a single reg if we demand a group. */
5176 if (force_group)
5177 continue;
5178 break;
5179 }
5180 /* Otherwise check that as many consecutive regs as we need
5181 are available here. */
5182 while (nr > 1)
5183 {
5184 int regno = regnum + nr - 1;
5185 if (!(TEST_HARD_REG_BIT (reg_class_contents[class], regno)
5186 && spill_reg_order[regno] >= 0
5187 && reload_reg_free_p (regno, rld[r].opnum,
5188 rld[r].when_needed)))
5189 break;
5190 nr--;
5191 }
5192 if (nr == 1)
5193 break;
5194 }
5195 }
5196
5197 /* If we found something on pass 1, omit pass 2. */
5198 if (count < n_spills)
5199 break;
5200 }
5201
5202 /* We should have found a spill register by now. */
5203 if (count >= n_spills)
5204 return 0;
5205
5206 /* I is the index in SPILL_REG_RTX of the reload register we are to
5207 allocate. Get an rtx for it and find its register number. */
5208
5209 return set_reload_reg (i, r);
5210 }
5211 \f
5212 /* Initialize all the tables needed to allocate reload registers.
5213 CHAIN is the insn currently being processed; SAVE_RELOAD_REG_RTX
5214 is the array we use to restore the reg_rtx field for every reload. */
5215
5216 static void
5217 choose_reload_regs_init (chain, save_reload_reg_rtx)
5218 struct insn_chain *chain;
5219 rtx *save_reload_reg_rtx;
5220 {
5221 int i;
5222
5223 for (i = 0; i < n_reloads; i++)
5224 rld[i].reg_rtx = save_reload_reg_rtx[i];
5225
5226 memset (reload_inherited, 0, MAX_RELOADS);
5227 memset ((char *) reload_inheritance_insn, 0, MAX_RELOADS * sizeof (rtx));
5228 memset ((char *) reload_override_in, 0, MAX_RELOADS * sizeof (rtx));
5229
5230 CLEAR_HARD_REG_SET (reload_reg_used);
5231 CLEAR_HARD_REG_SET (reload_reg_used_at_all);
5232 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr);
5233 CLEAR_HARD_REG_SET (reload_reg_used_in_op_addr_reload);
5234 CLEAR_HARD_REG_SET (reload_reg_used_in_insn);
5235 CLEAR_HARD_REG_SET (reload_reg_used_in_other_addr);
5236
5237 CLEAR_HARD_REG_SET (reg_used_in_insn);
5238 {
5239 HARD_REG_SET tmp;
5240 REG_SET_TO_HARD_REG_SET (tmp, &chain->live_throughout);
5241 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5242 REG_SET_TO_HARD_REG_SET (tmp, &chain->dead_or_set);
5243 IOR_HARD_REG_SET (reg_used_in_insn, tmp);
5244 compute_use_by_pseudos (&reg_used_in_insn, &chain->live_throughout);
5245 compute_use_by_pseudos (&reg_used_in_insn, &chain->dead_or_set);
5246 }
5247
5248 for (i = 0; i < reload_n_operands; i++)
5249 {
5250 CLEAR_HARD_REG_SET (reload_reg_used_in_output[i]);
5251 CLEAR_HARD_REG_SET (reload_reg_used_in_input[i]);
5252 CLEAR_HARD_REG_SET (reload_reg_used_in_input_addr[i]);
5253 CLEAR_HARD_REG_SET (reload_reg_used_in_inpaddr_addr[i]);
5254 CLEAR_HARD_REG_SET (reload_reg_used_in_output_addr[i]);
5255 CLEAR_HARD_REG_SET (reload_reg_used_in_outaddr_addr[i]);
5256 }
5257
5258 COMPL_HARD_REG_SET (reload_reg_unavailable, chain->used_spill_regs);
5259
5260 CLEAR_HARD_REG_SET (reload_reg_used_for_inherit);
5261
5262 for (i = 0; i < n_reloads; i++)
5263 /* If we have already decided to use a certain register,
5264 don't use it in another way. */
5265 if (rld[i].reg_rtx)
5266 mark_reload_reg_in_use (REGNO (rld[i].reg_rtx), rld[i].opnum,
5267 rld[i].when_needed, rld[i].mode);
5268 }
5269
5270 /* Assign hard reg targets for the pseudo-registers we must reload
5271 into hard regs for this insn.
5272 Also output the instructions to copy them in and out of the hard regs.
5273
5274 For machines with register classes, we are responsible for
5275 finding a reload reg in the proper class. */
5276
5277 static void
5278 choose_reload_regs (chain)
5279 struct insn_chain *chain;
5280 {
5281 rtx insn = chain->insn;
5282 register int i, j;
5283 unsigned int max_group_size = 1;
5284 enum reg_class group_class = NO_REGS;
5285 int pass, win, inheritance;
5286
5287 rtx save_reload_reg_rtx[MAX_RELOADS];
5288
5289 /* In order to be certain of getting the registers we need,
5290 we must sort the reloads into order of increasing register class.
5291 Then our grabbing of reload registers will parallel the process
5292 that provided the reload registers.
5293
5294 Also note whether any of the reloads wants a consecutive group of regs.
5295 If so, record the maximum size of the group desired and what
5296 register class contains all the groups needed by this insn. */
5297
5298 for (j = 0; j < n_reloads; j++)
5299 {
5300 reload_order[j] = j;
5301 reload_spill_index[j] = -1;
5302
5303 if (rld[j].nregs > 1)
5304 {
5305 max_group_size = MAX (rld[j].nregs, max_group_size);
5306 group_class
5307 = reg_class_superunion[(int) rld[j].class][(int)group_class];
5308 }
5309
5310 save_reload_reg_rtx[j] = rld[j].reg_rtx;
5311 }
5312
5313 if (n_reloads > 1)
5314 qsort (reload_order, n_reloads, sizeof (short), reload_reg_class_lower);
5315
5316 /* If -O, try first with inheritance, then turning it off.
5317 If not -O, don't do inheritance.
5318 Using inheritance when not optimizing leads to paradoxes
5319 with fp on the 68k: fp numbers (not NaNs) fail to be equal to themselves
5320 because one side of the comparison might be inherited. */
5321 win = 0;
5322 for (inheritance = optimize > 0; inheritance >= 0; inheritance--)
5323 {
5324 choose_reload_regs_init (chain, save_reload_reg_rtx);
5325
5326 /* Process the reloads in order of preference just found.
5327 Beyond this point, subregs can be found in reload_reg_rtx.
5328
5329 This used to look for an existing reloaded home for all of the
5330 reloads, and only then perform any new reloads. But that could lose
5331 if the reloads were done out of reg-class order because a later
5332 reload with a looser constraint might have an old home in a register
5333 needed by an earlier reload with a tighter constraint.
5334
5335 To solve this, we make two passes over the reloads, in the order
5336 described above. In the first pass we try to inherit a reload
5337 from a previous insn. If there is a later reload that needs a
5338 class that is a proper subset of the class being processed, we must
5339 also allocate a spill register during the first pass.
5340
5341 Then make a second pass over the reloads to allocate any reloads
5342 that haven't been given registers yet. */
5343
5344 for (j = 0; j < n_reloads; j++)
5345 {
5346 register int r = reload_order[j];
5347 rtx search_equiv = NULL_RTX;
5348
5349 /* Ignore reloads that got marked inoperative. */
5350 if (rld[r].out == 0 && rld[r].in == 0
5351 && ! rld[r].secondary_p)
5352 continue;
5353
5354 /* If find_reloads chose to use reload_in or reload_out as a reload
5355 register, we don't need to chose one. Otherwise, try even if it
5356 found one since we might save an insn if we find the value lying
5357 around.
5358 Try also when reload_in is a pseudo without a hard reg. */
5359 if (rld[r].in != 0 && rld[r].reg_rtx != 0
5360 && (rtx_equal_p (rld[r].in, rld[r].reg_rtx)
5361 || (rtx_equal_p (rld[r].out, rld[r].reg_rtx)
5362 && GET_CODE (rld[r].in) != MEM
5363 && true_regnum (rld[r].in) < FIRST_PSEUDO_REGISTER)))
5364 continue;
5365
5366 #if 0 /* No longer needed for correct operation.
5367 It might give better code, or might not; worth an experiment? */
5368 /* If this is an optional reload, we can't inherit from earlier insns
5369 until we are sure that any non-optional reloads have been allocated.
5370 The following code takes advantage of the fact that optional reloads
5371 are at the end of reload_order. */
5372 if (rld[r].optional != 0)
5373 for (i = 0; i < j; i++)
5374 if ((rld[reload_order[i]].out != 0
5375 || rld[reload_order[i]].in != 0
5376 || rld[reload_order[i]].secondary_p)
5377 && ! rld[reload_order[i]].optional
5378 && rld[reload_order[i]].reg_rtx == 0)
5379 allocate_reload_reg (chain, reload_order[i], 0);
5380 #endif
5381
5382 /* First see if this pseudo is already available as reloaded
5383 for a previous insn. We cannot try to inherit for reloads
5384 that are smaller than the maximum number of registers needed
5385 for groups unless the register we would allocate cannot be used
5386 for the groups.
5387
5388 We could check here to see if this is a secondary reload for
5389 an object that is already in a register of the desired class.
5390 This would avoid the need for the secondary reload register.
5391 But this is complex because we can't easily determine what
5392 objects might want to be loaded via this reload. So let a
5393 register be allocated here. In `emit_reload_insns' we suppress
5394 one of the loads in the case described above. */
5395
5396 if (inheritance)
5397 {
5398 int byte = 0;
5399 register int regno = -1;
5400 enum machine_mode mode = VOIDmode;
5401
5402 if (rld[r].in == 0)
5403 ;
5404 else if (GET_CODE (rld[r].in) == REG)
5405 {
5406 regno = REGNO (rld[r].in);
5407 mode = GET_MODE (rld[r].in);
5408 }
5409 else if (GET_CODE (rld[r].in_reg) == REG)
5410 {
5411 regno = REGNO (rld[r].in_reg);
5412 mode = GET_MODE (rld[r].in_reg);
5413 }
5414 else if (GET_CODE (rld[r].in_reg) == SUBREG
5415 && GET_CODE (SUBREG_REG (rld[r].in_reg)) == REG)
5416 {
5417 byte = SUBREG_BYTE (rld[r].in_reg);
5418 regno = REGNO (SUBREG_REG (rld[r].in_reg));
5419 if (regno < FIRST_PSEUDO_REGISTER)
5420 regno = subreg_regno (rld[r].in_reg);
5421 mode = GET_MODE (rld[r].in_reg);
5422 }
5423 #ifdef AUTO_INC_DEC
5424 else if ((GET_CODE (rld[r].in_reg) == PRE_INC
5425 || GET_CODE (rld[r].in_reg) == PRE_DEC
5426 || GET_CODE (rld[r].in_reg) == POST_INC
5427 || GET_CODE (rld[r].in_reg) == POST_DEC)
5428 && GET_CODE (XEXP (rld[r].in_reg, 0)) == REG)
5429 {
5430 regno = REGNO (XEXP (rld[r].in_reg, 0));
5431 mode = GET_MODE (XEXP (rld[r].in_reg, 0));
5432 rld[r].out = rld[r].in;
5433 }
5434 #endif
5435 #if 0
5436 /* This won't work, since REGNO can be a pseudo reg number.
5437 Also, it takes much more hair to keep track of all the things
5438 that can invalidate an inherited reload of part of a pseudoreg. */
5439 else if (GET_CODE (rld[r].in) == SUBREG
5440 && GET_CODE (SUBREG_REG (rld[r].in)) == REG)
5441 regno = subreg_regno (rld[r].in);
5442 #endif
5443
5444 if (regno >= 0 && reg_last_reload_reg[regno] != 0)
5445 {
5446 enum reg_class class = rld[r].class, last_class;
5447 rtx last_reg = reg_last_reload_reg[regno];
5448 enum machine_mode need_mode;
5449
5450 i = REGNO (last_reg);
5451 i += subreg_regno_offset (i, GET_MODE (last_reg), byte, mode);
5452 last_class = REGNO_REG_CLASS (i);
5453
5454 if (byte == 0)
5455 need_mode = mode;
5456 else
5457 need_mode
5458 = smallest_mode_for_size (GET_MODE_SIZE (mode) + byte,
5459 GET_MODE_CLASS (mode));
5460
5461 if (
5462 #ifdef CLASS_CANNOT_CHANGE_MODE
5463 (TEST_HARD_REG_BIT
5464 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE], i)
5465 ? ! CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (last_reg),
5466 need_mode)
5467 : (GET_MODE_SIZE (GET_MODE (last_reg))
5468 >= GET_MODE_SIZE (need_mode)))
5469 #else
5470 (GET_MODE_SIZE (GET_MODE (last_reg))
5471 >= GET_MODE_SIZE (need_mode))
5472 #endif
5473 && reg_reloaded_contents[i] == regno
5474 && TEST_HARD_REG_BIT (reg_reloaded_valid, i)
5475 && HARD_REGNO_MODE_OK (i, rld[r].mode)
5476 && (TEST_HARD_REG_BIT (reg_class_contents[(int) class], i)
5477 /* Even if we can't use this register as a reload
5478 register, we might use it for reload_override_in,
5479 if copying it to the desired class is cheap
5480 enough. */
5481 || ((REGISTER_MOVE_COST (mode, last_class, class)
5482 < MEMORY_MOVE_COST (mode, class, 1))
5483 #ifdef SECONDARY_INPUT_RELOAD_CLASS
5484 && (SECONDARY_INPUT_RELOAD_CLASS (class, mode,
5485 last_reg)
5486 == NO_REGS)
5487 #endif
5488 #ifdef SECONDARY_MEMORY_NEEDED
5489 && ! SECONDARY_MEMORY_NEEDED (last_class, class,
5490 mode)
5491 #endif
5492 ))
5493
5494 && (rld[r].nregs == max_group_size
5495 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) group_class],
5496 i))
5497 && free_for_value_p (i, rld[r].mode, rld[r].opnum,
5498 rld[r].when_needed, rld[r].in,
5499 const0_rtx, r, 1))
5500 {
5501 /* If a group is needed, verify that all the subsequent
5502 registers still have their values intact. */
5503 int nr = HARD_REGNO_NREGS (i, rld[r].mode);
5504 int k;
5505
5506 for (k = 1; k < nr; k++)
5507 if (reg_reloaded_contents[i + k] != regno
5508 || ! TEST_HARD_REG_BIT (reg_reloaded_valid, i + k))
5509 break;
5510
5511 if (k == nr)
5512 {
5513 int i1;
5514 int bad_for_class;
5515
5516 last_reg = (GET_MODE (last_reg) == mode
5517 ? last_reg : gen_rtx_REG (mode, i));
5518
5519 bad_for_class = 0;
5520 for (k = 0; k < nr; k++)
5521 bad_for_class |= ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5522 i+k);
5523
5524 /* We found a register that contains the
5525 value we need. If this register is the
5526 same as an `earlyclobber' operand of the
5527 current insn, just mark it as a place to
5528 reload from since we can't use it as the
5529 reload register itself. */
5530
5531 for (i1 = 0; i1 < n_earlyclobbers; i1++)
5532 if (reg_overlap_mentioned_for_reload_p
5533 (reg_last_reload_reg[regno],
5534 reload_earlyclobbers[i1]))
5535 break;
5536
5537 if (i1 != n_earlyclobbers
5538 || ! (free_for_value_p (i, rld[r].mode,
5539 rld[r].opnum,
5540 rld[r].when_needed, rld[r].in,
5541 rld[r].out, r, 1))
5542 /* Don't use it if we'd clobber a pseudo reg. */
5543 || (TEST_HARD_REG_BIT (reg_used_in_insn, i)
5544 && rld[r].out
5545 && ! TEST_HARD_REG_BIT (reg_reloaded_dead, i))
5546 /* Don't clobber the frame pointer. */
5547 || (i == HARD_FRAME_POINTER_REGNUM
5548 && rld[r].out)
5549 /* Don't really use the inherited spill reg
5550 if we need it wider than we've got it. */
5551 || (GET_MODE_SIZE (rld[r].mode)
5552 > GET_MODE_SIZE (mode))
5553 || bad_for_class
5554
5555 /* If find_reloads chose reload_out as reload
5556 register, stay with it - that leaves the
5557 inherited register for subsequent reloads. */
5558 || (rld[r].out && rld[r].reg_rtx
5559 && rtx_equal_p (rld[r].out, rld[r].reg_rtx)))
5560 {
5561 if (! rld[r].optional)
5562 {
5563 reload_override_in[r] = last_reg;
5564 reload_inheritance_insn[r]
5565 = reg_reloaded_insn[i];
5566 }
5567 }
5568 else
5569 {
5570 int k;
5571 /* We can use this as a reload reg. */
5572 /* Mark the register as in use for this part of
5573 the insn. */
5574 mark_reload_reg_in_use (i,
5575 rld[r].opnum,
5576 rld[r].when_needed,
5577 rld[r].mode);
5578 rld[r].reg_rtx = last_reg;
5579 reload_inherited[r] = 1;
5580 reload_inheritance_insn[r]
5581 = reg_reloaded_insn[i];
5582 reload_spill_index[r] = i;
5583 for (k = 0; k < nr; k++)
5584 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5585 i + k);
5586 }
5587 }
5588 }
5589 }
5590 }
5591
5592 /* Here's another way to see if the value is already lying around. */
5593 if (inheritance
5594 && rld[r].in != 0
5595 && ! reload_inherited[r]
5596 && rld[r].out == 0
5597 && (CONSTANT_P (rld[r].in)
5598 || GET_CODE (rld[r].in) == PLUS
5599 || GET_CODE (rld[r].in) == REG
5600 || GET_CODE (rld[r].in) == MEM)
5601 && (rld[r].nregs == max_group_size
5602 || ! reg_classes_intersect_p (rld[r].class, group_class)))
5603 search_equiv = rld[r].in;
5604 /* If this is an output reload from a simple move insn, look
5605 if an equivalence for the input is available. */
5606 else if (inheritance && rld[r].in == 0 && rld[r].out != 0)
5607 {
5608 rtx set = single_set (insn);
5609
5610 if (set
5611 && rtx_equal_p (rld[r].out, SET_DEST (set))
5612 && CONSTANT_P (SET_SRC (set)))
5613 search_equiv = SET_SRC (set);
5614 }
5615
5616 if (search_equiv)
5617 {
5618 register rtx equiv
5619 = find_equiv_reg (search_equiv, insn, rld[r].class,
5620 -1, NULL, 0, rld[r].mode);
5621 int regno = 0;
5622
5623 if (equiv != 0)
5624 {
5625 if (GET_CODE (equiv) == REG)
5626 regno = REGNO (equiv);
5627 else if (GET_CODE (equiv) == SUBREG)
5628 {
5629 /* This must be a SUBREG of a hard register.
5630 Make a new REG since this might be used in an
5631 address and not all machines support SUBREGs
5632 there. */
5633 regno = subreg_regno (equiv);
5634 equiv = gen_rtx_REG (rld[r].mode, regno);
5635 }
5636 else
5637 abort ();
5638 }
5639
5640 /* If we found a spill reg, reject it unless it is free
5641 and of the desired class. */
5642 if (equiv != 0
5643 && ((TEST_HARD_REG_BIT (reload_reg_used_at_all, regno)
5644 && ! free_for_value_p (regno, rld[r].mode,
5645 rld[r].opnum, rld[r].when_needed,
5646 rld[r].in, rld[r].out, r, 1))
5647 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rld[r].class],
5648 regno)))
5649 equiv = 0;
5650
5651 if (equiv != 0 && ! HARD_REGNO_MODE_OK (regno, rld[r].mode))
5652 equiv = 0;
5653
5654 /* We found a register that contains the value we need.
5655 If this register is the same as an `earlyclobber' operand
5656 of the current insn, just mark it as a place to reload from
5657 since we can't use it as the reload register itself. */
5658
5659 if (equiv != 0)
5660 for (i = 0; i < n_earlyclobbers; i++)
5661 if (reg_overlap_mentioned_for_reload_p (equiv,
5662 reload_earlyclobbers[i]))
5663 {
5664 if (! rld[r].optional)
5665 reload_override_in[r] = equiv;
5666 equiv = 0;
5667 break;
5668 }
5669
5670 /* If the equiv register we have found is explicitly clobbered
5671 in the current insn, it depends on the reload type if we
5672 can use it, use it for reload_override_in, or not at all.
5673 In particular, we then can't use EQUIV for a
5674 RELOAD_FOR_OUTPUT_ADDRESS reload. */
5675
5676 if (equiv != 0)
5677 {
5678 if (regno_clobbered_p (regno, insn, rld[r].mode, 0))
5679 switch (rld[r].when_needed)
5680 {
5681 case RELOAD_FOR_OTHER_ADDRESS:
5682 case RELOAD_FOR_INPADDR_ADDRESS:
5683 case RELOAD_FOR_INPUT_ADDRESS:
5684 case RELOAD_FOR_OPADDR_ADDR:
5685 break;
5686 case RELOAD_OTHER:
5687 case RELOAD_FOR_INPUT:
5688 case RELOAD_FOR_OPERAND_ADDRESS:
5689 if (! rld[r].optional)
5690 reload_override_in[r] = equiv;
5691 /* Fall through. */
5692 default:
5693 equiv = 0;
5694 break;
5695 }
5696 else if (regno_clobbered_p (regno, insn, rld[r].mode, 1))
5697 switch (rld[r].when_needed)
5698 {
5699 case RELOAD_FOR_OTHER_ADDRESS:
5700 case RELOAD_FOR_INPADDR_ADDRESS:
5701 case RELOAD_FOR_INPUT_ADDRESS:
5702 case RELOAD_FOR_OPADDR_ADDR:
5703 case RELOAD_FOR_OPERAND_ADDRESS:
5704 case RELOAD_FOR_INPUT:
5705 break;
5706 case RELOAD_OTHER:
5707 if (! rld[r].optional)
5708 reload_override_in[r] = equiv;
5709 /* Fall through. */
5710 default:
5711 equiv = 0;
5712 break;
5713 }
5714 }
5715
5716 /* If we found an equivalent reg, say no code need be generated
5717 to load it, and use it as our reload reg. */
5718 if (equiv != 0 && regno != HARD_FRAME_POINTER_REGNUM)
5719 {
5720 int nr = HARD_REGNO_NREGS (regno, rld[r].mode);
5721 int k;
5722 rld[r].reg_rtx = equiv;
5723 reload_inherited[r] = 1;
5724
5725 /* If reg_reloaded_valid is not set for this register,
5726 there might be a stale spill_reg_store lying around.
5727 We must clear it, since otherwise emit_reload_insns
5728 might delete the store. */
5729 if (! TEST_HARD_REG_BIT (reg_reloaded_valid, regno))
5730 spill_reg_store[regno] = NULL_RTX;
5731 /* If any of the hard registers in EQUIV are spill
5732 registers, mark them as in use for this insn. */
5733 for (k = 0; k < nr; k++)
5734 {
5735 i = spill_reg_order[regno + k];
5736 if (i >= 0)
5737 {
5738 mark_reload_reg_in_use (regno, rld[r].opnum,
5739 rld[r].when_needed,
5740 rld[r].mode);
5741 SET_HARD_REG_BIT (reload_reg_used_for_inherit,
5742 regno + k);
5743 }
5744 }
5745 }
5746 }
5747
5748 /* If we found a register to use already, or if this is an optional
5749 reload, we are done. */
5750 if (rld[r].reg_rtx != 0 || rld[r].optional != 0)
5751 continue;
5752
5753 #if 0
5754 /* No longer needed for correct operation. Might or might
5755 not give better code on the average. Want to experiment? */
5756
5757 /* See if there is a later reload that has a class different from our
5758 class that intersects our class or that requires less register
5759 than our reload. If so, we must allocate a register to this
5760 reload now, since that reload might inherit a previous reload
5761 and take the only available register in our class. Don't do this
5762 for optional reloads since they will force all previous reloads
5763 to be allocated. Also don't do this for reloads that have been
5764 turned off. */
5765
5766 for (i = j + 1; i < n_reloads; i++)
5767 {
5768 int s = reload_order[i];
5769
5770 if ((rld[s].in == 0 && rld[s].out == 0
5771 && ! rld[s].secondary_p)
5772 || rld[s].optional)
5773 continue;
5774
5775 if ((rld[s].class != rld[r].class
5776 && reg_classes_intersect_p (rld[r].class,
5777 rld[s].class))
5778 || rld[s].nregs < rld[r].nregs)
5779 break;
5780 }
5781
5782 if (i == n_reloads)
5783 continue;
5784
5785 allocate_reload_reg (chain, r, j == n_reloads - 1);
5786 #endif
5787 }
5788
5789 /* Now allocate reload registers for anything non-optional that
5790 didn't get one yet. */
5791 for (j = 0; j < n_reloads; j++)
5792 {
5793 register int r = reload_order[j];
5794
5795 /* Ignore reloads that got marked inoperative. */
5796 if (rld[r].out == 0 && rld[r].in == 0 && ! rld[r].secondary_p)
5797 continue;
5798
5799 /* Skip reloads that already have a register allocated or are
5800 optional. */
5801 if (rld[r].reg_rtx != 0 || rld[r].optional)
5802 continue;
5803
5804 if (! allocate_reload_reg (chain, r, j == n_reloads - 1))
5805 break;
5806 }
5807
5808 /* If that loop got all the way, we have won. */
5809 if (j == n_reloads)
5810 {
5811 win = 1;
5812 break;
5813 }
5814
5815 /* Loop around and try without any inheritance. */
5816 }
5817
5818 if (! win)
5819 {
5820 /* First undo everything done by the failed attempt
5821 to allocate with inheritance. */
5822 choose_reload_regs_init (chain, save_reload_reg_rtx);
5823
5824 /* Some sanity tests to verify that the reloads found in the first
5825 pass are identical to the ones we have now. */
5826 if (chain->n_reloads != n_reloads)
5827 abort ();
5828
5829 for (i = 0; i < n_reloads; i++)
5830 {
5831 if (chain->rld[i].regno < 0 || chain->rld[i].reg_rtx != 0)
5832 continue;
5833 if (chain->rld[i].when_needed != rld[i].when_needed)
5834 abort ();
5835 for (j = 0; j < n_spills; j++)
5836 if (spill_regs[j] == chain->rld[i].regno)
5837 if (! set_reload_reg (j, i))
5838 failed_reload (chain->insn, i);
5839 }
5840 }
5841
5842 /* If we thought we could inherit a reload, because it seemed that
5843 nothing else wanted the same reload register earlier in the insn,
5844 verify that assumption, now that all reloads have been assigned.
5845 Likewise for reloads where reload_override_in has been set. */
5846
5847 /* If doing expensive optimizations, do one preliminary pass that doesn't
5848 cancel any inheritance, but removes reloads that have been needed only
5849 for reloads that we know can be inherited. */
5850 for (pass = flag_expensive_optimizations; pass >= 0; pass--)
5851 {
5852 for (j = 0; j < n_reloads; j++)
5853 {
5854 register int r = reload_order[j];
5855 rtx check_reg;
5856 if (reload_inherited[r] && rld[r].reg_rtx)
5857 check_reg = rld[r].reg_rtx;
5858 else if (reload_override_in[r]
5859 && (GET_CODE (reload_override_in[r]) == REG
5860 || GET_CODE (reload_override_in[r]) == SUBREG))
5861 check_reg = reload_override_in[r];
5862 else
5863 continue;
5864 if (! free_for_value_p (true_regnum (check_reg), rld[r].mode,
5865 rld[r].opnum, rld[r].when_needed, rld[r].in,
5866 (reload_inherited[r]
5867 ? rld[r].out : const0_rtx),
5868 r, 1))
5869 {
5870 if (pass)
5871 continue;
5872 reload_inherited[r] = 0;
5873 reload_override_in[r] = 0;
5874 }
5875 /* If we can inherit a RELOAD_FOR_INPUT, or can use a
5876 reload_override_in, then we do not need its related
5877 RELOAD_FOR_INPUT_ADDRESS / RELOAD_FOR_INPADDR_ADDRESS reloads;
5878 likewise for other reload types.
5879 We handle this by removing a reload when its only replacement
5880 is mentioned in reload_in of the reload we are going to inherit.
5881 A special case are auto_inc expressions; even if the input is
5882 inherited, we still need the address for the output. We can
5883 recognize them because they have RELOAD_OUT set to RELOAD_IN.
5884 If we suceeded removing some reload and we are doing a preliminary
5885 pass just to remove such reloads, make another pass, since the
5886 removal of one reload might allow us to inherit another one. */
5887 else if (rld[r].in
5888 && rld[r].out != rld[r].in
5889 && remove_address_replacements (rld[r].in) && pass)
5890 pass = 2;
5891 }
5892 }
5893
5894 /* Now that reload_override_in is known valid,
5895 actually override reload_in. */
5896 for (j = 0; j < n_reloads; j++)
5897 if (reload_override_in[j])
5898 rld[j].in = reload_override_in[j];
5899
5900 /* If this reload won't be done because it has been cancelled or is
5901 optional and not inherited, clear reload_reg_rtx so other
5902 routines (such as subst_reloads) don't get confused. */
5903 for (j = 0; j < n_reloads; j++)
5904 if (rld[j].reg_rtx != 0
5905 && ((rld[j].optional && ! reload_inherited[j])
5906 || (rld[j].in == 0 && rld[j].out == 0
5907 && ! rld[j].secondary_p)))
5908 {
5909 int regno = true_regnum (rld[j].reg_rtx);
5910
5911 if (spill_reg_order[regno] >= 0)
5912 clear_reload_reg_in_use (regno, rld[j].opnum,
5913 rld[j].when_needed, rld[j].mode);
5914 rld[j].reg_rtx = 0;
5915 reload_spill_index[j] = -1;
5916 }
5917
5918 /* Record which pseudos and which spill regs have output reloads. */
5919 for (j = 0; j < n_reloads; j++)
5920 {
5921 register int r = reload_order[j];
5922
5923 i = reload_spill_index[r];
5924
5925 /* I is nonneg if this reload uses a register.
5926 If rld[r].reg_rtx is 0, this is an optional reload
5927 that we opted to ignore. */
5928 if (rld[r].out_reg != 0 && GET_CODE (rld[r].out_reg) == REG
5929 && rld[r].reg_rtx != 0)
5930 {
5931 register int nregno = REGNO (rld[r].out_reg);
5932 int nr = 1;
5933
5934 if (nregno < FIRST_PSEUDO_REGISTER)
5935 nr = HARD_REGNO_NREGS (nregno, rld[r].mode);
5936
5937 while (--nr >= 0)
5938 reg_has_output_reload[nregno + nr] = 1;
5939
5940 if (i >= 0)
5941 {
5942 nr = HARD_REGNO_NREGS (i, rld[r].mode);
5943 while (--nr >= 0)
5944 SET_HARD_REG_BIT (reg_is_output_reload, i + nr);
5945 }
5946
5947 if (rld[r].when_needed != RELOAD_OTHER
5948 && rld[r].when_needed != RELOAD_FOR_OUTPUT
5949 && rld[r].when_needed != RELOAD_FOR_INSN)
5950 abort ();
5951 }
5952 }
5953 }
5954
5955 /* Deallocate the reload register for reload R. This is called from
5956 remove_address_replacements. */
5957
5958 void
5959 deallocate_reload_reg (r)
5960 int r;
5961 {
5962 int regno;
5963
5964 if (! rld[r].reg_rtx)
5965 return;
5966 regno = true_regnum (rld[r].reg_rtx);
5967 rld[r].reg_rtx = 0;
5968 if (spill_reg_order[regno] >= 0)
5969 clear_reload_reg_in_use (regno, rld[r].opnum, rld[r].when_needed,
5970 rld[r].mode);
5971 reload_spill_index[r] = -1;
5972 }
5973 \f
5974 /* If SMALL_REGISTER_CLASSES is non-zero, we may not have merged two
5975 reloads of the same item for fear that we might not have enough reload
5976 registers. However, normally they will get the same reload register
5977 and hence actually need not be loaded twice.
5978
5979 Here we check for the most common case of this phenomenon: when we have
5980 a number of reloads for the same object, each of which were allocated
5981 the same reload_reg_rtx, that reload_reg_rtx is not used for any other
5982 reload, and is not modified in the insn itself. If we find such,
5983 merge all the reloads and set the resulting reload to RELOAD_OTHER.
5984 This will not increase the number of spill registers needed and will
5985 prevent redundant code. */
5986
5987 static void
5988 merge_assigned_reloads (insn)
5989 rtx insn;
5990 {
5991 int i, j;
5992
5993 /* Scan all the reloads looking for ones that only load values and
5994 are not already RELOAD_OTHER and ones whose reload_reg_rtx are
5995 assigned and not modified by INSN. */
5996
5997 for (i = 0; i < n_reloads; i++)
5998 {
5999 int conflicting_input = 0;
6000 int max_input_address_opnum = -1;
6001 int min_conflicting_input_opnum = MAX_RECOG_OPERANDS;
6002
6003 if (rld[i].in == 0 || rld[i].when_needed == RELOAD_OTHER
6004 || rld[i].out != 0 || rld[i].reg_rtx == 0
6005 || reg_set_p (rld[i].reg_rtx, insn))
6006 continue;
6007
6008 /* Look at all other reloads. Ensure that the only use of this
6009 reload_reg_rtx is in a reload that just loads the same value
6010 as we do. Note that any secondary reloads must be of the identical
6011 class since the values, modes, and result registers are the
6012 same, so we need not do anything with any secondary reloads. */
6013
6014 for (j = 0; j < n_reloads; j++)
6015 {
6016 if (i == j || rld[j].reg_rtx == 0
6017 || ! reg_overlap_mentioned_p (rld[j].reg_rtx,
6018 rld[i].reg_rtx))
6019 continue;
6020
6021 if (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6022 && rld[j].opnum > max_input_address_opnum)
6023 max_input_address_opnum = rld[j].opnum;
6024
6025 /* If the reload regs aren't exactly the same (e.g, different modes)
6026 or if the values are different, we can't merge this reload.
6027 But if it is an input reload, we might still merge
6028 RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_OTHER_ADDRESS reloads. */
6029
6030 if (! rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6031 || rld[j].out != 0 || rld[j].in == 0
6032 || ! rtx_equal_p (rld[i].in, rld[j].in))
6033 {
6034 if (rld[j].when_needed != RELOAD_FOR_INPUT
6035 || ((rld[i].when_needed != RELOAD_FOR_INPUT_ADDRESS
6036 || rld[i].opnum > rld[j].opnum)
6037 && rld[i].when_needed != RELOAD_FOR_OTHER_ADDRESS))
6038 break;
6039 conflicting_input = 1;
6040 if (min_conflicting_input_opnum > rld[j].opnum)
6041 min_conflicting_input_opnum = rld[j].opnum;
6042 }
6043 }
6044
6045 /* If all is OK, merge the reloads. Only set this to RELOAD_OTHER if
6046 we, in fact, found any matching reloads. */
6047
6048 if (j == n_reloads
6049 && max_input_address_opnum <= min_conflicting_input_opnum)
6050 {
6051 for (j = 0; j < n_reloads; j++)
6052 if (i != j && rld[j].reg_rtx != 0
6053 && rtx_equal_p (rld[i].reg_rtx, rld[j].reg_rtx)
6054 && (! conflicting_input
6055 || rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6056 || rld[j].when_needed == RELOAD_FOR_OTHER_ADDRESS))
6057 {
6058 rld[i].when_needed = RELOAD_OTHER;
6059 rld[j].in = 0;
6060 reload_spill_index[j] = -1;
6061 transfer_replacements (i, j);
6062 }
6063
6064 /* If this is now RELOAD_OTHER, look for any reloads that load
6065 parts of this operand and set them to RELOAD_FOR_OTHER_ADDRESS
6066 if they were for inputs, RELOAD_OTHER for outputs. Note that
6067 this test is equivalent to looking for reloads for this operand
6068 number. */
6069
6070 if (rld[i].when_needed == RELOAD_OTHER)
6071 for (j = 0; j < n_reloads; j++)
6072 if (rld[j].in != 0
6073 && rld[j].when_needed != RELOAD_OTHER
6074 && reg_overlap_mentioned_for_reload_p (rld[j].in,
6075 rld[i].in))
6076 rld[j].when_needed
6077 = ((rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
6078 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
6079 ? RELOAD_FOR_OTHER_ADDRESS : RELOAD_OTHER);
6080 }
6081 }
6082 }
6083 \f
6084 /* These arrays are filled by emit_reload_insns and its subroutines. */
6085 static rtx input_reload_insns[MAX_RECOG_OPERANDS];
6086 static rtx other_input_address_reload_insns = 0;
6087 static rtx other_input_reload_insns = 0;
6088 static rtx input_address_reload_insns[MAX_RECOG_OPERANDS];
6089 static rtx inpaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6090 static rtx output_reload_insns[MAX_RECOG_OPERANDS];
6091 static rtx output_address_reload_insns[MAX_RECOG_OPERANDS];
6092 static rtx outaddr_address_reload_insns[MAX_RECOG_OPERANDS];
6093 static rtx operand_reload_insns = 0;
6094 static rtx other_operand_reload_insns = 0;
6095 static rtx other_output_reload_insns[MAX_RECOG_OPERANDS];
6096
6097 /* Values to be put in spill_reg_store are put here first. */
6098 static rtx new_spill_reg_store[FIRST_PSEUDO_REGISTER];
6099 static HARD_REG_SET reg_reloaded_died;
6100
6101 /* Generate insns to perform reload RL, which is for the insn in CHAIN and
6102 has the number J. OLD contains the value to be used as input. */
6103
6104 static void
6105 emit_input_reload_insns (chain, rl, old, j)
6106 struct insn_chain *chain;
6107 struct reload *rl;
6108 rtx old;
6109 int j;
6110 {
6111 rtx insn = chain->insn;
6112 register rtx reloadreg = rl->reg_rtx;
6113 rtx oldequiv_reg = 0;
6114 rtx oldequiv = 0;
6115 int special = 0;
6116 enum machine_mode mode;
6117 rtx *where;
6118
6119 /* Determine the mode to reload in.
6120 This is very tricky because we have three to choose from.
6121 There is the mode the insn operand wants (rl->inmode).
6122 There is the mode of the reload register RELOADREG.
6123 There is the intrinsic mode of the operand, which we could find
6124 by stripping some SUBREGs.
6125 It turns out that RELOADREG's mode is irrelevant:
6126 we can change that arbitrarily.
6127
6128 Consider (SUBREG:SI foo:QI) as an operand that must be SImode;
6129 then the reload reg may not support QImode moves, so use SImode.
6130 If foo is in memory due to spilling a pseudo reg, this is safe,
6131 because the QImode value is in the least significant part of a
6132 slot big enough for a SImode. If foo is some other sort of
6133 memory reference, then it is impossible to reload this case,
6134 so previous passes had better make sure this never happens.
6135
6136 Then consider a one-word union which has SImode and one of its
6137 members is a float, being fetched as (SUBREG:SF union:SI).
6138 We must fetch that as SFmode because we could be loading into
6139 a float-only register. In this case OLD's mode is correct.
6140
6141 Consider an immediate integer: it has VOIDmode. Here we need
6142 to get a mode from something else.
6143
6144 In some cases, there is a fourth mode, the operand's
6145 containing mode. If the insn specifies a containing mode for
6146 this operand, it overrides all others.
6147
6148 I am not sure whether the algorithm here is always right,
6149 but it does the right things in those cases. */
6150
6151 mode = GET_MODE (old);
6152 if (mode == VOIDmode)
6153 mode = rl->inmode;
6154
6155 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6156 /* If we need a secondary register for this operation, see if
6157 the value is already in a register in that class. Don't
6158 do this if the secondary register will be used as a scratch
6159 register. */
6160
6161 if (rl->secondary_in_reload >= 0
6162 && rl->secondary_in_icode == CODE_FOR_nothing
6163 && optimize)
6164 oldequiv
6165 = find_equiv_reg (old, insn,
6166 rld[rl->secondary_in_reload].class,
6167 -1, NULL, 0, mode);
6168 #endif
6169
6170 /* If reloading from memory, see if there is a register
6171 that already holds the same value. If so, reload from there.
6172 We can pass 0 as the reload_reg_p argument because
6173 any other reload has either already been emitted,
6174 in which case find_equiv_reg will see the reload-insn,
6175 or has yet to be emitted, in which case it doesn't matter
6176 because we will use this equiv reg right away. */
6177
6178 if (oldequiv == 0 && optimize
6179 && (GET_CODE (old) == MEM
6180 || (GET_CODE (old) == REG
6181 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6182 && reg_renumber[REGNO (old)] < 0)))
6183 oldequiv = find_equiv_reg (old, insn, ALL_REGS, -1, NULL, 0, mode);
6184
6185 if (oldequiv)
6186 {
6187 unsigned int regno = true_regnum (oldequiv);
6188
6189 /* Don't use OLDEQUIV if any other reload changes it at an
6190 earlier stage of this insn or at this stage. */
6191 if (! free_for_value_p (regno, rl->mode, rl->opnum, rl->when_needed,
6192 rl->in, const0_rtx, j, 0))
6193 oldequiv = 0;
6194
6195 /* If it is no cheaper to copy from OLDEQUIV into the
6196 reload register than it would be to move from memory,
6197 don't use it. Likewise, if we need a secondary register
6198 or memory. */
6199
6200 if (oldequiv != 0
6201 && ((REGNO_REG_CLASS (regno) != rl->class
6202 && (REGISTER_MOVE_COST (mode, REGNO_REG_CLASS (regno),
6203 rl->class)
6204 >= MEMORY_MOVE_COST (mode, rl->class, 1)))
6205 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6206 || (SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6207 mode, oldequiv)
6208 != NO_REGS)
6209 #endif
6210 #ifdef SECONDARY_MEMORY_NEEDED
6211 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (regno),
6212 rl->class,
6213 mode)
6214 #endif
6215 ))
6216 oldequiv = 0;
6217 }
6218
6219 /* delete_output_reload is only invoked properly if old contains
6220 the original pseudo register. Since this is replaced with a
6221 hard reg when RELOAD_OVERRIDE_IN is set, see if we can
6222 find the pseudo in RELOAD_IN_REG. */
6223 if (oldequiv == 0
6224 && reload_override_in[j]
6225 && GET_CODE (rl->in_reg) == REG)
6226 {
6227 oldequiv = old;
6228 old = rl->in_reg;
6229 }
6230 if (oldequiv == 0)
6231 oldequiv = old;
6232 else if (GET_CODE (oldequiv) == REG)
6233 oldequiv_reg = oldequiv;
6234 else if (GET_CODE (oldequiv) == SUBREG)
6235 oldequiv_reg = SUBREG_REG (oldequiv);
6236
6237 /* If we are reloading from a register that was recently stored in
6238 with an output-reload, see if we can prove there was
6239 actually no need to store the old value in it. */
6240
6241 if (optimize && GET_CODE (oldequiv) == REG
6242 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6243 && spill_reg_store[REGNO (oldequiv)]
6244 && GET_CODE (old) == REG
6245 && (dead_or_set_p (insn, spill_reg_stored_to[REGNO (oldequiv)])
6246 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6247 rl->out_reg)))
6248 delete_output_reload (insn, j, REGNO (oldequiv));
6249
6250 /* Encapsulate both RELOADREG and OLDEQUIV into that mode,
6251 then load RELOADREG from OLDEQUIV. Note that we cannot use
6252 gen_lowpart_common since it can do the wrong thing when
6253 RELOADREG has a multi-word mode. Note that RELOADREG
6254 must always be a REG here. */
6255
6256 if (GET_MODE (reloadreg) != mode)
6257 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6258 while (GET_CODE (oldequiv) == SUBREG && GET_MODE (oldequiv) != mode)
6259 oldequiv = SUBREG_REG (oldequiv);
6260 if (GET_MODE (oldequiv) != VOIDmode
6261 && mode != GET_MODE (oldequiv))
6262 oldequiv = gen_lowpart_SUBREG (mode, oldequiv);
6263
6264 /* Switch to the right place to emit the reload insns. */
6265 switch (rl->when_needed)
6266 {
6267 case RELOAD_OTHER:
6268 where = &other_input_reload_insns;
6269 break;
6270 case RELOAD_FOR_INPUT:
6271 where = &input_reload_insns[rl->opnum];
6272 break;
6273 case RELOAD_FOR_INPUT_ADDRESS:
6274 where = &input_address_reload_insns[rl->opnum];
6275 break;
6276 case RELOAD_FOR_INPADDR_ADDRESS:
6277 where = &inpaddr_address_reload_insns[rl->opnum];
6278 break;
6279 case RELOAD_FOR_OUTPUT_ADDRESS:
6280 where = &output_address_reload_insns[rl->opnum];
6281 break;
6282 case RELOAD_FOR_OUTADDR_ADDRESS:
6283 where = &outaddr_address_reload_insns[rl->opnum];
6284 break;
6285 case RELOAD_FOR_OPERAND_ADDRESS:
6286 where = &operand_reload_insns;
6287 break;
6288 case RELOAD_FOR_OPADDR_ADDR:
6289 where = &other_operand_reload_insns;
6290 break;
6291 case RELOAD_FOR_OTHER_ADDRESS:
6292 where = &other_input_address_reload_insns;
6293 break;
6294 default:
6295 abort ();
6296 }
6297
6298 push_to_sequence (*where);
6299
6300 /* Auto-increment addresses must be reloaded in a special way. */
6301 if (rl->out && ! rl->out_reg)
6302 {
6303 /* We are not going to bother supporting the case where a
6304 incremented register can't be copied directly from
6305 OLDEQUIV since this seems highly unlikely. */
6306 if (rl->secondary_in_reload >= 0)
6307 abort ();
6308
6309 if (reload_inherited[j])
6310 oldequiv = reloadreg;
6311
6312 old = XEXP (rl->in_reg, 0);
6313
6314 if (optimize && GET_CODE (oldequiv) == REG
6315 && REGNO (oldequiv) < FIRST_PSEUDO_REGISTER
6316 && spill_reg_store[REGNO (oldequiv)]
6317 && GET_CODE (old) == REG
6318 && (dead_or_set_p (insn,
6319 spill_reg_stored_to[REGNO (oldequiv)])
6320 || rtx_equal_p (spill_reg_stored_to[REGNO (oldequiv)],
6321 old)))
6322 delete_output_reload (insn, j, REGNO (oldequiv));
6323
6324 /* Prevent normal processing of this reload. */
6325 special = 1;
6326 /* Output a special code sequence for this case. */
6327 new_spill_reg_store[REGNO (reloadreg)]
6328 = inc_for_reload (reloadreg, oldequiv, rl->out,
6329 rl->inc);
6330 }
6331
6332 /* If we are reloading a pseudo-register that was set by the previous
6333 insn, see if we can get rid of that pseudo-register entirely
6334 by redirecting the previous insn into our reload register. */
6335
6336 else if (optimize && GET_CODE (old) == REG
6337 && REGNO (old) >= FIRST_PSEUDO_REGISTER
6338 && dead_or_set_p (insn, old)
6339 /* This is unsafe if some other reload
6340 uses the same reg first. */
6341 && ! conflicts_with_override (reloadreg)
6342 && free_for_value_p (REGNO (reloadreg), rl->mode, rl->opnum,
6343 rl->when_needed, old, rl->out, j, 0))
6344 {
6345 rtx temp = PREV_INSN (insn);
6346 while (temp && GET_CODE (temp) == NOTE)
6347 temp = PREV_INSN (temp);
6348 if (temp
6349 && GET_CODE (temp) == INSN
6350 && GET_CODE (PATTERN (temp)) == SET
6351 && SET_DEST (PATTERN (temp)) == old
6352 /* Make sure we can access insn_operand_constraint. */
6353 && asm_noperands (PATTERN (temp)) < 0
6354 /* This is unsafe if prev insn rejects our reload reg. */
6355 && constraint_accepts_reg_p (insn_data[recog_memoized (temp)].operand[0].constraint,
6356 reloadreg)
6357 /* This is unsafe if operand occurs more than once in current
6358 insn. Perhaps some occurrences aren't reloaded. */
6359 && count_occurrences (PATTERN (insn), old, 0) == 1
6360 /* Don't risk splitting a matching pair of operands. */
6361 && ! reg_mentioned_p (old, SET_SRC (PATTERN (temp))))
6362 {
6363 /* Store into the reload register instead of the pseudo. */
6364 SET_DEST (PATTERN (temp)) = reloadreg;
6365
6366 /* If the previous insn is an output reload, the source is
6367 a reload register, and its spill_reg_store entry will
6368 contain the previous destination. This is now
6369 invalid. */
6370 if (GET_CODE (SET_SRC (PATTERN (temp))) == REG
6371 && REGNO (SET_SRC (PATTERN (temp))) < FIRST_PSEUDO_REGISTER)
6372 {
6373 spill_reg_store[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6374 spill_reg_stored_to[REGNO (SET_SRC (PATTERN (temp)))] = 0;
6375 }
6376
6377 /* If these are the only uses of the pseudo reg,
6378 pretend for GDB it lives in the reload reg we used. */
6379 if (REG_N_DEATHS (REGNO (old)) == 1
6380 && REG_N_SETS (REGNO (old)) == 1)
6381 {
6382 reg_renumber[REGNO (old)] = REGNO (rl->reg_rtx);
6383 alter_reg (REGNO (old), -1);
6384 }
6385 special = 1;
6386 }
6387 }
6388
6389 /* We can't do that, so output an insn to load RELOADREG. */
6390
6391 #ifdef SECONDARY_INPUT_RELOAD_CLASS
6392 /* If we have a secondary reload, pick up the secondary register
6393 and icode, if any. If OLDEQUIV and OLD are different or
6394 if this is an in-out reload, recompute whether or not we
6395 still need a secondary register and what the icode should
6396 be. If we still need a secondary register and the class or
6397 icode is different, go back to reloading from OLD if using
6398 OLDEQUIV means that we got the wrong type of register. We
6399 cannot have different class or icode due to an in-out reload
6400 because we don't make such reloads when both the input and
6401 output need secondary reload registers. */
6402
6403 if (! special && rl->secondary_in_reload >= 0)
6404 {
6405 rtx second_reload_reg = 0;
6406 int secondary_reload = rl->secondary_in_reload;
6407 rtx real_oldequiv = oldequiv;
6408 rtx real_old = old;
6409 rtx tmp;
6410 enum insn_code icode;
6411
6412 /* If OLDEQUIV is a pseudo with a MEM, get the real MEM
6413 and similarly for OLD.
6414 See comments in get_secondary_reload in reload.c. */
6415 /* If it is a pseudo that cannot be replaced with its
6416 equivalent MEM, we must fall back to reload_in, which
6417 will have all the necessary substitutions registered.
6418 Likewise for a pseudo that can't be replaced with its
6419 equivalent constant.
6420
6421 Take extra care for subregs of such pseudos. Note that
6422 we cannot use reg_equiv_mem in this case because it is
6423 not in the right mode. */
6424
6425 tmp = oldequiv;
6426 if (GET_CODE (tmp) == SUBREG)
6427 tmp = SUBREG_REG (tmp);
6428 if (GET_CODE (tmp) == REG
6429 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6430 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6431 || reg_equiv_constant[REGNO (tmp)] != 0))
6432 {
6433 if (! reg_equiv_mem[REGNO (tmp)]
6434 || num_not_at_initial_offset
6435 || GET_CODE (oldequiv) == SUBREG)
6436 real_oldequiv = rl->in;
6437 else
6438 real_oldequiv = reg_equiv_mem[REGNO (tmp)];
6439 }
6440
6441 tmp = old;
6442 if (GET_CODE (tmp) == SUBREG)
6443 tmp = SUBREG_REG (tmp);
6444 if (GET_CODE (tmp) == REG
6445 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER
6446 && (reg_equiv_memory_loc[REGNO (tmp)] != 0
6447 || reg_equiv_constant[REGNO (tmp)] != 0))
6448 {
6449 if (! reg_equiv_mem[REGNO (tmp)]
6450 || num_not_at_initial_offset
6451 || GET_CODE (old) == SUBREG)
6452 real_old = rl->in;
6453 else
6454 real_old = reg_equiv_mem[REGNO (tmp)];
6455 }
6456
6457 second_reload_reg = rld[secondary_reload].reg_rtx;
6458 icode = rl->secondary_in_icode;
6459
6460 if ((old != oldequiv && ! rtx_equal_p (old, oldequiv))
6461 || (rl->in != 0 && rl->out != 0))
6462 {
6463 enum reg_class new_class
6464 = SECONDARY_INPUT_RELOAD_CLASS (rl->class,
6465 mode, real_oldequiv);
6466
6467 if (new_class == NO_REGS)
6468 second_reload_reg = 0;
6469 else
6470 {
6471 enum insn_code new_icode;
6472 enum machine_mode new_mode;
6473
6474 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) new_class],
6475 REGNO (second_reload_reg)))
6476 oldequiv = old, real_oldequiv = real_old;
6477 else
6478 {
6479 new_icode = reload_in_optab[(int) mode];
6480 if (new_icode != CODE_FOR_nothing
6481 && ((insn_data[(int) new_icode].operand[0].predicate
6482 && ! ((*insn_data[(int) new_icode].operand[0].predicate)
6483 (reloadreg, mode)))
6484 || (insn_data[(int) new_icode].operand[1].predicate
6485 && ! ((*insn_data[(int) new_icode].operand[1].predicate)
6486 (real_oldequiv, mode)))))
6487 new_icode = CODE_FOR_nothing;
6488
6489 if (new_icode == CODE_FOR_nothing)
6490 new_mode = mode;
6491 else
6492 new_mode = insn_data[(int) new_icode].operand[2].mode;
6493
6494 if (GET_MODE (second_reload_reg) != new_mode)
6495 {
6496 if (!HARD_REGNO_MODE_OK (REGNO (second_reload_reg),
6497 new_mode))
6498 oldequiv = old, real_oldequiv = real_old;
6499 else
6500 second_reload_reg
6501 = gen_rtx_REG (new_mode,
6502 REGNO (second_reload_reg));
6503 }
6504 }
6505 }
6506 }
6507
6508 /* If we still need a secondary reload register, check
6509 to see if it is being used as a scratch or intermediate
6510 register and generate code appropriately. If we need
6511 a scratch register, use REAL_OLDEQUIV since the form of
6512 the insn may depend on the actual address if it is
6513 a MEM. */
6514
6515 if (second_reload_reg)
6516 {
6517 if (icode != CODE_FOR_nothing)
6518 {
6519 emit_insn (GEN_FCN (icode) (reloadreg, real_oldequiv,
6520 second_reload_reg));
6521 special = 1;
6522 }
6523 else
6524 {
6525 /* See if we need a scratch register to load the
6526 intermediate register (a tertiary reload). */
6527 enum insn_code tertiary_icode
6528 = rld[secondary_reload].secondary_in_icode;
6529
6530 if (tertiary_icode != CODE_FOR_nothing)
6531 {
6532 rtx third_reload_reg
6533 = rld[rld[secondary_reload].secondary_in_reload].reg_rtx;
6534
6535 emit_insn ((GEN_FCN (tertiary_icode)
6536 (second_reload_reg, real_oldequiv,
6537 third_reload_reg)));
6538 }
6539 else
6540 gen_reload (second_reload_reg, real_oldequiv,
6541 rl->opnum,
6542 rl->when_needed);
6543
6544 oldequiv = second_reload_reg;
6545 }
6546 }
6547 }
6548 #endif
6549
6550 if (! special && ! rtx_equal_p (reloadreg, oldequiv))
6551 {
6552 rtx real_oldequiv = oldequiv;
6553
6554 if ((GET_CODE (oldequiv) == REG
6555 && REGNO (oldequiv) >= FIRST_PSEUDO_REGISTER
6556 && (reg_equiv_memory_loc[REGNO (oldequiv)] != 0
6557 || reg_equiv_constant[REGNO (oldequiv)] != 0))
6558 || (GET_CODE (oldequiv) == SUBREG
6559 && GET_CODE (SUBREG_REG (oldequiv)) == REG
6560 && (REGNO (SUBREG_REG (oldequiv))
6561 >= FIRST_PSEUDO_REGISTER)
6562 && ((reg_equiv_memory_loc
6563 [REGNO (SUBREG_REG (oldequiv))] != 0)
6564 || (reg_equiv_constant
6565 [REGNO (SUBREG_REG (oldequiv))] != 0)))
6566 || (CONSTANT_P (oldequiv)
6567 && PREFERRED_RELOAD_CLASS (oldequiv,
6568 REGNO_REG_CLASS (REGNO (reloadreg))) == NO_REGS))
6569 real_oldequiv = rl->in;
6570 gen_reload (reloadreg, real_oldequiv, rl->opnum,
6571 rl->when_needed);
6572 }
6573
6574 if (flag_non_call_exceptions)
6575 copy_eh_notes (insn, get_insns ());
6576
6577 /* End this sequence. */
6578 *where = get_insns ();
6579 end_sequence ();
6580
6581 /* Update reload_override_in so that delete_address_reloads_1
6582 can see the actual register usage. */
6583 if (oldequiv_reg)
6584 reload_override_in[j] = oldequiv;
6585 }
6586
6587 /* Generate insns to for the output reload RL, which is for the insn described
6588 by CHAIN and has the number J. */
6589 static void
6590 emit_output_reload_insns (chain, rl, j)
6591 struct insn_chain *chain;
6592 struct reload *rl;
6593 int j;
6594 {
6595 rtx reloadreg = rl->reg_rtx;
6596 rtx insn = chain->insn;
6597 int special = 0;
6598 rtx old = rl->out;
6599 enum machine_mode mode = GET_MODE (old);
6600 rtx p;
6601
6602 if (rl->when_needed == RELOAD_OTHER)
6603 start_sequence ();
6604 else
6605 push_to_sequence (output_reload_insns[rl->opnum]);
6606
6607 /* Determine the mode to reload in.
6608 See comments above (for input reloading). */
6609
6610 if (mode == VOIDmode)
6611 {
6612 /* VOIDmode should never happen for an output. */
6613 if (asm_noperands (PATTERN (insn)) < 0)
6614 /* It's the compiler's fault. */
6615 fatal_insn ("VOIDmode on an output", insn);
6616 error_for_asm (insn, "output operand is constant in `asm'");
6617 /* Prevent crash--use something we know is valid. */
6618 mode = word_mode;
6619 old = gen_rtx_REG (mode, REGNO (reloadreg));
6620 }
6621
6622 if (GET_MODE (reloadreg) != mode)
6623 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6624
6625 #ifdef SECONDARY_OUTPUT_RELOAD_CLASS
6626
6627 /* If we need two reload regs, set RELOADREG to the intermediate
6628 one, since it will be stored into OLD. We might need a secondary
6629 register only for an input reload, so check again here. */
6630
6631 if (rl->secondary_out_reload >= 0)
6632 {
6633 rtx real_old = old;
6634
6635 if (GET_CODE (old) == REG && REGNO (old) >= FIRST_PSEUDO_REGISTER
6636 && reg_equiv_mem[REGNO (old)] != 0)
6637 real_old = reg_equiv_mem[REGNO (old)];
6638
6639 if ((SECONDARY_OUTPUT_RELOAD_CLASS (rl->class,
6640 mode, real_old)
6641 != NO_REGS))
6642 {
6643 rtx second_reloadreg = reloadreg;
6644 reloadreg = rld[rl->secondary_out_reload].reg_rtx;
6645
6646 /* See if RELOADREG is to be used as a scratch register
6647 or as an intermediate register. */
6648 if (rl->secondary_out_icode != CODE_FOR_nothing)
6649 {
6650 emit_insn ((GEN_FCN (rl->secondary_out_icode)
6651 (real_old, second_reloadreg, reloadreg)));
6652 special = 1;
6653 }
6654 else
6655 {
6656 /* See if we need both a scratch and intermediate reload
6657 register. */
6658
6659 int secondary_reload = rl->secondary_out_reload;
6660 enum insn_code tertiary_icode
6661 = rld[secondary_reload].secondary_out_icode;
6662
6663 if (GET_MODE (reloadreg) != mode)
6664 reloadreg = gen_rtx_REG (mode, REGNO (reloadreg));
6665
6666 if (tertiary_icode != CODE_FOR_nothing)
6667 {
6668 rtx third_reloadreg
6669 = rld[rld[secondary_reload].secondary_out_reload].reg_rtx;
6670 rtx tem;
6671
6672 /* Copy primary reload reg to secondary reload reg.
6673 (Note that these have been swapped above, then
6674 secondary reload reg to OLD using our insn.) */
6675
6676 /* If REAL_OLD is a paradoxical SUBREG, remove it
6677 and try to put the opposite SUBREG on
6678 RELOADREG. */
6679 if (GET_CODE (real_old) == SUBREG
6680 && (GET_MODE_SIZE (GET_MODE (real_old))
6681 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (real_old))))
6682 && 0 != (tem = gen_lowpart_common
6683 (GET_MODE (SUBREG_REG (real_old)),
6684 reloadreg)))
6685 real_old = SUBREG_REG (real_old), reloadreg = tem;
6686
6687 gen_reload (reloadreg, second_reloadreg,
6688 rl->opnum, rl->when_needed);
6689 emit_insn ((GEN_FCN (tertiary_icode)
6690 (real_old, reloadreg, third_reloadreg)));
6691 special = 1;
6692 }
6693
6694 else
6695 /* Copy between the reload regs here and then to
6696 OUT later. */
6697
6698 gen_reload (reloadreg, second_reloadreg,
6699 rl->opnum, rl->when_needed);
6700 }
6701 }
6702 }
6703 #endif
6704
6705 /* Output the last reload insn. */
6706 if (! special)
6707 {
6708 rtx set;
6709
6710 /* Don't output the last reload if OLD is not the dest of
6711 INSN and is in the src and is clobbered by INSN. */
6712 if (! flag_expensive_optimizations
6713 || GET_CODE (old) != REG
6714 || !(set = single_set (insn))
6715 || rtx_equal_p (old, SET_DEST (set))
6716 || !reg_mentioned_p (old, SET_SRC (set))
6717 || !regno_clobbered_p (REGNO (old), insn, rl->mode, 0))
6718 gen_reload (old, reloadreg, rl->opnum,
6719 rl->when_needed);
6720 }
6721
6722 /* Look at all insns we emitted, just to be safe. */
6723 for (p = get_insns (); p; p = NEXT_INSN (p))
6724 if (INSN_P (p))
6725 {
6726 rtx pat = PATTERN (p);
6727
6728 /* If this output reload doesn't come from a spill reg,
6729 clear any memory of reloaded copies of the pseudo reg.
6730 If this output reload comes from a spill reg,
6731 reg_has_output_reload will make this do nothing. */
6732 note_stores (pat, forget_old_reloads_1, NULL);
6733
6734 if (reg_mentioned_p (rl->reg_rtx, pat))
6735 {
6736 rtx set = single_set (insn);
6737 if (reload_spill_index[j] < 0
6738 && set
6739 && SET_SRC (set) == rl->reg_rtx)
6740 {
6741 int src = REGNO (SET_SRC (set));
6742
6743 reload_spill_index[j] = src;
6744 SET_HARD_REG_BIT (reg_is_output_reload, src);
6745 if (find_regno_note (insn, REG_DEAD, src))
6746 SET_HARD_REG_BIT (reg_reloaded_died, src);
6747 }
6748 if (REGNO (rl->reg_rtx) < FIRST_PSEUDO_REGISTER)
6749 {
6750 int s = rl->secondary_out_reload;
6751 set = single_set (p);
6752 /* If this reload copies only to the secondary reload
6753 register, the secondary reload does the actual
6754 store. */
6755 if (s >= 0 && set == NULL_RTX)
6756 /* We can't tell what function the secondary reload
6757 has and where the actual store to the pseudo is
6758 made; leave new_spill_reg_store alone. */
6759 ;
6760 else if (s >= 0
6761 && SET_SRC (set) == rl->reg_rtx
6762 && SET_DEST (set) == rld[s].reg_rtx)
6763 {
6764 /* Usually the next instruction will be the
6765 secondary reload insn; if we can confirm
6766 that it is, setting new_spill_reg_store to
6767 that insn will allow an extra optimization. */
6768 rtx s_reg = rld[s].reg_rtx;
6769 rtx next = NEXT_INSN (p);
6770 rld[s].out = rl->out;
6771 rld[s].out_reg = rl->out_reg;
6772 set = single_set (next);
6773 if (set && SET_SRC (set) == s_reg
6774 && ! new_spill_reg_store[REGNO (s_reg)])
6775 {
6776 SET_HARD_REG_BIT (reg_is_output_reload,
6777 REGNO (s_reg));
6778 new_spill_reg_store[REGNO (s_reg)] = next;
6779 }
6780 }
6781 else
6782 new_spill_reg_store[REGNO (rl->reg_rtx)] = p;
6783 }
6784 }
6785 }
6786
6787 if (rl->when_needed == RELOAD_OTHER)
6788 {
6789 emit_insns (other_output_reload_insns[rl->opnum]);
6790 other_output_reload_insns[rl->opnum] = get_insns ();
6791 }
6792 else
6793 output_reload_insns[rl->opnum] = get_insns ();
6794
6795 if (flag_non_call_exceptions)
6796 copy_eh_notes (insn, get_insns ());
6797
6798 end_sequence ();
6799 }
6800
6801 /* Do input reloading for reload RL, which is for the insn described by CHAIN
6802 and has the number J. */
6803 static void
6804 do_input_reload (chain, rl, j)
6805 struct insn_chain *chain;
6806 struct reload *rl;
6807 int j;
6808 {
6809 int expect_occurrences = 1;
6810 rtx insn = chain->insn;
6811 rtx old = (rl->in && GET_CODE (rl->in) == MEM
6812 ? rl->in_reg : rl->in);
6813
6814 if (old != 0
6815 /* AUTO_INC reloads need to be handled even if inherited. We got an
6816 AUTO_INC reload if reload_out is set but reload_out_reg isn't. */
6817 && (! reload_inherited[j] || (rl->out && ! rl->out_reg))
6818 && ! rtx_equal_p (rl->reg_rtx, old)
6819 && rl->reg_rtx != 0)
6820 emit_input_reload_insns (chain, rld + j, old, j);
6821
6822 /* When inheriting a wider reload, we have a MEM in rl->in,
6823 e.g. inheriting a SImode output reload for
6824 (mem:HI (plus:SI (reg:SI 14 fp) (const_int 10))) */
6825 if (optimize && reload_inherited[j] && rl->in
6826 && GET_CODE (rl->in) == MEM
6827 && GET_CODE (rl->in_reg) == MEM
6828 && reload_spill_index[j] >= 0
6829 && TEST_HARD_REG_BIT (reg_reloaded_valid, reload_spill_index[j]))
6830 {
6831 expect_occurrences
6832 = count_occurrences (PATTERN (insn), rl->in, 0) == 1 ? 0 : -1;
6833 rl->in = regno_reg_rtx[reg_reloaded_contents[reload_spill_index[j]]];
6834 }
6835
6836 /* If we are reloading a register that was recently stored in with an
6837 output-reload, see if we can prove there was
6838 actually no need to store the old value in it. */
6839
6840 if (optimize
6841 && (reload_inherited[j] || reload_override_in[j])
6842 && rl->reg_rtx
6843 && GET_CODE (rl->reg_rtx) == REG
6844 && spill_reg_store[REGNO (rl->reg_rtx)] != 0
6845 #if 0
6846 /* There doesn't seem to be any reason to restrict this to pseudos
6847 and doing so loses in the case where we are copying from a
6848 register of the wrong class. */
6849 && (REGNO (spill_reg_stored_to[REGNO (rl->reg_rtx)])
6850 >= FIRST_PSEUDO_REGISTER)
6851 #endif
6852 /* The insn might have already some references to stackslots
6853 replaced by MEMs, while reload_out_reg still names the
6854 original pseudo. */
6855 && (dead_or_set_p (insn,
6856 spill_reg_stored_to[REGNO (rl->reg_rtx)])
6857 || rtx_equal_p (spill_reg_stored_to[REGNO (rl->reg_rtx)],
6858 rl->out_reg)))
6859 delete_output_reload (insn, j, REGNO (rl->reg_rtx));
6860 }
6861
6862 /* Do output reloading for reload RL, which is for the insn described by
6863 CHAIN and has the number J.
6864 ??? At some point we need to support handling output reloads of
6865 JUMP_INSNs or insns that set cc0. */
6866 static void
6867 do_output_reload (chain, rl, j)
6868 struct insn_chain *chain;
6869 struct reload *rl;
6870 int j;
6871 {
6872 rtx note, old;
6873 rtx insn = chain->insn;
6874 /* If this is an output reload that stores something that is
6875 not loaded in this same reload, see if we can eliminate a previous
6876 store. */
6877 rtx pseudo = rl->out_reg;
6878
6879 if (pseudo
6880 && GET_CODE (pseudo) == REG
6881 && ! rtx_equal_p (rl->in_reg, pseudo)
6882 && REGNO (pseudo) >= FIRST_PSEUDO_REGISTER
6883 && reg_last_reload_reg[REGNO (pseudo)])
6884 {
6885 int pseudo_no = REGNO (pseudo);
6886 int last_regno = REGNO (reg_last_reload_reg[pseudo_no]);
6887
6888 /* We don't need to test full validity of last_regno for
6889 inherit here; we only want to know if the store actually
6890 matches the pseudo. */
6891 if (TEST_HARD_REG_BIT (reg_reloaded_valid, last_regno)
6892 && reg_reloaded_contents[last_regno] == pseudo_no
6893 && spill_reg_store[last_regno]
6894 && rtx_equal_p (pseudo, spill_reg_stored_to[last_regno]))
6895 delete_output_reload (insn, j, last_regno);
6896 }
6897
6898 old = rl->out_reg;
6899 if (old == 0
6900 || rl->reg_rtx == old
6901 || rl->reg_rtx == 0)
6902 return;
6903
6904 /* An output operand that dies right away does need a reload,
6905 but need not be copied from it. Show the new location in the
6906 REG_UNUSED note. */
6907 if ((GET_CODE (old) == REG || GET_CODE (old) == SCRATCH)
6908 && (note = find_reg_note (insn, REG_UNUSED, old)) != 0)
6909 {
6910 XEXP (note, 0) = rl->reg_rtx;
6911 return;
6912 }
6913 /* Likewise for a SUBREG of an operand that dies. */
6914 else if (GET_CODE (old) == SUBREG
6915 && GET_CODE (SUBREG_REG (old)) == REG
6916 && 0 != (note = find_reg_note (insn, REG_UNUSED,
6917 SUBREG_REG (old))))
6918 {
6919 XEXP (note, 0) = gen_lowpart_common (GET_MODE (old),
6920 rl->reg_rtx);
6921 return;
6922 }
6923 else if (GET_CODE (old) == SCRATCH)
6924 /* If we aren't optimizing, there won't be a REG_UNUSED note,
6925 but we don't want to make an output reload. */
6926 return;
6927
6928 /* If is a JUMP_INSN, we can't support output reloads yet. */
6929 if (GET_CODE (insn) == JUMP_INSN)
6930 abort ();
6931
6932 emit_output_reload_insns (chain, rld + j, j);
6933 }
6934
6935 /* Output insns to reload values in and out of the chosen reload regs. */
6936
6937 static void
6938 emit_reload_insns (chain)
6939 struct insn_chain *chain;
6940 {
6941 rtx insn = chain->insn;
6942
6943 register int j;
6944 rtx following_insn = NEXT_INSN (insn);
6945 rtx before_insn = PREV_INSN (insn);
6946
6947 CLEAR_HARD_REG_SET (reg_reloaded_died);
6948
6949 for (j = 0; j < reload_n_operands; j++)
6950 input_reload_insns[j] = input_address_reload_insns[j]
6951 = inpaddr_address_reload_insns[j]
6952 = output_reload_insns[j] = output_address_reload_insns[j]
6953 = outaddr_address_reload_insns[j]
6954 = other_output_reload_insns[j] = 0;
6955 other_input_address_reload_insns = 0;
6956 other_input_reload_insns = 0;
6957 operand_reload_insns = 0;
6958 other_operand_reload_insns = 0;
6959
6960 /* Dump reloads into the dump file. */
6961 if (rtl_dump_file)
6962 {
6963 fprintf (rtl_dump_file, "\nReloads for insn # %d\n", INSN_UID (insn));
6964 debug_reload_to_stream (rtl_dump_file);
6965 }
6966
6967 /* Now output the instructions to copy the data into and out of the
6968 reload registers. Do these in the order that the reloads were reported,
6969 since reloads of base and index registers precede reloads of operands
6970 and the operands may need the base and index registers reloaded. */
6971
6972 for (j = 0; j < n_reloads; j++)
6973 {
6974 if (rld[j].reg_rtx
6975 && REGNO (rld[j].reg_rtx) < FIRST_PSEUDO_REGISTER)
6976 new_spill_reg_store[REGNO (rld[j].reg_rtx)] = 0;
6977
6978 do_input_reload (chain, rld + j, j);
6979 do_output_reload (chain, rld + j, j);
6980 }
6981
6982 /* Now write all the insns we made for reloads in the order expected by
6983 the allocation functions. Prior to the insn being reloaded, we write
6984 the following reloads:
6985
6986 RELOAD_FOR_OTHER_ADDRESS reloads for input addresses.
6987
6988 RELOAD_OTHER reloads.
6989
6990 For each operand, any RELOAD_FOR_INPADDR_ADDRESS reloads followed
6991 by any RELOAD_FOR_INPUT_ADDRESS reloads followed by the
6992 RELOAD_FOR_INPUT reload for the operand.
6993
6994 RELOAD_FOR_OPADDR_ADDRS reloads.
6995
6996 RELOAD_FOR_OPERAND_ADDRESS reloads.
6997
6998 After the insn being reloaded, we write the following:
6999
7000 For each operand, any RELOAD_FOR_OUTADDR_ADDRESS reloads followed
7001 by any RELOAD_FOR_OUTPUT_ADDRESS reload followed by the
7002 RELOAD_FOR_OUTPUT reload, followed by any RELOAD_OTHER output
7003 reloads for the operand. The RELOAD_OTHER output reloads are
7004 output in descending order by reload number. */
7005
7006 emit_insns_before (other_input_address_reload_insns, insn);
7007 emit_insns_before (other_input_reload_insns, insn);
7008
7009 for (j = 0; j < reload_n_operands; j++)
7010 {
7011 emit_insns_before (inpaddr_address_reload_insns[j], insn);
7012 emit_insns_before (input_address_reload_insns[j], insn);
7013 emit_insns_before (input_reload_insns[j], insn);
7014 }
7015
7016 emit_insns_before (other_operand_reload_insns, insn);
7017 emit_insns_before (operand_reload_insns, insn);
7018
7019 for (j = 0; j < reload_n_operands; j++)
7020 {
7021 emit_insns_before (outaddr_address_reload_insns[j], following_insn);
7022 emit_insns_before (output_address_reload_insns[j], following_insn);
7023 emit_insns_before (output_reload_insns[j], following_insn);
7024 emit_insns_before (other_output_reload_insns[j], following_insn);
7025 }
7026
7027 /* Keep basic block info up to date. */
7028 if (n_basic_blocks)
7029 {
7030 if (BLOCK_HEAD (chain->block) == insn)
7031 BLOCK_HEAD (chain->block) = NEXT_INSN (before_insn);
7032 if (BLOCK_END (chain->block) == insn)
7033 BLOCK_END (chain->block) = PREV_INSN (following_insn);
7034 }
7035
7036 /* For all the spill regs newly reloaded in this instruction,
7037 record what they were reloaded from, so subsequent instructions
7038 can inherit the reloads.
7039
7040 Update spill_reg_store for the reloads of this insn.
7041 Copy the elements that were updated in the loop above. */
7042
7043 for (j = 0; j < n_reloads; j++)
7044 {
7045 register int r = reload_order[j];
7046 register int i = reload_spill_index[r];
7047
7048 /* If this is a non-inherited input reload from a pseudo, we must
7049 clear any memory of a previous store to the same pseudo. Only do
7050 something if there will not be an output reload for the pseudo
7051 being reloaded. */
7052 if (rld[r].in_reg != 0
7053 && ! (reload_inherited[r] || reload_override_in[r]))
7054 {
7055 rtx reg = rld[r].in_reg;
7056
7057 if (GET_CODE (reg) == SUBREG)
7058 reg = SUBREG_REG (reg);
7059
7060 if (GET_CODE (reg) == REG
7061 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
7062 && ! reg_has_output_reload[REGNO (reg)])
7063 {
7064 int nregno = REGNO (reg);
7065
7066 if (reg_last_reload_reg[nregno])
7067 {
7068 int last_regno = REGNO (reg_last_reload_reg[nregno]);
7069
7070 if (reg_reloaded_contents[last_regno] == nregno)
7071 spill_reg_store[last_regno] = 0;
7072 }
7073 }
7074 }
7075
7076 /* I is nonneg if this reload used a register.
7077 If rld[r].reg_rtx is 0, this is an optional reload
7078 that we opted to ignore. */
7079
7080 if (i >= 0 && rld[r].reg_rtx != 0)
7081 {
7082 int nr = HARD_REGNO_NREGS (i, GET_MODE (rld[r].reg_rtx));
7083 int k;
7084 int part_reaches_end = 0;
7085 int all_reaches_end = 1;
7086
7087 /* For a multi register reload, we need to check if all or part
7088 of the value lives to the end. */
7089 for (k = 0; k < nr; k++)
7090 {
7091 if (reload_reg_reaches_end_p (i + k, rld[r].opnum,
7092 rld[r].when_needed))
7093 part_reaches_end = 1;
7094 else
7095 all_reaches_end = 0;
7096 }
7097
7098 /* Ignore reloads that don't reach the end of the insn in
7099 entirety. */
7100 if (all_reaches_end)
7101 {
7102 /* First, clear out memory of what used to be in this spill reg.
7103 If consecutive registers are used, clear them all. */
7104
7105 for (k = 0; k < nr; k++)
7106 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7107
7108 /* Maybe the spill reg contains a copy of reload_out. */
7109 if (rld[r].out != 0
7110 && (GET_CODE (rld[r].out) == REG
7111 #ifdef AUTO_INC_DEC
7112 || ! rld[r].out_reg
7113 #endif
7114 || GET_CODE (rld[r].out_reg) == REG))
7115 {
7116 rtx out = (GET_CODE (rld[r].out) == REG
7117 ? rld[r].out
7118 : rld[r].out_reg
7119 ? rld[r].out_reg
7120 /* AUTO_INC */ : XEXP (rld[r].in_reg, 0));
7121 register int nregno = REGNO (out);
7122 int nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7123 : HARD_REGNO_NREGS (nregno,
7124 GET_MODE (rld[r].reg_rtx)));
7125
7126 spill_reg_store[i] = new_spill_reg_store[i];
7127 spill_reg_stored_to[i] = out;
7128 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7129
7130 /* If NREGNO is a hard register, it may occupy more than
7131 one register. If it does, say what is in the
7132 rest of the registers assuming that both registers
7133 agree on how many words the object takes. If not,
7134 invalidate the subsequent registers. */
7135
7136 if (nregno < FIRST_PSEUDO_REGISTER)
7137 for (k = 1; k < nnr; k++)
7138 reg_last_reload_reg[nregno + k]
7139 = (nr == nnr
7140 ? gen_rtx_REG (reg_raw_mode[REGNO (rld[r].reg_rtx) + k],
7141 REGNO (rld[r].reg_rtx) + k)
7142 : 0);
7143
7144 /* Now do the inverse operation. */
7145 for (k = 0; k < nr; k++)
7146 {
7147 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7148 reg_reloaded_contents[i + k]
7149 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7150 ? nregno
7151 : nregno + k);
7152 reg_reloaded_insn[i + k] = insn;
7153 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7154 }
7155 }
7156
7157 /* Maybe the spill reg contains a copy of reload_in. Only do
7158 something if there will not be an output reload for
7159 the register being reloaded. */
7160 else if (rld[r].out_reg == 0
7161 && rld[r].in != 0
7162 && ((GET_CODE (rld[r].in) == REG
7163 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER
7164 && ! reg_has_output_reload[REGNO (rld[r].in)])
7165 || (GET_CODE (rld[r].in_reg) == REG
7166 && ! reg_has_output_reload[REGNO (rld[r].in_reg)]))
7167 && ! reg_set_p (rld[r].reg_rtx, PATTERN (insn)))
7168 {
7169 register int nregno;
7170 int nnr;
7171
7172 if (GET_CODE (rld[r].in) == REG
7173 && REGNO (rld[r].in) >= FIRST_PSEUDO_REGISTER)
7174 nregno = REGNO (rld[r].in);
7175 else if (GET_CODE (rld[r].in_reg) == REG)
7176 nregno = REGNO (rld[r].in_reg);
7177 else
7178 nregno = REGNO (XEXP (rld[r].in_reg, 0));
7179
7180 nnr = (nregno >= FIRST_PSEUDO_REGISTER ? 1
7181 : HARD_REGNO_NREGS (nregno,
7182 GET_MODE (rld[r].reg_rtx)));
7183
7184 reg_last_reload_reg[nregno] = rld[r].reg_rtx;
7185
7186 if (nregno < FIRST_PSEUDO_REGISTER)
7187 for (k = 1; k < nnr; k++)
7188 reg_last_reload_reg[nregno + k]
7189 = (nr == nnr
7190 ? gen_rtx_REG (reg_raw_mode[REGNO (rld[r].reg_rtx) + k],
7191 REGNO (rld[r].reg_rtx) + k)
7192 : 0);
7193
7194 /* Unless we inherited this reload, show we haven't
7195 recently done a store.
7196 Previous stores of inherited auto_inc expressions
7197 also have to be discarded. */
7198 if (! reload_inherited[r]
7199 || (rld[r].out && ! rld[r].out_reg))
7200 spill_reg_store[i] = 0;
7201
7202 for (k = 0; k < nr; k++)
7203 {
7204 CLEAR_HARD_REG_BIT (reg_reloaded_dead, i + k);
7205 reg_reloaded_contents[i + k]
7206 = (nregno >= FIRST_PSEUDO_REGISTER || nr != nnr
7207 ? nregno
7208 : nregno + k);
7209 reg_reloaded_insn[i + k] = insn;
7210 SET_HARD_REG_BIT (reg_reloaded_valid, i + k);
7211 }
7212 }
7213 }
7214
7215 /* However, if part of the reload reaches the end, then we must
7216 invalidate the old info for the part that survives to the end. */
7217 else if (part_reaches_end)
7218 {
7219 for (k = 0; k < nr; k++)
7220 if (reload_reg_reaches_end_p (i + k,
7221 rld[r].opnum,
7222 rld[r].when_needed))
7223 CLEAR_HARD_REG_BIT (reg_reloaded_valid, i + k);
7224 }
7225 }
7226
7227 /* The following if-statement was #if 0'd in 1.34 (or before...).
7228 It's reenabled in 1.35 because supposedly nothing else
7229 deals with this problem. */
7230
7231 /* If a register gets output-reloaded from a non-spill register,
7232 that invalidates any previous reloaded copy of it.
7233 But forget_old_reloads_1 won't get to see it, because
7234 it thinks only about the original insn. So invalidate it here. */
7235 if (i < 0 && rld[r].out != 0
7236 && (GET_CODE (rld[r].out) == REG
7237 || (GET_CODE (rld[r].out) == MEM
7238 && GET_CODE (rld[r].out_reg) == REG)))
7239 {
7240 rtx out = (GET_CODE (rld[r].out) == REG
7241 ? rld[r].out : rld[r].out_reg);
7242 register int nregno = REGNO (out);
7243 if (nregno >= FIRST_PSEUDO_REGISTER)
7244 {
7245 rtx src_reg, store_insn = NULL_RTX;
7246
7247 reg_last_reload_reg[nregno] = 0;
7248
7249 /* If we can find a hard register that is stored, record
7250 the storing insn so that we may delete this insn with
7251 delete_output_reload. */
7252 src_reg = rld[r].reg_rtx;
7253
7254 /* If this is an optional reload, try to find the source reg
7255 from an input reload. */
7256 if (! src_reg)
7257 {
7258 rtx set = single_set (insn);
7259 if (set && SET_DEST (set) == rld[r].out)
7260 {
7261 int k;
7262
7263 src_reg = SET_SRC (set);
7264 store_insn = insn;
7265 for (k = 0; k < n_reloads; k++)
7266 {
7267 if (rld[k].in == src_reg)
7268 {
7269 src_reg = rld[k].reg_rtx;
7270 break;
7271 }
7272 }
7273 }
7274 }
7275 else
7276 store_insn = new_spill_reg_store[REGNO (src_reg)];
7277 if (src_reg && GET_CODE (src_reg) == REG
7278 && REGNO (src_reg) < FIRST_PSEUDO_REGISTER)
7279 {
7280 int src_regno = REGNO (src_reg);
7281 int nr = HARD_REGNO_NREGS (src_regno, rld[r].mode);
7282 /* The place where to find a death note varies with
7283 PRESERVE_DEATH_INFO_REGNO_P . The condition is not
7284 necessarily checked exactly in the code that moves
7285 notes, so just check both locations. */
7286 rtx note = find_regno_note (insn, REG_DEAD, src_regno);
7287 if (! note && store_insn)
7288 note = find_regno_note (store_insn, REG_DEAD, src_regno);
7289 while (nr-- > 0)
7290 {
7291 spill_reg_store[src_regno + nr] = store_insn;
7292 spill_reg_stored_to[src_regno + nr] = out;
7293 reg_reloaded_contents[src_regno + nr] = nregno;
7294 reg_reloaded_insn[src_regno + nr] = store_insn;
7295 CLEAR_HARD_REG_BIT (reg_reloaded_dead, src_regno + nr);
7296 SET_HARD_REG_BIT (reg_reloaded_valid, src_regno + nr);
7297 SET_HARD_REG_BIT (reg_is_output_reload, src_regno + nr);
7298 if (note)
7299 SET_HARD_REG_BIT (reg_reloaded_died, src_regno);
7300 else
7301 CLEAR_HARD_REG_BIT (reg_reloaded_died, src_regno);
7302 }
7303 reg_last_reload_reg[nregno] = src_reg;
7304 }
7305 }
7306 else
7307 {
7308 int num_regs = HARD_REGNO_NREGS (nregno, GET_MODE (rld[r].out));
7309
7310 while (num_regs-- > 0)
7311 reg_last_reload_reg[nregno + num_regs] = 0;
7312 }
7313 }
7314 }
7315 IOR_HARD_REG_SET (reg_reloaded_dead, reg_reloaded_died);
7316 }
7317 \f
7318 /* Emit code to perform a reload from IN (which may be a reload register) to
7319 OUT (which may also be a reload register). IN or OUT is from operand
7320 OPNUM with reload type TYPE.
7321
7322 Returns first insn emitted. */
7323
7324 rtx
7325 gen_reload (out, in, opnum, type)
7326 rtx out;
7327 rtx in;
7328 int opnum;
7329 enum reload_type type;
7330 {
7331 rtx last = get_last_insn ();
7332 rtx tem;
7333
7334 /* If IN is a paradoxical SUBREG, remove it and try to put the
7335 opposite SUBREG on OUT. Likewise for a paradoxical SUBREG on OUT. */
7336 if (GET_CODE (in) == SUBREG
7337 && (GET_MODE_SIZE (GET_MODE (in))
7338 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))))
7339 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (in)), out)) != 0)
7340 in = SUBREG_REG (in), out = tem;
7341 else if (GET_CODE (out) == SUBREG
7342 && (GET_MODE_SIZE (GET_MODE (out))
7343 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))))
7344 && (tem = gen_lowpart_common (GET_MODE (SUBREG_REG (out)), in)) != 0)
7345 out = SUBREG_REG (out), in = tem;
7346
7347 /* How to do this reload can get quite tricky. Normally, we are being
7348 asked to reload a simple operand, such as a MEM, a constant, or a pseudo
7349 register that didn't get a hard register. In that case we can just
7350 call emit_move_insn.
7351
7352 We can also be asked to reload a PLUS that adds a register or a MEM to
7353 another register, constant or MEM. This can occur during frame pointer
7354 elimination and while reloading addresses. This case is handled by
7355 trying to emit a single insn to perform the add. If it is not valid,
7356 we use a two insn sequence.
7357
7358 Finally, we could be called to handle an 'o' constraint by putting
7359 an address into a register. In that case, we first try to do this
7360 with a named pattern of "reload_load_address". If no such pattern
7361 exists, we just emit a SET insn and hope for the best (it will normally
7362 be valid on machines that use 'o').
7363
7364 This entire process is made complex because reload will never
7365 process the insns we generate here and so we must ensure that
7366 they will fit their constraints and also by the fact that parts of
7367 IN might be being reloaded separately and replaced with spill registers.
7368 Because of this, we are, in some sense, just guessing the right approach
7369 here. The one listed above seems to work.
7370
7371 ??? At some point, this whole thing needs to be rethought. */
7372
7373 if (GET_CODE (in) == PLUS
7374 && (GET_CODE (XEXP (in, 0)) == REG
7375 || GET_CODE (XEXP (in, 0)) == SUBREG
7376 || GET_CODE (XEXP (in, 0)) == MEM)
7377 && (GET_CODE (XEXP (in, 1)) == REG
7378 || GET_CODE (XEXP (in, 1)) == SUBREG
7379 || CONSTANT_P (XEXP (in, 1))
7380 || GET_CODE (XEXP (in, 1)) == MEM))
7381 {
7382 /* We need to compute the sum of a register or a MEM and another
7383 register, constant, or MEM, and put it into the reload
7384 register. The best possible way of doing this is if the machine
7385 has a three-operand ADD insn that accepts the required operands.
7386
7387 The simplest approach is to try to generate such an insn and see if it
7388 is recognized and matches its constraints. If so, it can be used.
7389
7390 It might be better not to actually emit the insn unless it is valid,
7391 but we need to pass the insn as an operand to `recog' and
7392 `extract_insn' and it is simpler to emit and then delete the insn if
7393 not valid than to dummy things up. */
7394
7395 rtx op0, op1, tem, insn;
7396 int code;
7397
7398 op0 = find_replacement (&XEXP (in, 0));
7399 op1 = find_replacement (&XEXP (in, 1));
7400
7401 /* Since constraint checking is strict, commutativity won't be
7402 checked, so we need to do that here to avoid spurious failure
7403 if the add instruction is two-address and the second operand
7404 of the add is the same as the reload reg, which is frequently
7405 the case. If the insn would be A = B + A, rearrange it so
7406 it will be A = A + B as constrain_operands expects. */
7407
7408 if (GET_CODE (XEXP (in, 1)) == REG
7409 && REGNO (out) == REGNO (XEXP (in, 1)))
7410 tem = op0, op0 = op1, op1 = tem;
7411
7412 if (op0 != XEXP (in, 0) || op1 != XEXP (in, 1))
7413 in = gen_rtx_PLUS (GET_MODE (in), op0, op1);
7414
7415 insn = emit_insn (gen_rtx_SET (VOIDmode, out, in));
7416 code = recog_memoized (insn);
7417
7418 if (code >= 0)
7419 {
7420 extract_insn (insn);
7421 /* We want constrain operands to treat this insn strictly in
7422 its validity determination, i.e., the way it would after reload
7423 has completed. */
7424 if (constrain_operands (1))
7425 return insn;
7426 }
7427
7428 delete_insns_since (last);
7429
7430 /* If that failed, we must use a conservative two-insn sequence.
7431
7432 Use a move to copy one operand into the reload register. Prefer
7433 to reload a constant, MEM or pseudo since the move patterns can
7434 handle an arbitrary operand. If OP1 is not a constant, MEM or
7435 pseudo and OP1 is not a valid operand for an add instruction, then
7436 reload OP1.
7437
7438 After reloading one of the operands into the reload register, add
7439 the reload register to the output register.
7440
7441 If there is another way to do this for a specific machine, a
7442 DEFINE_PEEPHOLE should be specified that recognizes the sequence
7443 we emit below. */
7444
7445 code = (int) add_optab->handlers[(int) GET_MODE (out)].insn_code;
7446
7447 if (CONSTANT_P (op1) || GET_CODE (op1) == MEM || GET_CODE (op1) == SUBREG
7448 || (GET_CODE (op1) == REG
7449 && REGNO (op1) >= FIRST_PSEUDO_REGISTER)
7450 || (code != CODE_FOR_nothing
7451 && ! ((*insn_data[code].operand[2].predicate)
7452 (op1, insn_data[code].operand[2].mode))))
7453 tem = op0, op0 = op1, op1 = tem;
7454
7455 gen_reload (out, op0, opnum, type);
7456
7457 /* If OP0 and OP1 are the same, we can use OUT for OP1.
7458 This fixes a problem on the 32K where the stack pointer cannot
7459 be used as an operand of an add insn. */
7460
7461 if (rtx_equal_p (op0, op1))
7462 op1 = out;
7463
7464 insn = emit_insn (gen_add2_insn (out, op1));
7465
7466 /* If that failed, copy the address register to the reload register.
7467 Then add the constant to the reload register. */
7468
7469 code = recog_memoized (insn);
7470
7471 if (code >= 0)
7472 {
7473 extract_insn (insn);
7474 /* We want constrain operands to treat this insn strictly in
7475 its validity determination, i.e., the way it would after reload
7476 has completed. */
7477 if (constrain_operands (1))
7478 {
7479 /* Add a REG_EQUIV note so that find_equiv_reg can find it. */
7480 REG_NOTES (insn)
7481 = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7482 return insn;
7483 }
7484 }
7485
7486 delete_insns_since (last);
7487
7488 gen_reload (out, op1, opnum, type);
7489 insn = emit_insn (gen_add2_insn (out, op0));
7490 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUIV, in, REG_NOTES (insn));
7491 }
7492
7493 #ifdef SECONDARY_MEMORY_NEEDED
7494 /* If we need a memory location to do the move, do it that way. */
7495 else if (GET_CODE (in) == REG && REGNO (in) < FIRST_PSEUDO_REGISTER
7496 && GET_CODE (out) == REG && REGNO (out) < FIRST_PSEUDO_REGISTER
7497 && SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (REGNO (in)),
7498 REGNO_REG_CLASS (REGNO (out)),
7499 GET_MODE (out)))
7500 {
7501 /* Get the memory to use and rewrite both registers to its mode. */
7502 rtx loc = get_secondary_mem (in, GET_MODE (out), opnum, type);
7503
7504 if (GET_MODE (loc) != GET_MODE (out))
7505 out = gen_rtx_REG (GET_MODE (loc), REGNO (out));
7506
7507 if (GET_MODE (loc) != GET_MODE (in))
7508 in = gen_rtx_REG (GET_MODE (loc), REGNO (in));
7509
7510 gen_reload (loc, in, opnum, type);
7511 gen_reload (out, loc, opnum, type);
7512 }
7513 #endif
7514
7515 /* If IN is a simple operand, use gen_move_insn. */
7516 else if (GET_RTX_CLASS (GET_CODE (in)) == 'o' || GET_CODE (in) == SUBREG)
7517 emit_insn (gen_move_insn (out, in));
7518
7519 #ifdef HAVE_reload_load_address
7520 else if (HAVE_reload_load_address)
7521 emit_insn (gen_reload_load_address (out, in));
7522 #endif
7523
7524 /* Otherwise, just write (set OUT IN) and hope for the best. */
7525 else
7526 emit_insn (gen_rtx_SET (VOIDmode, out, in));
7527
7528 /* Return the first insn emitted.
7529 We can not just return get_last_insn, because there may have
7530 been multiple instructions emitted. Also note that gen_move_insn may
7531 emit more than one insn itself, so we can not assume that there is one
7532 insn emitted per emit_insn_before call. */
7533
7534 return last ? NEXT_INSN (last) : get_insns ();
7535 }
7536 \f
7537 /* Delete a previously made output-reload
7538 whose result we now believe is not needed.
7539 First we double-check.
7540
7541 INSN is the insn now being processed.
7542 LAST_RELOAD_REG is the hard register number for which we want to delete
7543 the last output reload.
7544 J is the reload-number that originally used REG. The caller has made
7545 certain that reload J doesn't use REG any longer for input. */
7546
7547 static void
7548 delete_output_reload (insn, j, last_reload_reg)
7549 rtx insn;
7550 int j;
7551 int last_reload_reg;
7552 {
7553 rtx output_reload_insn = spill_reg_store[last_reload_reg];
7554 rtx reg = spill_reg_stored_to[last_reload_reg];
7555 int k;
7556 int n_occurrences;
7557 int n_inherited = 0;
7558 register rtx i1;
7559 rtx substed;
7560
7561 /* Get the raw pseudo-register referred to. */
7562
7563 while (GET_CODE (reg) == SUBREG)
7564 reg = SUBREG_REG (reg);
7565 substed = reg_equiv_memory_loc[REGNO (reg)];
7566
7567 /* This is unsafe if the operand occurs more often in the current
7568 insn than it is inherited. */
7569 for (k = n_reloads - 1; k >= 0; k--)
7570 {
7571 rtx reg2 = rld[k].in;
7572 if (! reg2)
7573 continue;
7574 if (GET_CODE (reg2) == MEM || reload_override_in[k])
7575 reg2 = rld[k].in_reg;
7576 #ifdef AUTO_INC_DEC
7577 if (rld[k].out && ! rld[k].out_reg)
7578 reg2 = XEXP (rld[k].in_reg, 0);
7579 #endif
7580 while (GET_CODE (reg2) == SUBREG)
7581 reg2 = SUBREG_REG (reg2);
7582 if (rtx_equal_p (reg2, reg))
7583 {
7584 if (reload_inherited[k] || reload_override_in[k] || k == j)
7585 {
7586 n_inherited++;
7587 reg2 = rld[k].out_reg;
7588 if (! reg2)
7589 continue;
7590 while (GET_CODE (reg2) == SUBREG)
7591 reg2 = XEXP (reg2, 0);
7592 if (rtx_equal_p (reg2, reg))
7593 n_inherited++;
7594 }
7595 else
7596 return;
7597 }
7598 }
7599 n_occurrences = count_occurrences (PATTERN (insn), reg, 0);
7600 if (substed)
7601 n_occurrences += count_occurrences (PATTERN (insn),
7602 eliminate_regs (substed, 0,
7603 NULL_RTX), 0);
7604 if (n_occurrences > n_inherited)
7605 return;
7606
7607 /* If the pseudo-reg we are reloading is no longer referenced
7608 anywhere between the store into it and here,
7609 and no jumps or labels intervene, then the value can get
7610 here through the reload reg alone.
7611 Otherwise, give up--return. */
7612 for (i1 = NEXT_INSN (output_reload_insn);
7613 i1 != insn; i1 = NEXT_INSN (i1))
7614 {
7615 if (GET_CODE (i1) == CODE_LABEL || GET_CODE (i1) == JUMP_INSN)
7616 return;
7617 if ((GET_CODE (i1) == INSN || GET_CODE (i1) == CALL_INSN)
7618 && reg_mentioned_p (reg, PATTERN (i1)))
7619 {
7620 /* If this is USE in front of INSN, we only have to check that
7621 there are no more references than accounted for by inheritance. */
7622 while (GET_CODE (i1) == INSN && GET_CODE (PATTERN (i1)) == USE)
7623 {
7624 n_occurrences += rtx_equal_p (reg, XEXP (PATTERN (i1), 0)) != 0;
7625 i1 = NEXT_INSN (i1);
7626 }
7627 if (n_occurrences <= n_inherited && i1 == insn)
7628 break;
7629 return;
7630 }
7631 }
7632
7633 /* The caller has already checked that REG dies or is set in INSN.
7634 It has also checked that we are optimizing, and thus some inaccurancies
7635 in the debugging information are acceptable.
7636 So we could just delete output_reload_insn.
7637 But in some cases we can improve the debugging information without
7638 sacrificing optimization - maybe even improving the code:
7639 See if the pseudo reg has been completely replaced
7640 with reload regs. If so, delete the store insn
7641 and forget we had a stack slot for the pseudo. */
7642 if (rld[j].out != rld[j].in
7643 && REG_N_DEATHS (REGNO (reg)) == 1
7644 && REG_N_SETS (REGNO (reg)) == 1
7645 && REG_BASIC_BLOCK (REGNO (reg)) >= 0
7646 && find_regno_note (insn, REG_DEAD, REGNO (reg)))
7647 {
7648 rtx i2;
7649
7650 /* We know that it was used only between here
7651 and the beginning of the current basic block.
7652 (We also know that the last use before INSN was
7653 the output reload we are thinking of deleting, but never mind that.)
7654 Search that range; see if any ref remains. */
7655 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7656 {
7657 rtx set = single_set (i2);
7658
7659 /* Uses which just store in the pseudo don't count,
7660 since if they are the only uses, they are dead. */
7661 if (set != 0 && SET_DEST (set) == reg)
7662 continue;
7663 if (GET_CODE (i2) == CODE_LABEL
7664 || GET_CODE (i2) == JUMP_INSN)
7665 break;
7666 if ((GET_CODE (i2) == INSN || GET_CODE (i2) == CALL_INSN)
7667 && reg_mentioned_p (reg, PATTERN (i2)))
7668 {
7669 /* Some other ref remains; just delete the output reload we
7670 know to be dead. */
7671 delete_address_reloads (output_reload_insn, insn);
7672 PUT_CODE (output_reload_insn, NOTE);
7673 NOTE_SOURCE_FILE (output_reload_insn) = 0;
7674 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
7675 return;
7676 }
7677 }
7678
7679 /* Delete the now-dead stores into this pseudo. */
7680 for (i2 = PREV_INSN (insn); i2; i2 = PREV_INSN (i2))
7681 {
7682 rtx set = single_set (i2);
7683
7684 if (set != 0 && SET_DEST (set) == reg)
7685 {
7686 delete_address_reloads (i2, insn);
7687 /* This might be a basic block head,
7688 thus don't use delete_insn. */
7689 PUT_CODE (i2, NOTE);
7690 NOTE_SOURCE_FILE (i2) = 0;
7691 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
7692 }
7693 if (GET_CODE (i2) == CODE_LABEL
7694 || GET_CODE (i2) == JUMP_INSN)
7695 break;
7696 }
7697
7698 /* For the debugging info,
7699 say the pseudo lives in this reload reg. */
7700 reg_renumber[REGNO (reg)] = REGNO (rld[j].reg_rtx);
7701 alter_reg (REGNO (reg), -1);
7702 }
7703 delete_address_reloads (output_reload_insn, insn);
7704 PUT_CODE (output_reload_insn, NOTE);
7705 NOTE_SOURCE_FILE (output_reload_insn) = 0;
7706 NOTE_LINE_NUMBER (output_reload_insn) = NOTE_INSN_DELETED;
7707
7708 }
7709
7710 /* We are going to delete DEAD_INSN. Recursively delete loads of
7711 reload registers used in DEAD_INSN that are not used till CURRENT_INSN.
7712 CURRENT_INSN is being reloaded, so we have to check its reloads too. */
7713 static void
7714 delete_address_reloads (dead_insn, current_insn)
7715 rtx dead_insn, current_insn;
7716 {
7717 rtx set = single_set (dead_insn);
7718 rtx set2, dst, prev, next;
7719 if (set)
7720 {
7721 rtx dst = SET_DEST (set);
7722 if (GET_CODE (dst) == MEM)
7723 delete_address_reloads_1 (dead_insn, XEXP (dst, 0), current_insn);
7724 }
7725 /* If we deleted the store from a reloaded post_{in,de}c expression,
7726 we can delete the matching adds. */
7727 prev = PREV_INSN (dead_insn);
7728 next = NEXT_INSN (dead_insn);
7729 if (! prev || ! next)
7730 return;
7731 set = single_set (next);
7732 set2 = single_set (prev);
7733 if (! set || ! set2
7734 || GET_CODE (SET_SRC (set)) != PLUS || GET_CODE (SET_SRC (set2)) != PLUS
7735 || GET_CODE (XEXP (SET_SRC (set), 1)) != CONST_INT
7736 || GET_CODE (XEXP (SET_SRC (set2), 1)) != CONST_INT)
7737 return;
7738 dst = SET_DEST (set);
7739 if (! rtx_equal_p (dst, SET_DEST (set2))
7740 || ! rtx_equal_p (dst, XEXP (SET_SRC (set), 0))
7741 || ! rtx_equal_p (dst, XEXP (SET_SRC (set2), 0))
7742 || (INTVAL (XEXP (SET_SRC (set), 1))
7743 != -INTVAL (XEXP (SET_SRC (set2), 1))))
7744 return;
7745 delete_related_insns (prev);
7746 delete_related_insns (next);
7747 }
7748
7749 /* Subfunction of delete_address_reloads: process registers found in X. */
7750 static void
7751 delete_address_reloads_1 (dead_insn, x, current_insn)
7752 rtx dead_insn, x, current_insn;
7753 {
7754 rtx prev, set, dst, i2;
7755 int i, j;
7756 enum rtx_code code = GET_CODE (x);
7757
7758 if (code != REG)
7759 {
7760 const char *fmt = GET_RTX_FORMAT (code);
7761 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7762 {
7763 if (fmt[i] == 'e')
7764 delete_address_reloads_1 (dead_insn, XEXP (x, i), current_insn);
7765 else if (fmt[i] == 'E')
7766 {
7767 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7768 delete_address_reloads_1 (dead_insn, XVECEXP (x, i, j),
7769 current_insn);
7770 }
7771 }
7772 return;
7773 }
7774
7775 if (spill_reg_order[REGNO (x)] < 0)
7776 return;
7777
7778 /* Scan backwards for the insn that sets x. This might be a way back due
7779 to inheritance. */
7780 for (prev = PREV_INSN (dead_insn); prev; prev = PREV_INSN (prev))
7781 {
7782 code = GET_CODE (prev);
7783 if (code == CODE_LABEL || code == JUMP_INSN)
7784 return;
7785 if (GET_RTX_CLASS (code) != 'i')
7786 continue;
7787 if (reg_set_p (x, PATTERN (prev)))
7788 break;
7789 if (reg_referenced_p (x, PATTERN (prev)))
7790 return;
7791 }
7792 if (! prev || INSN_UID (prev) < reload_first_uid)
7793 return;
7794 /* Check that PREV only sets the reload register. */
7795 set = single_set (prev);
7796 if (! set)
7797 return;
7798 dst = SET_DEST (set);
7799 if (GET_CODE (dst) != REG
7800 || ! rtx_equal_p (dst, x))
7801 return;
7802 if (! reg_set_p (dst, PATTERN (dead_insn)))
7803 {
7804 /* Check if DST was used in a later insn -
7805 it might have been inherited. */
7806 for (i2 = NEXT_INSN (dead_insn); i2; i2 = NEXT_INSN (i2))
7807 {
7808 if (GET_CODE (i2) == CODE_LABEL)
7809 break;
7810 if (! INSN_P (i2))
7811 continue;
7812 if (reg_referenced_p (dst, PATTERN (i2)))
7813 {
7814 /* If there is a reference to the register in the current insn,
7815 it might be loaded in a non-inherited reload. If no other
7816 reload uses it, that means the register is set before
7817 referenced. */
7818 if (i2 == current_insn)
7819 {
7820 for (j = n_reloads - 1; j >= 0; j--)
7821 if ((rld[j].reg_rtx == dst && reload_inherited[j])
7822 || reload_override_in[j] == dst)
7823 return;
7824 for (j = n_reloads - 1; j >= 0; j--)
7825 if (rld[j].in && rld[j].reg_rtx == dst)
7826 break;
7827 if (j >= 0)
7828 break;
7829 }
7830 return;
7831 }
7832 if (GET_CODE (i2) == JUMP_INSN)
7833 break;
7834 /* If DST is still live at CURRENT_INSN, check if it is used for
7835 any reload. Note that even if CURRENT_INSN sets DST, we still
7836 have to check the reloads. */
7837 if (i2 == current_insn)
7838 {
7839 for (j = n_reloads - 1; j >= 0; j--)
7840 if ((rld[j].reg_rtx == dst && reload_inherited[j])
7841 || reload_override_in[j] == dst)
7842 return;
7843 /* ??? We can't finish the loop here, because dst might be
7844 allocated to a pseudo in this block if no reload in this
7845 block needs any of the clsses containing DST - see
7846 spill_hard_reg. There is no easy way to tell this, so we
7847 have to scan till the end of the basic block. */
7848 }
7849 if (reg_set_p (dst, PATTERN (i2)))
7850 break;
7851 }
7852 }
7853 delete_address_reloads_1 (prev, SET_SRC (set), current_insn);
7854 reg_reloaded_contents[REGNO (dst)] = -1;
7855 /* Can't use delete_insn here because PREV might be a basic block head. */
7856 PUT_CODE (prev, NOTE);
7857 NOTE_LINE_NUMBER (prev) = NOTE_INSN_DELETED;
7858 NOTE_SOURCE_FILE (prev) = 0;
7859 }
7860 \f
7861 /* Output reload-insns to reload VALUE into RELOADREG.
7862 VALUE is an autoincrement or autodecrement RTX whose operand
7863 is a register or memory location;
7864 so reloading involves incrementing that location.
7865 IN is either identical to VALUE, or some cheaper place to reload from.
7866
7867 INC_AMOUNT is the number to increment or decrement by (always positive).
7868 This cannot be deduced from VALUE.
7869
7870 Return the instruction that stores into RELOADREG. */
7871
7872 static rtx
7873 inc_for_reload (reloadreg, in, value, inc_amount)
7874 rtx reloadreg;
7875 rtx in, value;
7876 int inc_amount;
7877 {
7878 /* REG or MEM to be copied and incremented. */
7879 rtx incloc = XEXP (value, 0);
7880 /* Nonzero if increment after copying. */
7881 int post = (GET_CODE (value) == POST_DEC || GET_CODE (value) == POST_INC);
7882 rtx last;
7883 rtx inc;
7884 rtx add_insn;
7885 int code;
7886 rtx store;
7887 rtx real_in = in == value ? XEXP (in, 0) : in;
7888
7889 /* No hard register is equivalent to this register after
7890 inc/dec operation. If REG_LAST_RELOAD_REG were non-zero,
7891 we could inc/dec that register as well (maybe even using it for
7892 the source), but I'm not sure it's worth worrying about. */
7893 if (GET_CODE (incloc) == REG)
7894 reg_last_reload_reg[REGNO (incloc)] = 0;
7895
7896 if (GET_CODE (value) == PRE_DEC || GET_CODE (value) == POST_DEC)
7897 inc_amount = -inc_amount;
7898
7899 inc = GEN_INT (inc_amount);
7900
7901 /* If this is post-increment, first copy the location to the reload reg. */
7902 if (post && real_in != reloadreg)
7903 emit_insn (gen_move_insn (reloadreg, real_in));
7904
7905 if (in == value)
7906 {
7907 /* See if we can directly increment INCLOC. Use a method similar to
7908 that in gen_reload. */
7909
7910 last = get_last_insn ();
7911 add_insn = emit_insn (gen_rtx_SET (VOIDmode, incloc,
7912 gen_rtx_PLUS (GET_MODE (incloc),
7913 incloc, inc)));
7914
7915 code = recog_memoized (add_insn);
7916 if (code >= 0)
7917 {
7918 extract_insn (add_insn);
7919 if (constrain_operands (1))
7920 {
7921 /* If this is a pre-increment and we have incremented the value
7922 where it lives, copy the incremented value to RELOADREG to
7923 be used as an address. */
7924
7925 if (! post)
7926 emit_insn (gen_move_insn (reloadreg, incloc));
7927
7928 return add_insn;
7929 }
7930 }
7931 delete_insns_since (last);
7932 }
7933
7934 /* If couldn't do the increment directly, must increment in RELOADREG.
7935 The way we do this depends on whether this is pre- or post-increment.
7936 For pre-increment, copy INCLOC to the reload register, increment it
7937 there, then save back. */
7938
7939 if (! post)
7940 {
7941 if (in != reloadreg)
7942 emit_insn (gen_move_insn (reloadreg, real_in));
7943 emit_insn (gen_add2_insn (reloadreg, inc));
7944 store = emit_insn (gen_move_insn (incloc, reloadreg));
7945 }
7946 else
7947 {
7948 /* Postincrement.
7949 Because this might be a jump insn or a compare, and because RELOADREG
7950 may not be available after the insn in an input reload, we must do
7951 the incrementation before the insn being reloaded for.
7952
7953 We have already copied IN to RELOADREG. Increment the copy in
7954 RELOADREG, save that back, then decrement RELOADREG so it has
7955 the original value. */
7956
7957 emit_insn (gen_add2_insn (reloadreg, inc));
7958 store = emit_insn (gen_move_insn (incloc, reloadreg));
7959 emit_insn (gen_add2_insn (reloadreg, GEN_INT (-inc_amount)));
7960 }
7961
7962 return store;
7963 }
7964 \f
7965 /* Return 1 if we are certain that the constraint-string STRING allows
7966 the hard register REG. Return 0 if we can't be sure of this. */
7967
7968 static int
7969 constraint_accepts_reg_p (string, reg)
7970 const char *string;
7971 rtx reg;
7972 {
7973 int value = 0;
7974 int regno = true_regnum (reg);
7975 int c;
7976
7977 /* Initialize for first alternative. */
7978 value = 0;
7979 /* Check that each alternative contains `g' or `r'. */
7980 while (1)
7981 switch (c = *string++)
7982 {
7983 case 0:
7984 /* If an alternative lacks `g' or `r', we lose. */
7985 return value;
7986 case ',':
7987 /* If an alternative lacks `g' or `r', we lose. */
7988 if (value == 0)
7989 return 0;
7990 /* Initialize for next alternative. */
7991 value = 0;
7992 break;
7993 case 'g':
7994 case 'r':
7995 /* Any general reg wins for this alternative. */
7996 if (TEST_HARD_REG_BIT (reg_class_contents[(int) GENERAL_REGS], regno))
7997 value = 1;
7998 break;
7999 default:
8000 /* Any reg in specified class wins for this alternative. */
8001 {
8002 enum reg_class class = REG_CLASS_FROM_LETTER (c);
8003
8004 if (TEST_HARD_REG_BIT (reg_class_contents[(int) class], regno))
8005 value = 1;
8006 }
8007 }
8008 }
8009 \f
8010 /* INSN is a no-op; delete it.
8011 If this sets the return value of the function, we must keep a USE around,
8012 in case this is in a different basic block than the final USE. Otherwise,
8013 we could loose important register lifeness information on
8014 SMALL_REGISTER_CLASSES machines, where return registers might be used as
8015 spills: subsequent passes assume that spill registers are dead at the end
8016 of a basic block.
8017 VALUE must be the return value in such a case, NULL otherwise. */
8018 static void
8019 reload_cse_delete_noop_set (insn, value)
8020 rtx insn, value;
8021 {
8022 if (value)
8023 {
8024 PATTERN (insn) = gen_rtx_USE (VOIDmode, value);
8025 INSN_CODE (insn) = -1;
8026 REG_NOTES (insn) = NULL_RTX;
8027 }
8028 else
8029 {
8030 PUT_CODE (insn, NOTE);
8031 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8032 NOTE_SOURCE_FILE (insn) = 0;
8033 }
8034 }
8035
8036 /* See whether a single set SET is a noop. */
8037 static int
8038 reload_cse_noop_set_p (set)
8039 rtx set;
8040 {
8041 return rtx_equal_for_cselib_p (SET_DEST (set), SET_SRC (set));
8042 }
8043
8044 /* Try to simplify INSN. */
8045 static void
8046 reload_cse_simplify (insn)
8047 rtx insn;
8048 {
8049 rtx body = PATTERN (insn);
8050
8051 if (GET_CODE (body) == SET)
8052 {
8053 int count = 0;
8054
8055 /* Simplify even if we may think it is a no-op.
8056 We may think a memory load of a value smaller than WORD_SIZE
8057 is redundant because we haven't taken into account possible
8058 implicit extension. reload_cse_simplify_set() will bring
8059 this out, so it's safer to simplify before we delete. */
8060 count += reload_cse_simplify_set (body, insn);
8061
8062 if (!count && reload_cse_noop_set_p (body))
8063 {
8064 rtx value = SET_DEST (body);
8065 if (! REG_FUNCTION_VALUE_P (SET_DEST (body)))
8066 value = 0;
8067 reload_cse_delete_noop_set (insn, value);
8068 return;
8069 }
8070
8071 if (count > 0)
8072 apply_change_group ();
8073 else
8074 reload_cse_simplify_operands (insn);
8075 }
8076 else if (GET_CODE (body) == PARALLEL)
8077 {
8078 int i;
8079 int count = 0;
8080 rtx value = NULL_RTX;
8081
8082 /* If every action in a PARALLEL is a noop, we can delete
8083 the entire PARALLEL. */
8084 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8085 {
8086 rtx part = XVECEXP (body, 0, i);
8087 if (GET_CODE (part) == SET)
8088 {
8089 if (! reload_cse_noop_set_p (part))
8090 break;
8091 if (REG_FUNCTION_VALUE_P (SET_DEST (part)))
8092 {
8093 if (value)
8094 break;
8095 value = SET_DEST (part);
8096 }
8097 }
8098 else if (GET_CODE (part) != CLOBBER)
8099 break;
8100 }
8101
8102 if (i < 0)
8103 {
8104 reload_cse_delete_noop_set (insn, value);
8105 /* We're done with this insn. */
8106 return;
8107 }
8108
8109 /* It's not a no-op, but we can try to simplify it. */
8110 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
8111 if (GET_CODE (XVECEXP (body, 0, i)) == SET)
8112 count += reload_cse_simplify_set (XVECEXP (body, 0, i), insn);
8113
8114 if (count > 0)
8115 apply_change_group ();
8116 else
8117 reload_cse_simplify_operands (insn);
8118 }
8119 }
8120
8121 /* Do a very simple CSE pass over the hard registers.
8122
8123 This function detects no-op moves where we happened to assign two
8124 different pseudo-registers to the same hard register, and then
8125 copied one to the other. Reload will generate a useless
8126 instruction copying a register to itself.
8127
8128 This function also detects cases where we load a value from memory
8129 into two different registers, and (if memory is more expensive than
8130 registers) changes it to simply copy the first register into the
8131 second register.
8132
8133 Another optimization is performed that scans the operands of each
8134 instruction to see whether the value is already available in a
8135 hard register. It then replaces the operand with the hard register
8136 if possible, much like an optional reload would. */
8137
8138 static void
8139 reload_cse_regs_1 (first)
8140 rtx first;
8141 {
8142 rtx insn;
8143
8144 cselib_init ();
8145 init_alias_analysis ();
8146
8147 for (insn = first; insn; insn = NEXT_INSN (insn))
8148 {
8149 if (INSN_P (insn))
8150 reload_cse_simplify (insn);
8151
8152 cselib_process_insn (insn);
8153 }
8154
8155 /* Clean up. */
8156 end_alias_analysis ();
8157 cselib_finish ();
8158 }
8159
8160 /* Call cse / combine like post-reload optimization phases.
8161 FIRST is the first instruction. */
8162 void
8163 reload_cse_regs (first)
8164 rtx first;
8165 {
8166 reload_cse_regs_1 (first);
8167 reload_combine ();
8168 reload_cse_move2add (first);
8169 if (flag_expensive_optimizations)
8170 reload_cse_regs_1 (first);
8171 }
8172
8173 /* Try to simplify a single SET instruction. SET is the set pattern.
8174 INSN is the instruction it came from.
8175 This function only handles one case: if we set a register to a value
8176 which is not a register, we try to find that value in some other register
8177 and change the set into a register copy. */
8178
8179 static int
8180 reload_cse_simplify_set (set, insn)
8181 rtx set;
8182 rtx insn;
8183 {
8184 int did_change = 0;
8185 int dreg;
8186 rtx src;
8187 enum reg_class dclass;
8188 int old_cost;
8189 cselib_val *val;
8190 struct elt_loc_list *l;
8191 #ifdef LOAD_EXTEND_OP
8192 enum rtx_code extend_op = NIL;
8193 #endif
8194
8195 dreg = true_regnum (SET_DEST (set));
8196 if (dreg < 0)
8197 return 0;
8198
8199 src = SET_SRC (set);
8200 if (side_effects_p (src) || true_regnum (src) >= 0)
8201 return 0;
8202
8203 dclass = REGNO_REG_CLASS (dreg);
8204
8205 #ifdef LOAD_EXTEND_OP
8206 /* When replacing a memory with a register, we need to honor assumptions
8207 that combine made wrt the contents of sign bits. We'll do this by
8208 generating an extend instruction instead of a reg->reg copy. Thus
8209 the destination must be a register that we can widen. */
8210 if (GET_CODE (src) == MEM
8211 && GET_MODE_BITSIZE (GET_MODE (src)) < BITS_PER_WORD
8212 && (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != NIL
8213 && GET_CODE (SET_DEST (set)) != REG)
8214 return 0;
8215 #endif
8216
8217 /* If memory loads are cheaper than register copies, don't change them. */
8218 if (GET_CODE (src) == MEM)
8219 old_cost = MEMORY_MOVE_COST (GET_MODE (src), dclass, 1);
8220 else if (CONSTANT_P (src))
8221 old_cost = rtx_cost (src, SET);
8222 else if (GET_CODE (src) == REG)
8223 old_cost = REGISTER_MOVE_COST (GET_MODE (src),
8224 REGNO_REG_CLASS (REGNO (src)), dclass);
8225 else
8226 /* ??? */
8227 old_cost = rtx_cost (src, SET);
8228
8229 val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0);
8230 if (! val)
8231 return 0;
8232 for (l = val->locs; l; l = l->next)
8233 {
8234 rtx this_rtx = l->loc;
8235 int this_cost;
8236
8237 if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
8238 {
8239 #ifdef LOAD_EXTEND_OP
8240 if (extend_op != NIL)
8241 {
8242 HOST_WIDE_INT this_val;
8243
8244 /* ??? I'm lazy and don't wish to handle CONST_DOUBLE. Other
8245 constants, such as SYMBOL_REF, cannot be extended. */
8246 if (GET_CODE (this_rtx) != CONST_INT)
8247 continue;
8248
8249 this_val = INTVAL (this_rtx);
8250 switch (extend_op)
8251 {
8252 case ZERO_EXTEND:
8253 this_val &= GET_MODE_MASK (GET_MODE (src));
8254 break;
8255 case SIGN_EXTEND:
8256 /* ??? In theory we're already extended. */
8257 if (this_val == trunc_int_for_mode (this_val, GET_MODE (src)))
8258 break;
8259 default:
8260 abort ();
8261 }
8262 this_rtx = GEN_INT (this_val);
8263 }
8264 #endif
8265 this_cost = rtx_cost (this_rtx, SET);
8266 }
8267 else if (GET_CODE (this_rtx) == REG)
8268 {
8269 #ifdef LOAD_EXTEND_OP
8270 if (extend_op != NIL)
8271 {
8272 this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
8273 this_cost = rtx_cost (this_rtx, SET);
8274 }
8275 else
8276 #endif
8277 this_cost = REGISTER_MOVE_COST (GET_MODE (this_rtx),
8278 REGNO_REG_CLASS (REGNO (this_rtx)),
8279 dclass);
8280 }
8281 else
8282 continue;
8283
8284 /* If equal costs, prefer registers over anything else. That
8285 tends to lead to smaller instructions on some machines. */
8286 if (this_cost < old_cost
8287 || (this_cost == old_cost
8288 && GET_CODE (this_rtx) == REG
8289 && GET_CODE (SET_SRC (set)) != REG))
8290 {
8291 #ifdef LOAD_EXTEND_OP
8292 if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
8293 && extend_op != NIL)
8294 {
8295 rtx wide_dest = gen_rtx_REG (word_mode, REGNO (SET_DEST (set)));
8296 ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
8297 validate_change (insn, &SET_DEST (set), wide_dest, 1);
8298 }
8299 #endif
8300
8301 validate_change (insn, &SET_SRC (set), copy_rtx (this_rtx), 1);
8302 old_cost = this_cost, did_change = 1;
8303 }
8304 }
8305
8306 return did_change;
8307 }
8308
8309 /* Try to replace operands in INSN with equivalent values that are already
8310 in registers. This can be viewed as optional reloading.
8311
8312 For each non-register operand in the insn, see if any hard regs are
8313 known to be equivalent to that operand. Record the alternatives which
8314 can accept these hard registers. Among all alternatives, select the
8315 ones which are better or equal to the one currently matching, where
8316 "better" is in terms of '?' and '!' constraints. Among the remaining
8317 alternatives, select the one which replaces most operands with
8318 hard registers. */
8319
8320 static int
8321 reload_cse_simplify_operands (insn)
8322 rtx insn;
8323 {
8324 int i, j;
8325
8326 /* For each operand, all registers that are equivalent to it. */
8327 HARD_REG_SET equiv_regs[MAX_RECOG_OPERANDS];
8328
8329 const char *constraints[MAX_RECOG_OPERANDS];
8330
8331 /* Vector recording how bad an alternative is. */
8332 int *alternative_reject;
8333 /* Vector recording how many registers can be introduced by choosing
8334 this alternative. */
8335 int *alternative_nregs;
8336 /* Array of vectors recording, for each operand and each alternative,
8337 which hard register to substitute, or -1 if the operand should be
8338 left as it is. */
8339 int *op_alt_regno[MAX_RECOG_OPERANDS];
8340 /* Array of alternatives, sorted in order of decreasing desirability. */
8341 int *alternative_order;
8342 rtx reg = gen_rtx_REG (VOIDmode, -1);
8343
8344 extract_insn (insn);
8345
8346 if (recog_data.n_alternatives == 0 || recog_data.n_operands == 0)
8347 return 0;
8348
8349 /* Figure out which alternative currently matches. */
8350 if (! constrain_operands (1))
8351 fatal_insn_not_found (insn);
8352
8353 alternative_reject = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8354 alternative_nregs = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8355 alternative_order = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8356 memset ((char *)alternative_reject, 0, recog_data.n_alternatives * sizeof (int));
8357 memset ((char *)alternative_nregs, 0, recog_data.n_alternatives * sizeof (int));
8358
8359 /* For each operand, find out which regs are equivalent. */
8360 for (i = 0; i < recog_data.n_operands; i++)
8361 {
8362 cselib_val *v;
8363 struct elt_loc_list *l;
8364
8365 CLEAR_HARD_REG_SET (equiv_regs[i]);
8366
8367 /* cselib blows up on CODE_LABELs. Trying to fix that doesn't seem
8368 right, so avoid the problem here. Likewise if we have a constant
8369 and the insn pattern doesn't tell us the mode we need. */
8370 if (GET_CODE (recog_data.operand[i]) == CODE_LABEL
8371 || (CONSTANT_P (recog_data.operand[i])
8372 && recog_data.operand_mode[i] == VOIDmode))
8373 continue;
8374
8375 v = cselib_lookup (recog_data.operand[i], recog_data.operand_mode[i], 0);
8376 if (! v)
8377 continue;
8378
8379 for (l = v->locs; l; l = l->next)
8380 if (GET_CODE (l->loc) == REG)
8381 SET_HARD_REG_BIT (equiv_regs[i], REGNO (l->loc));
8382 }
8383
8384 for (i = 0; i < recog_data.n_operands; i++)
8385 {
8386 enum machine_mode mode;
8387 int regno;
8388 const char *p;
8389
8390 op_alt_regno[i] = (int *) alloca (recog_data.n_alternatives * sizeof (int));
8391 for (j = 0; j < recog_data.n_alternatives; j++)
8392 op_alt_regno[i][j] = -1;
8393
8394 p = constraints[i] = recog_data.constraints[i];
8395 mode = recog_data.operand_mode[i];
8396
8397 /* Add the reject values for each alternative given by the constraints
8398 for this operand. */
8399 j = 0;
8400 while (*p != '\0')
8401 {
8402 char c = *p++;
8403 if (c == ',')
8404 j++;
8405 else if (c == '?')
8406 alternative_reject[j] += 3;
8407 else if (c == '!')
8408 alternative_reject[j] += 300;
8409 }
8410
8411 /* We won't change operands which are already registers. We
8412 also don't want to modify output operands. */
8413 regno = true_regnum (recog_data.operand[i]);
8414 if (regno >= 0
8415 || constraints[i][0] == '='
8416 || constraints[i][0] == '+')
8417 continue;
8418
8419 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8420 {
8421 int class = (int) NO_REGS;
8422
8423 if (! TEST_HARD_REG_BIT (equiv_regs[i], regno))
8424 continue;
8425
8426 REGNO (reg) = regno;
8427 PUT_MODE (reg, mode);
8428
8429 /* We found a register equal to this operand. Now look for all
8430 alternatives that can accept this register and have not been
8431 assigned a register they can use yet. */
8432 j = 0;
8433 p = constraints[i];
8434 for (;;)
8435 {
8436 char c = *p++;
8437
8438 switch (c)
8439 {
8440 case '=': case '+': case '?':
8441 case '#': case '&': case '!':
8442 case '*': case '%':
8443 case '0': case '1': case '2': case '3': case '4':
8444 case '5': case '6': case '7': case '8': case '9':
8445 case 'm': case '<': case '>': case 'V': case 'o':
8446 case 'E': case 'F': case 'G': case 'H':
8447 case 's': case 'i': case 'n':
8448 case 'I': case 'J': case 'K': case 'L':
8449 case 'M': case 'N': case 'O': case 'P':
8450 case 'p': case 'X':
8451 /* These don't say anything we care about. */
8452 break;
8453
8454 case 'g': case 'r':
8455 class = reg_class_subunion[(int) class][(int) GENERAL_REGS];
8456 break;
8457
8458 default:
8459 class
8460 = reg_class_subunion[(int) class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
8461 break;
8462
8463 case ',': case '\0':
8464 /* See if REGNO fits this alternative, and set it up as the
8465 replacement register if we don't have one for this
8466 alternative yet and the operand being replaced is not
8467 a cheap CONST_INT. */
8468 if (op_alt_regno[i][j] == -1
8469 && reg_fits_class_p (reg, class, 0, mode)
8470 && (GET_CODE (recog_data.operand[i]) != CONST_INT
8471 || (rtx_cost (recog_data.operand[i], SET)
8472 > rtx_cost (reg, SET))))
8473 {
8474 alternative_nregs[j]++;
8475 op_alt_regno[i][j] = regno;
8476 }
8477 j++;
8478 break;
8479 }
8480
8481 if (c == '\0')
8482 break;
8483 }
8484 }
8485 }
8486
8487 /* Record all alternatives which are better or equal to the currently
8488 matching one in the alternative_order array. */
8489 for (i = j = 0; i < recog_data.n_alternatives; i++)
8490 if (alternative_reject[i] <= alternative_reject[which_alternative])
8491 alternative_order[j++] = i;
8492 recog_data.n_alternatives = j;
8493
8494 /* Sort it. Given a small number of alternatives, a dumb algorithm
8495 won't hurt too much. */
8496 for (i = 0; i < recog_data.n_alternatives - 1; i++)
8497 {
8498 int best = i;
8499 int best_reject = alternative_reject[alternative_order[i]];
8500 int best_nregs = alternative_nregs[alternative_order[i]];
8501 int tmp;
8502
8503 for (j = i + 1; j < recog_data.n_alternatives; j++)
8504 {
8505 int this_reject = alternative_reject[alternative_order[j]];
8506 int this_nregs = alternative_nregs[alternative_order[j]];
8507
8508 if (this_reject < best_reject
8509 || (this_reject == best_reject && this_nregs < best_nregs))
8510 {
8511 best = j;
8512 best_reject = this_reject;
8513 best_nregs = this_nregs;
8514 }
8515 }
8516
8517 tmp = alternative_order[best];
8518 alternative_order[best] = alternative_order[i];
8519 alternative_order[i] = tmp;
8520 }
8521
8522 /* Substitute the operands as determined by op_alt_regno for the best
8523 alternative. */
8524 j = alternative_order[0];
8525
8526 for (i = 0; i < recog_data.n_operands; i++)
8527 {
8528 enum machine_mode mode = recog_data.operand_mode[i];
8529 if (op_alt_regno[i][j] == -1)
8530 continue;
8531
8532 validate_change (insn, recog_data.operand_loc[i],
8533 gen_rtx_REG (mode, op_alt_regno[i][j]), 1);
8534 }
8535
8536 for (i = recog_data.n_dups - 1; i >= 0; i--)
8537 {
8538 int op = recog_data.dup_num[i];
8539 enum machine_mode mode = recog_data.operand_mode[op];
8540
8541 if (op_alt_regno[op][j] == -1)
8542 continue;
8543
8544 validate_change (insn, recog_data.dup_loc[i],
8545 gen_rtx_REG (mode, op_alt_regno[op][j]), 1);
8546 }
8547
8548 return apply_change_group ();
8549 }
8550 \f
8551 /* If reload couldn't use reg+reg+offset addressing, try to use reg+reg
8552 addressing now.
8553 This code might also be useful when reload gave up on reg+reg addresssing
8554 because of clashes between the return register and INDEX_REG_CLASS. */
8555
8556 /* The maximum number of uses of a register we can keep track of to
8557 replace them with reg+reg addressing. */
8558 #define RELOAD_COMBINE_MAX_USES 6
8559
8560 /* INSN is the insn where a register has ben used, and USEP points to the
8561 location of the register within the rtl. */
8562 struct reg_use { rtx insn, *usep; };
8563
8564 /* If the register is used in some unknown fashion, USE_INDEX is negative.
8565 If it is dead, USE_INDEX is RELOAD_COMBINE_MAX_USES, and STORE_RUID
8566 indicates where it becomes live again.
8567 Otherwise, USE_INDEX is the index of the last encountered use of the
8568 register (which is first among these we have seen since we scan backwards),
8569 OFFSET contains the constant offset that is added to the register in
8570 all encountered uses, and USE_RUID indicates the first encountered, i.e.
8571 last, of these uses.
8572 STORE_RUID is always meaningful if we only want to use a value in a
8573 register in a different place: it denotes the next insn in the insn
8574 stream (i.e. the last ecountered) that sets or clobbers the register. */
8575 static struct
8576 {
8577 struct reg_use reg_use[RELOAD_COMBINE_MAX_USES];
8578 int use_index;
8579 rtx offset;
8580 int store_ruid;
8581 int use_ruid;
8582 } reg_state[FIRST_PSEUDO_REGISTER];
8583
8584 /* Reverse linear uid. This is increased in reload_combine while scanning
8585 the instructions from last to first. It is used to set last_label_ruid
8586 and the store_ruid / use_ruid fields in reg_state. */
8587 static int reload_combine_ruid;
8588
8589 #define LABEL_LIVE(LABEL) \
8590 (label_live[CODE_LABEL_NUMBER (LABEL) - min_labelno])
8591
8592 static void
8593 reload_combine ()
8594 {
8595 rtx insn, set;
8596 int first_index_reg = -1;
8597 int last_index_reg = 0;
8598 int i;
8599 unsigned int r;
8600 int last_label_ruid;
8601 int min_labelno, n_labels;
8602 HARD_REG_SET ever_live_at_start, *label_live;
8603
8604 /* If reg+reg can be used in offsetable memory adresses, the main chunk of
8605 reload has already used it where appropriate, so there is no use in
8606 trying to generate it now. */
8607 if (double_reg_address_ok && INDEX_REG_CLASS != NO_REGS)
8608 return;
8609
8610 /* To avoid wasting too much time later searching for an index register,
8611 determine the minimum and maximum index register numbers. */
8612 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8613 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], r))
8614 {
8615 if (first_index_reg == -1)
8616 first_index_reg = r;
8617
8618 last_index_reg = r;
8619 }
8620
8621 /* If no index register is available, we can quit now. */
8622 if (first_index_reg == -1)
8623 return;
8624
8625 /* Set up LABEL_LIVE and EVER_LIVE_AT_START. The register lifetime
8626 information is a bit fuzzy immediately after reload, but it's
8627 still good enough to determine which registers are live at a jump
8628 destination. */
8629 min_labelno = get_first_label_num ();
8630 n_labels = max_label_num () - min_labelno;
8631 label_live = (HARD_REG_SET *) xmalloc (n_labels * sizeof (HARD_REG_SET));
8632 CLEAR_HARD_REG_SET (ever_live_at_start);
8633
8634 for (i = n_basic_blocks - 1; i >= 0; i--)
8635 {
8636 insn = BLOCK_HEAD (i);
8637 if (GET_CODE (insn) == CODE_LABEL)
8638 {
8639 HARD_REG_SET live;
8640
8641 REG_SET_TO_HARD_REG_SET (live,
8642 BASIC_BLOCK (i)->global_live_at_start);
8643 compute_use_by_pseudos (&live,
8644 BASIC_BLOCK (i)->global_live_at_start);
8645 COPY_HARD_REG_SET (LABEL_LIVE (insn), live);
8646 IOR_HARD_REG_SET (ever_live_at_start, live);
8647 }
8648 }
8649
8650 /* Initialize last_label_ruid, reload_combine_ruid and reg_state. */
8651 last_label_ruid = reload_combine_ruid = 0;
8652 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8653 {
8654 reg_state[r].store_ruid = reload_combine_ruid;
8655 if (fixed_regs[r])
8656 reg_state[r].use_index = -1;
8657 else
8658 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8659 }
8660
8661 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
8662 {
8663 rtx note;
8664
8665 /* We cannot do our optimization across labels. Invalidating all the use
8666 information we have would be costly, so we just note where the label
8667 is and then later disable any optimization that would cross it. */
8668 if (GET_CODE (insn) == CODE_LABEL)
8669 last_label_ruid = reload_combine_ruid;
8670 else if (GET_CODE (insn) == BARRIER)
8671 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8672 if (! fixed_regs[r])
8673 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8674
8675 if (! INSN_P (insn))
8676 continue;
8677
8678 reload_combine_ruid++;
8679
8680 /* Look for (set (REGX) (CONST_INT))
8681 (set (REGX) (PLUS (REGX) (REGY)))
8682 ...
8683 ... (MEM (REGX)) ...
8684 and convert it to
8685 (set (REGZ) (CONST_INT))
8686 ...
8687 ... (MEM (PLUS (REGZ) (REGY)))... .
8688
8689 First, check that we have (set (REGX) (PLUS (REGX) (REGY)))
8690 and that we know all uses of REGX before it dies. */
8691 set = single_set (insn);
8692 if (set != NULL_RTX
8693 && GET_CODE (SET_DEST (set)) == REG
8694 && (HARD_REGNO_NREGS (REGNO (SET_DEST (set)),
8695 GET_MODE (SET_DEST (set)))
8696 == 1)
8697 && GET_CODE (SET_SRC (set)) == PLUS
8698 && GET_CODE (XEXP (SET_SRC (set), 1)) == REG
8699 && rtx_equal_p (XEXP (SET_SRC (set), 0), SET_DEST (set))
8700 && last_label_ruid < reg_state[REGNO (SET_DEST (set))].use_ruid)
8701 {
8702 rtx reg = SET_DEST (set);
8703 rtx plus = SET_SRC (set);
8704 rtx base = XEXP (plus, 1);
8705 rtx prev = prev_nonnote_insn (insn);
8706 rtx prev_set = prev ? single_set (prev) : NULL_RTX;
8707 unsigned int regno = REGNO (reg);
8708 rtx const_reg = NULL_RTX;
8709 rtx reg_sum = NULL_RTX;
8710
8711 /* Now, we need an index register.
8712 We'll set index_reg to this index register, const_reg to the
8713 register that is to be loaded with the constant
8714 (denoted as REGZ in the substitution illustration above),
8715 and reg_sum to the register-register that we want to use to
8716 substitute uses of REG (typically in MEMs) with.
8717 First check REG and BASE for being index registers;
8718 we can use them even if they are not dead. */
8719 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS], regno)
8720 || TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
8721 REGNO (base)))
8722 {
8723 const_reg = reg;
8724 reg_sum = plus;
8725 }
8726 else
8727 {
8728 /* Otherwise, look for a free index register. Since we have
8729 checked above that neiter REG nor BASE are index registers,
8730 if we find anything at all, it will be different from these
8731 two registers. */
8732 for (i = first_index_reg; i <= last_index_reg; i++)
8733 {
8734 if (TEST_HARD_REG_BIT (reg_class_contents[INDEX_REG_CLASS],
8735 i)
8736 && reg_state[i].use_index == RELOAD_COMBINE_MAX_USES
8737 && reg_state[i].store_ruid <= reg_state[regno].use_ruid
8738 && HARD_REGNO_NREGS (i, GET_MODE (reg)) == 1)
8739 {
8740 rtx index_reg = gen_rtx_REG (GET_MODE (reg), i);
8741
8742 const_reg = index_reg;
8743 reg_sum = gen_rtx_PLUS (GET_MODE (reg), index_reg, base);
8744 break;
8745 }
8746 }
8747 }
8748
8749 /* Check that PREV_SET is indeed (set (REGX) (CONST_INT)) and that
8750 (REGY), i.e. BASE, is not clobbered before the last use we'll
8751 create. */
8752 if (prev_set != 0
8753 && GET_CODE (SET_SRC (prev_set)) == CONST_INT
8754 && rtx_equal_p (SET_DEST (prev_set), reg)
8755 && reg_state[regno].use_index >= 0
8756 && (reg_state[REGNO (base)].store_ruid
8757 <= reg_state[regno].use_ruid)
8758 && reg_sum != 0)
8759 {
8760 int i;
8761
8762 /* Change destination register and, if necessary, the
8763 constant value in PREV, the constant loading instruction. */
8764 validate_change (prev, &SET_DEST (prev_set), const_reg, 1);
8765 if (reg_state[regno].offset != const0_rtx)
8766 validate_change (prev,
8767 &SET_SRC (prev_set),
8768 GEN_INT (INTVAL (SET_SRC (prev_set))
8769 + INTVAL (reg_state[regno].offset)),
8770 1);
8771
8772 /* Now for every use of REG that we have recorded, replace REG
8773 with REG_SUM. */
8774 for (i = reg_state[regno].use_index;
8775 i < RELOAD_COMBINE_MAX_USES; i++)
8776 validate_change (reg_state[regno].reg_use[i].insn,
8777 reg_state[regno].reg_use[i].usep,
8778 reg_sum, 1);
8779
8780 if (apply_change_group ())
8781 {
8782 rtx *np;
8783
8784 /* Delete the reg-reg addition. */
8785 PUT_CODE (insn, NOTE);
8786 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
8787 NOTE_SOURCE_FILE (insn) = 0;
8788
8789 if (reg_state[regno].offset != const0_rtx)
8790 /* Previous REG_EQUIV / REG_EQUAL notes for PREV
8791 are now invalid. */
8792 for (np = &REG_NOTES (prev); *np;)
8793 {
8794 if (REG_NOTE_KIND (*np) == REG_EQUAL
8795 || REG_NOTE_KIND (*np) == REG_EQUIV)
8796 *np = XEXP (*np, 1);
8797 else
8798 np = &XEXP (*np, 1);
8799 }
8800
8801 reg_state[regno].use_index = RELOAD_COMBINE_MAX_USES;
8802 reg_state[REGNO (const_reg)].store_ruid
8803 = reload_combine_ruid;
8804 continue;
8805 }
8806 }
8807 }
8808
8809 note_stores (PATTERN (insn), reload_combine_note_store, NULL);
8810
8811 if (GET_CODE (insn) == CALL_INSN)
8812 {
8813 rtx link;
8814
8815 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++)
8816 if (call_used_regs[r])
8817 {
8818 reg_state[r].use_index = RELOAD_COMBINE_MAX_USES;
8819 reg_state[r].store_ruid = reload_combine_ruid;
8820 }
8821
8822 for (link = CALL_INSN_FUNCTION_USAGE (insn); link;
8823 link = XEXP (link, 1))
8824 {
8825 rtx usage_rtx = XEXP (XEXP (link, 0), 0);
8826 if (GET_CODE (usage_rtx) == REG)
8827 {
8828 unsigned int i;
8829 unsigned int start_reg = REGNO (usage_rtx);
8830 unsigned int num_regs =
8831 HARD_REGNO_NREGS (start_reg, GET_MODE (usage_rtx));
8832 unsigned int end_reg = start_reg + num_regs - 1;
8833 for (i = start_reg; i <= end_reg; i++)
8834 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
8835 {
8836 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
8837 reg_state[i].store_ruid = reload_combine_ruid;
8838 }
8839 else
8840 reg_state[i].use_index = -1;
8841 }
8842 }
8843
8844 }
8845 else if (GET_CODE (insn) == JUMP_INSN
8846 && GET_CODE (PATTERN (insn)) != RETURN)
8847 {
8848 /* Non-spill registers might be used at the call destination in
8849 some unknown fashion, so we have to mark the unknown use. */
8850 HARD_REG_SET *live;
8851
8852 if ((condjump_p (insn) || condjump_in_parallel_p (insn))
8853 && JUMP_LABEL (insn))
8854 live = &LABEL_LIVE (JUMP_LABEL (insn));
8855 else
8856 live = &ever_live_at_start;
8857
8858 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; --i)
8859 if (TEST_HARD_REG_BIT (*live, i))
8860 reg_state[i].use_index = -1;
8861 }
8862
8863 reload_combine_note_use (&PATTERN (insn), insn);
8864 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
8865 {
8866 if (REG_NOTE_KIND (note) == REG_INC
8867 && GET_CODE (XEXP (note, 0)) == REG)
8868 {
8869 int regno = REGNO (XEXP (note, 0));
8870
8871 reg_state[regno].store_ruid = reload_combine_ruid;
8872 reg_state[regno].use_index = -1;
8873 }
8874 }
8875 }
8876
8877 free (label_live);
8878 }
8879
8880 /* Check if DST is a register or a subreg of a register; if it is,
8881 update reg_state[regno].store_ruid and reg_state[regno].use_index
8882 accordingly. Called via note_stores from reload_combine. */
8883
8884 static void
8885 reload_combine_note_store (dst, set, data)
8886 rtx dst, set;
8887 void *data ATTRIBUTE_UNUSED;
8888 {
8889 int regno = 0;
8890 int i;
8891 enum machine_mode mode = GET_MODE (dst);
8892
8893 if (GET_CODE (dst) == SUBREG)
8894 {
8895 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
8896 GET_MODE (SUBREG_REG (dst)),
8897 SUBREG_BYTE (dst),
8898 GET_MODE (dst));
8899 dst = SUBREG_REG (dst);
8900 }
8901 if (GET_CODE (dst) != REG)
8902 return;
8903 regno += REGNO (dst);
8904
8905 /* note_stores might have stripped a STRICT_LOW_PART, so we have to be
8906 careful with registers / register parts that are not full words.
8907
8908 Similarly for ZERO_EXTRACT and SIGN_EXTRACT. */
8909 if (GET_CODE (set) != SET
8910 || GET_CODE (SET_DEST (set)) == ZERO_EXTRACT
8911 || GET_CODE (SET_DEST (set)) == SIGN_EXTRACT
8912 || GET_CODE (SET_DEST (set)) == STRICT_LOW_PART)
8913 {
8914 for (i = HARD_REGNO_NREGS (regno, mode) - 1 + regno; i >= regno; i--)
8915 {
8916 reg_state[i].use_index = -1;
8917 reg_state[i].store_ruid = reload_combine_ruid;
8918 }
8919 }
8920 else
8921 {
8922 for (i = HARD_REGNO_NREGS (regno, mode) - 1 + regno; i >= regno; i--)
8923 {
8924 reg_state[i].store_ruid = reload_combine_ruid;
8925 reg_state[i].use_index = RELOAD_COMBINE_MAX_USES;
8926 }
8927 }
8928 }
8929
8930 /* XP points to a piece of rtl that has to be checked for any uses of
8931 registers.
8932 *XP is the pattern of INSN, or a part of it.
8933 Called from reload_combine, and recursively by itself. */
8934 static void
8935 reload_combine_note_use (xp, insn)
8936 rtx *xp, insn;
8937 {
8938 rtx x = *xp;
8939 enum rtx_code code = x->code;
8940 const char *fmt;
8941 int i, j;
8942 rtx offset = const0_rtx; /* For the REG case below. */
8943
8944 switch (code)
8945 {
8946 case SET:
8947 if (GET_CODE (SET_DEST (x)) == REG)
8948 {
8949 reload_combine_note_use (&SET_SRC (x), insn);
8950 return;
8951 }
8952 break;
8953
8954 case USE:
8955 /* If this is the USE of a return value, we can't change it. */
8956 if (GET_CODE (XEXP (x, 0)) == REG && REG_FUNCTION_VALUE_P (XEXP (x, 0)))
8957 {
8958 /* Mark the return register as used in an unknown fashion. */
8959 rtx reg = XEXP (x, 0);
8960 int regno = REGNO (reg);
8961 int nregs = HARD_REGNO_NREGS (regno, GET_MODE (reg));
8962
8963 while (--nregs >= 0)
8964 reg_state[regno + nregs].use_index = -1;
8965 return;
8966 }
8967 break;
8968
8969 case CLOBBER:
8970 if (GET_CODE (SET_DEST (x)) == REG)
8971 return;
8972 break;
8973
8974 case PLUS:
8975 /* We are interested in (plus (reg) (const_int)) . */
8976 if (GET_CODE (XEXP (x, 0)) != REG
8977 || GET_CODE (XEXP (x, 1)) != CONST_INT)
8978 break;
8979 offset = XEXP (x, 1);
8980 x = XEXP (x, 0);
8981 /* Fall through. */
8982 case REG:
8983 {
8984 int regno = REGNO (x);
8985 int use_index;
8986 int nregs;
8987
8988 /* Some spurious USEs of pseudo registers might remain.
8989 Just ignore them. */
8990 if (regno >= FIRST_PSEUDO_REGISTER)
8991 return;
8992
8993 nregs = HARD_REGNO_NREGS (regno, GET_MODE (x));
8994
8995 /* We can't substitute into multi-hard-reg uses. */
8996 if (nregs > 1)
8997 {
8998 while (--nregs >= 0)
8999 reg_state[regno + nregs].use_index = -1;
9000 return;
9001 }
9002
9003 /* If this register is already used in some unknown fashion, we
9004 can't do anything.
9005 If we decrement the index from zero to -1, we can't store more
9006 uses, so this register becomes used in an unknown fashion. */
9007 use_index = --reg_state[regno].use_index;
9008 if (use_index < 0)
9009 return;
9010
9011 if (use_index != RELOAD_COMBINE_MAX_USES - 1)
9012 {
9013 /* We have found another use for a register that is already
9014 used later. Check if the offsets match; if not, mark the
9015 register as used in an unknown fashion. */
9016 if (! rtx_equal_p (offset, reg_state[regno].offset))
9017 {
9018 reg_state[regno].use_index = -1;
9019 return;
9020 }
9021 }
9022 else
9023 {
9024 /* This is the first use of this register we have seen since we
9025 marked it as dead. */
9026 reg_state[regno].offset = offset;
9027 reg_state[regno].use_ruid = reload_combine_ruid;
9028 }
9029 reg_state[regno].reg_use[use_index].insn = insn;
9030 reg_state[regno].reg_use[use_index].usep = xp;
9031 return;
9032 }
9033
9034 default:
9035 break;
9036 }
9037
9038 /* Recursively process the components of X. */
9039 fmt = GET_RTX_FORMAT (code);
9040 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9041 {
9042 if (fmt[i] == 'e')
9043 reload_combine_note_use (&XEXP (x, i), insn);
9044 else if (fmt[i] == 'E')
9045 {
9046 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9047 reload_combine_note_use (&XVECEXP (x, i, j), insn);
9048 }
9049 }
9050 }
9051 \f
9052 /* See if we can reduce the cost of a constant by replacing a move
9053 with an add. We track situations in which a register is set to a
9054 constant or to a register plus a constant. */
9055 /* We cannot do our optimization across labels. Invalidating all the
9056 information about register contents we have would be costly, so we
9057 use move2add_last_label_luid to note where the label is and then
9058 later disable any optimization that would cross it.
9059 reg_offset[n] / reg_base_reg[n] / reg_mode[n] are only valid if
9060 reg_set_luid[n] is greater than last_label_luid[n] . */
9061 static int reg_set_luid[FIRST_PSEUDO_REGISTER];
9062
9063 /* If reg_base_reg[n] is negative, register n has been set to
9064 reg_offset[n] in mode reg_mode[n] .
9065 If reg_base_reg[n] is non-negative, register n has been set to the
9066 sum of reg_offset[n] and the value of register reg_base_reg[n]
9067 before reg_set_luid[n], calculated in mode reg_mode[n] . */
9068 static HOST_WIDE_INT reg_offset[FIRST_PSEUDO_REGISTER];
9069 static int reg_base_reg[FIRST_PSEUDO_REGISTER];
9070 static enum machine_mode reg_mode[FIRST_PSEUDO_REGISTER];
9071
9072 /* move2add_luid is linearily increased while scanning the instructions
9073 from first to last. It is used to set reg_set_luid in
9074 reload_cse_move2add and move2add_note_store. */
9075 static int move2add_luid;
9076
9077 /* move2add_last_label_luid is set whenever a label is found. Labels
9078 invalidate all previously collected reg_offset data. */
9079 static int move2add_last_label_luid;
9080
9081 /* Generate a CONST_INT and force it in the range of MODE. */
9082
9083 static HOST_WIDE_INT
9084 sext_for_mode (mode, value)
9085 enum machine_mode mode;
9086 HOST_WIDE_INT value;
9087 {
9088 HOST_WIDE_INT cval = value & GET_MODE_MASK (mode);
9089 int width = GET_MODE_BITSIZE (mode);
9090
9091 /* If MODE is narrower than HOST_WIDE_INT and CVAL is a negative number,
9092 sign extend it. */
9093 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
9094 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
9095 cval |= (HOST_WIDE_INT) -1 << width;
9096
9097 return cval;
9098 }
9099
9100 /* ??? We don't know how zero / sign extension is handled, hence we
9101 can't go from a narrower to a wider mode. */
9102 #define MODES_OK_FOR_MOVE2ADD(OUTMODE, INMODE) \
9103 (GET_MODE_SIZE (OUTMODE) == GET_MODE_SIZE (INMODE) \
9104 || (GET_MODE_SIZE (OUTMODE) <= GET_MODE_SIZE (INMODE) \
9105 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (OUTMODE), \
9106 GET_MODE_BITSIZE (INMODE))))
9107
9108 static void
9109 reload_cse_move2add (first)
9110 rtx first;
9111 {
9112 int i;
9113 rtx insn;
9114
9115 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
9116 reg_set_luid[i] = 0;
9117
9118 move2add_last_label_luid = 0;
9119 move2add_luid = 2;
9120 for (insn = first; insn; insn = NEXT_INSN (insn), move2add_luid++)
9121 {
9122 rtx pat, note;
9123
9124 if (GET_CODE (insn) == CODE_LABEL)
9125 {
9126 move2add_last_label_luid = move2add_luid;
9127 /* We're going to increment move2add_luid twice after a
9128 label, so that we can use move2add_last_label_luid + 1 as
9129 the luid for constants. */
9130 move2add_luid++;
9131 continue;
9132 }
9133 if (! INSN_P (insn))
9134 continue;
9135 pat = PATTERN (insn);
9136 /* For simplicity, we only perform this optimization on
9137 straightforward SETs. */
9138 if (GET_CODE (pat) == SET
9139 && GET_CODE (SET_DEST (pat)) == REG)
9140 {
9141 rtx reg = SET_DEST (pat);
9142 int regno = REGNO (reg);
9143 rtx src = SET_SRC (pat);
9144
9145 /* Check if we have valid information on the contents of this
9146 register in the mode of REG. */
9147 if (reg_set_luid[regno] > move2add_last_label_luid
9148 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg), reg_mode[regno]))
9149 {
9150 /* Try to transform (set (REGX) (CONST_INT A))
9151 ...
9152 (set (REGX) (CONST_INT B))
9153 to
9154 (set (REGX) (CONST_INT A))
9155 ...
9156 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9157
9158 if (GET_CODE (src) == CONST_INT && reg_base_reg[regno] < 0)
9159 {
9160 int success = 0;
9161 rtx new_src = GEN_INT (sext_for_mode (GET_MODE (reg),
9162 INTVAL (src)
9163 - reg_offset[regno]));
9164 /* (set (reg) (plus (reg) (const_int 0))) is not canonical;
9165 use (set (reg) (reg)) instead.
9166 We don't delete this insn, nor do we convert it into a
9167 note, to avoid losing register notes or the return
9168 value flag. jump2 already knowns how to get rid of
9169 no-op moves. */
9170 if (new_src == const0_rtx)
9171 success = validate_change (insn, &SET_SRC (pat), reg, 0);
9172 else if (rtx_cost (new_src, PLUS) < rtx_cost (src, SET)
9173 && have_add2_insn (reg, new_src))
9174 success = validate_change (insn, &PATTERN (insn),
9175 gen_add2_insn (reg, new_src), 0);
9176 reg_set_luid[regno] = move2add_luid;
9177 reg_mode[regno] = GET_MODE (reg);
9178 reg_offset[regno] = INTVAL (src);
9179 continue;
9180 }
9181
9182 /* Try to transform (set (REGX) (REGY))
9183 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9184 ...
9185 (set (REGX) (REGY))
9186 (set (REGX) (PLUS (REGX) (CONST_INT B)))
9187 to
9188 (REGX) (REGY))
9189 (set (REGX) (PLUS (REGX) (CONST_INT A)))
9190 ...
9191 (set (REGX) (plus (REGX) (CONST_INT B-A))) */
9192 else if (GET_CODE (src) == REG
9193 && reg_set_luid[regno] == reg_set_luid[REGNO (src)]
9194 && reg_base_reg[regno] == reg_base_reg[REGNO (src)]
9195 && MODES_OK_FOR_MOVE2ADD (GET_MODE (reg),
9196 reg_mode[REGNO (src)]))
9197 {
9198 rtx next = next_nonnote_insn (insn);
9199 rtx set = NULL_RTX;
9200 if (next)
9201 set = single_set (next);
9202 if (set
9203 && SET_DEST (set) == reg
9204 && GET_CODE (SET_SRC (set)) == PLUS
9205 && XEXP (SET_SRC (set), 0) == reg
9206 && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
9207 {
9208 rtx src3 = XEXP (SET_SRC (set), 1);
9209 HOST_WIDE_INT added_offset = INTVAL (src3);
9210 HOST_WIDE_INT base_offset = reg_offset[REGNO (src)];
9211 HOST_WIDE_INT regno_offset = reg_offset[regno];
9212 rtx new_src = GEN_INT (sext_for_mode (GET_MODE (reg),
9213 added_offset
9214 + base_offset
9215 - regno_offset));
9216 int success = 0;
9217
9218 if (new_src == const0_rtx)
9219 /* See above why we create (set (reg) (reg)) here. */
9220 success
9221 = validate_change (next, &SET_SRC (set), reg, 0);
9222 else if ((rtx_cost (new_src, PLUS)
9223 < COSTS_N_INSNS (1) + rtx_cost (src3, SET))
9224 && have_add2_insn (reg, new_src))
9225 success
9226 = validate_change (next, &PATTERN (next),
9227 gen_add2_insn (reg, new_src), 0);
9228 if (success)
9229 {
9230 /* INSN might be the first insn in a basic block
9231 if the preceding insn is a conditional jump
9232 or a possible-throwing call. */
9233 PUT_CODE (insn, NOTE);
9234 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
9235 NOTE_SOURCE_FILE (insn) = 0;
9236 }
9237 insn = next;
9238 reg_mode[regno] = GET_MODE (reg);
9239 reg_offset[regno] = sext_for_mode (GET_MODE (reg),
9240 added_offset
9241 + base_offset);
9242 continue;
9243 }
9244 }
9245 }
9246 }
9247
9248 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
9249 {
9250 if (REG_NOTE_KIND (note) == REG_INC
9251 && GET_CODE (XEXP (note, 0)) == REG)
9252 {
9253 /* Reset the information about this register. */
9254 int regno = REGNO (XEXP (note, 0));
9255 if (regno < FIRST_PSEUDO_REGISTER)
9256 reg_set_luid[regno] = 0;
9257 }
9258 }
9259 note_stores (PATTERN (insn), move2add_note_store, NULL);
9260 /* If this is a CALL_INSN, all call used registers are stored with
9261 unknown values. */
9262 if (GET_CODE (insn) == CALL_INSN)
9263 {
9264 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
9265 {
9266 if (call_used_regs[i])
9267 /* Reset the information about this register. */
9268 reg_set_luid[i] = 0;
9269 }
9270 }
9271 }
9272 }
9273
9274 /* SET is a SET or CLOBBER that sets DST.
9275 Update reg_set_luid, reg_offset and reg_base_reg accordingly.
9276 Called from reload_cse_move2add via note_stores. */
9277
9278 static void
9279 move2add_note_store (dst, set, data)
9280 rtx dst, set;
9281 void *data ATTRIBUTE_UNUSED;
9282 {
9283 unsigned int regno = 0;
9284 unsigned int i;
9285 enum machine_mode mode = GET_MODE (dst);
9286
9287 if (GET_CODE (dst) == SUBREG)
9288 {
9289 regno = subreg_regno_offset (REGNO (SUBREG_REG (dst)),
9290 GET_MODE (SUBREG_REG (dst)),
9291 SUBREG_BYTE (dst),
9292 GET_MODE (dst));
9293 dst = SUBREG_REG (dst);
9294 }
9295
9296 /* Some targets do argument pushes without adding REG_INC notes. */
9297
9298 if (GET_CODE (dst) == MEM)
9299 {
9300 dst = XEXP (dst, 0);
9301 if (GET_CODE (dst) == PRE_INC || GET_CODE (dst) == POST_INC
9302 || GET_CODE (dst) == PRE_DEC || GET_CODE (dst) == POST_DEC)
9303 reg_set_luid[REGNO (XEXP (dst, 0))] = 0;
9304 return;
9305 }
9306 if (GET_CODE (dst) != REG)
9307 return;
9308
9309 regno += REGNO (dst);
9310
9311 if (HARD_REGNO_NREGS (regno, mode) == 1 && GET_CODE (set) == SET
9312 && GET_CODE (SET_DEST (set)) != ZERO_EXTRACT
9313 && GET_CODE (SET_DEST (set)) != SIGN_EXTRACT
9314 && GET_CODE (SET_DEST (set)) != STRICT_LOW_PART)
9315 {
9316 rtx src = SET_SRC (set);
9317 rtx base_reg;
9318 HOST_WIDE_INT offset;
9319 int base_regno;
9320 /* This may be different from mode, if SET_DEST (set) is a
9321 SUBREG. */
9322 enum machine_mode dst_mode = GET_MODE (dst);
9323
9324 switch (GET_CODE (src))
9325 {
9326 case PLUS:
9327 if (GET_CODE (XEXP (src, 0)) == REG)
9328 {
9329 base_reg = XEXP (src, 0);
9330
9331 if (GET_CODE (XEXP (src, 1)) == CONST_INT)
9332 offset = INTVAL (XEXP (src, 1));
9333 else if (GET_CODE (XEXP (src, 1)) == REG
9334 && (reg_set_luid[REGNO (XEXP (src, 1))]
9335 > move2add_last_label_luid)
9336 && (MODES_OK_FOR_MOVE2ADD
9337 (dst_mode, reg_mode[REGNO (XEXP (src, 1))])))
9338 {
9339 if (reg_base_reg[REGNO (XEXP (src, 1))] < 0)
9340 offset = reg_offset[REGNO (XEXP (src, 1))];
9341 /* Maybe the first register is known to be a
9342 constant. */
9343 else if (reg_set_luid[REGNO (base_reg)]
9344 > move2add_last_label_luid
9345 && (MODES_OK_FOR_MOVE2ADD
9346 (dst_mode, reg_mode[REGNO (XEXP (src, 1))]))
9347 && reg_base_reg[REGNO (base_reg)] < 0)
9348 {
9349 offset = reg_offset[REGNO (base_reg)];
9350 base_reg = XEXP (src, 1);
9351 }
9352 else
9353 goto invalidate;
9354 }
9355 else
9356 goto invalidate;
9357
9358 break;
9359 }
9360
9361 goto invalidate;
9362
9363 case REG:
9364 base_reg = src;
9365 offset = 0;
9366 break;
9367
9368 case CONST_INT:
9369 /* Start tracking the register as a constant. */
9370 reg_base_reg[regno] = -1;
9371 reg_offset[regno] = INTVAL (SET_SRC (set));
9372 /* We assign the same luid to all registers set to constants. */
9373 reg_set_luid[regno] = move2add_last_label_luid + 1;
9374 reg_mode[regno] = mode;
9375 return;
9376
9377 default:
9378 invalidate:
9379 /* Invalidate the contents of the register. */
9380 reg_set_luid[regno] = 0;
9381 return;
9382 }
9383
9384 base_regno = REGNO (base_reg);
9385 /* If information about the base register is not valid, set it
9386 up as a new base register, pretending its value is known
9387 starting from the current insn. */
9388 if (reg_set_luid[base_regno] <= move2add_last_label_luid)
9389 {
9390 reg_base_reg[base_regno] = base_regno;
9391 reg_offset[base_regno] = 0;
9392 reg_set_luid[base_regno] = move2add_luid;
9393 reg_mode[base_regno] = mode;
9394 }
9395 else if (! MODES_OK_FOR_MOVE2ADD (dst_mode,
9396 reg_mode[base_regno]))
9397 goto invalidate;
9398
9399 reg_mode[regno] = mode;
9400
9401 /* Copy base information from our base register. */
9402 reg_set_luid[regno] = reg_set_luid[base_regno];
9403 reg_base_reg[regno] = reg_base_reg[base_regno];
9404
9405 /* Compute the sum of the offsets or constants. */
9406 reg_offset[regno] = sext_for_mode (dst_mode,
9407 offset
9408 + reg_offset[base_regno]);
9409 }
9410 else
9411 {
9412 unsigned int endregno = regno + HARD_REGNO_NREGS (regno, mode);
9413
9414 for (i = regno; i < endregno; i++)
9415 /* Reset the information about this register. */
9416 reg_set_luid[i] = 0;
9417 }
9418 }
9419
9420 #ifdef AUTO_INC_DEC
9421 static void
9422 add_auto_inc_notes (insn, x)
9423 rtx insn;
9424 rtx x;
9425 {
9426 enum rtx_code code = GET_CODE (x);
9427 const char *fmt;
9428 int i, j;
9429
9430 if (code == MEM && auto_inc_p (XEXP (x, 0)))
9431 {
9432 REG_NOTES (insn)
9433 = gen_rtx_EXPR_LIST (REG_INC, XEXP (XEXP (x, 0), 0), REG_NOTES (insn));
9434 return;
9435 }
9436
9437 /* Scan all the operand sub-expressions. */
9438 fmt = GET_RTX_FORMAT (code);
9439 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9440 {
9441 if (fmt[i] == 'e')
9442 add_auto_inc_notes (insn, XEXP (x, i));
9443 else if (fmt[i] == 'E')
9444 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9445 add_auto_inc_notes (insn, XVECEXP (x, i, j));
9446 }
9447 }
9448 #endif
9449
9450 /* Copy EH notes from an insn to its reloads. */
9451 static void
9452 copy_eh_notes (insn, x)
9453 rtx insn;
9454 rtx x;
9455 {
9456 rtx eh_note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
9457 if (eh_note)
9458 {
9459 for (; x != 0; x = NEXT_INSN (x))
9460 {
9461 if (may_trap_p (PATTERN (x)))
9462 REG_NOTES (x)
9463 = gen_rtx_EXPR_LIST (REG_EH_REGION, XEXP (eh_note, 0),
9464 REG_NOTES (x));
9465 }
9466 }
9467 }
9468
9469 /* This is used by reload pass, that does emit some instructions after
9470 abnormal calls moving basic block end, but in fact it wants to emit
9471 them on the edge. Looks for abnormal call edges, find backward the
9472 proper call and fix the damage.
9473
9474 Similar handle instructions throwing exceptions internally. */
9475 static void
9476 fixup_abnormal_edges ()
9477 {
9478 int i;
9479 bool inserted = false;
9480
9481 for (i = 0; i < n_basic_blocks; i++)
9482 {
9483 basic_block bb = BASIC_BLOCK (i);
9484 edge e;
9485
9486 /* Look for cases we are interested in - an calls or instructions causing
9487 exceptions. */
9488 for (e = bb->succ; e; e = e->succ_next)
9489 {
9490 if (e->flags & EDGE_ABNORMAL_CALL)
9491 break;
9492 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
9493 == (EDGE_ABNORMAL | EDGE_EH))
9494 break;
9495 }
9496 if (e && GET_CODE (bb->end) != CALL_INSN && !can_throw_internal (bb->end))
9497 {
9498 rtx insn = bb->end, stop = NEXT_INSN (bb->end);
9499 rtx next;
9500 for (e = bb->succ; e; e = e->succ_next)
9501 if (e->flags & EDGE_FALLTHRU)
9502 break;
9503 /* Get past the new insns generated. Allow notes, as the insns may
9504 be already deleted. */
9505 while ((GET_CODE (insn) == INSN || GET_CODE (insn) == NOTE)
9506 && !can_throw_internal (insn)
9507 && insn != bb->head)
9508 insn = PREV_INSN (insn);
9509 if (GET_CODE (insn) != CALL_INSN && !can_throw_internal (insn))
9510 abort ();
9511 bb->end = insn;
9512 inserted = true;
9513 insn = NEXT_INSN (insn);
9514 while (insn && insn != stop)
9515 {
9516 next = NEXT_INSN (insn);
9517 if (INSN_P (insn))
9518 {
9519 insert_insn_on_edge (PATTERN (insn), e);
9520 delete_insn (insn);
9521 }
9522 insn = next;
9523 }
9524 }
9525 }
9526 if (inserted)
9527 commit_edge_insertions ();
9528 }
This page took 0.491603 seconds and 6 git commands to generate.