]> gcc.gnu.org Git - gcc.git/blob - gcc/local-alloc.c
locale_classes.h: Fully qualify standard functions with std::, thus avoiding Koenig...
[gcc.git] / gcc / local-alloc.c
1 /* Allocate registers within a basic block, for GNU compiler.
2 Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /* Allocation of hard register numbers to pseudo registers is done in
23 two passes. In this pass we consider only regs that are born and
24 die once within one basic block. We do this one basic block at a
25 time. Then the next pass allocates the registers that remain.
26 Two passes are used because this pass uses methods that work only
27 on linear code, but that do a better job than the general methods
28 used in global_alloc, and more quickly too.
29
30 The assignments made are recorded in the vector reg_renumber
31 whose space is allocated here. The rtl code itself is not altered.
32
33 We assign each instruction in the basic block a number
34 which is its order from the beginning of the block.
35 Then we can represent the lifetime of a pseudo register with
36 a pair of numbers, and check for conflicts easily.
37 We can record the availability of hard registers with a
38 HARD_REG_SET for each instruction. The HARD_REG_SET
39 contains 0 or 1 for each hard reg.
40
41 To avoid register shuffling, we tie registers together when one
42 dies by being copied into another, or dies in an instruction that
43 does arithmetic to produce another. The tied registers are
44 allocated as one. Registers with different reg class preferences
45 can never be tied unless the class preferred by one is a subclass
46 of the one preferred by the other.
47
48 Tying is represented with "quantity numbers".
49 A non-tied register is given a new quantity number.
50 Tied registers have the same quantity number.
51
52 We have provision to exempt registers, even when they are contained
53 within the block, that can be tied to others that are not contained in it.
54 This is so that global_alloc could process them both and tie them then.
55 But this is currently disabled since tying in global_alloc is not
56 yet implemented. */
57
58 /* Pseudos allocated here can be reallocated by global.c if the hard register
59 is used as a spill register. Currently we don't allocate such pseudos
60 here if their preferred class is likely to be used by spills. */
61
62 #include "config.h"
63 #include "system.h"
64 #include "coretypes.h"
65 #include "tm.h"
66 #include "hard-reg-set.h"
67 #include "rtl.h"
68 #include "tm_p.h"
69 #include "flags.h"
70 #include "basic-block.h"
71 #include "regs.h"
72 #include "function.h"
73 #include "insn-config.h"
74 #include "insn-attr.h"
75 #include "recog.h"
76 #include "output.h"
77 #include "toplev.h"
78 #include "except.h"
79 #include "integrate.h"
80 \f
81 /* Next quantity number available for allocation. */
82
83 static int next_qty;
84
85 /* Information we maintain about each quantity. */
86 struct qty
87 {
88 /* The number of refs to quantity Q. */
89
90 int n_refs;
91
92 /* The frequency of uses of quantity Q. */
93
94 int freq;
95
96 /* Insn number (counting from head of basic block)
97 where quantity Q was born. -1 if birth has not been recorded. */
98
99 int birth;
100
101 /* Insn number (counting from head of basic block)
102 where given quantity died. Due to the way tying is done,
103 and the fact that we consider in this pass only regs that die but once,
104 a quantity can die only once. Each quantity's life span
105 is a set of consecutive insns. -1 if death has not been recorded. */
106
107 int death;
108
109 /* Number of words needed to hold the data in given quantity.
110 This depends on its machine mode. It is used for these purposes:
111 1. It is used in computing the relative importances of qtys,
112 which determines the order in which we look for regs for them.
113 2. It is used in rules that prevent tying several registers of
114 different sizes in a way that is geometrically impossible
115 (see combine_regs). */
116
117 int size;
118
119 /* Number of times a reg tied to given qty lives across a CALL_INSN. */
120
121 int n_calls_crossed;
122
123 /* The register number of one pseudo register whose reg_qty value is Q.
124 This register should be the head of the chain
125 maintained in reg_next_in_qty. */
126
127 int first_reg;
128
129 /* Reg class contained in (smaller than) the preferred classes of all
130 the pseudo regs that are tied in given quantity.
131 This is the preferred class for allocating that quantity. */
132
133 enum reg_class min_class;
134
135 /* Register class within which we allocate given qty if we can't get
136 its preferred class. */
137
138 enum reg_class alternate_class;
139
140 /* This holds the mode of the registers that are tied to given qty,
141 or VOIDmode if registers with differing modes are tied together. */
142
143 enum machine_mode mode;
144
145 /* the hard reg number chosen for given quantity,
146 or -1 if none was found. */
147
148 short phys_reg;
149 };
150
151 static struct qty *qty;
152
153 /* These fields are kept separately to speedup their clearing. */
154
155 /* We maintain two hard register sets that indicate suggested hard registers
156 for each quantity. The first, phys_copy_sugg, contains hard registers
157 that are tied to the quantity by a simple copy. The second contains all
158 hard registers that are tied to the quantity via an arithmetic operation.
159
160 The former register set is given priority for allocation. This tends to
161 eliminate copy insns. */
162
163 /* Element Q is a set of hard registers that are suggested for quantity Q by
164 copy insns. */
165
166 static HARD_REG_SET *qty_phys_copy_sugg;
167
168 /* Element Q is a set of hard registers that are suggested for quantity Q by
169 arithmetic insns. */
170
171 static HARD_REG_SET *qty_phys_sugg;
172
173 /* Element Q is the number of suggested registers in qty_phys_copy_sugg. */
174
175 static short *qty_phys_num_copy_sugg;
176
177 /* Element Q is the number of suggested registers in qty_phys_sugg. */
178
179 static short *qty_phys_num_sugg;
180
181 /* If (REG N) has been assigned a quantity number, is a register number
182 of another register assigned the same quantity number, or -1 for the
183 end of the chain. qty->first_reg point to the head of this chain. */
184
185 static int *reg_next_in_qty;
186
187 /* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
188 if it is >= 0,
189 of -1 if this register cannot be allocated by local-alloc,
190 or -2 if not known yet.
191
192 Note that if we see a use or death of pseudo register N with
193 reg_qty[N] == -2, register N must be local to the current block. If
194 it were used in more than one block, we would have reg_qty[N] == -1.
195 This relies on the fact that if reg_basic_block[N] is >= 0, register N
196 will not appear in any other block. We save a considerable number of
197 tests by exploiting this.
198
199 If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
200 be referenced. */
201
202 static int *reg_qty;
203
204 /* The offset (in words) of register N within its quantity.
205 This can be nonzero if register N is SImode, and has been tied
206 to a subreg of a DImode register. */
207
208 static char *reg_offset;
209
210 /* Vector of substitutions of register numbers,
211 used to map pseudo regs into hardware regs.
212 This is set up as a result of register allocation.
213 Element N is the hard reg assigned to pseudo reg N,
214 or is -1 if no hard reg was assigned.
215 If N is a hard reg number, element N is N. */
216
217 short *reg_renumber;
218
219 /* Set of hard registers live at the current point in the scan
220 of the instructions in a basic block. */
221
222 static HARD_REG_SET regs_live;
223
224 /* Each set of hard registers indicates registers live at a particular
225 point in the basic block. For N even, regs_live_at[N] says which
226 hard registers are needed *after* insn N/2 (i.e., they may not
227 conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
228
229 If an object is to conflict with the inputs of insn J but not the
230 outputs of insn J + 1, we say it is born at index J*2 - 1. Similarly,
231 if it is to conflict with the outputs of insn J but not the inputs of
232 insn J + 1, it is said to die at index J*2 + 1. */
233
234 static HARD_REG_SET *regs_live_at;
235
236 /* Communicate local vars `insn_number' and `insn'
237 from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'. */
238 static int this_insn_number;
239 static rtx this_insn;
240
241 struct equivalence
242 {
243 /* Set when an attempt should be made to replace a register
244 with the associated src_p entry. */
245
246 char replace;
247
248 /* Set when a REG_EQUIV note is found or created. Use to
249 keep track of what memory accesses might be created later,
250 e.g. by reload. */
251
252 rtx replacement;
253
254 rtx *src_p;
255
256 /* Loop depth is used to recognize equivalences which appear
257 to be present within the same loop (or in an inner loop). */
258
259 int loop_depth;
260
261 /* The list of each instruction which initializes this register. */
262
263 rtx init_insns;
264 };
265
266 /* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
267 structure for that register. */
268
269 static struct equivalence *reg_equiv;
270
271 /* Nonzero if we recorded an equivalence for a LABEL_REF. */
272 static int recorded_label_ref;
273
274 static void alloc_qty PARAMS ((int, enum machine_mode, int, int));
275 static void validate_equiv_mem_from_store PARAMS ((rtx, rtx, void *));
276 static int validate_equiv_mem PARAMS ((rtx, rtx, rtx));
277 static int equiv_init_varies_p PARAMS ((rtx));
278 static int equiv_init_movable_p PARAMS ((rtx, int));
279 static int contains_replace_regs PARAMS ((rtx));
280 static int memref_referenced_p PARAMS ((rtx, rtx));
281 static int memref_used_between_p PARAMS ((rtx, rtx, rtx));
282 static void update_equiv_regs PARAMS ((void));
283 static void no_equiv PARAMS ((rtx, rtx, void *));
284 static void block_alloc PARAMS ((int));
285 static int qty_sugg_compare PARAMS ((int, int));
286 static int qty_sugg_compare_1 PARAMS ((const void *, const void *));
287 static int qty_compare PARAMS ((int, int));
288 static int qty_compare_1 PARAMS ((const void *, const void *));
289 static int combine_regs PARAMS ((rtx, rtx, int, int, rtx, int));
290 static int reg_meets_class_p PARAMS ((int, enum reg_class));
291 static void update_qty_class PARAMS ((int, int));
292 static void reg_is_set PARAMS ((rtx, rtx, void *));
293 static void reg_is_born PARAMS ((rtx, int));
294 static void wipe_dead_reg PARAMS ((rtx, int));
295 static int find_free_reg PARAMS ((enum reg_class, enum machine_mode,
296 int, int, int, int, int));
297 static void mark_life PARAMS ((int, enum machine_mode, int));
298 static void post_mark_life PARAMS ((int, enum machine_mode, int, int, int));
299 static int no_conflict_p PARAMS ((rtx, rtx, rtx));
300 static int requires_inout PARAMS ((const char *));
301 \f
302 /* Allocate a new quantity (new within current basic block)
303 for register number REGNO which is born at index BIRTH
304 within the block. MODE and SIZE are info on reg REGNO. */
305
306 static void
307 alloc_qty (regno, mode, size, birth)
308 int regno;
309 enum machine_mode mode;
310 int size, birth;
311 {
312 int qtyno = next_qty++;
313
314 reg_qty[regno] = qtyno;
315 reg_offset[regno] = 0;
316 reg_next_in_qty[regno] = -1;
317
318 qty[qtyno].first_reg = regno;
319 qty[qtyno].size = size;
320 qty[qtyno].mode = mode;
321 qty[qtyno].birth = birth;
322 qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
323 qty[qtyno].min_class = reg_preferred_class (regno);
324 qty[qtyno].alternate_class = reg_alternate_class (regno);
325 qty[qtyno].n_refs = REG_N_REFS (regno);
326 qty[qtyno].freq = REG_FREQ (regno);
327 }
328 \f
329 /* Main entry point of this file. */
330
331 int
332 local_alloc ()
333 {
334 int i;
335 int max_qty;
336 basic_block b;
337
338 /* We need to keep track of whether or not we recorded a LABEL_REF so
339 that we know if the jump optimizer needs to be rerun. */
340 recorded_label_ref = 0;
341
342 /* Leaf functions and non-leaf functions have different needs.
343 If defined, let the machine say what kind of ordering we
344 should use. */
345 #ifdef ORDER_REGS_FOR_LOCAL_ALLOC
346 ORDER_REGS_FOR_LOCAL_ALLOC;
347 #endif
348
349 /* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
350 registers. */
351 if (optimize)
352 update_equiv_regs ();
353
354 /* This sets the maximum number of quantities we can have. Quantity
355 numbers start at zero and we can have one for each pseudo. */
356 max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
357
358 /* Allocate vectors of temporary data.
359 See the declarations of these variables, above,
360 for what they mean. */
361
362 qty = (struct qty *) xmalloc (max_qty * sizeof (struct qty));
363 qty_phys_copy_sugg
364 = (HARD_REG_SET *) xmalloc (max_qty * sizeof (HARD_REG_SET));
365 qty_phys_num_copy_sugg = (short *) xmalloc (max_qty * sizeof (short));
366 qty_phys_sugg = (HARD_REG_SET *) xmalloc (max_qty * sizeof (HARD_REG_SET));
367 qty_phys_num_sugg = (short *) xmalloc (max_qty * sizeof (short));
368
369 reg_qty = (int *) xmalloc (max_regno * sizeof (int));
370 reg_offset = (char *) xmalloc (max_regno * sizeof (char));
371 reg_next_in_qty = (int *) xmalloc (max_regno * sizeof (int));
372
373 /* Determine which pseudo-registers can be allocated by local-alloc.
374 In general, these are the registers used only in a single block and
375 which only die once.
376
377 We need not be concerned with which block actually uses the register
378 since we will never see it outside that block. */
379
380 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
381 {
382 if (REG_BASIC_BLOCK (i) >= 0 && REG_N_DEATHS (i) == 1)
383 reg_qty[i] = -2;
384 else
385 reg_qty[i] = -1;
386 }
387
388 /* Force loop below to initialize entire quantity array. */
389 next_qty = max_qty;
390
391 /* Allocate each block's local registers, block by block. */
392
393 FOR_EACH_BB (b)
394 {
395 /* NEXT_QTY indicates which elements of the `qty_...'
396 vectors might need to be initialized because they were used
397 for the previous block; it is set to the entire array before
398 block 0. Initialize those, with explicit loop if there are few,
399 else with bzero and bcopy. Do not initialize vectors that are
400 explicit set by `alloc_qty'. */
401
402 if (next_qty < 6)
403 {
404 for (i = 0; i < next_qty; i++)
405 {
406 CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
407 qty_phys_num_copy_sugg[i] = 0;
408 CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
409 qty_phys_num_sugg[i] = 0;
410 }
411 }
412 else
413 {
414 #define CLEAR(vector) \
415 memset ((char *) (vector), 0, (sizeof (*(vector))) * next_qty);
416
417 CLEAR (qty_phys_copy_sugg);
418 CLEAR (qty_phys_num_copy_sugg);
419 CLEAR (qty_phys_sugg);
420 CLEAR (qty_phys_num_sugg);
421 }
422
423 next_qty = 0;
424
425 block_alloc (b->index);
426 }
427
428 free (qty);
429 free (qty_phys_copy_sugg);
430 free (qty_phys_num_copy_sugg);
431 free (qty_phys_sugg);
432 free (qty_phys_num_sugg);
433
434 free (reg_qty);
435 free (reg_offset);
436 free (reg_next_in_qty);
437
438 return recorded_label_ref;
439 }
440 \f
441 /* Used for communication between the following two functions: contains
442 a MEM that we wish to ensure remains unchanged. */
443 static rtx equiv_mem;
444
445 /* Set nonzero if EQUIV_MEM is modified. */
446 static int equiv_mem_modified;
447
448 /* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
449 Called via note_stores. */
450
451 static void
452 validate_equiv_mem_from_store (dest, set, data)
453 rtx dest;
454 rtx set ATTRIBUTE_UNUSED;
455 void *data ATTRIBUTE_UNUSED;
456 {
457 if ((GET_CODE (dest) == REG
458 && reg_overlap_mentioned_p (dest, equiv_mem))
459 || (GET_CODE (dest) == MEM
460 && true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
461 equiv_mem_modified = 1;
462 }
463
464 /* Verify that no store between START and the death of REG invalidates
465 MEMREF. MEMREF is invalidated by modifying a register used in MEMREF,
466 by storing into an overlapping memory location, or with a non-const
467 CALL_INSN.
468
469 Return 1 if MEMREF remains valid. */
470
471 static int
472 validate_equiv_mem (start, reg, memref)
473 rtx start;
474 rtx reg;
475 rtx memref;
476 {
477 rtx insn;
478 rtx note;
479
480 equiv_mem = memref;
481 equiv_mem_modified = 0;
482
483 /* If the memory reference has side effects or is volatile, it isn't a
484 valid equivalence. */
485 if (side_effects_p (memref))
486 return 0;
487
488 for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
489 {
490 if (! INSN_P (insn))
491 continue;
492
493 if (find_reg_note (insn, REG_DEAD, reg))
494 return 1;
495
496 if (GET_CODE (insn) == CALL_INSN && ! RTX_UNCHANGING_P (memref)
497 && ! CONST_OR_PURE_CALL_P (insn))
498 return 0;
499
500 note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
501
502 /* If a register mentioned in MEMREF is modified via an
503 auto-increment, we lose the equivalence. Do the same if one
504 dies; although we could extend the life, it doesn't seem worth
505 the trouble. */
506
507 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
508 if ((REG_NOTE_KIND (note) == REG_INC
509 || REG_NOTE_KIND (note) == REG_DEAD)
510 && GET_CODE (XEXP (note, 0)) == REG
511 && reg_overlap_mentioned_p (XEXP (note, 0), memref))
512 return 0;
513 }
514
515 return 0;
516 }
517
518 /* Returns zero if X is known to be invariant. */
519
520 static int
521 equiv_init_varies_p (x)
522 rtx x;
523 {
524 RTX_CODE code = GET_CODE (x);
525 int i;
526 const char *fmt;
527
528 switch (code)
529 {
530 case MEM:
531 return ! RTX_UNCHANGING_P (x) || equiv_init_varies_p (XEXP (x, 0));
532
533 case QUEUED:
534 return 1;
535
536 case CONST:
537 case CONST_INT:
538 case CONST_DOUBLE:
539 case CONST_VECTOR:
540 case SYMBOL_REF:
541 case LABEL_REF:
542 return 0;
543
544 case REG:
545 return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
546
547 case ASM_OPERANDS:
548 if (MEM_VOLATILE_P (x))
549 return 1;
550
551 /* FALLTHROUGH */
552
553 default:
554 break;
555 }
556
557 fmt = GET_RTX_FORMAT (code);
558 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
559 if (fmt[i] == 'e')
560 {
561 if (equiv_init_varies_p (XEXP (x, i)))
562 return 1;
563 }
564 else if (fmt[i] == 'E')
565 {
566 int j;
567 for (j = 0; j < XVECLEN (x, i); j++)
568 if (equiv_init_varies_p (XVECEXP (x, i, j)))
569 return 1;
570 }
571
572 return 0;
573 }
574
575 /* Returns nonzero if X (used to initialize register REGNO) is movable.
576 X is only movable if the registers it uses have equivalent initializations
577 which appear to be within the same loop (or in an inner loop) and movable
578 or if they are not candidates for local_alloc and don't vary. */
579
580 static int
581 equiv_init_movable_p (x, regno)
582 rtx x;
583 int regno;
584 {
585 int i, j;
586 const char *fmt;
587 enum rtx_code code = GET_CODE (x);
588
589 switch (code)
590 {
591 case SET:
592 return equiv_init_movable_p (SET_SRC (x), regno);
593
594 case CC0:
595 case CLOBBER:
596 return 0;
597
598 case PRE_INC:
599 case PRE_DEC:
600 case POST_INC:
601 case POST_DEC:
602 case PRE_MODIFY:
603 case POST_MODIFY:
604 return 0;
605
606 case REG:
607 return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
608 && reg_equiv[REGNO (x)].replace)
609 || (REG_BASIC_BLOCK (REGNO (x)) < 0 && ! rtx_varies_p (x, 0));
610
611 case UNSPEC_VOLATILE:
612 return 0;
613
614 case ASM_OPERANDS:
615 if (MEM_VOLATILE_P (x))
616 return 0;
617
618 /* FALLTHROUGH */
619
620 default:
621 break;
622 }
623
624 fmt = GET_RTX_FORMAT (code);
625 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
626 switch (fmt[i])
627 {
628 case 'e':
629 if (! equiv_init_movable_p (XEXP (x, i), regno))
630 return 0;
631 break;
632 case 'E':
633 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
634 if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
635 return 0;
636 break;
637 }
638
639 return 1;
640 }
641
642 /* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true. */
643
644 static int
645 contains_replace_regs (x)
646 rtx x;
647 {
648 int i, j;
649 const char *fmt;
650 enum rtx_code code = GET_CODE (x);
651
652 switch (code)
653 {
654 case CONST_INT:
655 case CONST:
656 case LABEL_REF:
657 case SYMBOL_REF:
658 case CONST_DOUBLE:
659 case CONST_VECTOR:
660 case PC:
661 case CC0:
662 case HIGH:
663 return 0;
664
665 case REG:
666 return reg_equiv[REGNO (x)].replace;
667
668 default:
669 break;
670 }
671
672 fmt = GET_RTX_FORMAT (code);
673 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
674 switch (fmt[i])
675 {
676 case 'e':
677 if (contains_replace_regs (XEXP (x, i)))
678 return 1;
679 break;
680 case 'E':
681 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
682 if (contains_replace_regs (XVECEXP (x, i, j)))
683 return 1;
684 break;
685 }
686
687 return 0;
688 }
689 \f
690 /* TRUE if X references a memory location that would be affected by a store
691 to MEMREF. */
692
693 static int
694 memref_referenced_p (memref, x)
695 rtx x;
696 rtx memref;
697 {
698 int i, j;
699 const char *fmt;
700 enum rtx_code code = GET_CODE (x);
701
702 switch (code)
703 {
704 case CONST_INT:
705 case CONST:
706 case LABEL_REF:
707 case SYMBOL_REF:
708 case CONST_DOUBLE:
709 case CONST_VECTOR:
710 case PC:
711 case CC0:
712 case HIGH:
713 case LO_SUM:
714 return 0;
715
716 case REG:
717 return (reg_equiv[REGNO (x)].replacement
718 && memref_referenced_p (memref,
719 reg_equiv[REGNO (x)].replacement));
720
721 case MEM:
722 if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
723 return 1;
724 break;
725
726 case SET:
727 /* If we are setting a MEM, it doesn't count (its address does), but any
728 other SET_DEST that has a MEM in it is referencing the MEM. */
729 if (GET_CODE (SET_DEST (x)) == MEM)
730 {
731 if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
732 return 1;
733 }
734 else if (memref_referenced_p (memref, SET_DEST (x)))
735 return 1;
736
737 return memref_referenced_p (memref, SET_SRC (x));
738
739 default:
740 break;
741 }
742
743 fmt = GET_RTX_FORMAT (code);
744 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
745 switch (fmt[i])
746 {
747 case 'e':
748 if (memref_referenced_p (memref, XEXP (x, i)))
749 return 1;
750 break;
751 case 'E':
752 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
753 if (memref_referenced_p (memref, XVECEXP (x, i, j)))
754 return 1;
755 break;
756 }
757
758 return 0;
759 }
760
761 /* TRUE if some insn in the range (START, END] references a memory location
762 that would be affected by a store to MEMREF. */
763
764 static int
765 memref_used_between_p (memref, start, end)
766 rtx memref;
767 rtx start;
768 rtx end;
769 {
770 rtx insn;
771
772 for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
773 insn = NEXT_INSN (insn))
774 if (INSN_P (insn) && memref_referenced_p (memref, PATTERN (insn)))
775 return 1;
776
777 return 0;
778 }
779 \f
780 /* Return nonzero if the rtx X is invariant over the current function. */
781 /* ??? Actually, the places this is used in reload expect exactly what
782 is tested here, and not everything that is function invariant. In
783 particular, the frame pointer and arg pointer are special cased;
784 pic_offset_table_rtx is not, and this will cause aborts when we
785 go to spill these things to memory. */
786
787 int
788 function_invariant_p (x)
789 rtx x;
790 {
791 if (CONSTANT_P (x))
792 return 1;
793 if (x == frame_pointer_rtx || x == arg_pointer_rtx)
794 return 1;
795 if (GET_CODE (x) == PLUS
796 && (XEXP (x, 0) == frame_pointer_rtx || XEXP (x, 0) == arg_pointer_rtx)
797 && CONSTANT_P (XEXP (x, 1)))
798 return 1;
799 return 0;
800 }
801
802 /* Find registers that are equivalent to a single value throughout the
803 compilation (either because they can be referenced in memory or are set once
804 from a single constant). Lower their priority for a register.
805
806 If such a register is only referenced once, try substituting its value
807 into the using insn. If it succeeds, we can eliminate the register
808 completely. */
809
810 static void
811 update_equiv_regs ()
812 {
813 rtx insn;
814 basic_block bb;
815 int loop_depth;
816 regset_head cleared_regs;
817 int clear_regnos = 0;
818
819 reg_equiv = (struct equivalence *) xcalloc (max_regno, sizeof *reg_equiv);
820 INIT_REG_SET (&cleared_regs);
821
822 init_alias_analysis ();
823
824 /* Scan the insns and find which registers have equivalences. Do this
825 in a separate scan of the insns because (due to -fcse-follow-jumps)
826 a register can be set below its use. */
827 FOR_EACH_BB (bb)
828 {
829 loop_depth = bb->loop_depth;
830
831 for (insn = bb->head; insn != NEXT_INSN (bb->end); insn = NEXT_INSN (insn))
832 {
833 rtx note;
834 rtx set;
835 rtx dest, src;
836 int regno;
837
838 if (! INSN_P (insn))
839 continue;
840
841 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
842 if (REG_NOTE_KIND (note) == REG_INC)
843 no_equiv (XEXP (note, 0), note, NULL);
844
845 set = single_set (insn);
846
847 /* If this insn contains more (or less) than a single SET,
848 only mark all destinations as having no known equivalence. */
849 if (set == 0)
850 {
851 note_stores (PATTERN (insn), no_equiv, NULL);
852 continue;
853 }
854 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
855 {
856 int i;
857
858 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
859 {
860 rtx part = XVECEXP (PATTERN (insn), 0, i);
861 if (part != set)
862 note_stores (part, no_equiv, NULL);
863 }
864 }
865
866 dest = SET_DEST (set);
867 src = SET_SRC (set);
868
869 /* If this sets a MEM to the contents of a REG that is only used
870 in a single basic block, see if the register is always equivalent
871 to that memory location and if moving the store from INSN to the
872 insn that set REG is safe. If so, put a REG_EQUIV note on the
873 initializing insn.
874
875 Don't add a REG_EQUIV note if the insn already has one. The existing
876 REG_EQUIV is likely more useful than the one we are adding.
877
878 If one of the regs in the address has reg_equiv[REGNO].replace set,
879 then we can't add this REG_EQUIV note. The reg_equiv[REGNO].replace
880 optimization may move the set of this register immediately before
881 insn, which puts it after reg_equiv[REGNO].init_insns, and hence
882 the mention in the REG_EQUIV note would be to an uninitialized
883 pseudo. */
884 /* ????? This test isn't good enough; we might see a MEM with a use of
885 a pseudo register before we see its setting insn that will cause
886 reg_equiv[].replace for that pseudo to be set.
887 Equivalences to MEMs should be made in another pass, after the
888 reg_equiv[].replace information has been gathered. */
889
890 if (GET_CODE (dest) == MEM && GET_CODE (src) == REG
891 && (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
892 && REG_BASIC_BLOCK (regno) >= 0
893 && REG_N_SETS (regno) == 1
894 && reg_equiv[regno].init_insns != 0
895 && reg_equiv[regno].init_insns != const0_rtx
896 && ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
897 REG_EQUIV, NULL_RTX)
898 && ! contains_replace_regs (XEXP (dest, 0)))
899 {
900 rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
901 if (validate_equiv_mem (init_insn, src, dest)
902 && ! memref_used_between_p (dest, init_insn, insn))
903 REG_NOTES (init_insn)
904 = gen_rtx_EXPR_LIST (REG_EQUIV, dest, REG_NOTES (init_insn));
905 }
906
907 /* We only handle the case of a pseudo register being set
908 once, or always to the same value. */
909 /* ??? The mn10200 port breaks if we add equivalences for
910 values that need an ADDRESS_REGS register and set them equivalent
911 to a MEM of a pseudo. The actual problem is in the over-conservative
912 handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
913 calculate_needs, but we traditionally work around this problem
914 here by rejecting equivalences when the destination is in a register
915 that's likely spilled. This is fragile, of course, since the
916 preferred class of a pseudo depends on all instructions that set
917 or use it. */
918
919 if (GET_CODE (dest) != REG
920 || (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
921 || reg_equiv[regno].init_insns == const0_rtx
922 || (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
923 && GET_CODE (src) == MEM))
924 {
925 /* This might be setting a SUBREG of a pseudo, a pseudo that is
926 also set somewhere else to a constant. */
927 note_stores (set, no_equiv, NULL);
928 continue;
929 }
930
931 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
932
933 /* cse sometimes generates function invariants, but doesn't put a
934 REG_EQUAL note on the insn. Since this note would be redundant,
935 there's no point creating it earlier than here. */
936 if (! note && ! rtx_varies_p (src, 0))
937 note = set_unique_reg_note (insn, REG_EQUAL, src);
938
939 /* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
940 since it represents a function call */
941 if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
942 note = NULL_RTX;
943
944 if (REG_N_SETS (regno) != 1
945 && (! note
946 || rtx_varies_p (XEXP (note, 0), 0)
947 || (reg_equiv[regno].replacement
948 && ! rtx_equal_p (XEXP (note, 0),
949 reg_equiv[regno].replacement))))
950 {
951 no_equiv (dest, set, NULL);
952 continue;
953 }
954 /* Record this insn as initializing this register. */
955 reg_equiv[regno].init_insns
956 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
957
958 /* If this register is known to be equal to a constant, record that
959 it is always equivalent to the constant. */
960 if (note && ! rtx_varies_p (XEXP (note, 0), 0))
961 PUT_MODE (note, (enum machine_mode) REG_EQUIV);
962
963 /* If this insn introduces a "constant" register, decrease the priority
964 of that register. Record this insn if the register is only used once
965 more and the equivalence value is the same as our source.
966
967 The latter condition is checked for two reasons: First, it is an
968 indication that it may be more efficient to actually emit the insn
969 as written (if no registers are available, reload will substitute
970 the equivalence). Secondly, it avoids problems with any registers
971 dying in this insn whose death notes would be missed.
972
973 If we don't have a REG_EQUIV note, see if this insn is loading
974 a register used only in one basic block from a MEM. If so, and the
975 MEM remains unchanged for the life of the register, add a REG_EQUIV
976 note. */
977
978 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
979
980 if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
981 && GET_CODE (SET_SRC (set)) == MEM
982 && validate_equiv_mem (insn, dest, SET_SRC (set)))
983 REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV, SET_SRC (set),
984 REG_NOTES (insn));
985
986 if (note)
987 {
988 int regno = REGNO (dest);
989
990 /* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
991 We might end up substituting the LABEL_REF for uses of the
992 pseudo here or later. That kind of transformation may turn an
993 indirect jump into a direct jump, in which case we must rerun the
994 jump optimizer to ensure that the JUMP_LABEL fields are valid. */
995 if (GET_CODE (XEXP (note, 0)) == LABEL_REF
996 || (GET_CODE (XEXP (note, 0)) == CONST
997 && GET_CODE (XEXP (XEXP (note, 0), 0)) == PLUS
998 && (GET_CODE (XEXP (XEXP (XEXP (note, 0), 0), 0))
999 == LABEL_REF)))
1000 recorded_label_ref = 1;
1001
1002 reg_equiv[regno].replacement = XEXP (note, 0);
1003 reg_equiv[regno].src_p = &SET_SRC (set);
1004 reg_equiv[regno].loop_depth = loop_depth;
1005
1006 /* Don't mess with things live during setjmp. */
1007 if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
1008 {
1009 /* Note that the statement below does not affect the priority
1010 in local-alloc! */
1011 REG_LIVE_LENGTH (regno) *= 2;
1012
1013
1014 /* If the register is referenced exactly twice, meaning it is
1015 set once and used once, indicate that the reference may be
1016 replaced by the equivalence we computed above. Do this
1017 even if the register is only used in one block so that
1018 dependencies can be handled where the last register is
1019 used in a different block (i.e. HIGH / LO_SUM sequences)
1020 and to reduce the number of registers alive across
1021 calls. */
1022
1023 if (REG_N_REFS (regno) == 2
1024 && (rtx_equal_p (XEXP (note, 0), src)
1025 || ! equiv_init_varies_p (src))
1026 && GET_CODE (insn) == INSN
1027 && equiv_init_movable_p (PATTERN (insn), regno))
1028 reg_equiv[regno].replace = 1;
1029 }
1030 }
1031 }
1032 }
1033
1034 /* Now scan all regs killed in an insn to see if any of them are
1035 registers only used that once. If so, see if we can replace the
1036 reference with the equivalent from. If we can, delete the
1037 initializing reference and this register will go away. If we
1038 can't replace the reference, and the initializing reference is
1039 within the same loop (or in an inner loop), then move the register
1040 initialization just before the use, so that they are in the same
1041 basic block. */
1042 FOR_EACH_BB_REVERSE (bb)
1043 {
1044 loop_depth = bb->loop_depth;
1045 for (insn = bb->end; insn != PREV_INSN (bb->head); insn = PREV_INSN (insn))
1046 {
1047 rtx link;
1048
1049 if (! INSN_P (insn))
1050 continue;
1051
1052 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1053 {
1054 if (REG_NOTE_KIND (link) == REG_DEAD
1055 /* Make sure this insn still refers to the register. */
1056 && reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
1057 {
1058 int regno = REGNO (XEXP (link, 0));
1059 rtx equiv_insn;
1060
1061 if (! reg_equiv[regno].replace
1062 || reg_equiv[regno].loop_depth < loop_depth)
1063 continue;
1064
1065 /* reg_equiv[REGNO].replace gets set only when
1066 REG_N_REFS[REGNO] is 2, i.e. the register is set
1067 once and used once. (If it were only set, but not used,
1068 flow would have deleted the setting insns.) Hence
1069 there can only be one insn in reg_equiv[REGNO].init_insns. */
1070 if (reg_equiv[regno].init_insns == NULL_RTX
1071 || XEXP (reg_equiv[regno].init_insns, 1) != NULL_RTX)
1072 abort ();
1073 equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
1074
1075 /* We may not move instructions that can throw, since
1076 that changes basic block boundaries and we are not
1077 prepared to adjust the CFG to match. */
1078 if (can_throw_internal (equiv_insn))
1079 continue;
1080
1081 if (asm_noperands (PATTERN (equiv_insn)) < 0
1082 && validate_replace_rtx (regno_reg_rtx[regno],
1083 *(reg_equiv[regno].src_p), insn))
1084 {
1085 rtx equiv_link;
1086 rtx last_link;
1087 rtx note;
1088
1089 /* Find the last note. */
1090 for (last_link = link; XEXP (last_link, 1);
1091 last_link = XEXP (last_link, 1))
1092 ;
1093
1094 /* Append the REG_DEAD notes from equiv_insn. */
1095 equiv_link = REG_NOTES (equiv_insn);
1096 while (equiv_link)
1097 {
1098 note = equiv_link;
1099 equiv_link = XEXP (equiv_link, 1);
1100 if (REG_NOTE_KIND (note) == REG_DEAD)
1101 {
1102 remove_note (equiv_insn, note);
1103 XEXP (last_link, 1) = note;
1104 XEXP (note, 1) = NULL_RTX;
1105 last_link = note;
1106 }
1107 }
1108
1109 remove_death (regno, insn);
1110 REG_N_REFS (regno) = 0;
1111 REG_FREQ (regno) = 0;
1112 delete_insn (equiv_insn);
1113
1114 reg_equiv[regno].init_insns
1115 = XEXP (reg_equiv[regno].init_insns, 1);
1116 }
1117 /* Move the initialization of the register to just before
1118 INSN. Update the flow information. */
1119 else if (PREV_INSN (insn) != equiv_insn)
1120 {
1121 rtx new_insn;
1122
1123 new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
1124 REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
1125 REG_NOTES (equiv_insn) = 0;
1126
1127 /* Make sure this insn is recognized before reload begins,
1128 otherwise eliminate_regs_in_insn will abort. */
1129 INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
1130
1131 delete_insn (equiv_insn);
1132
1133 XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
1134
1135 REG_BASIC_BLOCK (regno) = bb->index;
1136 REG_N_CALLS_CROSSED (regno) = 0;
1137 REG_LIVE_LENGTH (regno) = 2;
1138
1139 if (insn == bb->head)
1140 bb->head = PREV_INSN (insn);
1141
1142 /* Remember to clear REGNO from all basic block's live
1143 info. */
1144 SET_REGNO_REG_SET (&cleared_regs, regno);
1145 clear_regnos++;
1146 }
1147 }
1148 }
1149 }
1150 }
1151
1152 /* Clear all dead REGNOs from all basic block's live info. */
1153 if (clear_regnos)
1154 {
1155 int j;
1156 if (clear_regnos > 8)
1157 {
1158 FOR_EACH_BB (bb)
1159 {
1160 AND_COMPL_REG_SET (bb->global_live_at_start, &cleared_regs);
1161 AND_COMPL_REG_SET (bb->global_live_at_end, &cleared_regs);
1162 }
1163 }
1164 else
1165 EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j,
1166 {
1167 FOR_EACH_BB (bb)
1168 {
1169 CLEAR_REGNO_REG_SET (bb->global_live_at_start, j);
1170 CLEAR_REGNO_REG_SET (bb->global_live_at_end, j);
1171 }
1172 });
1173 }
1174
1175 /* Clean up. */
1176 end_alias_analysis ();
1177 CLEAR_REG_SET (&cleared_regs);
1178 free (reg_equiv);
1179 }
1180
1181 /* Mark REG as having no known equivalence.
1182 Some instructions might have been processed before and furnished
1183 with REG_EQUIV notes for this register; these notes will have to be
1184 removed.
1185 STORE is the piece of RTL that does the non-constant / conflicting
1186 assignment - a SET, CLOBBER or REG_INC note. It is currently not used,
1187 but needs to be there because this function is called from note_stores. */
1188 static void
1189 no_equiv (reg, store, data)
1190 rtx reg, store ATTRIBUTE_UNUSED;
1191 void *data ATTRIBUTE_UNUSED;
1192 {
1193 int regno;
1194 rtx list;
1195
1196 if (GET_CODE (reg) != REG)
1197 return;
1198 regno = REGNO (reg);
1199 list = reg_equiv[regno].init_insns;
1200 if (list == const0_rtx)
1201 return;
1202 for (; list; list = XEXP (list, 1))
1203 {
1204 rtx insn = XEXP (list, 0);
1205 remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
1206 }
1207 reg_equiv[regno].init_insns = const0_rtx;
1208 reg_equiv[regno].replacement = NULL_RTX;
1209 }
1210 \f
1211 /* Allocate hard regs to the pseudo regs used only within block number B.
1212 Only the pseudos that die but once can be handled. */
1213
1214 static void
1215 block_alloc (b)
1216 int b;
1217 {
1218 int i, q;
1219 rtx insn;
1220 rtx note, hard_reg;
1221 int insn_number = 0;
1222 int insn_count = 0;
1223 int max_uid = get_max_uid ();
1224 int *qty_order;
1225 int no_conflict_combined_regno = -1;
1226
1227 /* Count the instructions in the basic block. */
1228
1229 insn = BLOCK_END (b);
1230 while (1)
1231 {
1232 if (GET_CODE (insn) != NOTE)
1233 if (++insn_count > max_uid)
1234 abort ();
1235 if (insn == BLOCK_HEAD (b))
1236 break;
1237 insn = PREV_INSN (insn);
1238 }
1239
1240 /* +2 to leave room for a post_mark_life at the last insn and for
1241 the birth of a CLOBBER in the first insn. */
1242 regs_live_at = (HARD_REG_SET *) xcalloc ((2 * insn_count + 2),
1243 sizeof (HARD_REG_SET));
1244
1245 /* Initialize table of hardware registers currently live. */
1246
1247 REG_SET_TO_HARD_REG_SET (regs_live, BASIC_BLOCK (b)->global_live_at_start);
1248
1249 /* This loop scans the instructions of the basic block
1250 and assigns quantities to registers.
1251 It computes which registers to tie. */
1252
1253 insn = BLOCK_HEAD (b);
1254 while (1)
1255 {
1256 if (GET_CODE (insn) != NOTE)
1257 insn_number++;
1258
1259 if (INSN_P (insn))
1260 {
1261 rtx link, set;
1262 int win = 0;
1263 rtx r0, r1 = NULL_RTX;
1264 int combined_regno = -1;
1265 int i;
1266
1267 this_insn_number = insn_number;
1268 this_insn = insn;
1269
1270 extract_insn (insn);
1271 which_alternative = -1;
1272
1273 /* Is this insn suitable for tying two registers?
1274 If so, try doing that.
1275 Suitable insns are those with at least two operands and where
1276 operand 0 is an output that is a register that is not
1277 earlyclobber.
1278
1279 We can tie operand 0 with some operand that dies in this insn.
1280 First look for operands that are required to be in the same
1281 register as operand 0. If we find such, only try tying that
1282 operand or one that can be put into that operand if the
1283 operation is commutative. If we don't find an operand
1284 that is required to be in the same register as operand 0,
1285 we can tie with any operand.
1286
1287 Subregs in place of regs are also ok.
1288
1289 If tying is done, WIN is set nonzero. */
1290
1291 if (optimize
1292 && recog_data.n_operands > 1
1293 && recog_data.constraints[0][0] == '='
1294 && recog_data.constraints[0][1] != '&')
1295 {
1296 /* If non-negative, is an operand that must match operand 0. */
1297 int must_match_0 = -1;
1298 /* Counts number of alternatives that require a match with
1299 operand 0. */
1300 int n_matching_alts = 0;
1301
1302 for (i = 1; i < recog_data.n_operands; i++)
1303 {
1304 const char *p = recog_data.constraints[i];
1305 int this_match = requires_inout (p);
1306
1307 n_matching_alts += this_match;
1308 if (this_match == recog_data.n_alternatives)
1309 must_match_0 = i;
1310 }
1311
1312 r0 = recog_data.operand[0];
1313 for (i = 1; i < recog_data.n_operands; i++)
1314 {
1315 /* Skip this operand if we found an operand that
1316 must match operand 0 and this operand isn't it
1317 and can't be made to be it by commutativity. */
1318
1319 if (must_match_0 >= 0 && i != must_match_0
1320 && ! (i == must_match_0 + 1
1321 && recog_data.constraints[i-1][0] == '%')
1322 && ! (i == must_match_0 - 1
1323 && recog_data.constraints[i][0] == '%'))
1324 continue;
1325
1326 /* Likewise if each alternative has some operand that
1327 must match operand zero. In that case, skip any
1328 operand that doesn't list operand 0 since we know that
1329 the operand always conflicts with operand 0. We
1330 ignore commutativity in this case to keep things simple. */
1331 if (n_matching_alts == recog_data.n_alternatives
1332 && 0 == requires_inout (recog_data.constraints[i]))
1333 continue;
1334
1335 r1 = recog_data.operand[i];
1336
1337 /* If the operand is an address, find a register in it.
1338 There may be more than one register, but we only try one
1339 of them. */
1340 if (recog_data.constraints[i][0] == 'p'
1341 || EXTRA_ADDRESS_CONSTRAINT (recog_data.constraints[i][0],
1342 recog_data.constraints[i]))
1343 while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
1344 r1 = XEXP (r1, 0);
1345
1346 /* Avoid making a call-saved register unnecessarily
1347 clobbered. */
1348 hard_reg = get_hard_reg_initial_reg (cfun, r1);
1349 if (hard_reg != NULL_RTX)
1350 {
1351 if (GET_CODE (hard_reg) == REG
1352 && IN_RANGE (REGNO (hard_reg),
1353 0, FIRST_PSEUDO_REGISTER - 1)
1354 && ! call_used_regs[REGNO (hard_reg)])
1355 continue;
1356 }
1357
1358 if (GET_CODE (r0) == REG || GET_CODE (r0) == SUBREG)
1359 {
1360 /* We have two priorities for hard register preferences.
1361 If we have a move insn or an insn whose first input
1362 can only be in the same register as the output, give
1363 priority to an equivalence found from that insn. */
1364 int may_save_copy
1365 = (r1 == recog_data.operand[i] && must_match_0 >= 0);
1366
1367 if (GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG)
1368 win = combine_regs (r1, r0, may_save_copy,
1369 insn_number, insn, 0);
1370 }
1371 if (win)
1372 break;
1373 }
1374 }
1375
1376 /* Recognize an insn sequence with an ultimate result
1377 which can safely overlap one of the inputs.
1378 The sequence begins with a CLOBBER of its result,
1379 and ends with an insn that copies the result to itself
1380 and has a REG_EQUAL note for an equivalent formula.
1381 That note indicates what the inputs are.
1382 The result and the input can overlap if each insn in
1383 the sequence either doesn't mention the input
1384 or has a REG_NO_CONFLICT note to inhibit the conflict.
1385
1386 We do the combining test at the CLOBBER so that the
1387 destination register won't have had a quantity number
1388 assigned, since that would prevent combining. */
1389
1390 if (optimize
1391 && GET_CODE (PATTERN (insn)) == CLOBBER
1392 && (r0 = XEXP (PATTERN (insn), 0),
1393 GET_CODE (r0) == REG)
1394 && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
1395 && XEXP (link, 0) != 0
1396 && GET_CODE (XEXP (link, 0)) == INSN
1397 && (set = single_set (XEXP (link, 0))) != 0
1398 && SET_DEST (set) == r0 && SET_SRC (set) == r0
1399 && (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
1400 NULL_RTX)) != 0)
1401 {
1402 if (r1 = XEXP (note, 0), GET_CODE (r1) == REG
1403 /* Check that we have such a sequence. */
1404 && no_conflict_p (insn, r0, r1))
1405 win = combine_regs (r1, r0, 1, insn_number, insn, 1);
1406 else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
1407 && (r1 = XEXP (XEXP (note, 0), 0),
1408 GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG)
1409 && no_conflict_p (insn, r0, r1))
1410 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1411
1412 /* Here we care if the operation to be computed is
1413 commutative. */
1414 else if ((GET_CODE (XEXP (note, 0)) == EQ
1415 || GET_CODE (XEXP (note, 0)) == NE
1416 || GET_RTX_CLASS (GET_CODE (XEXP (note, 0))) == 'c')
1417 && (r1 = XEXP (XEXP (note, 0), 1),
1418 (GET_CODE (r1) == REG || GET_CODE (r1) == SUBREG))
1419 && no_conflict_p (insn, r0, r1))
1420 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1421
1422 /* If we did combine something, show the register number
1423 in question so that we know to ignore its death. */
1424 if (win)
1425 no_conflict_combined_regno = REGNO (r1);
1426 }
1427
1428 /* If registers were just tied, set COMBINED_REGNO
1429 to the number of the register used in this insn
1430 that was tied to the register set in this insn.
1431 This register's qty should not be "killed". */
1432
1433 if (win)
1434 {
1435 while (GET_CODE (r1) == SUBREG)
1436 r1 = SUBREG_REG (r1);
1437 combined_regno = REGNO (r1);
1438 }
1439
1440 /* Mark the death of everything that dies in this instruction,
1441 except for anything that was just combined. */
1442
1443 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1444 if (REG_NOTE_KIND (link) == REG_DEAD
1445 && GET_CODE (XEXP (link, 0)) == REG
1446 && combined_regno != (int) REGNO (XEXP (link, 0))
1447 && (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
1448 || ! find_reg_note (insn, REG_NO_CONFLICT,
1449 XEXP (link, 0))))
1450 wipe_dead_reg (XEXP (link, 0), 0);
1451
1452 /* Allocate qty numbers for all registers local to this block
1453 that are born (set) in this instruction.
1454 A pseudo that already has a qty is not changed. */
1455
1456 note_stores (PATTERN (insn), reg_is_set, NULL);
1457
1458 /* If anything is set in this insn and then unused, mark it as dying
1459 after this insn, so it will conflict with our outputs. This
1460 can't match with something that combined, and it doesn't matter
1461 if it did. Do this after the calls to reg_is_set since these
1462 die after, not during, the current insn. */
1463
1464 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1465 if (REG_NOTE_KIND (link) == REG_UNUSED
1466 && GET_CODE (XEXP (link, 0)) == REG)
1467 wipe_dead_reg (XEXP (link, 0), 1);
1468
1469 /* If this is an insn that has a REG_RETVAL note pointing at a
1470 CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
1471 block, so clear any register number that combined within it. */
1472 if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
1473 && GET_CODE (XEXP (note, 0)) == INSN
1474 && GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
1475 no_conflict_combined_regno = -1;
1476 }
1477
1478 /* Set the registers live after INSN_NUMBER. Note that we never
1479 record the registers live before the block's first insn, since no
1480 pseudos we care about are live before that insn. */
1481
1482 IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
1483 IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
1484
1485 if (insn == BLOCK_END (b))
1486 break;
1487
1488 insn = NEXT_INSN (insn);
1489 }
1490
1491 /* Now every register that is local to this basic block
1492 should have been given a quantity, or else -1 meaning ignore it.
1493 Every quantity should have a known birth and death.
1494
1495 Order the qtys so we assign them registers in order of the
1496 number of suggested registers they need so we allocate those with
1497 the most restrictive needs first. */
1498
1499 qty_order = (int *) xmalloc (next_qty * sizeof (int));
1500 for (i = 0; i < next_qty; i++)
1501 qty_order[i] = i;
1502
1503 #define EXCHANGE(I1, I2) \
1504 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1505
1506 switch (next_qty)
1507 {
1508 case 3:
1509 /* Make qty_order[2] be the one to allocate last. */
1510 if (qty_sugg_compare (0, 1) > 0)
1511 EXCHANGE (0, 1);
1512 if (qty_sugg_compare (1, 2) > 0)
1513 EXCHANGE (2, 1);
1514
1515 /* ... Fall through ... */
1516 case 2:
1517 /* Put the best one to allocate in qty_order[0]. */
1518 if (qty_sugg_compare (0, 1) > 0)
1519 EXCHANGE (0, 1);
1520
1521 /* ... Fall through ... */
1522
1523 case 1:
1524 case 0:
1525 /* Nothing to do here. */
1526 break;
1527
1528 default:
1529 qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
1530 }
1531
1532 /* Try to put each quantity in a suggested physical register, if it has one.
1533 This may cause registers to be allocated that otherwise wouldn't be, but
1534 this seems acceptable in local allocation (unlike global allocation). */
1535 for (i = 0; i < next_qty; i++)
1536 {
1537 q = qty_order[i];
1538 if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
1539 qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
1540 0, 1, qty[q].birth, qty[q].death);
1541 else
1542 qty[q].phys_reg = -1;
1543 }
1544
1545 /* Order the qtys so we assign them registers in order of
1546 decreasing length of life. Normally call qsort, but if we
1547 have only a very small number of quantities, sort them ourselves. */
1548
1549 for (i = 0; i < next_qty; i++)
1550 qty_order[i] = i;
1551
1552 #define EXCHANGE(I1, I2) \
1553 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1554
1555 switch (next_qty)
1556 {
1557 case 3:
1558 /* Make qty_order[2] be the one to allocate last. */
1559 if (qty_compare (0, 1) > 0)
1560 EXCHANGE (0, 1);
1561 if (qty_compare (1, 2) > 0)
1562 EXCHANGE (2, 1);
1563
1564 /* ... Fall through ... */
1565 case 2:
1566 /* Put the best one to allocate in qty_order[0]. */
1567 if (qty_compare (0, 1) > 0)
1568 EXCHANGE (0, 1);
1569
1570 /* ... Fall through ... */
1571
1572 case 1:
1573 case 0:
1574 /* Nothing to do here. */
1575 break;
1576
1577 default:
1578 qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
1579 }
1580
1581 /* Now for each qty that is not a hardware register,
1582 look for a hardware register to put it in.
1583 First try the register class that is cheapest for this qty,
1584 if there is more than one class. */
1585
1586 for (i = 0; i < next_qty; i++)
1587 {
1588 q = qty_order[i];
1589 if (qty[q].phys_reg < 0)
1590 {
1591 #ifdef INSN_SCHEDULING
1592 /* These values represent the adjusted lifetime of a qty so
1593 that it conflicts with qtys which appear near the start/end
1594 of this qty's lifetime.
1595
1596 The purpose behind extending the lifetime of this qty is to
1597 discourage the register allocator from creating false
1598 dependencies.
1599
1600 The adjustment value is chosen to indicate that this qty
1601 conflicts with all the qtys in the instructions immediately
1602 before and after the lifetime of this qty.
1603
1604 Experiments have shown that higher values tend to hurt
1605 overall code performance.
1606
1607 If allocation using the extended lifetime fails we will try
1608 again with the qty's unadjusted lifetime. */
1609 int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
1610 int fake_death = MIN (insn_number * 2 + 1,
1611 qty[q].death + 2 - qty[q].death % 2);
1612 #endif
1613
1614 if (N_REG_CLASSES > 1)
1615 {
1616 #ifdef INSN_SCHEDULING
1617 /* We try to avoid using hard registers allocated to qtys which
1618 are born immediately after this qty or die immediately before
1619 this qty.
1620
1621 This optimization is only appropriate when we will run
1622 a scheduling pass after reload and we are not optimizing
1623 for code size. */
1624 if (flag_schedule_insns_after_reload
1625 && !optimize_size
1626 && !SMALL_REGISTER_CLASSES)
1627 {
1628 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1629 qty[q].mode, q, 0, 0,
1630 fake_birth, fake_death);
1631 if (qty[q].phys_reg >= 0)
1632 continue;
1633 }
1634 #endif
1635 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1636 qty[q].mode, q, 0, 0,
1637 qty[q].birth, qty[q].death);
1638 if (qty[q].phys_reg >= 0)
1639 continue;
1640 }
1641
1642 #ifdef INSN_SCHEDULING
1643 /* Similarly, avoid false dependencies. */
1644 if (flag_schedule_insns_after_reload
1645 && !optimize_size
1646 && !SMALL_REGISTER_CLASSES
1647 && qty[q].alternate_class != NO_REGS)
1648 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1649 qty[q].mode, q, 0, 0,
1650 fake_birth, fake_death);
1651 #endif
1652 if (qty[q].alternate_class != NO_REGS)
1653 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1654 qty[q].mode, q, 0, 0,
1655 qty[q].birth, qty[q].death);
1656 }
1657 }
1658
1659 /* Now propagate the register assignments
1660 to the pseudo regs belonging to the qtys. */
1661
1662 for (q = 0; q < next_qty; q++)
1663 if (qty[q].phys_reg >= 0)
1664 {
1665 for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
1666 reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
1667 }
1668
1669 /* Clean up. */
1670 free (regs_live_at);
1671 free (qty_order);
1672 }
1673 \f
1674 /* Compare two quantities' priority for getting real registers.
1675 We give shorter-lived quantities higher priority.
1676 Quantities with more references are also preferred, as are quantities that
1677 require multiple registers. This is the identical prioritization as
1678 done by global-alloc.
1679
1680 We used to give preference to registers with *longer* lives, but using
1681 the same algorithm in both local- and global-alloc can speed up execution
1682 of some programs by as much as a factor of three! */
1683
1684 /* Note that the quotient will never be bigger than
1685 the value of floor_log2 times the maximum number of
1686 times a register can occur in one insn (surely less than 100)
1687 weighted by frequency (max REG_FREQ_MAX).
1688 Multiplying this by 10000/REG_FREQ_MAX can't overflow.
1689 QTY_CMP_PRI is also used by qty_sugg_compare. */
1690
1691 #define QTY_CMP_PRI(q) \
1692 ((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
1693 / (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
1694
1695 static int
1696 qty_compare (q1, q2)
1697 int q1, q2;
1698 {
1699 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1700 }
1701
1702 static int
1703 qty_compare_1 (q1p, q2p)
1704 const void *q1p;
1705 const void *q2p;
1706 {
1707 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1708 int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1709
1710 if (tem != 0)
1711 return tem;
1712
1713 /* If qtys are equally good, sort by qty number,
1714 so that the results of qsort leave nothing to chance. */
1715 return q1 - q2;
1716 }
1717 \f
1718 /* Compare two quantities' priority for getting real registers. This version
1719 is called for quantities that have suggested hard registers. First priority
1720 goes to quantities that have copy preferences, then to those that have
1721 normal preferences. Within those groups, quantities with the lower
1722 number of preferences have the highest priority. Of those, we use the same
1723 algorithm as above. */
1724
1725 #define QTY_CMP_SUGG(q) \
1726 (qty_phys_num_copy_sugg[q] \
1727 ? qty_phys_num_copy_sugg[q] \
1728 : qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
1729
1730 static int
1731 qty_sugg_compare (q1, q2)
1732 int q1, q2;
1733 {
1734 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1735
1736 if (tem != 0)
1737 return tem;
1738
1739 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1740 }
1741
1742 static int
1743 qty_sugg_compare_1 (q1p, q2p)
1744 const void *q1p;
1745 const void *q2p;
1746 {
1747 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1748 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1749
1750 if (tem != 0)
1751 return tem;
1752
1753 tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1754 if (tem != 0)
1755 return tem;
1756
1757 /* If qtys are equally good, sort by qty number,
1758 so that the results of qsort leave nothing to chance. */
1759 return q1 - q2;
1760 }
1761
1762 #undef QTY_CMP_SUGG
1763 #undef QTY_CMP_PRI
1764 \f
1765 /* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
1766 Returns 1 if have done so, or 0 if cannot.
1767
1768 Combining registers means marking them as having the same quantity
1769 and adjusting the offsets within the quantity if either of
1770 them is a SUBREG).
1771
1772 We don't actually combine a hard reg with a pseudo; instead
1773 we just record the hard reg as the suggestion for the pseudo's quantity.
1774 If we really combined them, we could lose if the pseudo lives
1775 across an insn that clobbers the hard reg (eg, movstr).
1776
1777 ALREADY_DEAD is nonzero if USEDREG is known to be dead even though
1778 there is no REG_DEAD note on INSN. This occurs during the processing
1779 of REG_NO_CONFLICT blocks.
1780
1781 MAY_SAVE_COPYCOPY is nonzero if this insn is simply copying USEDREG to
1782 SETREG or if the input and output must share a register.
1783 In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
1784
1785 There are elaborate checks for the validity of combining. */
1786
1787 static int
1788 combine_regs (usedreg, setreg, may_save_copy, insn_number, insn, already_dead)
1789 rtx usedreg, setreg;
1790 int may_save_copy;
1791 int insn_number;
1792 rtx insn;
1793 int already_dead;
1794 {
1795 int ureg, sreg;
1796 int offset = 0;
1797 int usize, ssize;
1798 int sqty;
1799
1800 /* Determine the numbers and sizes of registers being used. If a subreg
1801 is present that does not change the entire register, don't consider
1802 this a copy insn. */
1803
1804 while (GET_CODE (usedreg) == SUBREG)
1805 {
1806 rtx subreg = SUBREG_REG (usedreg);
1807
1808 if (GET_CODE (subreg) == REG)
1809 {
1810 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1811 may_save_copy = 0;
1812
1813 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1814 offset += subreg_regno_offset (REGNO (subreg),
1815 GET_MODE (subreg),
1816 SUBREG_BYTE (usedreg),
1817 GET_MODE (usedreg));
1818 else
1819 offset += (SUBREG_BYTE (usedreg)
1820 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1821 }
1822
1823 usedreg = subreg;
1824 }
1825
1826 if (GET_CODE (usedreg) != REG)
1827 return 0;
1828
1829 ureg = REGNO (usedreg);
1830 if (ureg < FIRST_PSEUDO_REGISTER)
1831 usize = HARD_REGNO_NREGS (ureg, GET_MODE (usedreg));
1832 else
1833 usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
1834 + (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
1835 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1836
1837 while (GET_CODE (setreg) == SUBREG)
1838 {
1839 rtx subreg = SUBREG_REG (setreg);
1840
1841 if (GET_CODE (subreg) == REG)
1842 {
1843 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1844 may_save_copy = 0;
1845
1846 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1847 offset -= subreg_regno_offset (REGNO (subreg),
1848 GET_MODE (subreg),
1849 SUBREG_BYTE (setreg),
1850 GET_MODE (setreg));
1851 else
1852 offset -= (SUBREG_BYTE (setreg)
1853 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1854 }
1855
1856 setreg = subreg;
1857 }
1858
1859 if (GET_CODE (setreg) != REG)
1860 return 0;
1861
1862 sreg = REGNO (setreg);
1863 if (sreg < FIRST_PSEUDO_REGISTER)
1864 ssize = HARD_REGNO_NREGS (sreg, GET_MODE (setreg));
1865 else
1866 ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
1867 + (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
1868 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1869
1870 /* If UREG is a pseudo-register that hasn't already been assigned a
1871 quantity number, it means that it is not local to this block or dies
1872 more than once. In either event, we can't do anything with it. */
1873 if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
1874 /* Do not combine registers unless one fits within the other. */
1875 || (offset > 0 && usize + offset > ssize)
1876 || (offset < 0 && usize + offset < ssize)
1877 /* Do not combine with a smaller already-assigned object
1878 if that smaller object is already combined with something bigger. */
1879 || (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
1880 && usize < qty[reg_qty[ureg]].size)
1881 /* Can't combine if SREG is not a register we can allocate. */
1882 || (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
1883 /* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
1884 These have already been taken care of. This probably wouldn't
1885 combine anyway, but don't take any chances. */
1886 || (ureg >= FIRST_PSEUDO_REGISTER
1887 && find_reg_note (insn, REG_NO_CONFLICT, usedreg))
1888 /* Don't tie something to itself. In most cases it would make no
1889 difference, but it would screw up if the reg being tied to itself
1890 also dies in this insn. */
1891 || ureg == sreg
1892 /* Don't try to connect two different hardware registers. */
1893 || (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
1894 /* Don't connect two different machine modes if they have different
1895 implications as to which registers may be used. */
1896 || !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
1897 return 0;
1898
1899 /* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
1900 qty_phys_sugg for the pseudo instead of tying them.
1901
1902 Return "failure" so that the lifespan of UREG is terminated here;
1903 that way the two lifespans will be disjoint and nothing will prevent
1904 the pseudo reg from being given this hard reg. */
1905
1906 if (ureg < FIRST_PSEUDO_REGISTER)
1907 {
1908 /* Allocate a quantity number so we have a place to put our
1909 suggestions. */
1910 if (reg_qty[sreg] == -2)
1911 reg_is_born (setreg, 2 * insn_number);
1912
1913 if (reg_qty[sreg] >= 0)
1914 {
1915 if (may_save_copy
1916 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
1917 {
1918 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
1919 qty_phys_num_copy_sugg[reg_qty[sreg]]++;
1920 }
1921 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
1922 {
1923 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
1924 qty_phys_num_sugg[reg_qty[sreg]]++;
1925 }
1926 }
1927 return 0;
1928 }
1929
1930 /* Similarly for SREG a hard register and UREG a pseudo register. */
1931
1932 if (sreg < FIRST_PSEUDO_REGISTER)
1933 {
1934 if (may_save_copy
1935 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
1936 {
1937 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
1938 qty_phys_num_copy_sugg[reg_qty[ureg]]++;
1939 }
1940 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
1941 {
1942 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
1943 qty_phys_num_sugg[reg_qty[ureg]]++;
1944 }
1945 return 0;
1946 }
1947
1948 /* At this point we know that SREG and UREG are both pseudos.
1949 Do nothing if SREG already has a quantity or is a register that we
1950 don't allocate. */
1951 if (reg_qty[sreg] >= -1
1952 /* If we are not going to let any regs live across calls,
1953 don't tie a call-crossing reg to a non-call-crossing reg. */
1954 || (current_function_has_nonlocal_label
1955 && ((REG_N_CALLS_CROSSED (ureg) > 0)
1956 != (REG_N_CALLS_CROSSED (sreg) > 0))))
1957 return 0;
1958
1959 /* We don't already know about SREG, so tie it to UREG
1960 if this is the last use of UREG, provided the classes they want
1961 are compatible. */
1962
1963 if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
1964 && reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
1965 {
1966 /* Add SREG to UREG's quantity. */
1967 sqty = reg_qty[ureg];
1968 reg_qty[sreg] = sqty;
1969 reg_offset[sreg] = reg_offset[ureg] + offset;
1970 reg_next_in_qty[sreg] = qty[sqty].first_reg;
1971 qty[sqty].first_reg = sreg;
1972
1973 /* If SREG's reg class is smaller, set qty[SQTY].min_class. */
1974 update_qty_class (sqty, sreg);
1975
1976 /* Update info about quantity SQTY. */
1977 qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
1978 qty[sqty].n_refs += REG_N_REFS (sreg);
1979 qty[sqty].freq += REG_FREQ (sreg);
1980 if (usize < ssize)
1981 {
1982 int i;
1983
1984 for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
1985 reg_offset[i] -= offset;
1986
1987 qty[sqty].size = ssize;
1988 qty[sqty].mode = GET_MODE (setreg);
1989 }
1990 }
1991 else
1992 return 0;
1993
1994 return 1;
1995 }
1996 \f
1997 /* Return 1 if the preferred class of REG allows it to be tied
1998 to a quantity or register whose class is CLASS.
1999 True if REG's reg class either contains or is contained in CLASS. */
2000
2001 static int
2002 reg_meets_class_p (reg, class)
2003 int reg;
2004 enum reg_class class;
2005 {
2006 enum reg_class rclass = reg_preferred_class (reg);
2007 return (reg_class_subset_p (rclass, class)
2008 || reg_class_subset_p (class, rclass));
2009 }
2010
2011 /* Update the class of QTYNO assuming that REG is being tied to it. */
2012
2013 static void
2014 update_qty_class (qtyno, reg)
2015 int qtyno;
2016 int reg;
2017 {
2018 enum reg_class rclass = reg_preferred_class (reg);
2019 if (reg_class_subset_p (rclass, qty[qtyno].min_class))
2020 qty[qtyno].min_class = rclass;
2021
2022 rclass = reg_alternate_class (reg);
2023 if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
2024 qty[qtyno].alternate_class = rclass;
2025 }
2026 \f
2027 /* Handle something which alters the value of an rtx REG.
2028
2029 REG is whatever is set or clobbered. SETTER is the rtx that
2030 is modifying the register.
2031
2032 If it is not really a register, we do nothing.
2033 The file-global variables `this_insn' and `this_insn_number'
2034 carry info from `block_alloc'. */
2035
2036 static void
2037 reg_is_set (reg, setter, data)
2038 rtx reg;
2039 rtx setter;
2040 void *data ATTRIBUTE_UNUSED;
2041 {
2042 /* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
2043 a hard register. These may actually not exist any more. */
2044
2045 if (GET_CODE (reg) != SUBREG
2046 && GET_CODE (reg) != REG)
2047 return;
2048
2049 /* Mark this register as being born. If it is used in a CLOBBER, mark
2050 it as being born halfway between the previous insn and this insn so that
2051 it conflicts with our inputs but not the outputs of the previous insn. */
2052
2053 reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
2054 }
2055 \f
2056 /* Handle beginning of the life of register REG.
2057 BIRTH is the index at which this is happening. */
2058
2059 static void
2060 reg_is_born (reg, birth)
2061 rtx reg;
2062 int birth;
2063 {
2064 int regno;
2065
2066 if (GET_CODE (reg) == SUBREG)
2067 {
2068 regno = REGNO (SUBREG_REG (reg));
2069 if (regno < FIRST_PSEUDO_REGISTER)
2070 regno = subreg_hard_regno (reg, 1);
2071 }
2072 else
2073 regno = REGNO (reg);
2074
2075 if (regno < FIRST_PSEUDO_REGISTER)
2076 {
2077 mark_life (regno, GET_MODE (reg), 1);
2078
2079 /* If the register was to have been born earlier that the present
2080 insn, mark it as live where it is actually born. */
2081 if (birth < 2 * this_insn_number)
2082 post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
2083 }
2084 else
2085 {
2086 if (reg_qty[regno] == -2)
2087 alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
2088
2089 /* If this register has a quantity number, show that it isn't dead. */
2090 if (reg_qty[regno] >= 0)
2091 qty[reg_qty[regno]].death = -1;
2092 }
2093 }
2094
2095 /* Record the death of REG in the current insn. If OUTPUT_P is nonzero,
2096 REG is an output that is dying (i.e., it is never used), otherwise it
2097 is an input (the normal case).
2098 If OUTPUT_P is 1, then we extend the life past the end of this insn. */
2099
2100 static void
2101 wipe_dead_reg (reg, output_p)
2102 rtx reg;
2103 int output_p;
2104 {
2105 int regno = REGNO (reg);
2106
2107 /* If this insn has multiple results,
2108 and the dead reg is used in one of the results,
2109 extend its life to after this insn,
2110 so it won't get allocated together with any other result of this insn.
2111
2112 It is unsafe to use !single_set here since it will ignore an unused
2113 output. Just because an output is unused does not mean the compiler
2114 can assume the side effect will not occur. Consider if REG appears
2115 in the address of an output and we reload the output. If we allocate
2116 REG to the same hard register as an unused output we could set the hard
2117 register before the output reload insn. */
2118 if (GET_CODE (PATTERN (this_insn)) == PARALLEL
2119 && multiple_sets (this_insn))
2120 {
2121 int i;
2122 for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
2123 {
2124 rtx set = XVECEXP (PATTERN (this_insn), 0, i);
2125 if (GET_CODE (set) == SET
2126 && GET_CODE (SET_DEST (set)) != REG
2127 && !rtx_equal_p (reg, SET_DEST (set))
2128 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2129 output_p = 1;
2130 }
2131 }
2132
2133 /* If this register is used in an auto-increment address, then extend its
2134 life to after this insn, so that it won't get allocated together with
2135 the result of this insn. */
2136 if (! output_p && find_regno_note (this_insn, REG_INC, regno))
2137 output_p = 1;
2138
2139 if (regno < FIRST_PSEUDO_REGISTER)
2140 {
2141 mark_life (regno, GET_MODE (reg), 0);
2142
2143 /* If a hard register is dying as an output, mark it as in use at
2144 the beginning of this insn (the above statement would cause this
2145 not to happen). */
2146 if (output_p)
2147 post_mark_life (regno, GET_MODE (reg), 1,
2148 2 * this_insn_number, 2 * this_insn_number + 1);
2149 }
2150
2151 else if (reg_qty[regno] >= 0)
2152 qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
2153 }
2154 \f
2155 /* Find a block of SIZE words of hard regs in reg_class CLASS
2156 that can hold something of machine-mode MODE
2157 (but actually we test only the first of the block for holding MODE)
2158 and still free between insn BORN_INDEX and insn DEAD_INDEX,
2159 and return the number of the first of them.
2160 Return -1 if such a block cannot be found.
2161 If QTYNO crosses calls, insist on a register preserved by calls,
2162 unless ACCEPT_CALL_CLOBBERED is nonzero.
2163
2164 If JUST_TRY_SUGGESTED is nonzero, only try to see if the suggested
2165 register is available. If not, return -1. */
2166
2167 static int
2168 find_free_reg (class, mode, qtyno, accept_call_clobbered, just_try_suggested,
2169 born_index, dead_index)
2170 enum reg_class class;
2171 enum machine_mode mode;
2172 int qtyno;
2173 int accept_call_clobbered;
2174 int just_try_suggested;
2175 int born_index, dead_index;
2176 {
2177 int i, ins;
2178 HARD_REG_SET first_used, used;
2179 #ifdef ELIMINABLE_REGS
2180 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2181 #endif
2182
2183 /* Validate our parameters. */
2184 if (born_index < 0 || born_index > dead_index)
2185 abort ();
2186
2187 /* Don't let a pseudo live in a reg across a function call
2188 if we might get a nonlocal goto. */
2189 if (current_function_has_nonlocal_label
2190 && qty[qtyno].n_calls_crossed > 0)
2191 return -1;
2192
2193 if (accept_call_clobbered)
2194 COPY_HARD_REG_SET (used, call_fixed_reg_set);
2195 else if (qty[qtyno].n_calls_crossed == 0)
2196 COPY_HARD_REG_SET (used, fixed_reg_set);
2197 else
2198 COPY_HARD_REG_SET (used, call_used_reg_set);
2199
2200 if (accept_call_clobbered)
2201 IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
2202
2203 for (ins = born_index; ins < dead_index; ins++)
2204 IOR_HARD_REG_SET (used, regs_live_at[ins]);
2205
2206 IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
2207
2208 /* Don't use the frame pointer reg in local-alloc even if
2209 we may omit the frame pointer, because if we do that and then we
2210 need a frame pointer, reload won't know how to move the pseudo
2211 to another hard reg. It can move only regs made by global-alloc.
2212
2213 This is true of any register that can be eliminated. */
2214 #ifdef ELIMINABLE_REGS
2215 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2216 SET_HARD_REG_BIT (used, eliminables[i].from);
2217 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2218 /* If FRAME_POINTER_REGNUM is not a real register, then protect the one
2219 that it might be eliminated into. */
2220 SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
2221 #endif
2222 #else
2223 SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
2224 #endif
2225
2226 #ifdef CANNOT_CHANGE_MODE_CLASS
2227 cannot_change_mode_set_regs (&used, mode, qty[qtyno].first_reg);
2228 #endif
2229
2230 /* Normally, the registers that can be used for the first register in
2231 a multi-register quantity are the same as those that can be used for
2232 subsequent registers. However, if just trying suggested registers,
2233 restrict our consideration to them. If there are copy-suggested
2234 register, try them. Otherwise, try the arithmetic-suggested
2235 registers. */
2236 COPY_HARD_REG_SET (first_used, used);
2237
2238 if (just_try_suggested)
2239 {
2240 if (qty_phys_num_copy_sugg[qtyno] != 0)
2241 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
2242 else
2243 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
2244 }
2245
2246 /* If all registers are excluded, we can't do anything. */
2247 GO_IF_HARD_REG_SUBSET (reg_class_contents[(int) ALL_REGS], first_used, fail);
2248
2249 /* If at least one would be suitable, test each hard reg. */
2250
2251 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2252 {
2253 #ifdef REG_ALLOC_ORDER
2254 int regno = reg_alloc_order[i];
2255 #else
2256 int regno = i;
2257 #endif
2258 if (! TEST_HARD_REG_BIT (first_used, regno)
2259 && HARD_REGNO_MODE_OK (regno, mode)
2260 && (qty[qtyno].n_calls_crossed == 0
2261 || accept_call_clobbered
2262 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
2263 {
2264 int j;
2265 int size1 = HARD_REGNO_NREGS (regno, mode);
2266 for (j = 1; j < size1 && ! TEST_HARD_REG_BIT (used, regno + j); j++);
2267 if (j == size1)
2268 {
2269 /* Mark that this register is in use between its birth and death
2270 insns. */
2271 post_mark_life (regno, mode, 1, born_index, dead_index);
2272 return regno;
2273 }
2274 #ifndef REG_ALLOC_ORDER
2275 /* Skip starting points we know will lose. */
2276 i += j;
2277 #endif
2278 }
2279 }
2280
2281 fail:
2282 /* If we are just trying suggested register, we have just tried copy-
2283 suggested registers, and there are arithmetic-suggested registers,
2284 try them. */
2285
2286 /* If it would be profitable to allocate a call-clobbered register
2287 and save and restore it around calls, do that. */
2288 if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
2289 && qty_phys_num_sugg[qtyno] != 0)
2290 {
2291 /* Don't try the copy-suggested regs again. */
2292 qty_phys_num_copy_sugg[qtyno] = 0;
2293 return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
2294 born_index, dead_index);
2295 }
2296
2297 /* We need not check to see if the current function has nonlocal
2298 labels because we don't put any pseudos that are live over calls in
2299 registers in that case. */
2300
2301 if (! accept_call_clobbered
2302 && flag_caller_saves
2303 && ! just_try_suggested
2304 && qty[qtyno].n_calls_crossed != 0
2305 && CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
2306 qty[qtyno].n_calls_crossed))
2307 {
2308 i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
2309 if (i >= 0)
2310 caller_save_needed = 1;
2311 return i;
2312 }
2313 return -1;
2314 }
2315 \f
2316 /* Mark that REGNO with machine-mode MODE is live starting from the current
2317 insn (if LIFE is nonzero) or dead starting at the current insn (if LIFE
2318 is zero). */
2319
2320 static void
2321 mark_life (regno, mode, life)
2322 int regno;
2323 enum machine_mode mode;
2324 int life;
2325 {
2326 int j = HARD_REGNO_NREGS (regno, mode);
2327 if (life)
2328 while (--j >= 0)
2329 SET_HARD_REG_BIT (regs_live, regno + j);
2330 else
2331 while (--j >= 0)
2332 CLEAR_HARD_REG_BIT (regs_live, regno + j);
2333 }
2334
2335 /* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
2336 is nonzero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
2337 to insn number DEATH (exclusive). */
2338
2339 static void
2340 post_mark_life (regno, mode, life, birth, death)
2341 int regno;
2342 enum machine_mode mode;
2343 int life, birth, death;
2344 {
2345 int j = HARD_REGNO_NREGS (regno, mode);
2346 #ifdef HARD_REG_SET
2347 /* Declare it register if it's a scalar. */
2348 register
2349 #endif
2350 HARD_REG_SET this_reg;
2351
2352 CLEAR_HARD_REG_SET (this_reg);
2353 while (--j >= 0)
2354 SET_HARD_REG_BIT (this_reg, regno + j);
2355
2356 if (life)
2357 while (birth < death)
2358 {
2359 IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
2360 birth++;
2361 }
2362 else
2363 while (birth < death)
2364 {
2365 AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
2366 birth++;
2367 }
2368 }
2369 \f
2370 /* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
2371 is the register being clobbered, and R1 is a register being used in
2372 the equivalent expression.
2373
2374 If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
2375 in which it is used, return 1.
2376
2377 Otherwise, return 0. */
2378
2379 static int
2380 no_conflict_p (insn, r0, r1)
2381 rtx insn, r0 ATTRIBUTE_UNUSED, r1;
2382 {
2383 int ok = 0;
2384 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
2385 rtx p, last;
2386
2387 /* If R1 is a hard register, return 0 since we handle this case
2388 when we scan the insns that actually use it. */
2389
2390 if (note == 0
2391 || (GET_CODE (r1) == REG && REGNO (r1) < FIRST_PSEUDO_REGISTER)
2392 || (GET_CODE (r1) == SUBREG && GET_CODE (SUBREG_REG (r1)) == REG
2393 && REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
2394 return 0;
2395
2396 last = XEXP (note, 0);
2397
2398 for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
2399 if (INSN_P (p))
2400 {
2401 if (find_reg_note (p, REG_DEAD, r1))
2402 ok = 1;
2403
2404 /* There must be a REG_NO_CONFLICT note on every insn, otherwise
2405 some earlier optimization pass has inserted instructions into
2406 the sequence, and it is not safe to perform this optimization.
2407 Note that emit_no_conflict_block always ensures that this is
2408 true when these sequences are created. */
2409 if (! find_reg_note (p, REG_NO_CONFLICT, r1))
2410 return 0;
2411 }
2412
2413 return ok;
2414 }
2415 \f
2416 /* Return the number of alternatives for which the constraint string P
2417 indicates that the operand must be equal to operand 0 and that no register
2418 is acceptable. */
2419
2420 static int
2421 requires_inout (p)
2422 const char *p;
2423 {
2424 char c;
2425 int found_zero = 0;
2426 int reg_allowed = 0;
2427 int num_matching_alts = 0;
2428 int len;
2429
2430 for ( ; (c = *p); p += len)
2431 {
2432 len = CONSTRAINT_LEN (c, p);
2433 switch (c)
2434 {
2435 case '=': case '+': case '?':
2436 case '#': case '&': case '!':
2437 case '*': case '%':
2438 case 'm': case '<': case '>': case 'V': case 'o':
2439 case 'E': case 'F': case 'G': case 'H':
2440 case 's': case 'i': case 'n':
2441 case 'I': case 'J': case 'K': case 'L':
2442 case 'M': case 'N': case 'O': case 'P':
2443 case 'X':
2444 /* These don't say anything we care about. */
2445 break;
2446
2447 case ',':
2448 if (found_zero && ! reg_allowed)
2449 num_matching_alts++;
2450
2451 found_zero = reg_allowed = 0;
2452 break;
2453
2454 case '0':
2455 found_zero = 1;
2456 break;
2457
2458 case '1': case '2': case '3': case '4': case '5':
2459 case '6': case '7': case '8': case '9':
2460 /* Skip the balance of the matching constraint. */
2461 do
2462 p++;
2463 while (ISDIGIT (*p));
2464 len = 0;
2465 break;
2466
2467 default:
2468 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS
2469 && !EXTRA_ADDRESS_CONSTRAINT (c, p))
2470 break;
2471 /* FALLTHRU */
2472 case 'p':
2473 case 'g': case 'r':
2474 reg_allowed = 1;
2475 break;
2476 }
2477 }
2478
2479 if (found_zero && ! reg_allowed)
2480 num_matching_alts++;
2481
2482 return num_matching_alts;
2483 }
2484 \f
2485 void
2486 dump_local_alloc (file)
2487 FILE *file;
2488 {
2489 int i;
2490 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2491 if (reg_renumber[i] != -1)
2492 fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
2493 }
This page took 0.148529 seconds and 6 git commands to generate.