]> gcc.gnu.org Git - gcc.git/blob - gcc/global.c
arm.c (arm_override_options): Error on iWMMXt and hardware floating point.
[gcc.git] / gcc / global.c
1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "reload.h"
37 #include "output.h"
38 #include "toplev.h"
39 #include "tree-pass.h"
40 #include "timevar.h"
41 #include "vecprim.h"
42
43 /* This pass of the compiler performs global register allocation.
44 It assigns hard register numbers to all the pseudo registers
45 that were not handled in local_alloc. Assignments are recorded
46 in the vector reg_renumber, not by changing the rtl code.
47 (Such changes are made by final). The entry point is
48 the function global_alloc.
49
50 After allocation is complete, the reload pass is run as a subroutine
51 of this pass, so that when a pseudo reg loses its hard reg due to
52 spilling it is possible to make a second attempt to find a hard
53 reg for it. The reload pass is independent in other respects
54 and it is run even when stupid register allocation is in use.
55
56 1. Assign allocation-numbers (allocnos) to the pseudo-registers
57 still needing allocations and to the pseudo-registers currently
58 allocated by local-alloc which may be spilled by reload.
59 Set up tables reg_allocno and allocno_reg to map
60 reg numbers to allocnos and vice versa.
61 max_allocno gets the number of allocnos in use.
62
63 2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
64 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
65 for conflicts between allocnos and explicit hard register use
66 (which includes use of pseudo-registers allocated by local_alloc).
67
68 3. For each basic block
69 walk forward through the block, recording which
70 pseudo-registers and which hardware registers are live.
71 Build the conflict matrix between the pseudo-registers
72 and another of pseudo-registers versus hardware registers.
73 Also record the preferred hardware registers
74 for each pseudo-register.
75
76 4. Sort a table of the allocnos into order of
77 desirability of the variables.
78
79 5. Allocate the variables in that order; each if possible into
80 a preferred register, else into another register. */
81 \f
82 /* Number of pseudo-registers which are candidates for allocation. */
83
84 static int max_allocno;
85
86 /* Indexed by (pseudo) reg number, gives the allocno, or -1
87 for pseudo registers which are not to be allocated. */
88
89 static int *reg_allocno;
90
91 struct allocno
92 {
93 int reg;
94 /* Gives the number of consecutive hard registers needed by that
95 pseudo reg. */
96 int size;
97
98 /* Number of calls crossed by each allocno. */
99 int calls_crossed;
100
101 /* Number of calls that might throw crossed by each allocno. */
102 int throwing_calls_crossed;
103
104 /* Number of refs to each allocno. */
105 int n_refs;
106
107 /* Frequency of uses of each allocno. */
108 int freq;
109
110 /* Guess at live length of each allocno.
111 This is actually the max of the live lengths of the regs. */
112 int live_length;
113
114 /* Set of hard regs conflicting with allocno N. */
115
116 HARD_REG_SET hard_reg_conflicts;
117
118 /* Set of hard regs preferred by allocno N.
119 This is used to make allocnos go into regs that are copied to or from them,
120 when possible, to reduce register shuffling. */
121
122 HARD_REG_SET hard_reg_preferences;
123
124 /* Similar, but just counts register preferences made in simple copy
125 operations, rather than arithmetic. These are given priority because
126 we can always eliminate an insn by using these, but using a register
127 in the above list won't always eliminate an insn. */
128
129 HARD_REG_SET hard_reg_copy_preferences;
130
131 /* Similar to hard_reg_preferences, but includes bits for subsequent
132 registers when an allocno is multi-word. The above variable is used for
133 allocation while this is used to build reg_someone_prefers, below. */
134
135 HARD_REG_SET hard_reg_full_preferences;
136
137 /* Set of hard registers that some later allocno has a preference for. */
138
139 HARD_REG_SET regs_someone_prefers;
140
141 #ifdef STACK_REGS
142 /* Set to true if allocno can't be allocated in the stack register. */
143 bool no_stack_reg;
144 #endif
145 };
146
147 static struct allocno *allocno;
148
149 /* A vector of the integers from 0 to max_allocno-1,
150 sorted in the order of first-to-be-allocated first. */
151
152 static int *allocno_order;
153
154 /* Indexed by (pseudo) reg number, gives the number of another
155 lower-numbered pseudo reg which can share a hard reg with this pseudo
156 *even if the two pseudos would otherwise appear to conflict*. */
157
158 static int *reg_may_share;
159
160 /* Define the number of bits in each element of `conflicts' and what
161 type that element has. We use the largest integer format on the
162 host machine. */
163
164 #define INT_BITS HOST_BITS_PER_WIDE_INT
165 #define INT_TYPE HOST_WIDE_INT
166
167 /* max_allocno by max_allocno array of bits,
168 recording whether two allocno's conflict (can't go in the same
169 hardware register).
170
171 `conflicts' is symmetric after the call to mirror_conflicts. */
172
173 static INT_TYPE *conflicts;
174
175 /* Number of ints required to hold max_allocno bits.
176 This is the length of a row in `conflicts'. */
177
178 static int allocno_row_words;
179
180 /* Two macros to test or store 1 in an element of `conflicts'. */
181
182 #define CONFLICTP(I, J) \
183 (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
184 & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
185
186 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
187 and execute CODE. */
188 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
189 do { \
190 int i_; \
191 int allocno_; \
192 INT_TYPE *p_ = (ALLOCNO_SET); \
193 \
194 for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
195 i_--, allocno_ += INT_BITS) \
196 { \
197 unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
198 \
199 for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
200 { \
201 if (word_ & 1) \
202 {CODE;} \
203 } \
204 } \
205 } while (0)
206
207 /* This doesn't work for non-GNU C due to the way CODE is macro expanded. */
208 #if 0
209 /* For any allocno that conflicts with IN_ALLOCNO, set OUT_ALLOCNO to
210 the conflicting allocno, and execute CODE. This macro assumes that
211 mirror_conflicts has been run. */
212 #define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
213 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
214 OUT_ALLOCNO, (CODE))
215 #endif
216
217 /* Set of hard regs currently live (during scan of all insns). */
218
219 static HARD_REG_SET hard_regs_live;
220
221 /* Set of registers that global-alloc isn't supposed to use. */
222
223 static HARD_REG_SET no_global_alloc_regs;
224
225 /* Set of registers used so far. */
226
227 static HARD_REG_SET regs_used_so_far;
228
229 /* Number of refs to each hard reg, as used by local alloc.
230 It is zero for a reg that contains global pseudos or is explicitly used. */
231
232 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
233
234 /* Frequency of uses of given hard reg. */
235 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
236
237 /* Guess at live length of each hard reg, as used by local alloc.
238 This is actually the sum of the live lengths of the specific regs. */
239
240 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
241
242 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
243 element I, and hard register number J. */
244
245 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
246
247 /* Bit mask for allocnos live at current point in the scan. */
248
249 static INT_TYPE *allocnos_live;
250
251 /* Test, set or clear bit number I in allocnos_live,
252 a bit vector indexed by allocno. */
253
254 #define SET_ALLOCNO_LIVE(I) \
255 (allocnos_live[(unsigned) (I) / INT_BITS] \
256 |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
257
258 #define CLEAR_ALLOCNO_LIVE(I) \
259 (allocnos_live[(unsigned) (I) / INT_BITS] \
260 &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
261
262 /* This is turned off because it doesn't work right for DImode.
263 (And it is only used for DImode, so the other cases are worthless.)
264 The problem is that it isn't true that there is NO possibility of conflict;
265 only that there is no conflict if the two pseudos get the exact same regs.
266 If they were allocated with a partial overlap, there would be a conflict.
267 We can't safely turn off the conflict unless we have another way to
268 prevent the partial overlap.
269
270 Idea: change hard_reg_conflicts so that instead of recording which
271 hard regs the allocno may not overlap, it records where the allocno
272 may not start. Change both where it is used and where it is updated.
273 Then there is a way to record that (reg:DI 108) may start at 10
274 but not at 9 or 11. There is still the question of how to record
275 this semi-conflict between two pseudos. */
276 #if 0
277 /* Reg pairs for which conflict after the current insn
278 is inhibited by a REG_NO_CONFLICT note.
279 If the table gets full, we ignore any other notes--that is conservative. */
280 #define NUM_NO_CONFLICT_PAIRS 4
281 /* Number of pairs in use in this insn. */
282 int n_no_conflict_pairs;
283 static struct { int allocno1, allocno2;}
284 no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
285 #endif /* 0 */
286
287 /* Record all regs that are set in any one insn.
288 Communication from mark_reg_{store,clobber} and global_conflicts. */
289
290 static rtx *regs_set;
291 static int n_regs_set;
292
293 /* All registers that can be eliminated. */
294
295 static HARD_REG_SET eliminable_regset;
296
297 static int allocno_compare (const void *, const void *);
298 static void global_conflicts (void);
299 static void mirror_conflicts (void);
300 static void expand_preferences (void);
301 static void prune_preferences (void);
302 static void find_reg (int, HARD_REG_SET, int, int, int);
303 static void record_one_conflict (int);
304 static void record_conflicts (int *, int);
305 static void mark_reg_store (rtx, rtx, void *);
306 static void mark_reg_clobber (rtx, rtx, void *);
307 static void mark_reg_conflicts (rtx);
308 static void mark_reg_death (rtx);
309 static void mark_reg_live_nc (int, enum machine_mode);
310 static void set_preference (rtx, rtx);
311 static void dump_conflicts (FILE *);
312 static void reg_becomes_live (rtx, rtx, void *);
313 static void reg_dies (int, enum machine_mode, struct insn_chain *);
314
315 static void allocate_bb_info (void);
316 static void free_bb_info (void);
317 static bool check_earlyclobber (rtx);
318 static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
319 static int mark_reg_use_for_earlyclobber (rtx *, void *);
320 static void calculate_local_reg_bb_info (void);
321 static void set_up_bb_rts_numbers (void);
322 static int rpost_cmp (const void *, const void *);
323 static void calculate_reg_pav (void);
324 static void modify_reg_pav (void);
325 static void make_accurate_live_analysis (void);
326
327 \f
328
329 /* Perform allocation of pseudo-registers not allocated by local_alloc.
330
331 Return value is nonzero if reload failed
332 and we must not do any more for this function. */
333
334 static int
335 global_alloc (void)
336 {
337 int retval;
338 #ifdef ELIMINABLE_REGS
339 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
340 #endif
341 int need_fp
342 = (! flag_omit_frame_pointer
343 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
344 || FRAME_POINTER_REQUIRED);
345
346 size_t i;
347 rtx x;
348
349 make_accurate_live_analysis ();
350
351 max_allocno = 0;
352
353 /* A machine may have certain hard registers that
354 are safe to use only within a basic block. */
355
356 CLEAR_HARD_REG_SET (no_global_alloc_regs);
357
358 /* Build the regset of all eliminable registers and show we can't use those
359 that we already know won't be eliminated. */
360 #ifdef ELIMINABLE_REGS
361 for (i = 0; i < ARRAY_SIZE (eliminables); i++)
362 {
363 bool cannot_elim
364 = (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
365 || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
366
367 if (!regs_asm_clobbered[eliminables[i].from])
368 {
369 SET_HARD_REG_BIT (eliminable_regset, eliminables[i].from);
370
371 if (cannot_elim)
372 SET_HARD_REG_BIT (no_global_alloc_regs, eliminables[i].from);
373 }
374 else if (cannot_elim)
375 error ("%s cannot be used in asm here",
376 reg_names[eliminables[i].from]);
377 else
378 regs_ever_live[eliminables[i].from] = 1;
379 }
380 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
381 if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
382 {
383 SET_HARD_REG_BIT (eliminable_regset, HARD_FRAME_POINTER_REGNUM);
384 if (need_fp)
385 SET_HARD_REG_BIT (no_global_alloc_regs, HARD_FRAME_POINTER_REGNUM);
386 }
387 else if (need_fp)
388 error ("%s cannot be used in asm here",
389 reg_names[HARD_FRAME_POINTER_REGNUM]);
390 else
391 regs_ever_live[HARD_FRAME_POINTER_REGNUM] = 1;
392 #endif
393
394 #else
395 if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
396 {
397 SET_HARD_REG_BIT (eliminable_regset, FRAME_POINTER_REGNUM);
398 if (need_fp)
399 SET_HARD_REG_BIT (no_global_alloc_regs, FRAME_POINTER_REGNUM);
400 }
401 else if (need_fp)
402 error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
403 else
404 regs_ever_live[FRAME_POINTER_REGNUM] = 1;
405 #endif
406
407 /* Track which registers have already been used. Start with registers
408 explicitly in the rtl, then registers allocated by local register
409 allocation. */
410
411 CLEAR_HARD_REG_SET (regs_used_so_far);
412 #ifdef LEAF_REGISTERS
413 /* If we are doing the leaf function optimization, and this is a leaf
414 function, it means that the registers that take work to save are those
415 that need a register window. So prefer the ones that can be used in
416 a leaf function. */
417 {
418 const char *cheap_regs;
419 const char *const leaf_regs = LEAF_REGISTERS;
420
421 if (only_leaf_regs_used () && leaf_function_p ())
422 cheap_regs = leaf_regs;
423 else
424 cheap_regs = call_used_regs;
425 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
426 if (regs_ever_live[i] || cheap_regs[i])
427 SET_HARD_REG_BIT (regs_used_so_far, i);
428 }
429 #else
430 /* We consider registers that do not have to be saved over calls as if
431 they were already used since there is no cost in using them. */
432 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
433 if (regs_ever_live[i] || call_used_regs[i])
434 SET_HARD_REG_BIT (regs_used_so_far, i);
435 #endif
436
437 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
438 if (reg_renumber[i] >= 0)
439 SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
440
441 /* Establish mappings from register number to allocation number
442 and vice versa. In the process, count the allocnos. */
443
444 reg_allocno = XNEWVEC (int, max_regno);
445
446 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
447 reg_allocno[i] = -1;
448
449 /* Initialize the shared-hard-reg mapping
450 from the list of pairs that may share. */
451 reg_may_share = XCNEWVEC (int, max_regno);
452 for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
453 {
454 int r1 = REGNO (XEXP (x, 0));
455 int r2 = REGNO (XEXP (XEXP (x, 1), 0));
456 if (r1 > r2)
457 reg_may_share[r1] = r2;
458 else
459 reg_may_share[r2] = r1;
460 }
461
462 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
463 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
464 that we are supposed to refrain from putting in a hard reg.
465 -2 means do make an allocno but don't allocate it. */
466 if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
467 /* Don't allocate pseudos that cross calls,
468 if this function receives a nonlocal goto. */
469 && (! current_function_has_nonlocal_label
470 || REG_N_CALLS_CROSSED (i) == 0))
471 {
472 if (reg_renumber[i] < 0
473 && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
474 reg_allocno[i] = reg_allocno[reg_may_share[i]];
475 else
476 reg_allocno[i] = max_allocno++;
477 gcc_assert (REG_LIVE_LENGTH (i));
478 }
479 else
480 reg_allocno[i] = -1;
481
482 allocno = XCNEWVEC (struct allocno, max_allocno);
483
484 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
485 if (reg_allocno[i] >= 0)
486 {
487 int num = reg_allocno[i];
488 allocno[num].reg = i;
489 allocno[num].size = PSEUDO_REGNO_SIZE (i);
490 allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
491 allocno[num].throwing_calls_crossed
492 += REG_N_THROWING_CALLS_CROSSED (i);
493 allocno[num].n_refs += REG_N_REFS (i);
494 allocno[num].freq += REG_FREQ (i);
495 if (allocno[num].live_length < REG_LIVE_LENGTH (i))
496 allocno[num].live_length = REG_LIVE_LENGTH (i);
497 }
498
499 /* Calculate amount of usage of each hard reg by pseudos
500 allocated by local-alloc. This is to see if we want to
501 override it. */
502 memset (local_reg_live_length, 0, sizeof local_reg_live_length);
503 memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
504 memset (local_reg_freq, 0, sizeof local_reg_freq);
505 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
506 if (reg_renumber[i] >= 0)
507 {
508 int regno = reg_renumber[i];
509 int endregno = regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
510 int j;
511
512 for (j = regno; j < endregno; j++)
513 {
514 local_reg_n_refs[j] += REG_N_REFS (i);
515 local_reg_freq[j] += REG_FREQ (i);
516 local_reg_live_length[j] += REG_LIVE_LENGTH (i);
517 }
518 }
519
520 /* We can't override local-alloc for a reg used not just by local-alloc. */
521 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
522 if (regs_ever_live[i])
523 local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
524
525 allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
526
527 /* We used to use alloca here, but the size of what it would try to
528 allocate would occasionally cause it to exceed the stack limit and
529 cause unpredictable core dumps. Some examples were > 2Mb in size. */
530 conflicts = XCNEWVEC (INT_TYPE, max_allocno * allocno_row_words);
531
532 allocnos_live = XNEWVEC (INT_TYPE, allocno_row_words);
533
534 /* If there is work to be done (at least one reg to allocate),
535 perform global conflict analysis and allocate the regs. */
536
537 if (max_allocno > 0)
538 {
539 /* Scan all the insns and compute the conflicts among allocnos
540 and between allocnos and hard regs. */
541
542 global_conflicts ();
543
544 mirror_conflicts ();
545
546 /* Eliminate conflicts between pseudos and eliminable registers. If
547 the register is not eliminated, the pseudo won't really be able to
548 live in the eliminable register, so the conflict doesn't matter.
549 If we do eliminate the register, the conflict will no longer exist.
550 So in either case, we can ignore the conflict. Likewise for
551 preferences. */
552
553 for (i = 0; i < (size_t) max_allocno; i++)
554 {
555 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
556 eliminable_regset);
557 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
558 eliminable_regset);
559 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
560 eliminable_regset);
561 }
562
563 /* Try to expand the preferences by merging them between allocnos. */
564
565 expand_preferences ();
566
567 /* Determine the order to allocate the remaining pseudo registers. */
568
569 allocno_order = XNEWVEC (int, max_allocno);
570 for (i = 0; i < (size_t) max_allocno; i++)
571 allocno_order[i] = i;
572
573 /* Default the size to 1, since allocno_compare uses it to divide by.
574 Also convert allocno_live_length of zero to -1. A length of zero
575 can occur when all the registers for that allocno have reg_live_length
576 equal to -2. In this case, we want to make an allocno, but not
577 allocate it. So avoid the divide-by-zero and set it to a low
578 priority. */
579
580 for (i = 0; i < (size_t) max_allocno; i++)
581 {
582 if (allocno[i].size == 0)
583 allocno[i].size = 1;
584 if (allocno[i].live_length == 0)
585 allocno[i].live_length = -1;
586 }
587
588 qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
589
590 prune_preferences ();
591
592 if (dump_file)
593 dump_conflicts (dump_file);
594
595 /* Try allocating them, one by one, in that order,
596 except for parameters marked with reg_live_length[regno] == -2. */
597
598 for (i = 0; i < (size_t) max_allocno; i++)
599 if (reg_renumber[allocno[allocno_order[i]].reg] < 0
600 && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
601 {
602 /* If we have more than one register class,
603 first try allocating in the class that is cheapest
604 for this pseudo-reg. If that fails, try any reg. */
605 if (N_REG_CLASSES > 1)
606 {
607 find_reg (allocno_order[i], 0, 0, 0, 0);
608 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
609 continue;
610 }
611 if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
612 find_reg (allocno_order[i], 0, 1, 0, 0);
613 }
614
615 free (allocno_order);
616 }
617
618 /* Do the reloads now while the allocno data still exists, so that we can
619 try to assign new hard regs to any pseudo regs that are spilled. */
620
621 #if 0 /* We need to eliminate regs even if there is no rtl code,
622 for the sake of debugging information. */
623 if (n_basic_blocks > NUM_FIXED_BLOCKS)
624 #endif
625 {
626 build_insn_chain (get_insns ());
627 retval = reload (get_insns (), 1);
628 }
629
630 /* Clean up. */
631 free (reg_allocno);
632 free (reg_may_share);
633 free (allocno);
634 free (conflicts);
635 free (allocnos_live);
636
637 return retval;
638 }
639
640 /* Sort predicate for ordering the allocnos.
641 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
642
643 static int
644 allocno_compare (const void *v1p, const void *v2p)
645 {
646 int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
647 /* Note that the quotient will never be bigger than
648 the value of floor_log2 times the maximum number of
649 times a register can occur in one insn (surely less than 100)
650 weighted by the frequency (maximally REG_FREQ_MAX).
651 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
652 int pri1
653 = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
654 / allocno[v1].live_length)
655 * (10000 / REG_FREQ_MAX) * allocno[v1].size);
656 int pri2
657 = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
658 / allocno[v2].live_length)
659 * (10000 / REG_FREQ_MAX) * allocno[v2].size);
660 if (pri2 - pri1)
661 return pri2 - pri1;
662
663 /* If regs are equally good, sort by allocno,
664 so that the results of qsort leave nothing to chance. */
665 return v1 - v2;
666 }
667 \f
668 /* Scan the rtl code and record all conflicts and register preferences in the
669 conflict matrices and preference tables. */
670
671 static void
672 global_conflicts (void)
673 {
674 unsigned i;
675 basic_block b;
676 rtx insn;
677 int *block_start_allocnos;
678
679 /* Make a vector that mark_reg_{store,clobber} will store in. */
680 regs_set = XNEWVEC (rtx, max_parallel * 2);
681
682 block_start_allocnos = XNEWVEC (int, max_allocno);
683
684 FOR_EACH_BB (b)
685 {
686 memset (allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
687
688 /* Initialize table of registers currently live
689 to the state at the beginning of this basic block.
690 This also marks the conflicts among hard registers
691 and any allocnos that are live.
692
693 For pseudo-regs, there is only one bit for each one
694 no matter how many hard regs it occupies.
695 This is ok; we know the size from PSEUDO_REGNO_SIZE.
696 For explicit hard regs, we cannot know the size that way
697 since one hard reg can be used with various sizes.
698 Therefore, we must require that all the hard regs
699 implicitly live as part of a multi-word hard reg
700 be explicitly marked in basic_block_live_at_start. */
701
702 {
703 regset old = b->il.rtl->global_live_at_start;
704 int ax = 0;
705 reg_set_iterator rsi;
706
707 REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
708 EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i, rsi)
709 {
710 int a = reg_allocno[i];
711 if (a >= 0)
712 {
713 SET_ALLOCNO_LIVE (a);
714 block_start_allocnos[ax++] = a;
715 }
716 else if ((a = reg_renumber[i]) >= 0)
717 mark_reg_live_nc (a, PSEUDO_REGNO_MODE (i));
718 }
719
720 /* Record that each allocno now live conflicts with each hard reg
721 now live.
722
723 It is not necessary to mark any conflicts between pseudos at
724 this point, even for pseudos which are live at the start of
725 the basic block.
726
727 Given two pseudos X and Y and any point in the CFG P.
728
729 On any path to point P where X and Y are live one of the
730 following conditions must be true:
731
732 1. X is live at some instruction on the path that
733 evaluates Y.
734
735 2. Y is live at some instruction on the path that
736 evaluates X.
737
738 3. Either X or Y is not evaluated on the path to P
739 (i.e. it is used uninitialized) and thus the
740 conflict can be ignored.
741
742 In cases #1 and #2 the conflict will be recorded when we
743 scan the instruction that makes either X or Y become live. */
744 record_conflicts (block_start_allocnos, ax);
745
746 /* Pseudos can't go in stack regs at the start of a basic block that
747 is reached by an abnormal edge. Likewise for call clobbered regs,
748 because caller-save, fixup_abnormal_edges and possibly the table
749 driven EH machinery are not quite ready to handle such regs live
750 across such edges. */
751 {
752 edge e;
753 edge_iterator ei;
754
755 FOR_EACH_EDGE (e, ei, b->preds)
756 if (e->flags & EDGE_ABNORMAL)
757 break;
758
759 if (e != NULL)
760 {
761 #ifdef STACK_REGS
762 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, ax,
763 {
764 allocno[ax].no_stack_reg = 1;
765 });
766 for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
767 record_one_conflict (ax);
768 #endif
769
770 /* No need to record conflicts for call clobbered regs if we have
771 nonlocal labels around, as we don't ever try to allocate such
772 regs in this case. */
773 if (! current_function_has_nonlocal_label)
774 for (ax = 0; ax < FIRST_PSEUDO_REGISTER; ax++)
775 if (call_used_regs [ax])
776 record_one_conflict (ax);
777 }
778 }
779 }
780
781 insn = BB_HEAD (b);
782
783 /* Scan the code of this basic block, noting which allocnos
784 and hard regs are born or die. When one is born,
785 record a conflict with all others currently live. */
786
787 while (1)
788 {
789 RTX_CODE code = GET_CODE (insn);
790 rtx link;
791
792 /* Make regs_set an empty set. */
793
794 n_regs_set = 0;
795
796 if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
797 {
798
799 #if 0
800 int i = 0;
801 for (link = REG_NOTES (insn);
802 link && i < NUM_NO_CONFLICT_PAIRS;
803 link = XEXP (link, 1))
804 if (REG_NOTE_KIND (link) == REG_NO_CONFLICT)
805 {
806 no_conflict_pairs[i].allocno1
807 = reg_allocno[REGNO (SET_DEST (PATTERN (insn)))];
808 no_conflict_pairs[i].allocno2
809 = reg_allocno[REGNO (XEXP (link, 0))];
810 i++;
811 }
812 #endif /* 0 */
813
814 /* Mark any registers clobbered by INSN as live,
815 so they conflict with the inputs. */
816
817 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
818
819 /* Mark any registers dead after INSN as dead now. */
820
821 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
822 if (REG_NOTE_KIND (link) == REG_DEAD)
823 mark_reg_death (XEXP (link, 0));
824
825 /* Mark any registers set in INSN as live,
826 and mark them as conflicting with all other live regs.
827 Clobbers are processed again, so they conflict with
828 the registers that are set. */
829
830 note_stores (PATTERN (insn), mark_reg_store, NULL);
831
832 #ifdef AUTO_INC_DEC
833 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
834 if (REG_NOTE_KIND (link) == REG_INC)
835 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
836 #endif
837
838 /* If INSN has multiple outputs, then any reg that dies here
839 and is used inside of an output
840 must conflict with the other outputs.
841
842 It is unsafe to use !single_set here since it will ignore an
843 unused output. Just because an output is unused does not mean
844 the compiler can assume the side effect will not occur.
845 Consider if REG appears in the address of an output and we
846 reload the output. If we allocate REG to the same hard
847 register as an unused output we could set the hard register
848 before the output reload insn. */
849 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
850 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
851 if (REG_NOTE_KIND (link) == REG_DEAD)
852 {
853 int used_in_output = 0;
854 int i;
855 rtx reg = XEXP (link, 0);
856
857 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
858 {
859 rtx set = XVECEXP (PATTERN (insn), 0, i);
860 if (GET_CODE (set) == SET
861 && !REG_P (SET_DEST (set))
862 && !rtx_equal_p (reg, SET_DEST (set))
863 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
864 used_in_output = 1;
865 }
866 if (used_in_output)
867 mark_reg_conflicts (reg);
868 }
869
870 /* Mark any registers set in INSN and then never used. */
871
872 while (n_regs_set-- > 0)
873 {
874 rtx note = find_regno_note (insn, REG_UNUSED,
875 REGNO (regs_set[n_regs_set]));
876 if (note)
877 mark_reg_death (XEXP (note, 0));
878 }
879 }
880
881 if (insn == BB_END (b))
882 break;
883 insn = NEXT_INSN (insn);
884 }
885 }
886
887 /* Clean up. */
888 free (block_start_allocnos);
889 free (regs_set);
890 }
891 /* Expand the preference information by looking for cases where one allocno
892 dies in an insn that sets an allocno. If those two allocnos don't conflict,
893 merge any preferences between those allocnos. */
894
895 static void
896 expand_preferences (void)
897 {
898 rtx insn;
899 rtx link;
900 rtx set;
901
902 /* We only try to handle the most common cases here. Most of the cases
903 where this wins are reg-reg copies. */
904
905 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
906 if (INSN_P (insn)
907 && (set = single_set (insn)) != 0
908 && REG_P (SET_DEST (set))
909 && reg_allocno[REGNO (SET_DEST (set))] >= 0)
910 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
911 if (REG_NOTE_KIND (link) == REG_DEAD
912 && REG_P (XEXP (link, 0))
913 && reg_allocno[REGNO (XEXP (link, 0))] >= 0
914 && ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
915 reg_allocno[REGNO (XEXP (link, 0))]))
916 {
917 int a1 = reg_allocno[REGNO (SET_DEST (set))];
918 int a2 = reg_allocno[REGNO (XEXP (link, 0))];
919
920 if (XEXP (link, 0) == SET_SRC (set))
921 {
922 IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
923 allocno[a2].hard_reg_copy_preferences);
924 IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
925 allocno[a1].hard_reg_copy_preferences);
926 }
927
928 IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
929 allocno[a2].hard_reg_preferences);
930 IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
931 allocno[a1].hard_reg_preferences);
932 IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
933 allocno[a2].hard_reg_full_preferences);
934 IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
935 allocno[a1].hard_reg_full_preferences);
936 }
937 }
938 \f
939 /* Prune the preferences for global registers to exclude registers that cannot
940 be used.
941
942 Compute `regs_someone_prefers', which is a bitmask of the hard registers
943 that are preferred by conflicting registers of lower priority. If possible,
944 we will avoid using these registers. */
945
946 static void
947 prune_preferences (void)
948 {
949 int i;
950 int num;
951 int *allocno_to_order = XNEWVEC (int, max_allocno);
952
953 /* Scan least most important to most important.
954 For each allocno, remove from preferences registers that cannot be used,
955 either because of conflicts or register type. Then compute all registers
956 preferred by each lower-priority register that conflicts. */
957
958 for (i = max_allocno - 1; i >= 0; i--)
959 {
960 HARD_REG_SET temp;
961
962 num = allocno_order[i];
963 allocno_to_order[num] = i;
964 COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
965
966 if (allocno[num].calls_crossed == 0)
967 IOR_HARD_REG_SET (temp, fixed_reg_set);
968 else
969 IOR_HARD_REG_SET (temp, call_used_reg_set);
970
971 IOR_COMPL_HARD_REG_SET
972 (temp,
973 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
974
975 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
976 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
977 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
978 }
979
980 for (i = max_allocno - 1; i >= 0; i--)
981 {
982 /* Merge in the preferences of lower-priority registers (they have
983 already been pruned). If we also prefer some of those registers,
984 don't exclude them unless we are of a smaller size (in which case
985 we want to give the lower-priority allocno the first chance for
986 these registers). */
987 HARD_REG_SET temp, temp2;
988 int allocno2;
989
990 num = allocno_order[i];
991
992 CLEAR_HARD_REG_SET (temp);
993 CLEAR_HARD_REG_SET (temp2);
994
995 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
996 allocno2,
997 {
998 if (allocno_to_order[allocno2] > i)
999 {
1000 if (allocno[allocno2].size <= allocno[num].size)
1001 IOR_HARD_REG_SET (temp,
1002 allocno[allocno2].hard_reg_full_preferences);
1003 else
1004 IOR_HARD_REG_SET (temp2,
1005 allocno[allocno2].hard_reg_full_preferences);
1006 }
1007 });
1008
1009 AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
1010 IOR_HARD_REG_SET (temp, temp2);
1011 COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
1012 }
1013 free (allocno_to_order);
1014 }
1015 \f
1016 /* Assign a hard register to allocno NUM; look for one that is the beginning
1017 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1018 The registers marked in PREFREGS are tried first.
1019
1020 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1021 be used for this allocation.
1022
1023 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1024 Otherwise ignore that preferred class and use the alternate class.
1025
1026 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1027 will have to be saved and restored at calls.
1028
1029 RETRYING is nonzero if this is called from retry_global_alloc.
1030
1031 If we find one, record it in reg_renumber.
1032 If not, do nothing. */
1033
1034 static void
1035 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
1036 {
1037 int i, best_reg, pass;
1038 HARD_REG_SET used, used1, used2;
1039
1040 enum reg_class class = (alt_regs_p
1041 ? reg_alternate_class (allocno[num].reg)
1042 : reg_preferred_class (allocno[num].reg));
1043 enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
1044
1045 if (accept_call_clobbered)
1046 COPY_HARD_REG_SET (used1, call_fixed_reg_set);
1047 else if (allocno[num].calls_crossed == 0)
1048 COPY_HARD_REG_SET (used1, fixed_reg_set);
1049 else
1050 COPY_HARD_REG_SET (used1, call_used_reg_set);
1051
1052 /* Some registers should not be allocated in global-alloc. */
1053 IOR_HARD_REG_SET (used1, no_global_alloc_regs);
1054 if (losers)
1055 IOR_HARD_REG_SET (used1, losers);
1056
1057 IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
1058 COPY_HARD_REG_SET (used2, used1);
1059
1060 IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
1061
1062 #ifdef CANNOT_CHANGE_MODE_CLASS
1063 cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
1064 #endif
1065
1066 /* Try each hard reg to see if it fits. Do this in two passes.
1067 In the first pass, skip registers that are preferred by some other pseudo
1068 to give it a better chance of getting one of those registers. Only if
1069 we can't get a register when excluding those do we take one of them.
1070 However, we never allocate a register for the first time in pass 0. */
1071
1072 COPY_HARD_REG_SET (used, used1);
1073 IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
1074 IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
1075
1076 best_reg = -1;
1077 for (i = FIRST_PSEUDO_REGISTER, pass = 0;
1078 pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
1079 pass++)
1080 {
1081 if (pass == 1)
1082 COPY_HARD_REG_SET (used, used1);
1083 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1084 {
1085 #ifdef REG_ALLOC_ORDER
1086 int regno = reg_alloc_order[i];
1087 #else
1088 int regno = i;
1089 #endif
1090 if (! TEST_HARD_REG_BIT (used, regno)
1091 && HARD_REGNO_MODE_OK (regno, mode)
1092 && (allocno[num].calls_crossed == 0
1093 || accept_call_clobbered
1094 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
1095 {
1096 int j;
1097 int lim = regno + hard_regno_nregs[regno][mode];
1098 for (j = regno + 1;
1099 (j < lim
1100 && ! TEST_HARD_REG_BIT (used, j));
1101 j++);
1102 if (j == lim)
1103 {
1104 best_reg = regno;
1105 break;
1106 }
1107 #ifndef REG_ALLOC_ORDER
1108 i = j; /* Skip starting points we know will lose */
1109 #endif
1110 }
1111 }
1112 }
1113
1114 /* See if there is a preferred register with the same class as the register
1115 we allocated above. Making this restriction prevents register
1116 preferencing from creating worse register allocation.
1117
1118 Remove from the preferred registers and conflicting registers. Note that
1119 additional conflicts may have been added after `prune_preferences' was
1120 called.
1121
1122 First do this for those register with copy preferences, then all
1123 preferred registers. */
1124
1125 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
1126 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_copy_preferences,
1127 reg_class_contents[(int) NO_REGS], no_copy_prefs);
1128
1129 if (best_reg >= 0)
1130 {
1131 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1132 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
1133 && HARD_REGNO_MODE_OK (i, mode)
1134 && (allocno[num].calls_crossed == 0
1135 || accept_call_clobbered
1136 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1137 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1138 || reg_class_subset_p (REGNO_REG_CLASS (i),
1139 REGNO_REG_CLASS (best_reg))
1140 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1141 REGNO_REG_CLASS (i))))
1142 {
1143 int j;
1144 int lim = i + hard_regno_nregs[i][mode];
1145 for (j = i + 1;
1146 (j < lim
1147 && ! TEST_HARD_REG_BIT (used, j)
1148 && (REGNO_REG_CLASS (j)
1149 == REGNO_REG_CLASS (best_reg + (j - i))
1150 || reg_class_subset_p (REGNO_REG_CLASS (j),
1151 REGNO_REG_CLASS (best_reg + (j - i)))
1152 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1153 REGNO_REG_CLASS (j))));
1154 j++);
1155 if (j == lim)
1156 {
1157 best_reg = i;
1158 goto no_prefs;
1159 }
1160 }
1161 }
1162 no_copy_prefs:
1163
1164 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1165 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_preferences,
1166 reg_class_contents[(int) NO_REGS], no_prefs);
1167
1168 if (best_reg >= 0)
1169 {
1170 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1171 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1172 && HARD_REGNO_MODE_OK (i, mode)
1173 && (allocno[num].calls_crossed == 0
1174 || accept_call_clobbered
1175 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1176 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1177 || reg_class_subset_p (REGNO_REG_CLASS (i),
1178 REGNO_REG_CLASS (best_reg))
1179 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1180 REGNO_REG_CLASS (i))))
1181 {
1182 int j;
1183 int lim = i + hard_regno_nregs[i][mode];
1184 for (j = i + 1;
1185 (j < lim
1186 && ! TEST_HARD_REG_BIT (used, j)
1187 && (REGNO_REG_CLASS (j)
1188 == REGNO_REG_CLASS (best_reg + (j - i))
1189 || reg_class_subset_p (REGNO_REG_CLASS (j),
1190 REGNO_REG_CLASS (best_reg + (j - i)))
1191 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1192 REGNO_REG_CLASS (j))));
1193 j++);
1194 if (j == lim)
1195 {
1196 best_reg = i;
1197 break;
1198 }
1199 }
1200 }
1201 no_prefs:
1202
1203 /* If we haven't succeeded yet, try with caller-saves.
1204 We need not check to see if the current function has nonlocal
1205 labels because we don't put any pseudos that are live over calls in
1206 registers in that case. */
1207
1208 if (flag_caller_saves && best_reg < 0)
1209 {
1210 /* Did not find a register. If it would be profitable to
1211 allocate a call-clobbered register and save and restore it
1212 around calls, do that. Don't do this if it crosses any calls
1213 that might throw. */
1214 if (! accept_call_clobbered
1215 && allocno[num].calls_crossed != 0
1216 && allocno[num].throwing_calls_crossed == 0
1217 && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1218 allocno[num].calls_crossed))
1219 {
1220 HARD_REG_SET new_losers;
1221 if (! losers)
1222 CLEAR_HARD_REG_SET (new_losers);
1223 else
1224 COPY_HARD_REG_SET (new_losers, losers);
1225
1226 IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1227 find_reg (num, new_losers, alt_regs_p, 1, retrying);
1228 if (reg_renumber[allocno[num].reg] >= 0)
1229 {
1230 caller_save_needed = 1;
1231 return;
1232 }
1233 }
1234 }
1235
1236 /* If we haven't succeeded yet,
1237 see if some hard reg that conflicts with us
1238 was utilized poorly by local-alloc.
1239 If so, kick out the regs that were put there by local-alloc
1240 so we can use it instead. */
1241 if (best_reg < 0 && !retrying
1242 /* Let's not bother with multi-reg allocnos. */
1243 && allocno[num].size == 1
1244 && REG_BASIC_BLOCK (allocno[num].reg) == REG_BLOCK_GLOBAL)
1245 {
1246 /* Count from the end, to find the least-used ones first. */
1247 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1248 {
1249 #ifdef REG_ALLOC_ORDER
1250 int regno = reg_alloc_order[i];
1251 #else
1252 int regno = i;
1253 #endif
1254
1255 if (local_reg_n_refs[regno] != 0
1256 /* Don't use a reg no good for this pseudo. */
1257 && ! TEST_HARD_REG_BIT (used2, regno)
1258 && HARD_REGNO_MODE_OK (regno, mode)
1259 /* The code below assumes that we need only a single
1260 register, but the check of allocno[num].size above
1261 was not enough. Sometimes we need more than one
1262 register for a single-word value. */
1263 && hard_regno_nregs[regno][mode] == 1
1264 && (allocno[num].calls_crossed == 0
1265 || accept_call_clobbered
1266 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1267 #ifdef CANNOT_CHANGE_MODE_CLASS
1268 && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1269 mode)
1270 #endif
1271 #ifdef STACK_REGS
1272 && (!allocno[num].no_stack_reg
1273 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1274 #endif
1275 )
1276 {
1277 /* We explicitly evaluate the divide results into temporary
1278 variables so as to avoid excess precision problems that occur
1279 on an i386-unknown-sysv4.2 (unixware) host. */
1280
1281 double tmp1 = ((double) local_reg_freq[regno] * local_reg_n_refs[regno]
1282 / local_reg_live_length[regno]);
1283 double tmp2 = ((double) allocno[num].freq * allocno[num].n_refs
1284 / allocno[num].live_length);
1285
1286 if (tmp1 < tmp2)
1287 {
1288 /* Hard reg REGNO was used less in total by local regs
1289 than it would be used by this one allocno! */
1290 int k;
1291 if (dump_file)
1292 {
1293 fprintf (dump_file, "Regno %d better for global %d, ",
1294 regno, allocno[num].reg);
1295 fprintf (dump_file, "fr:%d, ll:%d, nr:%d ",
1296 allocno[num].freq, allocno[num].live_length,
1297 allocno[num].n_refs);
1298 fprintf (dump_file, "(was: fr:%d, ll:%d, nr:%d)\n",
1299 local_reg_freq[regno],
1300 local_reg_live_length[regno],
1301 local_reg_n_refs[regno]);
1302 }
1303
1304 for (k = 0; k < max_regno; k++)
1305 if (reg_renumber[k] >= 0)
1306 {
1307 int r = reg_renumber[k];
1308 int endregno
1309 = r + hard_regno_nregs[r][PSEUDO_REGNO_MODE (k)];
1310
1311 if (regno >= r && regno < endregno)
1312 {
1313 if (dump_file)
1314 fprintf (dump_file,
1315 "Local Reg %d now on stack\n", k);
1316 reg_renumber[k] = -1;
1317 }
1318 }
1319
1320 best_reg = regno;
1321 break;
1322 }
1323 }
1324 }
1325 }
1326
1327 /* Did we find a register? */
1328
1329 if (best_reg >= 0)
1330 {
1331 int lim, j;
1332 HARD_REG_SET this_reg;
1333
1334 /* Yes. Record it as the hard register of this pseudo-reg. */
1335 reg_renumber[allocno[num].reg] = best_reg;
1336 /* Also of any pseudo-regs that share with it. */
1337 if (reg_may_share[allocno[num].reg])
1338 for (j = FIRST_PSEUDO_REGISTER; j < max_regno; j++)
1339 if (reg_allocno[j] == num)
1340 reg_renumber[j] = best_reg;
1341
1342 /* Make a set of the hard regs being allocated. */
1343 CLEAR_HARD_REG_SET (this_reg);
1344 lim = best_reg + hard_regno_nregs[best_reg][mode];
1345 for (j = best_reg; j < lim; j++)
1346 {
1347 SET_HARD_REG_BIT (this_reg, j);
1348 SET_HARD_REG_BIT (regs_used_so_far, j);
1349 /* This is no longer a reg used just by local regs. */
1350 local_reg_n_refs[j] = 0;
1351 local_reg_freq[j] = 0;
1352 }
1353 /* For each other pseudo-reg conflicting with this one,
1354 mark it as conflicting with the hard regs this one occupies. */
1355 lim = num;
1356 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
1357 {
1358 IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1359 });
1360 }
1361 }
1362 \f
1363 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1364 Perhaps it had previously seemed not worth a hard reg,
1365 or perhaps its old hard reg has been commandeered for reloads.
1366 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1367 they do not appear to be allocated.
1368 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1369
1370 void
1371 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1372 {
1373 int alloc_no = reg_allocno[regno];
1374 if (alloc_no >= 0)
1375 {
1376 /* If we have more than one register class,
1377 first try allocating in the class that is cheapest
1378 for this pseudo-reg. If that fails, try any reg. */
1379 if (N_REG_CLASSES > 1)
1380 find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1381 if (reg_renumber[regno] < 0
1382 && reg_alternate_class (regno) != NO_REGS)
1383 find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1384
1385 /* If we found a register, modify the RTL for the register to
1386 show the hard register, and mark that register live. */
1387 if (reg_renumber[regno] >= 0)
1388 {
1389 REGNO (regno_reg_rtx[regno]) = reg_renumber[regno];
1390 mark_home_live (regno);
1391 }
1392 }
1393 }
1394 \f
1395 /* Record a conflict between register REGNO
1396 and everything currently live.
1397 REGNO must not be a pseudo reg that was allocated
1398 by local_alloc; such numbers must be translated through
1399 reg_renumber before calling here. */
1400
1401 static void
1402 record_one_conflict (int regno)
1403 {
1404 int j;
1405
1406 if (regno < FIRST_PSEUDO_REGISTER)
1407 /* When a hard register becomes live,
1408 record conflicts with live pseudo regs. */
1409 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, j,
1410 {
1411 SET_HARD_REG_BIT (allocno[j].hard_reg_conflicts, regno);
1412 });
1413 else
1414 /* When a pseudo-register becomes live,
1415 record conflicts first with hard regs,
1416 then with other pseudo regs. */
1417 {
1418 int ialloc = reg_allocno[regno];
1419 int ialloc_prod = ialloc * allocno_row_words;
1420
1421 IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, hard_regs_live);
1422 for (j = allocno_row_words - 1; j >= 0; j--)
1423 conflicts[ialloc_prod + j] |= allocnos_live[j];
1424 }
1425 }
1426
1427 /* Record all allocnos currently live as conflicting
1428 with all hard regs currently live.
1429
1430 ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1431 are currently live. Their bits are also flagged in allocnos_live. */
1432
1433 static void
1434 record_conflicts (int *allocno_vec, int len)
1435 {
1436 while (--len >= 0)
1437 IOR_HARD_REG_SET (allocno[allocno_vec[len]].hard_reg_conflicts,
1438 hard_regs_live);
1439 }
1440
1441 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1442 static void
1443 mirror_conflicts (void)
1444 {
1445 int i, j;
1446 int rw = allocno_row_words;
1447 int rwb = rw * INT_BITS;
1448 INT_TYPE *p = conflicts;
1449 INT_TYPE *q0 = conflicts, *q1, *q2;
1450 unsigned INT_TYPE mask;
1451
1452 for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
1453 {
1454 if (! mask)
1455 {
1456 mask = 1;
1457 q0++;
1458 }
1459 for (j = allocno_row_words - 1, q1 = q0; j >= 0; j--, q1 += rwb)
1460 {
1461 unsigned INT_TYPE word;
1462
1463 for (word = (unsigned INT_TYPE) *p++, q2 = q1; word;
1464 word >>= 1, q2 += rw)
1465 {
1466 if (word & 1)
1467 *q2 |= mask;
1468 }
1469 }
1470 }
1471 }
1472 \f
1473 /* Handle the case where REG is set by the insn being scanned,
1474 during the forward scan to accumulate conflicts.
1475 Store a 1 in regs_live or allocnos_live for this register, record how many
1476 consecutive hardware registers it actually needs,
1477 and record a conflict with all other registers already live.
1478
1479 Note that even if REG does not remain alive after this insn,
1480 we must mark it here as live, to ensure a conflict between
1481 REG and any other regs set in this insn that really do live.
1482 This is because those other regs could be considered after this.
1483
1484 REG might actually be something other than a register;
1485 if so, we do nothing.
1486
1487 SETTER is 0 if this register was modified by an auto-increment (i.e.,
1488 a REG_INC note was found for it). */
1489
1490 static void
1491 mark_reg_store (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
1492 {
1493 int regno;
1494
1495 if (GET_CODE (reg) == SUBREG)
1496 reg = SUBREG_REG (reg);
1497
1498 if (!REG_P (reg))
1499 return;
1500
1501 regs_set[n_regs_set++] = reg;
1502
1503 if (setter && GET_CODE (setter) != CLOBBER)
1504 set_preference (reg, SET_SRC (setter));
1505
1506 regno = REGNO (reg);
1507
1508 /* Either this is one of the max_allocno pseudo regs not allocated,
1509 or it is or has a hardware reg. First handle the pseudo-regs. */
1510 if (regno >= FIRST_PSEUDO_REGISTER)
1511 {
1512 if (reg_allocno[regno] >= 0)
1513 {
1514 SET_ALLOCNO_LIVE (reg_allocno[regno]);
1515 record_one_conflict (regno);
1516 }
1517 }
1518
1519 if (reg_renumber[regno] >= 0)
1520 regno = reg_renumber[regno];
1521
1522 /* Handle hardware regs (and pseudos allocated to hard regs). */
1523 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1524 {
1525 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1526 while (regno < last)
1527 {
1528 record_one_conflict (regno);
1529 SET_HARD_REG_BIT (hard_regs_live, regno);
1530 regno++;
1531 }
1532 }
1533 }
1534 \f
1535 /* Like mark_reg_store except notice just CLOBBERs; ignore SETs. */
1536
1537 static void
1538 mark_reg_clobber (rtx reg, rtx setter, void *data)
1539 {
1540 if (GET_CODE (setter) == CLOBBER)
1541 mark_reg_store (reg, setter, data);
1542 }
1543
1544 /* Record that REG has conflicts with all the regs currently live.
1545 Do not mark REG itself as live. */
1546
1547 static void
1548 mark_reg_conflicts (rtx reg)
1549 {
1550 int regno;
1551
1552 if (GET_CODE (reg) == SUBREG)
1553 reg = SUBREG_REG (reg);
1554
1555 if (!REG_P (reg))
1556 return;
1557
1558 regno = REGNO (reg);
1559
1560 /* Either this is one of the max_allocno pseudo regs not allocated,
1561 or it is or has a hardware reg. First handle the pseudo-regs. */
1562 if (regno >= FIRST_PSEUDO_REGISTER)
1563 {
1564 if (reg_allocno[regno] >= 0)
1565 record_one_conflict (regno);
1566 }
1567
1568 if (reg_renumber[regno] >= 0)
1569 regno = reg_renumber[regno];
1570
1571 /* Handle hardware regs (and pseudos allocated to hard regs). */
1572 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1573 {
1574 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1575 while (regno < last)
1576 {
1577 record_one_conflict (regno);
1578 regno++;
1579 }
1580 }
1581 }
1582 \f
1583 /* Mark REG as being dead (following the insn being scanned now).
1584 Store a 0 in regs_live or allocnos_live for this register. */
1585
1586 static void
1587 mark_reg_death (rtx reg)
1588 {
1589 int regno = REGNO (reg);
1590
1591 /* Either this is one of the max_allocno pseudo regs not allocated,
1592 or it is a hardware reg. First handle the pseudo-regs. */
1593 if (regno >= FIRST_PSEUDO_REGISTER)
1594 {
1595 if (reg_allocno[regno] >= 0)
1596 CLEAR_ALLOCNO_LIVE (reg_allocno[regno]);
1597 }
1598
1599 /* For pseudo reg, see if it has been assigned a hardware reg. */
1600 if (reg_renumber[regno] >= 0)
1601 regno = reg_renumber[regno];
1602
1603 /* Handle hardware regs (and pseudos allocated to hard regs). */
1604 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1605 {
1606 /* Pseudo regs already assigned hardware regs are treated
1607 almost the same as explicit hardware regs. */
1608 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1609 while (regno < last)
1610 {
1611 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
1612 regno++;
1613 }
1614 }
1615 }
1616
1617 /* Mark hard reg REGNO as currently live, assuming machine mode MODE
1618 for the value stored in it. MODE determines how many consecutive
1619 registers are actually in use. Do not record conflicts;
1620 it is assumed that the caller will do that. */
1621
1622 static void
1623 mark_reg_live_nc (int regno, enum machine_mode mode)
1624 {
1625 int last = regno + hard_regno_nregs[regno][mode];
1626 while (regno < last)
1627 {
1628 SET_HARD_REG_BIT (hard_regs_live, regno);
1629 regno++;
1630 }
1631 }
1632 \f
1633 /* Try to set a preference for an allocno to a hard register.
1634 We are passed DEST and SRC which are the operands of a SET. It is known
1635 that SRC is a register. If SRC or the first operand of SRC is a register,
1636 try to set a preference. If one of the two is a hard register and the other
1637 is a pseudo-register, mark the preference.
1638
1639 Note that we are not as aggressive as local-alloc in trying to tie a
1640 pseudo-register to a hard register. */
1641
1642 static void
1643 set_preference (rtx dest, rtx src)
1644 {
1645 unsigned int src_regno, dest_regno;
1646 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1647 to compensate for subregs in SRC or DEST. */
1648 int offset = 0;
1649 unsigned int i;
1650 int copy = 1;
1651
1652 if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
1653 src = XEXP (src, 0), copy = 0;
1654
1655 /* Get the reg number for both SRC and DEST.
1656 If neither is a reg, give up. */
1657
1658 if (REG_P (src))
1659 src_regno = REGNO (src);
1660 else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
1661 {
1662 src_regno = REGNO (SUBREG_REG (src));
1663
1664 if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
1665 offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
1666 GET_MODE (SUBREG_REG (src)),
1667 SUBREG_BYTE (src),
1668 GET_MODE (src));
1669 else
1670 offset += (SUBREG_BYTE (src)
1671 / REGMODE_NATURAL_SIZE (GET_MODE (src)));
1672 }
1673 else
1674 return;
1675
1676 if (REG_P (dest))
1677 dest_regno = REGNO (dest);
1678 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1679 {
1680 dest_regno = REGNO (SUBREG_REG (dest));
1681
1682 if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
1683 offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1684 GET_MODE (SUBREG_REG (dest)),
1685 SUBREG_BYTE (dest),
1686 GET_MODE (dest));
1687 else
1688 offset -= (SUBREG_BYTE (dest)
1689 / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
1690 }
1691 else
1692 return;
1693
1694 /* Convert either or both to hard reg numbers. */
1695
1696 if (reg_renumber[src_regno] >= 0)
1697 src_regno = reg_renumber[src_regno];
1698
1699 if (reg_renumber[dest_regno] >= 0)
1700 dest_regno = reg_renumber[dest_regno];
1701
1702 /* Now if one is a hard reg and the other is a global pseudo
1703 then give the other a preference. */
1704
1705 if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
1706 && reg_allocno[src_regno] >= 0)
1707 {
1708 dest_regno -= offset;
1709 if (dest_regno < FIRST_PSEUDO_REGISTER)
1710 {
1711 if (copy)
1712 SET_REGBIT (hard_reg_copy_preferences,
1713 reg_allocno[src_regno], dest_regno);
1714
1715 SET_REGBIT (hard_reg_preferences,
1716 reg_allocno[src_regno], dest_regno);
1717 for (i = dest_regno;
1718 i < dest_regno + hard_regno_nregs[dest_regno][GET_MODE (dest)];
1719 i++)
1720 SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
1721 }
1722 }
1723
1724 if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
1725 && reg_allocno[dest_regno] >= 0)
1726 {
1727 src_regno += offset;
1728 if (src_regno < FIRST_PSEUDO_REGISTER)
1729 {
1730 if (copy)
1731 SET_REGBIT (hard_reg_copy_preferences,
1732 reg_allocno[dest_regno], src_regno);
1733
1734 SET_REGBIT (hard_reg_preferences,
1735 reg_allocno[dest_regno], src_regno);
1736 for (i = src_regno;
1737 i < src_regno + hard_regno_nregs[src_regno][GET_MODE (src)];
1738 i++)
1739 SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
1740 }
1741 }
1742 }
1743 \f
1744 /* Indicate that hard register number FROM was eliminated and replaced with
1745 an offset from hard register number TO. The status of hard registers live
1746 at the start of a basic block is updated by replacing a use of FROM with
1747 a use of TO. */
1748
1749 void
1750 mark_elimination (int from, int to)
1751 {
1752 basic_block bb;
1753
1754 FOR_EACH_BB (bb)
1755 {
1756 regset r = bb->il.rtl->global_live_at_start;
1757 if (REGNO_REG_SET_P (r, from))
1758 {
1759 CLEAR_REGNO_REG_SET (r, from);
1760 SET_REGNO_REG_SET (r, to);
1761 }
1762 }
1763 }
1764 \f
1765 /* Used for communication between the following functions. Holds the
1766 current life information. */
1767 static regset live_relevant_regs;
1768
1769 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1770 This is called via note_stores. */
1771 static void
1772 reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *regs_set)
1773 {
1774 int regno;
1775
1776 if (GET_CODE (reg) == SUBREG)
1777 reg = SUBREG_REG (reg);
1778
1779 if (!REG_P (reg))
1780 return;
1781
1782 regno = REGNO (reg);
1783 if (regno < FIRST_PSEUDO_REGISTER)
1784 {
1785 int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1786 while (nregs-- > 0)
1787 {
1788 SET_REGNO_REG_SET (live_relevant_regs, regno);
1789 if (! fixed_regs[regno])
1790 SET_REGNO_REG_SET ((regset) regs_set, regno);
1791 regno++;
1792 }
1793 }
1794 else if (reg_renumber[regno] >= 0)
1795 {
1796 SET_REGNO_REG_SET (live_relevant_regs, regno);
1797 SET_REGNO_REG_SET ((regset) regs_set, regno);
1798 }
1799 }
1800
1801 /* Record in live_relevant_regs that register REGNO died. */
1802 static void
1803 reg_dies (int regno, enum machine_mode mode, struct insn_chain *chain)
1804 {
1805 if (regno < FIRST_PSEUDO_REGISTER)
1806 {
1807 int nregs = hard_regno_nregs[regno][mode];
1808 while (nregs-- > 0)
1809 {
1810 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1811 if (! fixed_regs[regno])
1812 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1813 regno++;
1814 }
1815 }
1816 else
1817 {
1818 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1819 if (reg_renumber[regno] >= 0)
1820 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1821 }
1822 }
1823
1824 /* Walk the insns of the current function and build reload_insn_chain,
1825 and record register life information. */
1826 void
1827 build_insn_chain (rtx first)
1828 {
1829 struct insn_chain **p = &reload_insn_chain;
1830 struct insn_chain *prev = 0;
1831 basic_block b = ENTRY_BLOCK_PTR->next_bb;
1832
1833 live_relevant_regs = ALLOC_REG_SET (&reg_obstack);
1834
1835 for (; first; first = NEXT_INSN (first))
1836 {
1837 struct insn_chain *c;
1838
1839 if (first == BB_HEAD (b))
1840 {
1841 unsigned i;
1842 bitmap_iterator bi;
1843
1844 CLEAR_REG_SET (live_relevant_regs);
1845
1846 EXECUTE_IF_SET_IN_BITMAP (b->il.rtl->global_live_at_start, 0, i, bi)
1847 {
1848 if (i < FIRST_PSEUDO_REGISTER
1849 ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
1850 : reg_renumber[i] >= 0)
1851 SET_REGNO_REG_SET (live_relevant_regs, i);
1852 }
1853 }
1854
1855 if (!NOTE_P (first) && !BARRIER_P (first))
1856 {
1857 c = new_insn_chain ();
1858 c->prev = prev;
1859 prev = c;
1860 *p = c;
1861 p = &c->next;
1862 c->insn = first;
1863 c->block = b->index;
1864
1865 if (INSN_P (first))
1866 {
1867 rtx link;
1868
1869 /* Mark the death of everything that dies in this instruction. */
1870
1871 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1872 if (REG_NOTE_KIND (link) == REG_DEAD
1873 && REG_P (XEXP (link, 0)))
1874 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1875 c);
1876
1877 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1878
1879 /* Mark everything born in this instruction as live. */
1880
1881 note_stores (PATTERN (first), reg_becomes_live,
1882 &c->dead_or_set);
1883 }
1884 else
1885 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1886
1887 if (INSN_P (first))
1888 {
1889 rtx link;
1890
1891 /* Mark anything that is set in this insn and then unused as dying. */
1892
1893 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1894 if (REG_NOTE_KIND (link) == REG_UNUSED
1895 && REG_P (XEXP (link, 0)))
1896 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1897 c);
1898 }
1899 }
1900
1901 if (first == BB_END (b))
1902 b = b->next_bb;
1903
1904 /* Stop after we pass the end of the last basic block. Verify that
1905 no real insns are after the end of the last basic block.
1906
1907 We may want to reorganize the loop somewhat since this test should
1908 always be the right exit test. Allow an ADDR_VEC or ADDR_DIF_VEC if
1909 the previous real insn is a JUMP_INSN. */
1910 if (b == EXIT_BLOCK_PTR)
1911 {
1912 #ifdef ENABLE_CHECKING
1913 for (first = NEXT_INSN (first); first; first = NEXT_INSN (first))
1914 gcc_assert (!INSN_P (first)
1915 || GET_CODE (PATTERN (first)) == USE
1916 || ((GET_CODE (PATTERN (first)) == ADDR_VEC
1917 || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
1918 && prev_real_insn (first) != 0
1919 && JUMP_P (prev_real_insn (first))));
1920 #endif
1921 break;
1922 }
1923 }
1924 FREE_REG_SET (live_relevant_regs);
1925 *p = 0;
1926 }
1927 \f
1928 /* Print debugging trace information if -dg switch is given,
1929 showing the information on which the allocation decisions are based. */
1930
1931 static void
1932 dump_conflicts (FILE *file)
1933 {
1934 int i;
1935 int has_preferences;
1936 int nregs;
1937 nregs = 0;
1938 for (i = 0; i < max_allocno; i++)
1939 {
1940 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1941 continue;
1942 nregs++;
1943 }
1944 fprintf (file, ";; %d regs to allocate:", nregs);
1945 for (i = 0; i < max_allocno; i++)
1946 {
1947 int j;
1948 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1949 continue;
1950 fprintf (file, " %d", allocno[allocno_order[i]].reg);
1951 for (j = 0; j < max_regno; j++)
1952 if (reg_allocno[j] == allocno_order[i]
1953 && j != allocno[allocno_order[i]].reg)
1954 fprintf (file, "+%d", j);
1955 if (allocno[allocno_order[i]].size != 1)
1956 fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1957 }
1958 fprintf (file, "\n");
1959
1960 for (i = 0; i < max_allocno; i++)
1961 {
1962 int j;
1963 fprintf (file, ";; %d conflicts:", allocno[i].reg);
1964 for (j = 0; j < max_allocno; j++)
1965 if (CONFLICTP (j, i))
1966 fprintf (file, " %d", allocno[j].reg);
1967 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1968 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j))
1969 fprintf (file, " %d", j);
1970 fprintf (file, "\n");
1971
1972 has_preferences = 0;
1973 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1974 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1975 has_preferences = 1;
1976
1977 if (! has_preferences)
1978 continue;
1979 fprintf (file, ";; %d preferences:", allocno[i].reg);
1980 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1981 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1982 fprintf (file, " %d", j);
1983 fprintf (file, "\n");
1984 }
1985 fprintf (file, "\n");
1986 }
1987
1988 void
1989 dump_global_regs (FILE *file)
1990 {
1991 int i, j;
1992
1993 fprintf (file, ";; Register dispositions:\n");
1994 for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
1995 if (reg_renumber[i] >= 0)
1996 {
1997 fprintf (file, "%d in %d ", i, reg_renumber[i]);
1998 if (++j % 6 == 0)
1999 fprintf (file, "\n");
2000 }
2001
2002 fprintf (file, "\n\n;; Hard regs used: ");
2003 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2004 if (regs_ever_live[i])
2005 fprintf (file, " %d", i);
2006 fprintf (file, "\n\n");
2007 }
2008
2009 \f
2010
2011 /* This page contains code to make live information more accurate.
2012 The accurate register liveness at program point P means:
2013 o there is a path from P to usage of the register and the
2014 register is not redefined or killed on the path.
2015 o register at P is partially available, i.e. there is a path from
2016 a register definition to the point P and the register is not
2017 killed (clobbered) on the path
2018
2019 The standard GCC live information means only the first condition.
2020 Without the partial availability, there will be more register
2021 conflicts and as a consequence worse register allocation. The
2022 typical example where the information can be different is a
2023 register initialized in the loop at the basic block preceding the
2024 loop in CFG. */
2025
2026 /* The following structure contains basic block data flow information
2027 used to calculate partial availability of registers. */
2028
2029 struct bb_info
2030 {
2031 /* The basic block reverse post-order number. */
2032 int rts_number;
2033 /* Registers used uninitialized in an insn in which there is an
2034 early clobbered register might get the same hard register. */
2035 bitmap earlyclobber;
2036 /* Registers correspondingly killed (clobbered) and defined but not
2037 killed afterward in the basic block. */
2038 bitmap killed, avloc;
2039 /* Registers partially available and living (in other words whose
2040 values were calculated and used) correspondingly at the start
2041 and end of the basic block. */
2042 bitmap live_pavin, live_pavout;
2043 };
2044
2045 /* Macros for accessing data flow information of basic blocks. */
2046
2047 #define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
2048 #define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
2049
2050 /* The function allocates the info structures of each basic block. It
2051 also initialized LIVE_PAVIN and LIVE_PAVOUT as if all hard
2052 registers were partially available. */
2053
2054 static void
2055 allocate_bb_info (void)
2056 {
2057 int i;
2058 basic_block bb;
2059 struct bb_info *bb_info;
2060 bitmap init;
2061
2062 alloc_aux_for_blocks (sizeof (struct bb_info));
2063 init = BITMAP_ALLOC (NULL);
2064 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2065 bitmap_set_bit (init, i);
2066 FOR_EACH_BB (bb)
2067 {
2068 bb_info = bb->aux;
2069 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2070 bb_info->avloc = BITMAP_ALLOC (NULL);
2071 bb_info->killed = BITMAP_ALLOC (NULL);
2072 bb_info->live_pavin = BITMAP_ALLOC (NULL);
2073 bb_info->live_pavout = BITMAP_ALLOC (NULL);
2074 bitmap_copy (bb_info->live_pavin, init);
2075 bitmap_copy (bb_info->live_pavout, init);
2076 }
2077 BITMAP_FREE (init);
2078 }
2079
2080 /* The function frees the allocated info of all basic blocks. */
2081
2082 static void
2083 free_bb_info (void)
2084 {
2085 basic_block bb;
2086 struct bb_info *bb_info;
2087
2088 FOR_EACH_BB (bb)
2089 {
2090 bb_info = BB_INFO (bb);
2091 BITMAP_FREE (bb_info->live_pavout);
2092 BITMAP_FREE (bb_info->live_pavin);
2093 BITMAP_FREE (bb_info->killed);
2094 BITMAP_FREE (bb_info->avloc);
2095 BITMAP_FREE (bb_info->earlyclobber);
2096 }
2097 free_aux_for_blocks ();
2098 }
2099
2100 /* The function modifies local info for register REG being changed in
2101 SETTER. DATA is used to pass the current basic block info. */
2102
2103 static void
2104 mark_reg_change (rtx reg, rtx setter, void *data)
2105 {
2106 int regno;
2107 basic_block bb = data;
2108 struct bb_info *bb_info = BB_INFO (bb);
2109
2110 if (GET_CODE (reg) == SUBREG)
2111 reg = SUBREG_REG (reg);
2112
2113 if (!REG_P (reg))
2114 return;
2115
2116 regno = REGNO (reg);
2117 bitmap_set_bit (bb_info->killed, regno);
2118
2119 if (GET_CODE (setter) != CLOBBER)
2120 bitmap_set_bit (bb_info->avloc, regno);
2121 else
2122 bitmap_clear_bit (bb_info->avloc, regno);
2123 }
2124
2125 /* Classes of registers which could be early clobbered in the current
2126 insn. */
2127
2128 static VEC(int,heap) *earlyclobber_regclass;
2129
2130 /* This function finds and stores register classes that could be early
2131 clobbered in INSN. If any earlyclobber classes are found, the function
2132 returns TRUE, in all other cases it returns FALSE. */
2133
2134 static bool
2135 check_earlyclobber (rtx insn)
2136 {
2137 int opno;
2138 bool found = false;
2139
2140 extract_insn (insn);
2141
2142 VEC_truncate (int, earlyclobber_regclass, 0);
2143 for (opno = 0; opno < recog_data.n_operands; opno++)
2144 {
2145 char c;
2146 bool amp_p;
2147 int i;
2148 enum reg_class class;
2149 const char *p = recog_data.constraints[opno];
2150
2151 class = NO_REGS;
2152 amp_p = false;
2153 for (;;)
2154 {
2155 c = *p;
2156 switch (c)
2157 {
2158 case '=': case '+': case '?':
2159 case '#': case '!':
2160 case '*': case '%':
2161 case 'm': case '<': case '>': case 'V': case 'o':
2162 case 'E': case 'F': case 'G': case 'H':
2163 case 's': case 'i': case 'n':
2164 case 'I': case 'J': case 'K': case 'L':
2165 case 'M': case 'N': case 'O': case 'P':
2166 case 'X':
2167 case '0': case '1': case '2': case '3': case '4':
2168 case '5': case '6': case '7': case '8': case '9':
2169 /* These don't say anything we care about. */
2170 break;
2171
2172 case '&':
2173 amp_p = true;
2174 break;
2175 case '\0':
2176 case ',':
2177 if (amp_p && class != NO_REGS)
2178 {
2179 int rc;
2180
2181 found = true;
2182 for (i = 0;
2183 VEC_iterate (int, earlyclobber_regclass, i, rc);
2184 i++)
2185 {
2186 if (rc == (int) class)
2187 goto found_rc;
2188 }
2189
2190 /* We use VEC_quick_push here because
2191 earlyclobber_regclass holds no more than
2192 N_REG_CLASSES elements. */
2193 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2194 found_rc:
2195 ;
2196 }
2197
2198 amp_p = false;
2199 class = NO_REGS;
2200 break;
2201
2202 case 'r':
2203 class = GENERAL_REGS;
2204 break;
2205
2206 default:
2207 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2208 break;
2209 }
2210 if (c == '\0')
2211 break;
2212 p += CONSTRAINT_LEN (c, p);
2213 }
2214 }
2215
2216 return found;
2217 }
2218
2219 /* The function checks that pseudo-register *X has a class
2220 intersecting with the class of pseudo-register could be early
2221 clobbered in the same insn.
2222 This function is a no-op if earlyclobber_regclass is empty. */
2223
2224 static int
2225 mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
2226 {
2227 enum reg_class pref_class, alt_class;
2228 int i, regno;
2229 basic_block bb = data;
2230 struct bb_info *bb_info = BB_INFO (bb);
2231
2232 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2233 {
2234 int rc;
2235
2236 regno = REGNO (*x);
2237 if (bitmap_bit_p (bb_info->killed, regno)
2238 || bitmap_bit_p (bb_info->avloc, regno))
2239 return 0;
2240 pref_class = reg_preferred_class (regno);
2241 alt_class = reg_alternate_class (regno);
2242 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2243 {
2244 if (reg_classes_intersect_p (rc, pref_class)
2245 || (rc != NO_REGS
2246 && reg_classes_intersect_p (rc, alt_class)))
2247 {
2248 bitmap_set_bit (bb_info->earlyclobber, regno);
2249 break;
2250 }
2251 }
2252 }
2253 return 0;
2254 }
2255
2256 /* The function processes all pseudo-registers in *X with the aid of
2257 previous function. */
2258
2259 static void
2260 mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2261 {
2262 for_each_rtx (x, mark_reg_use_for_earlyclobber, data);
2263 }
2264
2265 /* The function calculates local info for each basic block. */
2266
2267 static void
2268 calculate_local_reg_bb_info (void)
2269 {
2270 basic_block bb;
2271 rtx insn, bound;
2272
2273 /* We know that earlyclobber_regclass holds no more than
2274 N_REG_CLASSES elements. See check_earlyclobber. */
2275 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2276 FOR_EACH_BB (bb)
2277 {
2278 bound = NEXT_INSN (BB_END (bb));
2279 for (insn = BB_HEAD (bb); insn != bound; insn = NEXT_INSN (insn))
2280 if (INSN_P (insn))
2281 {
2282 note_stores (PATTERN (insn), mark_reg_change, bb);
2283 if (check_earlyclobber (insn))
2284 note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
2285 }
2286 }
2287 VEC_free (int, heap, earlyclobber_regclass);
2288 }
2289
2290 /* The function sets up reverse post-order number of each basic
2291 block. */
2292
2293 static void
2294 set_up_bb_rts_numbers (void)
2295 {
2296 int i;
2297 int *rts_order;
2298
2299 rts_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
2300 post_order_compute (rts_order, false);
2301 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
2302 BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
2303 free (rts_order);
2304 }
2305
2306 /* Compare function for sorting blocks in reverse postorder. */
2307
2308 static int
2309 rpost_cmp (const void *bb1, const void *bb2)
2310 {
2311 basic_block b1 = *(basic_block *) bb1, b2 = *(basic_block *) bb2;
2312
2313 return BB_INFO (b2)->rts_number - BB_INFO (b1)->rts_number;
2314 }
2315
2316 /* Temporary bitmap used for live_pavin, live_pavout calculation. */
2317 static bitmap temp_bitmap;
2318
2319 /* The function calculates partial register availability according to
2320 the following equations:
2321
2322 bb.live_pavin
2323 = empty for entry block
2324 | union (live_pavout of predecessors) & global_live_at_start
2325 bb.live_pavout = union (bb.live_pavin - bb.killed, bb.avloc)
2326 & global_live_at_end */
2327
2328 static void
2329 calculate_reg_pav (void)
2330 {
2331 basic_block bb, succ;
2332 edge e;
2333 int i, nel;
2334 VEC(basic_block,heap) *bbs, *new_bbs, *temp;
2335 basic_block *bb_array;
2336 sbitmap wset;
2337
2338 bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
2339 new_bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
2340 temp_bitmap = BITMAP_ALLOC (NULL);
2341 FOR_EACH_BB (bb)
2342 {
2343 VEC_quick_push (basic_block, bbs, bb);
2344 }
2345 wset = sbitmap_alloc (n_basic_blocks + 1);
2346 while (VEC_length (basic_block, bbs))
2347 {
2348 bb_array = VEC_address (basic_block, bbs);
2349 nel = VEC_length (basic_block, bbs);
2350 qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
2351 sbitmap_zero (wset);
2352 for (i = 0; i < nel; i++)
2353 {
2354 edge_iterator ei;
2355 struct bb_info *bb_info;
2356 bitmap bb_live_pavin, bb_live_pavout;
2357
2358 bb = bb_array [i];
2359 bb_info = BB_INFO (bb);
2360 bb_live_pavin = bb_info->live_pavin;
2361 bb_live_pavout = bb_info->live_pavout;
2362 FOR_EACH_EDGE (e, ei, bb->preds)
2363 {
2364 basic_block pred = e->src;
2365
2366 if (pred->index != ENTRY_BLOCK)
2367 bitmap_ior_into (bb_live_pavin, BB_INFO (pred)->live_pavout);
2368 }
2369 bitmap_and_into (bb_live_pavin, bb->il.rtl->global_live_at_start);
2370 bitmap_ior_and_compl (temp_bitmap, bb_info->avloc,
2371 bb_live_pavin, bb_info->killed);
2372 bitmap_and_into (temp_bitmap, bb->il.rtl->global_live_at_end);
2373 if (! bitmap_equal_p (temp_bitmap, bb_live_pavout))
2374 {
2375 bitmap_copy (bb_live_pavout, temp_bitmap);
2376 FOR_EACH_EDGE (e, ei, bb->succs)
2377 {
2378 succ = e->dest;
2379 if (succ->index != EXIT_BLOCK
2380 && !TEST_BIT (wset, succ->index))
2381 {
2382 SET_BIT (wset, succ->index);
2383 VEC_quick_push (basic_block, new_bbs, succ);
2384 }
2385 }
2386 }
2387 }
2388 temp = bbs;
2389 bbs = new_bbs;
2390 new_bbs = temp;
2391 VEC_truncate (basic_block, new_bbs, 0);
2392 }
2393 sbitmap_free (wset);
2394 BITMAP_FREE (temp_bitmap);
2395 VEC_free (basic_block, heap, new_bbs);
2396 VEC_free (basic_block, heap, bbs);
2397 }
2398
2399 /* The function modifies partial availability information for two
2400 special cases to prevent incorrect work of the subsequent passes
2401 with the accurate live information based on the partial
2402 availability. */
2403
2404 static void
2405 modify_reg_pav (void)
2406 {
2407 basic_block bb;
2408 struct bb_info *bb_info;
2409 #ifdef STACK_REGS
2410 int i;
2411 HARD_REG_SET zero, stack_hard_regs, used;
2412 bitmap stack_regs;
2413
2414 CLEAR_HARD_REG_SET (zero);
2415 CLEAR_HARD_REG_SET (stack_hard_regs);
2416 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2417 SET_HARD_REG_BIT(stack_hard_regs, i);
2418 stack_regs = BITMAP_ALLOC (NULL);
2419 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2420 {
2421 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2422 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2423 AND_HARD_REG_SET (used, stack_hard_regs);
2424 GO_IF_HARD_REG_EQUAL(used, zero, skip);
2425 bitmap_set_bit (stack_regs, i);
2426 skip:
2427 ;
2428 }
2429 #endif
2430 FOR_EACH_BB (bb)
2431 {
2432 bb_info = BB_INFO (bb);
2433
2434 /* Reload can assign the same hard register to uninitialized
2435 pseudo-register and early clobbered pseudo-register in an
2436 insn if the pseudo-register is used first time in given BB
2437 and not lived at the BB start. To prevent this we don't
2438 change life information for such pseudo-registers. */
2439 bitmap_ior_into (bb_info->live_pavin, bb_info->earlyclobber);
2440 #ifdef STACK_REGS
2441 /* We can not use the same stack register for uninitialized
2442 pseudo-register and another living pseudo-register because if the
2443 uninitialized pseudo-register dies, subsequent pass reg-stack
2444 will be confused (it will believe that the other register
2445 dies). */
2446 bitmap_ior_into (bb_info->live_pavin, stack_regs);
2447 #endif
2448 }
2449 #ifdef STACK_REGS
2450 BITMAP_FREE (stack_regs);
2451 #endif
2452 }
2453
2454 /* The following function makes live information more accurate by
2455 modifying global_live_at_start and global_live_at_end of basic
2456 blocks.
2457
2458 The standard GCC life analysis permits registers to live
2459 uninitialized, for example:
2460
2461 R is never used
2462 .....
2463 Loop:
2464 R is defined
2465 ...
2466 R is used.
2467
2468 With normal life_analysis, R would be live before "Loop:".
2469 The result is that R causes many interferences that do not
2470 serve any purpose.
2471
2472 After the function call a register lives at a program point
2473 only if it is initialized on a path from CFG entry to the
2474 program point. */
2475
2476 static void
2477 make_accurate_live_analysis (void)
2478 {
2479 basic_block bb;
2480 struct bb_info *bb_info;
2481
2482 max_regno = max_reg_num ();
2483 compact_blocks ();
2484 allocate_bb_info ();
2485 calculate_local_reg_bb_info ();
2486 set_up_bb_rts_numbers ();
2487 calculate_reg_pav ();
2488 modify_reg_pav ();
2489 FOR_EACH_BB (bb)
2490 {
2491 bb_info = BB_INFO (bb);
2492
2493 bitmap_and_into (bb->il.rtl->global_live_at_start, bb_info->live_pavin);
2494 bitmap_and_into (bb->il.rtl->global_live_at_end, bb_info->live_pavout);
2495 }
2496 free_bb_info ();
2497 }
2498 /* Run old register allocator. Return TRUE if we must exit
2499 rest_of_compilation upon return. */
2500 static unsigned int
2501 rest_of_handle_global_alloc (void)
2502 {
2503 bool failure;
2504
2505 /* If optimizing, allocate remaining pseudo-regs. Do the reload
2506 pass fixing up any insns that are invalid. */
2507
2508 if (optimize)
2509 failure = global_alloc ();
2510 else
2511 {
2512 build_insn_chain (get_insns ());
2513 failure = reload (get_insns (), 0);
2514 }
2515
2516 if (dump_enabled_p (pass_global_alloc.static_pass_number))
2517 {
2518 timevar_push (TV_DUMP);
2519 dump_global_regs (dump_file);
2520 timevar_pop (TV_DUMP);
2521 }
2522
2523 gcc_assert (reload_completed || failure);
2524 reload_completed = !failure;
2525 return 0;
2526 }
2527
2528 struct tree_opt_pass pass_global_alloc =
2529 {
2530 "greg", /* name */
2531 NULL, /* gate */
2532 rest_of_handle_global_alloc, /* execute */
2533 NULL, /* sub */
2534 NULL, /* next */
2535 0, /* static_pass_number */
2536 TV_GLOBAL_ALLOC, /* tv_id */
2537 0, /* properties_required */
2538 0, /* properties_provided */
2539 0, /* properties_destroyed */
2540 0, /* todo_flags_start */
2541 TODO_dump_func |
2542 TODO_ggc_collect, /* todo_flags_finish */
2543 'g' /* letter */
2544 };
2545
This page took 0.190081 seconds and 5 git commands to generate.