]> gcc.gnu.org Git - gcc.git/blob - gcc/resource.c
Make it possible to prototype port-specific functions (and convert i386 to use this)
[gcc.git] / gcc / resource.c
1 /* Definitions for computing resource usage of specific insns.
2 Copyright (C) 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "toplev.h"
24 #include "rtl.h"
25 #include "tm_p.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "function.h"
29 #include "regs.h"
30 #include "flags.h"
31 #include "output.h"
32 #include "resource.h"
33
34 /* This structure is used to record liveness information at the targets or
35 fallthrough insns of branches. We will most likely need the information
36 at targets again, so save them in a hash table rather than recomputing them
37 each time. */
38
39 struct target_info
40 {
41 int uid; /* INSN_UID of target. */
42 struct target_info *next; /* Next info for same hash bucket. */
43 HARD_REG_SET live_regs; /* Registers live at target. */
44 int block; /* Basic block number containing target. */
45 int bb_tick; /* Generation count of basic block info. */
46 };
47
48 #define TARGET_HASH_PRIME 257
49
50 /* Indicates what resources are required at the beginning of the epilogue. */
51 static struct resources start_of_epilogue_needs;
52
53 /* Indicates what resources are required at function end. */
54 static struct resources end_of_function_needs;
55
56 /* Define the hash table itself. */
57 static struct target_info **target_hash_table = NULL;
58
59 /* For each basic block, we maintain a generation number of its basic
60 block info, which is updated each time we move an insn from the
61 target of a jump. This is the generation number indexed by block
62 number. */
63
64 static int *bb_ticks;
65
66 /* Marks registers possibly live at the current place being scanned by
67 mark_target_live_regs. Used only by next two function. */
68
69 static HARD_REG_SET current_live_regs;
70
71 /* Marks registers for which we have seen a REG_DEAD note but no assignment.
72 Also only used by the next two functions. */
73
74 static HARD_REG_SET pending_dead_regs;
75 \f
76 static void update_live_status PROTO ((rtx, rtx));
77 static int find_basic_block PROTO ((rtx));
78 static rtx next_insn_no_annul PROTO ((rtx));
79 static rtx find_dead_or_set_registers PROTO ((rtx, struct resources*,
80 rtx*, int, struct resources,
81 struct resources));
82 \f
83 /* Utility function called from mark_target_live_regs via note_stores.
84 It deadens any CLOBBERed registers and livens any SET registers. */
85
86 static void
87 update_live_status (dest, x)
88 rtx dest;
89 rtx x;
90 {
91 int first_regno, last_regno;
92 int i;
93
94 if (GET_CODE (dest) != REG
95 && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG))
96 return;
97
98 if (GET_CODE (dest) == SUBREG)
99 first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest);
100 else
101 first_regno = REGNO (dest);
102
103 last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest));
104
105 if (GET_CODE (x) == CLOBBER)
106 for (i = first_regno; i < last_regno; i++)
107 CLEAR_HARD_REG_BIT (current_live_regs, i);
108 else
109 for (i = first_regno; i < last_regno; i++)
110 {
111 SET_HARD_REG_BIT (current_live_regs, i);
112 CLEAR_HARD_REG_BIT (pending_dead_regs, i);
113 }
114 }
115 /* Find the number of the basic block that starts closest to INSN. Return -1
116 if we couldn't find such a basic block. */
117
118 static int
119 find_basic_block (insn)
120 rtx insn;
121 {
122 int i;
123
124 /* Scan backwards to the previous BARRIER. Then see if we can find a
125 label that starts a basic block. Return the basic block number. */
126
127 for (insn = prev_nonnote_insn (insn);
128 insn && GET_CODE (insn) != BARRIER;
129 insn = prev_nonnote_insn (insn))
130 ;
131
132 /* The start of the function is basic block zero. */
133 if (insn == 0)
134 return 0;
135
136 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
137 anything other than a CODE_LABEL or note, we can't find this code. */
138 for (insn = next_nonnote_insn (insn);
139 insn && GET_CODE (insn) == CODE_LABEL;
140 insn = next_nonnote_insn (insn))
141 {
142 for (i = 0; i < n_basic_blocks; i++)
143 if (insn == BLOCK_HEAD (i))
144 return i;
145 }
146
147 return -1;
148 }
149 \f
150 /* Similar to next_insn, but ignores insns in the delay slots of
151 an annulled branch. */
152
153 static rtx
154 next_insn_no_annul (insn)
155 rtx insn;
156 {
157 if (insn)
158 {
159 /* If INSN is an annulled branch, skip any insns from the target
160 of the branch. */
161 if (INSN_ANNULLED_BRANCH_P (insn)
162 && NEXT_INSN (PREV_INSN (insn)) != insn)
163 while (INSN_FROM_TARGET_P (NEXT_INSN (insn)))
164 insn = NEXT_INSN (insn);
165
166 insn = NEXT_INSN (insn);
167 if (insn && GET_CODE (insn) == INSN
168 && GET_CODE (PATTERN (insn)) == SEQUENCE)
169 insn = XVECEXP (PATTERN (insn), 0, 0);
170 }
171
172 return insn;
173 }
174 \f
175 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark
176 which resources are references by the insn. If INCLUDE_DELAYED_EFFECTS
177 is TRUE, resources used by the called routine will be included for
178 CALL_INSNs. */
179
180 void
181 mark_referenced_resources (x, res, include_delayed_effects)
182 register rtx x;
183 register struct resources *res;
184 register int include_delayed_effects;
185 {
186 register enum rtx_code code = GET_CODE (x);
187 register int i, j;
188 register const char *format_ptr;
189
190 /* Handle leaf items for which we set resource flags. Also, special-case
191 CALL, SET and CLOBBER operators. */
192 switch (code)
193 {
194 case CONST:
195 case CONST_INT:
196 case CONST_DOUBLE:
197 case PC:
198 case SYMBOL_REF:
199 case LABEL_REF:
200 return;
201
202 case SUBREG:
203 if (GET_CODE (SUBREG_REG (x)) != REG)
204 mark_referenced_resources (SUBREG_REG (x), res, 0);
205 else
206 {
207 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
208 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
209 for (i = regno; i < last_regno; i++)
210 SET_HARD_REG_BIT (res->regs, i);
211 }
212 return;
213
214 case REG:
215 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
216 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
217 return;
218
219 case MEM:
220 /* If this memory shouldn't change, it really isn't referencing
221 memory. */
222 if (RTX_UNCHANGING_P (x))
223 res->unch_memory = 1;
224 else
225 res->memory = 1;
226 res->volatil |= MEM_VOLATILE_P (x);
227
228 /* Mark registers used to access memory. */
229 mark_referenced_resources (XEXP (x, 0), res, 0);
230 return;
231
232 case CC0:
233 res->cc = 1;
234 return;
235
236 case UNSPEC_VOLATILE:
237 case ASM_INPUT:
238 /* Traditional asm's are always volatile. */
239 res->volatil = 1;
240 return;
241
242 case TRAP_IF:
243 res->volatil = 1;
244 break;
245
246 case ASM_OPERANDS:
247 res->volatil |= MEM_VOLATILE_P (x);
248
249 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
250 We can not just fall through here since then we would be confused
251 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
252 traditional asms unlike their normal usage. */
253
254 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
255 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
256 return;
257
258 case CALL:
259 /* The first operand will be a (MEM (xxx)) but doesn't really reference
260 memory. The second operand may be referenced, though. */
261 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, 0);
262 mark_referenced_resources (XEXP (x, 1), res, 0);
263 return;
264
265 case SET:
266 /* Usually, the first operand of SET is set, not referenced. But
267 registers used to access memory are referenced. SET_DEST is
268 also referenced if it is a ZERO_EXTRACT or SIGN_EXTRACT. */
269
270 mark_referenced_resources (SET_SRC (x), res, 0);
271
272 x = SET_DEST (x);
273 if (GET_CODE (x) == SIGN_EXTRACT || GET_CODE (x) == ZERO_EXTRACT)
274 mark_referenced_resources (x, res, 0);
275 else if (GET_CODE (x) == SUBREG)
276 x = SUBREG_REG (x);
277 if (GET_CODE (x) == MEM)
278 mark_referenced_resources (XEXP (x, 0), res, 0);
279 return;
280
281 case CLOBBER:
282 return;
283
284 case CALL_INSN:
285 if (include_delayed_effects)
286 {
287 /* A CALL references memory, the frame pointer if it exists, the
288 stack pointer, any global registers and any registers given in
289 USE insns immediately in front of the CALL.
290
291 However, we may have moved some of the parameter loading insns
292 into the delay slot of this CALL. If so, the USE's for them
293 don't count and should be skipped. */
294 rtx insn = PREV_INSN (x);
295 rtx sequence = 0;
296 int seq_size = 0;
297 rtx next = NEXT_INSN (x);
298 int i;
299
300 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
301 if (NEXT_INSN (insn) != x)
302 {
303 next = NEXT_INSN (NEXT_INSN (insn));
304 sequence = PATTERN (NEXT_INSN (insn));
305 seq_size = XVECLEN (sequence, 0);
306 if (GET_CODE (sequence) != SEQUENCE)
307 abort ();
308 }
309
310 res->memory = 1;
311 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
312 if (frame_pointer_needed)
313 {
314 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
315 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
316 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
317 #endif
318 }
319
320 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
321 if (global_regs[i])
322 SET_HARD_REG_BIT (res->regs, i);
323
324 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
325 assume that this call can need any register.
326
327 This is done to be more conservative about how we handle setjmp.
328 We assume that they both use and set all registers. Using all
329 registers ensures that a register will not be considered dead
330 just because it crosses a setjmp call. A register should be
331 considered dead only if the setjmp call returns non-zero. */
332 if (next && GET_CODE (next) == NOTE
333 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
334 SET_HARD_REG_SET (res->regs);
335
336 {
337 rtx link;
338
339 for (link = CALL_INSN_FUNCTION_USAGE (x);
340 link;
341 link = XEXP (link, 1))
342 if (GET_CODE (XEXP (link, 0)) == USE)
343 {
344 for (i = 1; i < seq_size; i++)
345 {
346 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i));
347 if (GET_CODE (slot_pat) == SET
348 && rtx_equal_p (SET_DEST (slot_pat),
349 SET_DEST (XEXP (link, 0))))
350 break;
351 }
352 if (i >= seq_size)
353 mark_referenced_resources (SET_DEST (XEXP (link, 0)),
354 res, 0);
355 }
356 }
357 }
358
359 /* ... fall through to other INSN processing ... */
360
361 case INSN:
362 case JUMP_INSN:
363
364 #ifdef INSN_REFERENCES_ARE_DELAYED
365 if (! include_delayed_effects
366 && INSN_REFERENCES_ARE_DELAYED (x))
367 return;
368 #endif
369
370 /* No special processing, just speed up. */
371 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
372 return;
373
374 default:
375 break;
376 }
377
378 /* Process each sub-expression and flag what it needs. */
379 format_ptr = GET_RTX_FORMAT (code);
380 for (i = 0; i < GET_RTX_LENGTH (code); i++)
381 switch (*format_ptr++)
382 {
383 case 'e':
384 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
385 break;
386
387 case 'E':
388 for (j = 0; j < XVECLEN (x, i); j++)
389 mark_referenced_resources (XVECEXP (x, i, j), res,
390 include_delayed_effects);
391 break;
392 }
393 }
394 \f
395 /* A subroutine of mark_target_live_regs. Search forward from TARGET
396 looking for registers that are set before they are used. These are dead.
397 Stop after passing a few conditional jumps, and/or a small
398 number of unconditional branches. */
399
400 static rtx
401 find_dead_or_set_registers (target, res, jump_target, jump_count, set, needed)
402 rtx target;
403 struct resources *res;
404 rtx *jump_target;
405 int jump_count;
406 struct resources set, needed;
407 {
408 HARD_REG_SET scratch;
409 rtx insn, next;
410 rtx jump_insn = 0;
411 int i;
412
413 for (insn = target; insn; insn = next)
414 {
415 rtx this_jump_insn = insn;
416
417 next = NEXT_INSN (insn);
418 switch (GET_CODE (insn))
419 {
420 case CODE_LABEL:
421 /* After a label, any pending dead registers that weren't yet
422 used can be made dead. */
423 AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
424 AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
425 CLEAR_HARD_REG_SET (pending_dead_regs);
426
427 continue;
428
429 case BARRIER:
430 case NOTE:
431 continue;
432
433 case INSN:
434 if (GET_CODE (PATTERN (insn)) == USE)
435 {
436 /* If INSN is a USE made by update_block, we care about the
437 underlying insn. Any registers set by the underlying insn
438 are live since the insn is being done somewhere else. */
439 if (GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
440 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, 1);
441
442 /* All other USE insns are to be ignored. */
443 continue;
444 }
445 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
446 continue;
447 else if (GET_CODE (PATTERN (insn)) == SEQUENCE)
448 {
449 /* An unconditional jump can be used to fill the delay slot
450 of a call, so search for a JUMP_INSN in any position. */
451 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
452 {
453 this_jump_insn = XVECEXP (PATTERN (insn), 0, i);
454 if (GET_CODE (this_jump_insn) == JUMP_INSN)
455 break;
456 }
457 }
458
459 default:
460 break;
461 }
462
463 if (GET_CODE (this_jump_insn) == JUMP_INSN)
464 {
465 if (jump_count++ < 10)
466 {
467 if (simplejump_p (this_jump_insn)
468 || GET_CODE (PATTERN (this_jump_insn)) == RETURN)
469 {
470 next = JUMP_LABEL (this_jump_insn);
471 if (jump_insn == 0)
472 {
473 jump_insn = insn;
474 if (jump_target)
475 *jump_target = JUMP_LABEL (this_jump_insn);
476 }
477 }
478 else if (condjump_p (this_jump_insn)
479 || condjump_in_parallel_p (this_jump_insn))
480 {
481 struct resources target_set, target_res;
482 struct resources fallthrough_res;
483
484 /* We can handle conditional branches here by following
485 both paths, and then IOR the results of the two paths
486 together, which will give us registers that are dead
487 on both paths. Since this is expensive, we give it
488 a much higher cost than unconditional branches. The
489 cost was chosen so that we will follow at most 1
490 conditional branch. */
491
492 jump_count += 4;
493 if (jump_count >= 10)
494 break;
495
496 mark_referenced_resources (insn, &needed, 1);
497
498 /* For an annulled branch, mark_set_resources ignores slots
499 filled by instructions from the target. This is correct
500 if the branch is not taken. Since we are following both
501 paths from the branch, we must also compute correct info
502 if the branch is taken. We do this by inverting all of
503 the INSN_FROM_TARGET_P bits, calling mark_set_resources,
504 and then inverting the INSN_FROM_TARGET_P bits again. */
505
506 if (GET_CODE (PATTERN (insn)) == SEQUENCE
507 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
508 {
509 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
510 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
511 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
512
513 target_set = set;
514 mark_set_resources (insn, &target_set, 0, 1);
515
516 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++)
517 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i))
518 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i));
519
520 mark_set_resources (insn, &set, 0, 1);
521 }
522 else
523 {
524 mark_set_resources (insn, &set, 0, 1);
525 target_set = set;
526 }
527
528 target_res = *res;
529 COPY_HARD_REG_SET (scratch, target_set.regs);
530 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
531 AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
532
533 fallthrough_res = *res;
534 COPY_HARD_REG_SET (scratch, set.regs);
535 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
536 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
537
538 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn),
539 &target_res, 0, jump_count,
540 target_set, needed);
541 find_dead_or_set_registers (next,
542 &fallthrough_res, 0, jump_count,
543 set, needed);
544 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
545 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
546 break;
547 }
548 else
549 break;
550 }
551 else
552 {
553 /* Don't try this optimization if we expired our jump count
554 above, since that would mean there may be an infinite loop
555 in the function being compiled. */
556 jump_insn = 0;
557 break;
558 }
559 }
560
561 mark_referenced_resources (insn, &needed, 1);
562 mark_set_resources (insn, &set, 0, 1);
563
564 COPY_HARD_REG_SET (scratch, set.regs);
565 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
566 AND_COMPL_HARD_REG_SET (res->regs, scratch);
567 }
568
569 return jump_insn;
570 }
571 \f
572 /* Given X, a part of an insn, and a pointer to a `struct resource',
573 RES, indicate which resources are modified by the insn. If
574 INCLUDE_DELAYED_EFFECTS is nonzero, also mark resources potentially
575 set by the called routine.
576
577 If IN_DEST is nonzero, it means we are inside a SET. Otherwise,
578 objects are being referenced instead of set.
579
580 We never mark the insn as modifying the condition code unless it explicitly
581 SETs CC0 even though this is not totally correct. The reason for this is
582 that we require a SET of CC0 to immediately precede the reference to CC0.
583 So if some other insn sets CC0 as a side-effect, we know it cannot affect
584 our computation and thus may be placed in a delay slot. */
585
586 void
587 mark_set_resources (x, res, in_dest, include_delayed_effects)
588 register rtx x;
589 register struct resources *res;
590 int in_dest;
591 int include_delayed_effects;
592 {
593 register enum rtx_code code;
594 register int i, j;
595 register const char *format_ptr;
596
597 restart:
598
599 code = GET_CODE (x);
600
601 switch (code)
602 {
603 case NOTE:
604 case BARRIER:
605 case CODE_LABEL:
606 case USE:
607 case CONST_INT:
608 case CONST_DOUBLE:
609 case LABEL_REF:
610 case SYMBOL_REF:
611 case CONST:
612 case PC:
613 /* These don't set any resources. */
614 return;
615
616 case CC0:
617 if (in_dest)
618 res->cc = 1;
619 return;
620
621 case CALL_INSN:
622 /* Called routine modifies the condition code, memory, any registers
623 that aren't saved across calls, global registers and anything
624 explicitly CLOBBERed immediately after the CALL_INSN. */
625
626 if (include_delayed_effects)
627 {
628 rtx next = NEXT_INSN (x);
629 rtx prev = PREV_INSN (x);
630 rtx link;
631
632 res->cc = res->memory = 1;
633 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
634 if (call_used_regs[i] || global_regs[i])
635 SET_HARD_REG_BIT (res->regs, i);
636
637 /* If X is part of a delay slot sequence, then NEXT should be
638 the first insn after the sequence. */
639 if (NEXT_INSN (prev) != x)
640 next = NEXT_INSN (NEXT_INSN (prev));
641
642 for (link = CALL_INSN_FUNCTION_USAGE (x);
643 link; link = XEXP (link, 1))
644 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
645 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 0);
646
647 /* Check for a NOTE_INSN_SETJMP. If it exists, then we must
648 assume that this call can clobber any register. */
649 if (next && GET_CODE (next) == NOTE
650 && NOTE_LINE_NUMBER (next) == NOTE_INSN_SETJMP)
651 SET_HARD_REG_SET (res->regs);
652 }
653
654 /* ... and also what its RTL says it modifies, if anything. */
655
656 case JUMP_INSN:
657 case INSN:
658
659 /* An insn consisting of just a CLOBBER (or USE) is just for flow
660 and doesn't actually do anything, so we ignore it. */
661
662 #ifdef INSN_SETS_ARE_DELAYED
663 if (! include_delayed_effects
664 && INSN_SETS_ARE_DELAYED (x))
665 return;
666 #endif
667
668 x = PATTERN (x);
669 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
670 goto restart;
671 return;
672
673 case SET:
674 /* If the source of a SET is a CALL, this is actually done by
675 the called routine. So only include it if we are to include the
676 effects of the calling routine. */
677
678 mark_set_resources (SET_DEST (x), res,
679 (include_delayed_effects
680 || GET_CODE (SET_SRC (x)) != CALL),
681 0);
682
683 mark_set_resources (SET_SRC (x), res, 0, 0);
684 return;
685
686 case CLOBBER:
687 mark_set_resources (XEXP (x, 0), res, 1, 0);
688 return;
689
690 case SEQUENCE:
691 for (i = 0; i < XVECLEN (x, 0); i++)
692 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
693 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i))))
694 mark_set_resources (XVECEXP (x, 0, i), res, 0,
695 include_delayed_effects);
696 return;
697
698 case POST_INC:
699 case PRE_INC:
700 case POST_DEC:
701 case PRE_DEC:
702 mark_set_resources (XEXP (x, 0), res, 1, 0);
703 return;
704
705 case ZERO_EXTRACT:
706 mark_set_resources (XEXP (x, 0), res, in_dest, 0);
707 mark_set_resources (XEXP (x, 1), res, 0, 0);
708 mark_set_resources (XEXP (x, 2), res, 0, 0);
709 return;
710
711 case MEM:
712 if (in_dest)
713 {
714 res->memory = 1;
715 res->unch_memory |= RTX_UNCHANGING_P (x);
716 res->volatil |= MEM_VOLATILE_P (x);
717 }
718
719 mark_set_resources (XEXP (x, 0), res, 0, 0);
720 return;
721
722 case SUBREG:
723 if (in_dest)
724 {
725 if (GET_CODE (SUBREG_REG (x)) != REG)
726 mark_set_resources (SUBREG_REG (x), res,
727 in_dest, include_delayed_effects);
728 else
729 {
730 int regno = REGNO (SUBREG_REG (x)) + SUBREG_WORD (x);
731 int last_regno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
732 for (i = regno; i < last_regno; i++)
733 SET_HARD_REG_BIT (res->regs, i);
734 }
735 }
736 return;
737
738 case REG:
739 if (in_dest)
740 for (i = 0; i < HARD_REGNO_NREGS (REGNO (x), GET_MODE (x)); i++)
741 SET_HARD_REG_BIT (res->regs, REGNO (x) + i);
742 return;
743
744 case UNSPEC_VOLATILE:
745 case ASM_INPUT:
746 /* Traditional asm's are always volatile. */
747 res->volatil = 1;
748 return;
749
750 case TRAP_IF:
751 res->volatil = 1;
752 break;
753
754 case ASM_OPERANDS:
755 res->volatil |= MEM_VOLATILE_P (x);
756
757 /* For all ASM_OPERANDS, we must traverse the vector of input operands.
758 We can not just fall through here since then we would be confused
759 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
760 traditional asms unlike their normal usage. */
761
762 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
763 mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest, 0);
764 return;
765
766 default:
767 break;
768 }
769
770 /* Process each sub-expression and flag what it needs. */
771 format_ptr = GET_RTX_FORMAT (code);
772 for (i = 0; i < GET_RTX_LENGTH (code); i++)
773 switch (*format_ptr++)
774 {
775 case 'e':
776 mark_set_resources (XEXP (x, i), res, in_dest, include_delayed_effects);
777 break;
778
779 case 'E':
780 for (j = 0; j < XVECLEN (x, i); j++)
781 mark_set_resources (XVECEXP (x, i, j), res, in_dest,
782 include_delayed_effects);
783 break;
784 }
785 }
786 \f
787 /* Set the resources that are live at TARGET.
788
789 If TARGET is zero, we refer to the end of the current function and can
790 return our precomputed value.
791
792 Otherwise, we try to find out what is live by consulting the basic block
793 information. This is tricky, because we must consider the actions of
794 reload and jump optimization, which occur after the basic block information
795 has been computed.
796
797 Accordingly, we proceed as follows::
798
799 We find the previous BARRIER and look at all immediately following labels
800 (with no intervening active insns) to see if any of them start a basic
801 block. If we hit the start of the function first, we use block 0.
802
803 Once we have found a basic block and a corresponding first insns, we can
804 accurately compute the live status from basic_block_live_regs and
805 reg_renumber. (By starting at a label following a BARRIER, we are immune
806 to actions taken by reload and jump.) Then we scan all insns between
807 that point and our target. For each CLOBBER (or for call-clobbered regs
808 when we pass a CALL_INSN), mark the appropriate registers are dead. For
809 a SET, mark them as live.
810
811 We have to be careful when using REG_DEAD notes because they are not
812 updated by such things as find_equiv_reg. So keep track of registers
813 marked as dead that haven't been assigned to, and mark them dead at the
814 next CODE_LABEL since reload and jump won't propagate values across labels.
815
816 If we cannot find the start of a basic block (should be a very rare
817 case, if it can happen at all), mark everything as potentially live.
818
819 Next, scan forward from TARGET looking for things set or clobbered
820 before they are used. These are not live.
821
822 Because we can be called many times on the same target, save our results
823 in a hash table indexed by INSN_UID. This is only done if the function
824 init_resource_info () was invoked before we are called. */
825
826 void
827 mark_target_live_regs (insns, target, res)
828 rtx insns;
829 rtx target;
830 struct resources *res;
831 {
832 int b = -1;
833 int i;
834 struct target_info *tinfo = NULL;
835 rtx insn;
836 rtx jump_insn = 0;
837 rtx jump_target;
838 HARD_REG_SET scratch;
839 struct resources set, needed;
840
841 /* Handle end of function. */
842 if (target == 0)
843 {
844 *res = end_of_function_needs;
845 return;
846 }
847
848 /* We have to assume memory is needed, but the CC isn't. */
849 res->memory = 1;
850 res->volatil = res->unch_memory = 0;
851 res->cc = 0;
852
853 /* See if we have computed this value already. */
854 if (target_hash_table != NULL)
855 {
856 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
857 tinfo; tinfo = tinfo->next)
858 if (tinfo->uid == INSN_UID (target))
859 break;
860
861 /* Start by getting the basic block number. If we have saved
862 information, we can get it from there unless the insn at the
863 start of the basic block has been deleted. */
864 if (tinfo && tinfo->block != -1
865 && ! INSN_DELETED_P (BLOCK_HEAD (tinfo->block)))
866 b = tinfo->block;
867 }
868
869 if (b == -1)
870 b = find_basic_block (target);
871
872 if (target_hash_table != NULL)
873 {
874 if (tinfo)
875 {
876 /* If the information is up-to-date, use it. Otherwise, we will
877 update it below. */
878 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
879 {
880 COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
881 return;
882 }
883 }
884 else
885 {
886 /* Allocate a place to put our results and chain it into the
887 hash table. */
888 tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
889 tinfo->uid = INSN_UID (target);
890 tinfo->block = b;
891 tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
892 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
893 }
894 }
895
896 CLEAR_HARD_REG_SET (pending_dead_regs);
897
898 /* If we found a basic block, get the live registers from it and update
899 them with anything set or killed between its start and the insn before
900 TARGET. Otherwise, we must assume everything is live. */
901 if (b != -1)
902 {
903 regset regs_live = BASIC_BLOCK (b)->global_live_at_start;
904 int j;
905 int regno;
906 rtx start_insn, stop_insn;
907
908 /* Compute hard regs live at start of block -- this is the real hard regs
909 marked live, plus live pseudo regs that have been renumbered to
910 hard regs. */
911
912 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
913
914 EXECUTE_IF_SET_IN_REG_SET
915 (regs_live, FIRST_PSEUDO_REGISTER, i,
916 {
917 if ((regno = reg_renumber[i]) >= 0)
918 for (j = regno;
919 j < regno + HARD_REGNO_NREGS (regno,
920 PSEUDO_REGNO_MODE (i));
921 j++)
922 SET_HARD_REG_BIT (current_live_regs, j);
923 });
924
925 /* Get starting and ending insn, handling the case where each might
926 be a SEQUENCE. */
927 start_insn = (b == 0 ? insns : BLOCK_HEAD (b));
928 stop_insn = target;
929
930 if (GET_CODE (start_insn) == INSN
931 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
932 start_insn = XVECEXP (PATTERN (start_insn), 0, 0);
933
934 if (GET_CODE (stop_insn) == INSN
935 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
936 stop_insn = next_insn (PREV_INSN (stop_insn));
937
938 for (insn = start_insn; insn != stop_insn;
939 insn = next_insn_no_annul (insn))
940 {
941 rtx link;
942 rtx real_insn = insn;
943
944 /* If this insn is from the target of a branch, it isn't going to
945 be used in the sequel. If it is used in both cases, this
946 test will not be true. */
947 if (INSN_FROM_TARGET_P (insn))
948 continue;
949
950 /* If this insn is a USE made by update_block, we care about the
951 underlying insn. */
952 if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE
953 && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i')
954 real_insn = XEXP (PATTERN (insn), 0);
955
956 if (GET_CODE (real_insn) == CALL_INSN)
957 {
958 /* CALL clobbers all call-used regs that aren't fixed except
959 sp, ap, and fp. Do this before setting the result of the
960 call live. */
961 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
962 if (call_used_regs[i]
963 && i != STACK_POINTER_REGNUM && i != FRAME_POINTER_REGNUM
964 && i != ARG_POINTER_REGNUM
965 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
966 && i != HARD_FRAME_POINTER_REGNUM
967 #endif
968 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
969 && ! (i == ARG_POINTER_REGNUM && fixed_regs[i])
970 #endif
971 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
972 && ! (i == PIC_OFFSET_TABLE_REGNUM && flag_pic)
973 #endif
974 )
975 CLEAR_HARD_REG_BIT (current_live_regs, i);
976
977 /* A CALL_INSN sets any global register live, since it may
978 have been modified by the call. */
979 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
980 if (global_regs[i])
981 SET_HARD_REG_BIT (current_live_regs, i);
982 }
983
984 /* Mark anything killed in an insn to be deadened at the next
985 label. Ignore USE insns; the only REG_DEAD notes will be for
986 parameters. But they might be early. A CALL_INSN will usually
987 clobber registers used for parameters. It isn't worth bothering
988 with the unlikely case when it won't. */
989 if ((GET_CODE (real_insn) == INSN
990 && GET_CODE (PATTERN (real_insn)) != USE
991 && GET_CODE (PATTERN (real_insn)) != CLOBBER)
992 || GET_CODE (real_insn) == JUMP_INSN
993 || GET_CODE (real_insn) == CALL_INSN)
994 {
995 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
996 if (REG_NOTE_KIND (link) == REG_DEAD
997 && GET_CODE (XEXP (link, 0)) == REG
998 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
999 {
1000 int first_regno = REGNO (XEXP (link, 0));
1001 int last_regno
1002 = (first_regno
1003 + HARD_REGNO_NREGS (first_regno,
1004 GET_MODE (XEXP (link, 0))));
1005
1006 for (i = first_regno; i < last_regno; i++)
1007 SET_HARD_REG_BIT (pending_dead_regs, i);
1008 }
1009
1010 note_stores (PATTERN (real_insn), update_live_status);
1011
1012 /* If any registers were unused after this insn, kill them.
1013 These notes will always be accurate. */
1014 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1015 if (REG_NOTE_KIND (link) == REG_UNUSED
1016 && GET_CODE (XEXP (link, 0)) == REG
1017 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1018 {
1019 int first_regno = REGNO (XEXP (link, 0));
1020 int last_regno
1021 = (first_regno
1022 + HARD_REGNO_NREGS (first_regno,
1023 GET_MODE (XEXP (link, 0))));
1024
1025 for (i = first_regno; i < last_regno; i++)
1026 CLEAR_HARD_REG_BIT (current_live_regs, i);
1027 }
1028 }
1029
1030 else if (GET_CODE (real_insn) == CODE_LABEL)
1031 {
1032 /* A label clobbers the pending dead registers since neither
1033 reload nor jump will propagate a value across a label. */
1034 AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1035 CLEAR_HARD_REG_SET (pending_dead_regs);
1036 }
1037
1038 /* The beginning of the epilogue corresponds to the end of the
1039 RTL chain when there are no epilogue insns. Certain resources
1040 are implicitly required at that point. */
1041 else if (GET_CODE (real_insn) == NOTE
1042 && NOTE_LINE_NUMBER (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1043 IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1044 }
1045
1046 COPY_HARD_REG_SET (res->regs, current_live_regs);
1047 if (tinfo != NULL)
1048 {
1049 tinfo->block = b;
1050 tinfo->bb_tick = bb_ticks[b];
1051 }
1052 }
1053 else
1054 /* We didn't find the start of a basic block. Assume everything
1055 in use. This should happen only extremely rarely. */
1056 SET_HARD_REG_SET (res->regs);
1057
1058 CLEAR_RESOURCE (&set);
1059 CLEAR_RESOURCE (&needed);
1060
1061 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1062 set, needed);
1063
1064 /* If we hit an unconditional branch, we have another way of finding out
1065 what is live: we can see what is live at the branch target and include
1066 anything used but not set before the branch. The only things that are
1067 live are those that are live using the above test and the test below. */
1068
1069 if (jump_insn)
1070 {
1071 struct resources new_resources;
1072 rtx stop_insn = next_active_insn (jump_insn);
1073
1074 mark_target_live_regs (insns, next_active_insn (jump_target),
1075 &new_resources);
1076 CLEAR_RESOURCE (&set);
1077 CLEAR_RESOURCE (&needed);
1078
1079 /* Include JUMP_INSN in the needed registers. */
1080 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1081 {
1082 mark_referenced_resources (insn, &needed, 1);
1083
1084 COPY_HARD_REG_SET (scratch, needed.regs);
1085 AND_COMPL_HARD_REG_SET (scratch, set.regs);
1086 IOR_HARD_REG_SET (new_resources.regs, scratch);
1087
1088 mark_set_resources (insn, &set, 0, 1);
1089 }
1090
1091 AND_HARD_REG_SET (res->regs, new_resources.regs);
1092 }
1093
1094 if (tinfo != NULL)
1095 {
1096 COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1097 }
1098 }
1099 \f
1100 /* Initialize the resources required by mark_target_live_regs ().
1101 This should be invoked before the first call to mark_target_live_regs. */
1102
1103 void
1104 init_resource_info (epilogue_insn)
1105 rtx epilogue_insn;
1106 {
1107 int i;
1108
1109 /* Indicate what resources are required to be valid at the end of the current
1110 function. The condition code never is and memory always is. If the
1111 frame pointer is needed, it is and so is the stack pointer unless
1112 EXIT_IGNORE_STACK is non-zero. If the frame pointer is not needed, the
1113 stack pointer is. Registers used to return the function value are
1114 needed. Registers holding global variables are needed. */
1115
1116 end_of_function_needs.cc = 0;
1117 end_of_function_needs.memory = 1;
1118 end_of_function_needs.unch_memory = 0;
1119 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1120
1121 if (frame_pointer_needed)
1122 {
1123 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1124 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1125 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1126 #endif
1127 #ifdef EXIT_IGNORE_STACK
1128 if (! EXIT_IGNORE_STACK
1129 || current_function_sp_is_unchanging)
1130 #endif
1131 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1132 }
1133 else
1134 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1135
1136 if (current_function_return_rtx != 0)
1137 mark_referenced_resources (current_function_return_rtx,
1138 &end_of_function_needs, 1);
1139
1140 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1141 if (global_regs[i]
1142 #ifdef EPILOGUE_USES
1143 || EPILOGUE_USES (i)
1144 #endif
1145 )
1146 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1147
1148 /* The registers required to be live at the end of the function are
1149 represented in the flow information as being dead just prior to
1150 reaching the end of the function. For example, the return of a value
1151 might be represented by a USE of the return register immediately
1152 followed by an unconditional jump to the return label where the
1153 return label is the end of the RTL chain. The end of the RTL chain
1154 is then taken to mean that the return register is live.
1155
1156 This sequence is no longer maintained when epilogue instructions are
1157 added to the RTL chain. To reconstruct the original meaning, the
1158 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1159 point where these registers become live (start_of_epilogue_needs).
1160 If epilogue instructions are present, the registers set by those
1161 instructions won't have been processed by flow. Thus, those
1162 registers are additionally required at the end of the RTL chain
1163 (end_of_function_needs). */
1164
1165 start_of_epilogue_needs = end_of_function_needs;
1166
1167 while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1168 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, 1);
1169
1170 /* Allocate and initialize the tables used by mark_target_live_regs. */
1171 target_hash_table = (struct target_info **)
1172 xcalloc (TARGET_HASH_PRIME, sizeof (struct target_info *));
1173 bb_ticks = (int *) xcalloc (n_basic_blocks, sizeof (int));
1174 }
1175 \f
1176 /* Free up the resources allcated to mark_target_live_regs (). This
1177 should be invoked after the last call to mark_target_live_regs (). */
1178
1179 void
1180 free_resource_info ()
1181 {
1182 if (target_hash_table != NULL)
1183 {
1184 free (target_hash_table);
1185 target_hash_table = NULL;
1186 }
1187
1188 if (bb_ticks != NULL)
1189 {
1190 free (bb_ticks);
1191 bb_ticks = NULL;
1192 }
1193 }
1194 \f
1195 /* Clear any hashed information that we have stored for INSN. */
1196
1197 void
1198 clear_hashed_info_for_insn (insn)
1199 rtx insn;
1200 {
1201 struct target_info *tinfo;
1202
1203 if (target_hash_table != NULL)
1204 {
1205 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1206 tinfo; tinfo = tinfo->next)
1207 if (tinfo->uid == INSN_UID (insn))
1208 break;
1209
1210 if (tinfo)
1211 tinfo->block = -1;
1212 }
1213 }
1214 \f
1215 /* Increment the tick count for the basic block that contains INSN. */
1216
1217 void
1218 incr_ticks_for_insn (insn)
1219 rtx insn;
1220 {
1221 int b = find_basic_block (insn);
1222
1223 if (b != -1)
1224 bb_ticks[b]++;
1225 }
1226 \f
1227 /* Add TRIAL to the set of resources used at the end of the current
1228 function. */
1229 void
1230 mark_end_of_function_resources (trial, include_delayed_effects)
1231 rtx trial;
1232 int include_delayed_effects;
1233 {
1234 mark_referenced_resources (trial, &end_of_function_needs,
1235 include_delayed_effects);
1236 }
1237 \f
1238 /* Try to find a hard register of mode MODE, matching the register class in
1239 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
1240 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
1241 in which case the only condition is that the register must be available
1242 before CURRENT_INSN.
1243 Registers that already have bits set in REG_SET will not be considered.
1244
1245 If an appropriate register is available, it will be returned and the
1246 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
1247 returned. */
1248
1249 rtx
1250 find_free_register (current_insn, last_insn, class_str, mode, reg_set)
1251 rtx current_insn, last_insn;
1252 char *class_str;
1253 int mode;
1254 HARD_REG_SET *reg_set;
1255 {
1256 int i, j;
1257 struct resources used;
1258 unsigned char clet = class_str[0];
1259 enum reg_class class
1260 = (clet == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (clet));
1261
1262 mark_target_live_regs (get_insns (), current_insn, &used);
1263 if (last_insn)
1264 while (current_insn != last_insn)
1265 {
1266 /* Exclude anything set in this insn. */
1267 mark_set_resources (PATTERN (current_insn), &used, 0, 1);
1268 current_insn = next_nonnote_insn (current_insn);
1269 }
1270
1271
1272 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1273 {
1274 int regno;
1275 int success;
1276
1277 #ifdef REG_ALLOC_ORDER
1278 regno = reg_alloc_order [i];
1279 #else
1280 regno = i;
1281 #endif
1282
1283 /* Don't allocate fixed registers. */
1284 if (fixed_regs[regno])
1285 continue;
1286 /* Make sure the register is of the right class. */
1287 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
1288 continue;
1289 /* And can support the mode we need. */
1290 if (! HARD_REGNO_MODE_OK (regno, mode))
1291 continue;
1292 /* And that we don't create an extra save/restore. */
1293 if (! call_used_regs[regno] && ! regs_ever_live[regno])
1294 continue;
1295
1296 success = 1;
1297 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
1298 {
1299 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
1300 || TEST_HARD_REG_BIT (used.regs, regno + j))
1301 {
1302 success = 0;
1303 break;
1304 }
1305 }
1306 if (success)
1307 {
1308 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
1309 {
1310 SET_HARD_REG_BIT (*reg_set, regno + j);
1311 }
1312 return gen_rtx_REG (mode, regno);
1313 }
1314 }
1315 return NULL_RTX;
1316 }
1317
1318 /* Return true if REG is dead at CURRENT_INSN. */
1319
1320 int
1321 reg_dead_p (current_insn, reg)
1322 rtx current_insn, reg;
1323 {
1324 struct resources used;
1325 int regno, j;
1326
1327 mark_target_live_regs (get_insns (), current_insn, &used);
1328
1329 regno = REGNO (reg);
1330 for (j = HARD_REGNO_NREGS (regno, GET_MODE (reg)) - 1; j >= 0; j--)
1331 {
1332 if (TEST_HARD_REG_BIT (used.regs, regno + j))
1333 return 0;
1334 }
1335
1336 return 1;
1337 }
This page took 0.093072 seconds and 5 git commands to generate.