]> gcc.gnu.org Git - gcc.git/blob - gcc/rtlanal.c
read-rtl.c: Fix formatting.
[gcc.git] / gcc / rtlanal.c
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "tm_p.h"
31 #include "flags.h"
32
33 /* Forward declarations */
34 static int global_reg_mentioned_p_1 PARAMS ((rtx *, void *));
35 static void set_of_1 PARAMS ((rtx, rtx, void *));
36 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
37 static int computed_jump_p_1 PARAMS ((rtx));
38 static void parms_set PARAMS ((rtx, rtx, void *));
39
40 /* Bit flags that specify the machine subtype we are compiling for.
41 Bits are tested using macros TARGET_... defined in the tm.h file
42 and set by `-m...' switches. Must be defined in rtlanal.c. */
43
44 int target_flags;
45 \f
46 /* Return 1 if the value of X is unstable
47 (would be different at a different point in the program).
48 The frame pointer, arg pointer, etc. are considered stable
49 (within one function) and so is anything marked `unchanging'. */
50
51 int
52 rtx_unstable_p (x)
53 rtx x;
54 {
55 RTX_CODE code = GET_CODE (x);
56 int i;
57 const char *fmt;
58
59 switch (code)
60 {
61 case MEM:
62 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
63
64 case QUEUED:
65 return 1;
66
67 case ADDRESSOF:
68 case CONST:
69 case CONST_INT:
70 case CONST_DOUBLE:
71 case CONST_VECTOR:
72 case SYMBOL_REF:
73 case LABEL_REF:
74 return 0;
75
76 case REG:
77 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
78 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
79 /* The arg pointer varies if it is not a fixed register. */
80 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
81 || RTX_UNCHANGING_P (x))
82 return 0;
83 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
84 /* ??? When call-clobbered, the value is stable modulo the restore
85 that must happen after a call. This currently screws up local-alloc
86 into believing that the restore is not needed. */
87 if (x == pic_offset_table_rtx)
88 return 0;
89 #endif
90 return 1;
91
92 case ASM_OPERANDS:
93 if (MEM_VOLATILE_P (x))
94 return 1;
95
96 /* FALLTHROUGH */
97
98 default:
99 break;
100 }
101
102 fmt = GET_RTX_FORMAT (code);
103 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
104 if (fmt[i] == 'e')
105 {
106 if (rtx_unstable_p (XEXP (x, i)))
107 return 1;
108 }
109 else if (fmt[i] == 'E')
110 {
111 int j;
112 for (j = 0; j < XVECLEN (x, i); j++)
113 if (rtx_unstable_p (XVECEXP (x, i, j)))
114 return 1;
115 }
116
117 return 0;
118 }
119
120 /* Return 1 if X has a value that can vary even between two
121 executions of the program. 0 means X can be compared reliably
122 against certain constants or near-constants.
123 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
124 zero, we are slightly more conservative.
125 The frame pointer and the arg pointer are considered constant. */
126
127 int
128 rtx_varies_p (x, for_alias)
129 rtx x;
130 int for_alias;
131 {
132 RTX_CODE code = GET_CODE (x);
133 int i;
134 const char *fmt;
135
136 switch (code)
137 {
138 case MEM:
139 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
140
141 case QUEUED:
142 return 1;
143
144 case CONST:
145 case CONST_INT:
146 case CONST_DOUBLE:
147 case CONST_VECTOR:
148 case SYMBOL_REF:
149 case LABEL_REF:
150 return 0;
151
152 case REG:
153 /* Note that we have to test for the actual rtx used for the frame
154 and arg pointers and not just the register number in case we have
155 eliminated the frame and/or arg pointer and are using it
156 for pseudos. */
157 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
158 /* The arg pointer varies if it is not a fixed register. */
159 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
160 return 0;
161 if (x == pic_offset_table_rtx
162 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
163 /* ??? When call-clobbered, the value is stable modulo the restore
164 that must happen after a call. This currently screws up
165 local-alloc into believing that the restore is not needed, so we
166 must return 0 only if we are called from alias analysis. */
167 && for_alias
168 #endif
169 )
170 return 0;
171 return 1;
172
173 case LO_SUM:
174 /* The operand 0 of a LO_SUM is considered constant
175 (in fact it is related specifically to operand 1)
176 during alias analysis. */
177 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
178 || rtx_varies_p (XEXP (x, 1), for_alias);
179
180 case ASM_OPERANDS:
181 if (MEM_VOLATILE_P (x))
182 return 1;
183
184 /* FALLTHROUGH */
185
186 default:
187 break;
188 }
189
190 fmt = GET_RTX_FORMAT (code);
191 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
192 if (fmt[i] == 'e')
193 {
194 if (rtx_varies_p (XEXP (x, i), for_alias))
195 return 1;
196 }
197 else if (fmt[i] == 'E')
198 {
199 int j;
200 for (j = 0; j < XVECLEN (x, i); j++)
201 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
202 return 1;
203 }
204
205 return 0;
206 }
207
208 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
209
210 int
211 rtx_addr_can_trap_p (x)
212 rtx x;
213 {
214 enum rtx_code code = GET_CODE (x);
215
216 switch (code)
217 {
218 case SYMBOL_REF:
219 return SYMBOL_REF_WEAK (x);
220
221 case LABEL_REF:
222 return 0;
223
224 case REG:
225 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
226 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
227 || x == stack_pointer_rtx
228 /* The arg pointer varies if it is not a fixed register. */
229 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
230 return 0;
231 /* All of the virtual frame registers are stack references. */
232 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
233 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
234 return 0;
235 return 1;
236
237 case CONST:
238 return rtx_addr_can_trap_p (XEXP (x, 0));
239
240 case PLUS:
241 /* An address is assumed not to trap if it is an address that can't
242 trap plus a constant integer or it is the pic register plus a
243 constant. */
244 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
245 && GET_CODE (XEXP (x, 1)) == CONST_INT)
246 || (XEXP (x, 0) == pic_offset_table_rtx
247 && CONSTANT_P (XEXP (x, 1))));
248
249 case LO_SUM:
250 case PRE_MODIFY:
251 return rtx_addr_can_trap_p (XEXP (x, 1));
252
253 case PRE_DEC:
254 case PRE_INC:
255 case POST_DEC:
256 case POST_INC:
257 case POST_MODIFY:
258 return rtx_addr_can_trap_p (XEXP (x, 0));
259
260 default:
261 break;
262 }
263
264 /* If it isn't one of the case above, it can cause a trap. */
265 return 1;
266 }
267
268 /* Return 1 if X refers to a memory location whose address
269 cannot be compared reliably with constant addresses,
270 or if X refers to a BLKmode memory object.
271 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
272 zero, we are slightly more conservative. */
273
274 int
275 rtx_addr_varies_p (x, for_alias)
276 rtx x;
277 int for_alias;
278 {
279 enum rtx_code code;
280 int i;
281 const char *fmt;
282
283 if (x == 0)
284 return 0;
285
286 code = GET_CODE (x);
287 if (code == MEM)
288 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
289
290 fmt = GET_RTX_FORMAT (code);
291 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
292 if (fmt[i] == 'e')
293 {
294 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
295 return 1;
296 }
297 else if (fmt[i] == 'E')
298 {
299 int j;
300 for (j = 0; j < XVECLEN (x, i); j++)
301 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
302 return 1;
303 }
304 return 0;
305 }
306 \f
307 /* Return the value of the integer term in X, if one is apparent;
308 otherwise return 0.
309 Only obvious integer terms are detected.
310 This is used in cse.c with the `related_value' field. */
311
312 HOST_WIDE_INT
313 get_integer_term (x)
314 rtx x;
315 {
316 if (GET_CODE (x) == CONST)
317 x = XEXP (x, 0);
318
319 if (GET_CODE (x) == MINUS
320 && GET_CODE (XEXP (x, 1)) == CONST_INT)
321 return - INTVAL (XEXP (x, 1));
322 if (GET_CODE (x) == PLUS
323 && GET_CODE (XEXP (x, 1)) == CONST_INT)
324 return INTVAL (XEXP (x, 1));
325 return 0;
326 }
327
328 /* If X is a constant, return the value sans apparent integer term;
329 otherwise return 0.
330 Only obvious integer terms are detected. */
331
332 rtx
333 get_related_value (x)
334 rtx x;
335 {
336 if (GET_CODE (x) != CONST)
337 return 0;
338 x = XEXP (x, 0);
339 if (GET_CODE (x) == PLUS
340 && GET_CODE (XEXP (x, 1)) == CONST_INT)
341 return XEXP (x, 0);
342 else if (GET_CODE (x) == MINUS
343 && GET_CODE (XEXP (x, 1)) == CONST_INT)
344 return XEXP (x, 0);
345 return 0;
346 }
347 \f
348 /* Given a tablejump insn INSN, return the RTL expression for the offset
349 into the jump table. If the offset cannot be determined, then return
350 NULL_RTX.
351
352 If EARLIEST is non-zero, it is a pointer to a place where the earliest
353 insn used in locating the offset was found. */
354
355 rtx
356 get_jump_table_offset (insn, earliest)
357 rtx insn;
358 rtx *earliest;
359 {
360 rtx label;
361 rtx table;
362 rtx set;
363 rtx old_insn;
364 rtx x;
365 rtx old_x;
366 rtx y;
367 rtx old_y;
368 int i;
369
370 if (GET_CODE (insn) != JUMP_INSN
371 || ! (label = JUMP_LABEL (insn))
372 || ! (table = NEXT_INSN (label))
373 || GET_CODE (table) != JUMP_INSN
374 || (GET_CODE (PATTERN (table)) != ADDR_VEC
375 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
376 || ! (set = single_set (insn)))
377 return NULL_RTX;
378
379 x = SET_SRC (set);
380
381 /* Some targets (eg, ARM) emit a tablejump that also
382 contains the out-of-range target. */
383 if (GET_CODE (x) == IF_THEN_ELSE
384 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
385 x = XEXP (x, 1);
386
387 /* Search backwards and locate the expression stored in X. */
388 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
389 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
390 ;
391
392 /* If X is an expression using a relative address then strip
393 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
394 or the jump table label. */
395 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
396 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
397 {
398 for (i = 0; i < 2; i++)
399 {
400 old_insn = insn;
401 y = XEXP (x, i);
402
403 if (y == pc_rtx || y == pic_offset_table_rtx)
404 break;
405
406 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
407 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
408 ;
409
410 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
411 break;
412 }
413
414 if (i >= 2)
415 return NULL_RTX;
416
417 x = XEXP (x, 1 - i);
418
419 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
420 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
421 ;
422 }
423
424 /* Strip off any sign or zero extension. */
425 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
426 {
427 x = XEXP (x, 0);
428
429 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
430 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
431 ;
432 }
433
434 /* If X isn't a MEM then this isn't a tablejump we understand. */
435 if (GET_CODE (x) != MEM)
436 return NULL_RTX;
437
438 /* Strip off the MEM. */
439 x = XEXP (x, 0);
440
441 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
442 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
443 ;
444
445 /* If X isn't a PLUS than this isn't a tablejump we understand. */
446 if (GET_CODE (x) != PLUS)
447 return NULL_RTX;
448
449 /* At this point we should have an expression representing the jump table
450 plus an offset. Examine each operand in order to determine which one
451 represents the jump table. Knowing that tells us that the other operand
452 must represent the offset. */
453 for (i = 0; i < 2; i++)
454 {
455 old_insn = insn;
456 y = XEXP (x, i);
457
458 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
459 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
460 ;
461
462 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
463 && reg_mentioned_p (label, y))
464 break;
465 }
466
467 if (i >= 2)
468 return NULL_RTX;
469
470 x = XEXP (x, 1 - i);
471
472 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
473 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
474 for (i = 0; i < 2; i++)
475 if (XEXP (x, i) == pic_offset_table_rtx)
476 {
477 x = XEXP (x, 1 - i);
478 break;
479 }
480
481 if (earliest)
482 *earliest = insn;
483
484 /* Return the RTL expression representing the offset. */
485 return x;
486 }
487 \f
488 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
489 a global register. */
490
491 static int
492 global_reg_mentioned_p_1 (loc, data)
493 rtx *loc;
494 void *data ATTRIBUTE_UNUSED;
495 {
496 int regno;
497 rtx x = *loc;
498
499 if (! x)
500 return 0;
501
502 switch (GET_CODE (x))
503 {
504 case SUBREG:
505 if (GET_CODE (SUBREG_REG (x)) == REG)
506 {
507 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
508 && global_regs[subreg_regno (x)])
509 return 1;
510 return 0;
511 }
512 break;
513
514 case REG:
515 regno = REGNO (x);
516 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
517 return 1;
518 return 0;
519
520 case SCRATCH:
521 case PC:
522 case CC0:
523 case CONST_INT:
524 case CONST_DOUBLE:
525 case CONST:
526 case LABEL_REF:
527 return 0;
528
529 case CALL:
530 /* A non-constant call might use a global register. */
531 return 1;
532
533 default:
534 break;
535 }
536
537 return 0;
538 }
539
540 /* Returns non-zero if X mentions a global register. */
541
542 int
543 global_reg_mentioned_p (x)
544 rtx x;
545 {
546
547 if (INSN_P (x))
548 {
549 if (GET_CODE (x) == CALL_INSN)
550 {
551 if (! CONST_OR_PURE_CALL_P (x))
552 return 1;
553 x = CALL_INSN_FUNCTION_USAGE (x);
554 if (x == 0)
555 return 0;
556 }
557 else
558 x = PATTERN (x);
559 }
560
561 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
562 }
563 \f
564 /* Return the number of places FIND appears within X. If COUNT_DEST is
565 zero, we do not count occurrences inside the destination of a SET. */
566
567 int
568 count_occurrences (x, find, count_dest)
569 rtx x, find;
570 int count_dest;
571 {
572 int i, j;
573 enum rtx_code code;
574 const char *format_ptr;
575 int count;
576
577 if (x == find)
578 return 1;
579
580 code = GET_CODE (x);
581
582 switch (code)
583 {
584 case REG:
585 case CONST_INT:
586 case CONST_DOUBLE:
587 case CONST_VECTOR:
588 case SYMBOL_REF:
589 case CODE_LABEL:
590 case PC:
591 case CC0:
592 return 0;
593
594 case MEM:
595 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
596 return 1;
597 break;
598
599 case SET:
600 if (SET_DEST (x) == find && ! count_dest)
601 return count_occurrences (SET_SRC (x), find, count_dest);
602 break;
603
604 default:
605 break;
606 }
607
608 format_ptr = GET_RTX_FORMAT (code);
609 count = 0;
610
611 for (i = 0; i < GET_RTX_LENGTH (code); i++)
612 {
613 switch (*format_ptr++)
614 {
615 case 'e':
616 count += count_occurrences (XEXP (x, i), find, count_dest);
617 break;
618
619 case 'E':
620 for (j = 0; j < XVECLEN (x, i); j++)
621 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
622 break;
623 }
624 }
625 return count;
626 }
627 \f
628 /* Nonzero if register REG appears somewhere within IN.
629 Also works if REG is not a register; in this case it checks
630 for a subexpression of IN that is Lisp "equal" to REG. */
631
632 int
633 reg_mentioned_p (reg, in)
634 rtx reg, in;
635 {
636 const char *fmt;
637 int i;
638 enum rtx_code code;
639
640 if (in == 0)
641 return 0;
642
643 if (reg == in)
644 return 1;
645
646 if (GET_CODE (in) == LABEL_REF)
647 return reg == XEXP (in, 0);
648
649 code = GET_CODE (in);
650
651 switch (code)
652 {
653 /* Compare registers by number. */
654 case REG:
655 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
656
657 /* These codes have no constituent expressions
658 and are unique. */
659 case SCRATCH:
660 case CC0:
661 case PC:
662 return 0;
663
664 case CONST_INT:
665 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
666
667 case CONST_VECTOR:
668 case CONST_DOUBLE:
669 /* These are kept unique for a given value. */
670 return 0;
671
672 default:
673 break;
674 }
675
676 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
677 return 1;
678
679 fmt = GET_RTX_FORMAT (code);
680
681 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
682 {
683 if (fmt[i] == 'E')
684 {
685 int j;
686 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
687 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
688 return 1;
689 }
690 else if (fmt[i] == 'e'
691 && reg_mentioned_p (reg, XEXP (in, i)))
692 return 1;
693 }
694 return 0;
695 }
696 \f
697 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
698 no CODE_LABEL insn. */
699
700 int
701 no_labels_between_p (beg, end)
702 rtx beg, end;
703 {
704 rtx p;
705 if (beg == end)
706 return 0;
707 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
708 if (GET_CODE (p) == CODE_LABEL)
709 return 0;
710 return 1;
711 }
712
713 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
714 no JUMP_INSN insn. */
715
716 int
717 no_jumps_between_p (beg, end)
718 rtx beg, end;
719 {
720 rtx p;
721 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
722 if (GET_CODE (p) == JUMP_INSN)
723 return 0;
724 return 1;
725 }
726
727 /* Nonzero if register REG is used in an insn between
728 FROM_INSN and TO_INSN (exclusive of those two). */
729
730 int
731 reg_used_between_p (reg, from_insn, to_insn)
732 rtx reg, from_insn, to_insn;
733 {
734 rtx insn;
735
736 if (from_insn == to_insn)
737 return 0;
738
739 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
740 if (INSN_P (insn)
741 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
742 || (GET_CODE (insn) == CALL_INSN
743 && (find_reg_fusage (insn, USE, reg)
744 || find_reg_fusage (insn, CLOBBER, reg)))))
745 return 1;
746 return 0;
747 }
748 \f
749 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
750 is entirely replaced by a new value and the only use is as a SET_DEST,
751 we do not consider it a reference. */
752
753 int
754 reg_referenced_p (x, body)
755 rtx x;
756 rtx body;
757 {
758 int i;
759
760 switch (GET_CODE (body))
761 {
762 case SET:
763 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
764 return 1;
765
766 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
767 of a REG that occupies all of the REG, the insn references X if
768 it is mentioned in the destination. */
769 if (GET_CODE (SET_DEST (body)) != CC0
770 && GET_CODE (SET_DEST (body)) != PC
771 && GET_CODE (SET_DEST (body)) != REG
772 && ! (GET_CODE (SET_DEST (body)) == SUBREG
773 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
774 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
775 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
776 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
777 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
778 && reg_overlap_mentioned_p (x, SET_DEST (body)))
779 return 1;
780 return 0;
781
782 case ASM_OPERANDS:
783 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
784 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
785 return 1;
786 return 0;
787
788 case CALL:
789 case USE:
790 case IF_THEN_ELSE:
791 return reg_overlap_mentioned_p (x, body);
792
793 case TRAP_IF:
794 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
795
796 case PREFETCH:
797 return reg_overlap_mentioned_p (x, XEXP (body, 0));
798
799 case UNSPEC:
800 case UNSPEC_VOLATILE:
801 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
802 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
803 return 1;
804 return 0;
805
806 case PARALLEL:
807 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
808 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
809 return 1;
810 return 0;
811
812 case CLOBBER:
813 if (GET_CODE (XEXP (body, 0)) == MEM)
814 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
815 return 1;
816 return 0;
817
818 case COND_EXEC:
819 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
820 return 1;
821 return reg_referenced_p (x, COND_EXEC_CODE (body));
822
823 default:
824 return 0;
825 }
826 }
827
828 /* Nonzero if register REG is referenced in an insn between
829 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
830 not count. */
831
832 int
833 reg_referenced_between_p (reg, from_insn, to_insn)
834 rtx reg, from_insn, to_insn;
835 {
836 rtx insn;
837
838 if (from_insn == to_insn)
839 return 0;
840
841 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
842 if (INSN_P (insn)
843 && (reg_referenced_p (reg, PATTERN (insn))
844 || (GET_CODE (insn) == CALL_INSN
845 && find_reg_fusage (insn, USE, reg))))
846 return 1;
847 return 0;
848 }
849 \f
850 /* Nonzero if register REG is set or clobbered in an insn between
851 FROM_INSN and TO_INSN (exclusive of those two). */
852
853 int
854 reg_set_between_p (reg, from_insn, to_insn)
855 rtx reg, from_insn, to_insn;
856 {
857 rtx insn;
858
859 if (from_insn == to_insn)
860 return 0;
861
862 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
863 if (INSN_P (insn) && reg_set_p (reg, insn))
864 return 1;
865 return 0;
866 }
867
868 /* Internals of reg_set_between_p. */
869 int
870 reg_set_p (reg, insn)
871 rtx reg, insn;
872 {
873 rtx body = insn;
874
875 /* We can be passed an insn or part of one. If we are passed an insn,
876 check if a side-effect of the insn clobbers REG. */
877 if (INSN_P (insn))
878 {
879 if (FIND_REG_INC_NOTE (insn, reg)
880 || (GET_CODE (insn) == CALL_INSN
881 /* We'd like to test call_used_regs here, but rtlanal.c can't
882 reference that variable due to its use in genattrtab. So
883 we'll just be more conservative.
884
885 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
886 information holds all clobbered registers. */
887 && ((GET_CODE (reg) == REG
888 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
889 || GET_CODE (reg) == MEM
890 || find_reg_fusage (insn, CLOBBER, reg))))
891 return 1;
892
893 body = PATTERN (insn);
894 }
895
896 return set_of (reg, insn) != NULL_RTX;
897 }
898
899 /* Similar to reg_set_between_p, but check all registers in X. Return 0
900 only if none of them are modified between START and END. Do not
901 consider non-registers one way or the other. */
902
903 int
904 regs_set_between_p (x, start, end)
905 rtx x;
906 rtx start, end;
907 {
908 enum rtx_code code = GET_CODE (x);
909 const char *fmt;
910 int i, j;
911
912 switch (code)
913 {
914 case CONST_INT:
915 case CONST_DOUBLE:
916 case CONST_VECTOR:
917 case CONST:
918 case SYMBOL_REF:
919 case LABEL_REF:
920 case PC:
921 case CC0:
922 return 0;
923
924 case REG:
925 return reg_set_between_p (x, start, end);
926
927 default:
928 break;
929 }
930
931 fmt = GET_RTX_FORMAT (code);
932 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
933 {
934 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
935 return 1;
936
937 else if (fmt[i] == 'E')
938 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
939 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
940 return 1;
941 }
942
943 return 0;
944 }
945
946 /* Similar to reg_set_between_p, but check all registers in X. Return 0
947 only if none of them are modified between START and END. Return 1 if
948 X contains a MEM; this routine does not perform any memory aliasing. */
949
950 int
951 modified_between_p (x, start, end)
952 rtx x;
953 rtx start, end;
954 {
955 enum rtx_code code = GET_CODE (x);
956 const char *fmt;
957 int i, j;
958
959 switch (code)
960 {
961 case CONST_INT:
962 case CONST_DOUBLE:
963 case CONST_VECTOR:
964 case CONST:
965 case SYMBOL_REF:
966 case LABEL_REF:
967 return 0;
968
969 case PC:
970 case CC0:
971 return 1;
972
973 case MEM:
974 /* If the memory is not constant, assume it is modified. If it is
975 constant, we still have to check the address. */
976 if (! RTX_UNCHANGING_P (x))
977 return 1;
978 break;
979
980 case REG:
981 return reg_set_between_p (x, start, end);
982
983 default:
984 break;
985 }
986
987 fmt = GET_RTX_FORMAT (code);
988 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
989 {
990 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
991 return 1;
992
993 else if (fmt[i] == 'E')
994 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
995 if (modified_between_p (XVECEXP (x, i, j), start, end))
996 return 1;
997 }
998
999 return 0;
1000 }
1001
1002 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1003 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1004 does not perform any memory aliasing. */
1005
1006 int
1007 modified_in_p (x, insn)
1008 rtx x;
1009 rtx insn;
1010 {
1011 enum rtx_code code = GET_CODE (x);
1012 const char *fmt;
1013 int i, j;
1014
1015 switch (code)
1016 {
1017 case CONST_INT:
1018 case CONST_DOUBLE:
1019 case CONST_VECTOR:
1020 case CONST:
1021 case SYMBOL_REF:
1022 case LABEL_REF:
1023 return 0;
1024
1025 case PC:
1026 case CC0:
1027 return 1;
1028
1029 case MEM:
1030 /* If the memory is not constant, assume it is modified. If it is
1031 constant, we still have to check the address. */
1032 if (! RTX_UNCHANGING_P (x))
1033 return 1;
1034 break;
1035
1036 case REG:
1037 return reg_set_p (x, insn);
1038
1039 default:
1040 break;
1041 }
1042
1043 fmt = GET_RTX_FORMAT (code);
1044 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1045 {
1046 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1047 return 1;
1048
1049 else if (fmt[i] == 'E')
1050 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1051 if (modified_in_p (XVECEXP (x, i, j), insn))
1052 return 1;
1053 }
1054
1055 return 0;
1056 }
1057
1058 /* Return true if anything in insn X is (anti,output,true) dependent on
1059 anything in insn Y. */
1060
1061 int
1062 insn_dependent_p (x, y)
1063 rtx x, y;
1064 {
1065 rtx tmp;
1066
1067 if (! INSN_P (x) || ! INSN_P (y))
1068 abort ();
1069
1070 tmp = PATTERN (y);
1071 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1072 if (tmp == NULL_RTX)
1073 return 1;
1074
1075 tmp = PATTERN (x);
1076 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1077 if (tmp == NULL_RTX)
1078 return 1;
1079
1080 return 0;
1081 }
1082
1083 /* A helper routine for insn_dependent_p called through note_stores. */
1084
1085 static void
1086 insn_dependent_p_1 (x, pat, data)
1087 rtx x;
1088 rtx pat ATTRIBUTE_UNUSED;
1089 void *data;
1090 {
1091 rtx * pinsn = (rtx *) data;
1092
1093 if (*pinsn && reg_mentioned_p (x, *pinsn))
1094 *pinsn = NULL_RTX;
1095 }
1096 \f
1097 /* Helper function for set_of. */
1098 struct set_of_data
1099 {
1100 rtx found;
1101 rtx pat;
1102 };
1103
1104 static void
1105 set_of_1 (x, pat, data1)
1106 rtx x;
1107 rtx pat;
1108 void *data1;
1109 {
1110 struct set_of_data *data = (struct set_of_data *) (data1);
1111 if (rtx_equal_p (x, data->pat)
1112 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1113 data->found = pat;
1114 }
1115
1116 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1117 (either directly or via STRICT_LOW_PART and similar modifiers). */
1118 rtx
1119 set_of (pat, insn)
1120 rtx pat, insn;
1121 {
1122 struct set_of_data data;
1123 data.found = NULL_RTX;
1124 data.pat = pat;
1125 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1126 return data.found;
1127 }
1128 \f
1129 /* Given an INSN, return a SET expression if this insn has only a single SET.
1130 It may also have CLOBBERs, USEs, or SET whose output
1131 will not be used, which we ignore. */
1132
1133 rtx
1134 single_set_2 (insn, pat)
1135 rtx insn, pat;
1136 {
1137 rtx set = NULL;
1138 int set_verified = 1;
1139 int i;
1140
1141 if (GET_CODE (pat) == PARALLEL)
1142 {
1143 for (i = 0; i < XVECLEN (pat, 0); i++)
1144 {
1145 rtx sub = XVECEXP (pat, 0, i);
1146 switch (GET_CODE (sub))
1147 {
1148 case USE:
1149 case CLOBBER:
1150 break;
1151
1152 case SET:
1153 /* We can consider insns having multiple sets, where all
1154 but one are dead as single set insns. In common case
1155 only single set is present in the pattern so we want
1156 to avoid checking for REG_UNUSED notes unless necessary.
1157
1158 When we reach set first time, we just expect this is
1159 the single set we are looking for and only when more
1160 sets are found in the insn, we check them. */
1161 if (!set_verified)
1162 {
1163 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1164 && !side_effects_p (set))
1165 set = NULL;
1166 else
1167 set_verified = 1;
1168 }
1169 if (!set)
1170 set = sub, set_verified = 0;
1171 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1172 || side_effects_p (sub))
1173 return NULL_RTX;
1174 break;
1175
1176 default:
1177 return NULL_RTX;
1178 }
1179 }
1180 }
1181 return set;
1182 }
1183
1184 /* Given an INSN, return nonzero if it has more than one SET, else return
1185 zero. */
1186
1187 int
1188 multiple_sets (insn)
1189 rtx insn;
1190 {
1191 int found;
1192 int i;
1193
1194 /* INSN must be an insn. */
1195 if (! INSN_P (insn))
1196 return 0;
1197
1198 /* Only a PARALLEL can have multiple SETs. */
1199 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1200 {
1201 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1202 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1203 {
1204 /* If we have already found a SET, then return now. */
1205 if (found)
1206 return 1;
1207 else
1208 found = 1;
1209 }
1210 }
1211
1212 /* Either zero or one SET. */
1213 return 0;
1214 }
1215 \f
1216 /* Return nonzero if the destination of SET equals the source
1217 and there are no side effects. */
1218
1219 int
1220 set_noop_p (set)
1221 rtx set;
1222 {
1223 rtx src = SET_SRC (set);
1224 rtx dst = SET_DEST (set);
1225
1226 if (side_effects_p (src) || side_effects_p (dst))
1227 return 0;
1228
1229 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1230 return rtx_equal_p (dst, src);
1231
1232 if (dst == pc_rtx && src == pc_rtx)
1233 return 1;
1234
1235 if (GET_CODE (dst) == SIGN_EXTRACT
1236 || GET_CODE (dst) == ZERO_EXTRACT)
1237 return rtx_equal_p (XEXP (dst, 0), src)
1238 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1239
1240 if (GET_CODE (dst) == STRICT_LOW_PART)
1241 dst = XEXP (dst, 0);
1242
1243 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1244 {
1245 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1246 return 0;
1247 src = SUBREG_REG (src);
1248 dst = SUBREG_REG (dst);
1249 }
1250
1251 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1252 && REGNO (src) == REGNO (dst));
1253 }
1254 \f
1255 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1256 value to itself. */
1257
1258 int
1259 noop_move_p (insn)
1260 rtx insn;
1261 {
1262 rtx pat = PATTERN (insn);
1263
1264 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1265 return 1;
1266
1267 /* Insns carrying these notes are useful later on. */
1268 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1269 return 0;
1270
1271 /* For now treat an insn with a REG_RETVAL note as a
1272 a special insn which should not be considered a no-op. */
1273 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1274 return 0;
1275
1276 if (GET_CODE (pat) == SET && set_noop_p (pat))
1277 return 1;
1278
1279 if (GET_CODE (pat) == PARALLEL)
1280 {
1281 int i;
1282 /* If nothing but SETs of registers to themselves,
1283 this insn can also be deleted. */
1284 for (i = 0; i < XVECLEN (pat, 0); i++)
1285 {
1286 rtx tem = XVECEXP (pat, 0, i);
1287
1288 if (GET_CODE (tem) == USE
1289 || GET_CODE (tem) == CLOBBER)
1290 continue;
1291
1292 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1293 return 0;
1294 }
1295
1296 return 1;
1297 }
1298 return 0;
1299 }
1300 \f
1301
1302 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1303 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1304 If the object was modified, if we hit a partial assignment to X, or hit a
1305 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1306 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1307 be the src. */
1308
1309 rtx
1310 find_last_value (x, pinsn, valid_to, allow_hwreg)
1311 rtx x;
1312 rtx *pinsn;
1313 rtx valid_to;
1314 int allow_hwreg;
1315 {
1316 rtx p;
1317
1318 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1319 p = PREV_INSN (p))
1320 if (INSN_P (p))
1321 {
1322 rtx set = single_set (p);
1323 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1324
1325 if (set && rtx_equal_p (x, SET_DEST (set)))
1326 {
1327 rtx src = SET_SRC (set);
1328
1329 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1330 src = XEXP (note, 0);
1331
1332 if ((valid_to == NULL_RTX
1333 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1334 /* Reject hard registers because we don't usually want
1335 to use them; we'd rather use a pseudo. */
1336 && (! (GET_CODE (src) == REG
1337 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1338 {
1339 *pinsn = p;
1340 return src;
1341 }
1342 }
1343
1344 /* If set in non-simple way, we don't have a value. */
1345 if (reg_set_p (x, p))
1346 break;
1347 }
1348
1349 return x;
1350 }
1351 \f
1352 /* Return nonzero if register in range [REGNO, ENDREGNO)
1353 appears either explicitly or implicitly in X
1354 other than being stored into.
1355
1356 References contained within the substructure at LOC do not count.
1357 LOC may be zero, meaning don't ignore anything. */
1358
1359 int
1360 refers_to_regno_p (regno, endregno, x, loc)
1361 unsigned int regno, endregno;
1362 rtx x;
1363 rtx *loc;
1364 {
1365 int i;
1366 unsigned int x_regno;
1367 RTX_CODE code;
1368 const char *fmt;
1369
1370 repeat:
1371 /* The contents of a REG_NONNEG note is always zero, so we must come here
1372 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1373 if (x == 0)
1374 return 0;
1375
1376 code = GET_CODE (x);
1377
1378 switch (code)
1379 {
1380 case REG:
1381 x_regno = REGNO (x);
1382
1383 /* If we modifying the stack, frame, or argument pointer, it will
1384 clobber a virtual register. In fact, we could be more precise,
1385 but it isn't worth it. */
1386 if ((x_regno == STACK_POINTER_REGNUM
1387 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1388 || x_regno == ARG_POINTER_REGNUM
1389 #endif
1390 || x_regno == FRAME_POINTER_REGNUM)
1391 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1392 return 1;
1393
1394 return (endregno > x_regno
1395 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1396 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1397 : 1));
1398
1399 case SUBREG:
1400 /* If this is a SUBREG of a hard reg, we can see exactly which
1401 registers are being modified. Otherwise, handle normally. */
1402 if (GET_CODE (SUBREG_REG (x)) == REG
1403 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1404 {
1405 unsigned int inner_regno = subreg_regno (x);
1406 unsigned int inner_endregno
1407 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1408 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1409
1410 return endregno > inner_regno && regno < inner_endregno;
1411 }
1412 break;
1413
1414 case CLOBBER:
1415 case SET:
1416 if (&SET_DEST (x) != loc
1417 /* Note setting a SUBREG counts as referring to the REG it is in for
1418 a pseudo but not for hard registers since we can
1419 treat each word individually. */
1420 && ((GET_CODE (SET_DEST (x)) == SUBREG
1421 && loc != &SUBREG_REG (SET_DEST (x))
1422 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1423 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1424 && refers_to_regno_p (regno, endregno,
1425 SUBREG_REG (SET_DEST (x)), loc))
1426 || (GET_CODE (SET_DEST (x)) != REG
1427 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1428 return 1;
1429
1430 if (code == CLOBBER || loc == &SET_SRC (x))
1431 return 0;
1432 x = SET_SRC (x);
1433 goto repeat;
1434
1435 default:
1436 break;
1437 }
1438
1439 /* X does not match, so try its subexpressions. */
1440
1441 fmt = GET_RTX_FORMAT (code);
1442 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1443 {
1444 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1445 {
1446 if (i == 0)
1447 {
1448 x = XEXP (x, 0);
1449 goto repeat;
1450 }
1451 else
1452 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1453 return 1;
1454 }
1455 else if (fmt[i] == 'E')
1456 {
1457 int j;
1458 for (j = XVECLEN (x, i) - 1; j >=0; j--)
1459 if (loc != &XVECEXP (x, i, j)
1460 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1461 return 1;
1462 }
1463 }
1464 return 0;
1465 }
1466
1467 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1468 we check if any register number in X conflicts with the relevant register
1469 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1470 contains a MEM (we don't bother checking for memory addresses that can't
1471 conflict because we expect this to be a rare case. */
1472
1473 int
1474 reg_overlap_mentioned_p (x, in)
1475 rtx x, in;
1476 {
1477 unsigned int regno, endregno;
1478
1479 /* Overly conservative. */
1480 if (GET_CODE (x) == STRICT_LOW_PART)
1481 x = XEXP (x, 0);
1482
1483 /* If either argument is a constant, then modifying X can not affect IN. */
1484 if (CONSTANT_P (x) || CONSTANT_P (in))
1485 return 0;
1486
1487 switch (GET_CODE (x))
1488 {
1489 case SUBREG:
1490 regno = REGNO (SUBREG_REG (x));
1491 if (regno < FIRST_PSEUDO_REGISTER)
1492 regno = subreg_regno (x);
1493 goto do_reg;
1494
1495 case REG:
1496 regno = REGNO (x);
1497 do_reg:
1498 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1499 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1500 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1501
1502 case MEM:
1503 {
1504 const char *fmt;
1505 int i;
1506
1507 if (GET_CODE (in) == MEM)
1508 return 1;
1509
1510 fmt = GET_RTX_FORMAT (GET_CODE (in));
1511 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1512 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1513 return 1;
1514
1515 return 0;
1516 }
1517
1518 case SCRATCH:
1519 case PC:
1520 case CC0:
1521 return reg_mentioned_p (x, in);
1522
1523 case PARALLEL:
1524 {
1525 int i;
1526
1527 /* If any register in here refers to it we return true. */
1528 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1529 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1530 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1531 return 1;
1532 return 0;
1533 }
1534
1535 default:
1536 break;
1537 }
1538
1539 abort ();
1540 }
1541 \f
1542 /* Return the last value to which REG was set prior to INSN. If we can't
1543 find it easily, return 0.
1544
1545 We only return a REG, SUBREG, or constant because it is too hard to
1546 check if a MEM remains unchanged. */
1547
1548 rtx
1549 reg_set_last (x, insn)
1550 rtx x;
1551 rtx insn;
1552 {
1553 rtx orig_insn = insn;
1554
1555 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1556 Stop when we reach a label or X is a hard reg and we reach a
1557 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1558
1559 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1560
1561 /* We compare with <= here, because reg_set_last_last_regno
1562 is actually the number of the first reg *not* in X. */
1563 for (;
1564 insn && GET_CODE (insn) != CODE_LABEL
1565 && ! (GET_CODE (insn) == CALL_INSN
1566 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1567 insn = PREV_INSN (insn))
1568 if (INSN_P (insn))
1569 {
1570 rtx set = set_of (x, insn);
1571 /* OK, this function modify our register. See if we understand it. */
1572 if (set)
1573 {
1574 rtx last_value;
1575 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1576 return 0;
1577 last_value = SET_SRC (x);
1578 if (CONSTANT_P (last_value)
1579 || ((GET_CODE (last_value) == REG
1580 || GET_CODE (last_value) == SUBREG)
1581 && ! reg_set_between_p (last_value,
1582 insn, orig_insn)))
1583 return last_value;
1584 else
1585 return 0;
1586 }
1587 }
1588
1589 return 0;
1590 }
1591 \f
1592 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1593 (X would be the pattern of an insn).
1594 FUN receives two arguments:
1595 the REG, MEM, CC0 or PC being stored in or clobbered,
1596 the SET or CLOBBER rtx that does the store.
1597
1598 If the item being stored in or clobbered is a SUBREG of a hard register,
1599 the SUBREG will be passed. */
1600
1601 void
1602 note_stores (x, fun, data)
1603 rtx x;
1604 void (*fun) PARAMS ((rtx, rtx, void *));
1605 void *data;
1606 {
1607 int i;
1608
1609 if (GET_CODE (x) == COND_EXEC)
1610 x = COND_EXEC_CODE (x);
1611
1612 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1613 {
1614 rtx dest = SET_DEST (x);
1615
1616 while ((GET_CODE (dest) == SUBREG
1617 && (GET_CODE (SUBREG_REG (dest)) != REG
1618 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1619 || GET_CODE (dest) == ZERO_EXTRACT
1620 || GET_CODE (dest) == SIGN_EXTRACT
1621 || GET_CODE (dest) == STRICT_LOW_PART)
1622 dest = XEXP (dest, 0);
1623
1624 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1625 each of whose first operand is a register. */
1626 if (GET_CODE (dest) == PARALLEL)
1627 {
1628 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1629 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1630 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1631 }
1632 else
1633 (*fun) (dest, x, data);
1634 }
1635
1636 else if (GET_CODE (x) == PARALLEL)
1637 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1638 note_stores (XVECEXP (x, 0, i), fun, data);
1639 }
1640 \f
1641 /* Like notes_stores, but call FUN for each expression that is being
1642 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1643 FUN for each expression, not any interior subexpressions. FUN receives a
1644 pointer to the expression and the DATA passed to this function.
1645
1646 Note that this is not quite the same test as that done in reg_referenced_p
1647 since that considers something as being referenced if it is being
1648 partially set, while we do not. */
1649
1650 void
1651 note_uses (pbody, fun, data)
1652 rtx *pbody;
1653 void (*fun) PARAMS ((rtx *, void *));
1654 void *data;
1655 {
1656 rtx body = *pbody;
1657 int i;
1658
1659 switch (GET_CODE (body))
1660 {
1661 case COND_EXEC:
1662 (*fun) (&COND_EXEC_TEST (body), data);
1663 note_uses (&COND_EXEC_CODE (body), fun, data);
1664 return;
1665
1666 case PARALLEL:
1667 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1668 note_uses (&XVECEXP (body, 0, i), fun, data);
1669 return;
1670
1671 case USE:
1672 (*fun) (&XEXP (body, 0), data);
1673 return;
1674
1675 case ASM_OPERANDS:
1676 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1677 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1678 return;
1679
1680 case TRAP_IF:
1681 (*fun) (&TRAP_CONDITION (body), data);
1682 return;
1683
1684 case PREFETCH:
1685 (*fun) (&XEXP (body, 0), data);
1686 return;
1687
1688 case UNSPEC:
1689 case UNSPEC_VOLATILE:
1690 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1691 (*fun) (&XVECEXP (body, 0, i), data);
1692 return;
1693
1694 case CLOBBER:
1695 if (GET_CODE (XEXP (body, 0)) == MEM)
1696 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1697 return;
1698
1699 case SET:
1700 {
1701 rtx dest = SET_DEST (body);
1702
1703 /* For sets we replace everything in source plus registers in memory
1704 expression in store and operands of a ZERO_EXTRACT. */
1705 (*fun) (&SET_SRC (body), data);
1706
1707 if (GET_CODE (dest) == ZERO_EXTRACT)
1708 {
1709 (*fun) (&XEXP (dest, 1), data);
1710 (*fun) (&XEXP (dest, 2), data);
1711 }
1712
1713 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1714 dest = XEXP (dest, 0);
1715
1716 if (GET_CODE (dest) == MEM)
1717 (*fun) (&XEXP (dest, 0), data);
1718 }
1719 return;
1720
1721 default:
1722 /* All the other possibilities never store. */
1723 (*fun) (pbody, data);
1724 return;
1725 }
1726 }
1727 \f
1728 /* Return nonzero if X's old contents don't survive after INSN.
1729 This will be true if X is (cc0) or if X is a register and
1730 X dies in INSN or because INSN entirely sets X.
1731
1732 "Entirely set" means set directly and not through a SUBREG,
1733 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1734 Likewise, REG_INC does not count.
1735
1736 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1737 but for this use that makes no difference, since regs don't overlap
1738 during their lifetimes. Therefore, this function may be used
1739 at any time after deaths have been computed (in flow.c).
1740
1741 If REG is a hard reg that occupies multiple machine registers, this
1742 function will only return 1 if each of those registers will be replaced
1743 by INSN. */
1744
1745 int
1746 dead_or_set_p (insn, x)
1747 rtx insn;
1748 rtx x;
1749 {
1750 unsigned int regno, last_regno;
1751 unsigned int i;
1752
1753 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1754 if (GET_CODE (x) == CC0)
1755 return 1;
1756
1757 if (GET_CODE (x) != REG)
1758 abort ();
1759
1760 regno = REGNO (x);
1761 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1762 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1763
1764 for (i = regno; i <= last_regno; i++)
1765 if (! dead_or_set_regno_p (insn, i))
1766 return 0;
1767
1768 return 1;
1769 }
1770
1771 /* Utility function for dead_or_set_p to check an individual register. Also
1772 called from flow.c. */
1773
1774 int
1775 dead_or_set_regno_p (insn, test_regno)
1776 rtx insn;
1777 unsigned int test_regno;
1778 {
1779 unsigned int regno, endregno;
1780 rtx pattern;
1781
1782 /* See if there is a death note for something that includes TEST_REGNO. */
1783 if (find_regno_note (insn, REG_DEAD, test_regno))
1784 return 1;
1785
1786 if (GET_CODE (insn) == CALL_INSN
1787 && find_regno_fusage (insn, CLOBBER, test_regno))
1788 return 1;
1789
1790 pattern = PATTERN (insn);
1791
1792 if (GET_CODE (pattern) == COND_EXEC)
1793 pattern = COND_EXEC_CODE (pattern);
1794
1795 if (GET_CODE (pattern) == SET)
1796 {
1797 rtx dest = SET_DEST (PATTERN (insn));
1798
1799 /* A value is totally replaced if it is the destination or the
1800 destination is a SUBREG of REGNO that does not change the number of
1801 words in it. */
1802 if (GET_CODE (dest) == SUBREG
1803 && (((GET_MODE_SIZE (GET_MODE (dest))
1804 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1805 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1806 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1807 dest = SUBREG_REG (dest);
1808
1809 if (GET_CODE (dest) != REG)
1810 return 0;
1811
1812 regno = REGNO (dest);
1813 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1814 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1815
1816 return (test_regno >= regno && test_regno < endregno);
1817 }
1818 else if (GET_CODE (pattern) == PARALLEL)
1819 {
1820 int i;
1821
1822 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1823 {
1824 rtx body = XVECEXP (pattern, 0, i);
1825
1826 if (GET_CODE (body) == COND_EXEC)
1827 body = COND_EXEC_CODE (body);
1828
1829 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1830 {
1831 rtx dest = SET_DEST (body);
1832
1833 if (GET_CODE (dest) == SUBREG
1834 && (((GET_MODE_SIZE (GET_MODE (dest))
1835 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1836 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1837 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1838 dest = SUBREG_REG (dest);
1839
1840 if (GET_CODE (dest) != REG)
1841 continue;
1842
1843 regno = REGNO (dest);
1844 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1845 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1846
1847 if (test_regno >= regno && test_regno < endregno)
1848 return 1;
1849 }
1850 }
1851 }
1852
1853 return 0;
1854 }
1855
1856 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1857 If DATUM is nonzero, look for one whose datum is DATUM. */
1858
1859 rtx
1860 find_reg_note (insn, kind, datum)
1861 rtx insn;
1862 enum reg_note kind;
1863 rtx datum;
1864 {
1865 rtx link;
1866
1867 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1868 if (! INSN_P (insn))
1869 return 0;
1870
1871 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1872 if (REG_NOTE_KIND (link) == kind
1873 && (datum == 0 || datum == XEXP (link, 0)))
1874 return link;
1875 return 0;
1876 }
1877
1878 /* Return the reg-note of kind KIND in insn INSN which applies to register
1879 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1880 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1881 it might be the case that the note overlaps REGNO. */
1882
1883 rtx
1884 find_regno_note (insn, kind, regno)
1885 rtx insn;
1886 enum reg_note kind;
1887 unsigned int regno;
1888 {
1889 rtx link;
1890
1891 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1892 if (! INSN_P (insn))
1893 return 0;
1894
1895 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1896 if (REG_NOTE_KIND (link) == kind
1897 /* Verify that it is a register, so that scratch and MEM won't cause a
1898 problem here. */
1899 && GET_CODE (XEXP (link, 0)) == REG
1900 && REGNO (XEXP (link, 0)) <= regno
1901 && ((REGNO (XEXP (link, 0))
1902 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1903 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1904 GET_MODE (XEXP (link, 0)))))
1905 > regno))
1906 return link;
1907 return 0;
1908 }
1909
1910 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1911 has such a note. */
1912
1913 rtx
1914 find_reg_equal_equiv_note (insn)
1915 rtx insn;
1916 {
1917 rtx note;
1918
1919 if (single_set (insn) == 0)
1920 return 0;
1921 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1922 return note;
1923 else
1924 return find_reg_note (insn, REG_EQUAL, NULL_RTX);
1925 }
1926
1927 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1928 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1929
1930 int
1931 find_reg_fusage (insn, code, datum)
1932 rtx insn;
1933 enum rtx_code code;
1934 rtx datum;
1935 {
1936 /* If it's not a CALL_INSN, it can't possibly have a
1937 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1938 if (GET_CODE (insn) != CALL_INSN)
1939 return 0;
1940
1941 if (! datum)
1942 abort ();
1943
1944 if (GET_CODE (datum) != REG)
1945 {
1946 rtx link;
1947
1948 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1949 link;
1950 link = XEXP (link, 1))
1951 if (GET_CODE (XEXP (link, 0)) == code
1952 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1953 return 1;
1954 }
1955 else
1956 {
1957 unsigned int regno = REGNO (datum);
1958
1959 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1960 to pseudo registers, so don't bother checking. */
1961
1962 if (regno < FIRST_PSEUDO_REGISTER)
1963 {
1964 unsigned int end_regno
1965 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1966 unsigned int i;
1967
1968 for (i = regno; i < end_regno; i++)
1969 if (find_regno_fusage (insn, code, i))
1970 return 1;
1971 }
1972 }
1973
1974 return 0;
1975 }
1976
1977 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1978 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1979
1980 int
1981 find_regno_fusage (insn, code, regno)
1982 rtx insn;
1983 enum rtx_code code;
1984 unsigned int regno;
1985 {
1986 rtx link;
1987
1988 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1989 to pseudo registers, so don't bother checking. */
1990
1991 if (regno >= FIRST_PSEUDO_REGISTER
1992 || GET_CODE (insn) != CALL_INSN )
1993 return 0;
1994
1995 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1996 {
1997 unsigned int regnote;
1998 rtx op, reg;
1999
2000 if (GET_CODE (op = XEXP (link, 0)) == code
2001 && GET_CODE (reg = XEXP (op, 0)) == REG
2002 && (regnote = REGNO (reg)) <= regno
2003 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2004 return 1;
2005 }
2006
2007 return 0;
2008 }
2009
2010 /* Return true if INSN is a call to a pure function. */
2011
2012 int
2013 pure_call_p (insn)
2014 rtx insn;
2015 {
2016 rtx link;
2017
2018 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2019 return 0;
2020
2021 /* Look for the note that differentiates const and pure functions. */
2022 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2023 {
2024 rtx u, m;
2025
2026 if (GET_CODE (u = XEXP (link, 0)) == USE
2027 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2028 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2029 return 1;
2030 }
2031
2032 return 0;
2033 }
2034 \f
2035 /* Remove register note NOTE from the REG_NOTES of INSN. */
2036
2037 void
2038 remove_note (insn, note)
2039 rtx insn;
2040 rtx note;
2041 {
2042 rtx link;
2043
2044 if (note == NULL_RTX)
2045 return;
2046
2047 if (REG_NOTES (insn) == note)
2048 {
2049 REG_NOTES (insn) = XEXP (note, 1);
2050 return;
2051 }
2052
2053 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2054 if (XEXP (link, 1) == note)
2055 {
2056 XEXP (link, 1) = XEXP (note, 1);
2057 return;
2058 }
2059
2060 abort ();
2061 }
2062
2063 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2064 return 1 if it is found. A simple equality test is used to determine if
2065 NODE matches. */
2066
2067 int
2068 in_expr_list_p (listp, node)
2069 rtx listp;
2070 rtx node;
2071 {
2072 rtx x;
2073
2074 for (x = listp; x; x = XEXP (x, 1))
2075 if (node == XEXP (x, 0))
2076 return 1;
2077
2078 return 0;
2079 }
2080
2081 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2082 remove that entry from the list if it is found.
2083
2084 A simple equality test is used to determine if NODE matches. */
2085
2086 void
2087 remove_node_from_expr_list (node, listp)
2088 rtx node;
2089 rtx *listp;
2090 {
2091 rtx temp = *listp;
2092 rtx prev = NULL_RTX;
2093
2094 while (temp)
2095 {
2096 if (node == XEXP (temp, 0))
2097 {
2098 /* Splice the node out of the list. */
2099 if (prev)
2100 XEXP (prev, 1) = XEXP (temp, 1);
2101 else
2102 *listp = XEXP (temp, 1);
2103
2104 return;
2105 }
2106
2107 prev = temp;
2108 temp = XEXP (temp, 1);
2109 }
2110 }
2111 \f
2112 /* Nonzero if X contains any volatile instructions. These are instructions
2113 which may cause unpredictable machine state instructions, and thus no
2114 instructions should be moved or combined across them. This includes
2115 only volatile asms and UNSPEC_VOLATILE instructions. */
2116
2117 int
2118 volatile_insn_p (x)
2119 rtx x;
2120 {
2121 RTX_CODE code;
2122
2123 code = GET_CODE (x);
2124 switch (code)
2125 {
2126 case LABEL_REF:
2127 case SYMBOL_REF:
2128 case CONST_INT:
2129 case CONST:
2130 case CONST_DOUBLE:
2131 case CONST_VECTOR:
2132 case CC0:
2133 case PC:
2134 case REG:
2135 case SCRATCH:
2136 case CLOBBER:
2137 case ASM_INPUT:
2138 case ADDR_VEC:
2139 case ADDR_DIFF_VEC:
2140 case CALL:
2141 case MEM:
2142 return 0;
2143
2144 case UNSPEC_VOLATILE:
2145 /* case TRAP_IF: This isn't clear yet. */
2146 return 1;
2147
2148 case ASM_OPERANDS:
2149 if (MEM_VOLATILE_P (x))
2150 return 1;
2151
2152 default:
2153 break;
2154 }
2155
2156 /* Recursively scan the operands of this expression. */
2157
2158 {
2159 const char *fmt = GET_RTX_FORMAT (code);
2160 int i;
2161
2162 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2163 {
2164 if (fmt[i] == 'e')
2165 {
2166 if (volatile_insn_p (XEXP (x, i)))
2167 return 1;
2168 }
2169 else if (fmt[i] == 'E')
2170 {
2171 int j;
2172 for (j = 0; j < XVECLEN (x, i); j++)
2173 if (volatile_insn_p (XVECEXP (x, i, j)))
2174 return 1;
2175 }
2176 }
2177 }
2178 return 0;
2179 }
2180
2181 /* Nonzero if X contains any volatile memory references
2182 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2183
2184 int
2185 volatile_refs_p (x)
2186 rtx x;
2187 {
2188 RTX_CODE code;
2189
2190 code = GET_CODE (x);
2191 switch (code)
2192 {
2193 case LABEL_REF:
2194 case SYMBOL_REF:
2195 case CONST_INT:
2196 case CONST:
2197 case CONST_DOUBLE:
2198 case CONST_VECTOR:
2199 case CC0:
2200 case PC:
2201 case REG:
2202 case SCRATCH:
2203 case CLOBBER:
2204 case ASM_INPUT:
2205 case ADDR_VEC:
2206 case ADDR_DIFF_VEC:
2207 return 0;
2208
2209 case CALL:
2210 case UNSPEC_VOLATILE:
2211 /* case TRAP_IF: This isn't clear yet. */
2212 return 1;
2213
2214 case MEM:
2215 case ASM_OPERANDS:
2216 if (MEM_VOLATILE_P (x))
2217 return 1;
2218
2219 default:
2220 break;
2221 }
2222
2223 /* Recursively scan the operands of this expression. */
2224
2225 {
2226 const char *fmt = GET_RTX_FORMAT (code);
2227 int i;
2228
2229 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2230 {
2231 if (fmt[i] == 'e')
2232 {
2233 if (volatile_refs_p (XEXP (x, i)))
2234 return 1;
2235 }
2236 else if (fmt[i] == 'E')
2237 {
2238 int j;
2239 for (j = 0; j < XVECLEN (x, i); j++)
2240 if (volatile_refs_p (XVECEXP (x, i, j)))
2241 return 1;
2242 }
2243 }
2244 }
2245 return 0;
2246 }
2247
2248 /* Similar to above, except that it also rejects register pre- and post-
2249 incrementing. */
2250
2251 int
2252 side_effects_p (x)
2253 rtx x;
2254 {
2255 RTX_CODE code;
2256
2257 code = GET_CODE (x);
2258 switch (code)
2259 {
2260 case LABEL_REF:
2261 case SYMBOL_REF:
2262 case CONST_INT:
2263 case CONST:
2264 case CONST_DOUBLE:
2265 case CONST_VECTOR:
2266 case CC0:
2267 case PC:
2268 case REG:
2269 case SCRATCH:
2270 case ASM_INPUT:
2271 case ADDR_VEC:
2272 case ADDR_DIFF_VEC:
2273 return 0;
2274
2275 case CLOBBER:
2276 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2277 when some combination can't be done. If we see one, don't think
2278 that we can simplify the expression. */
2279 return (GET_MODE (x) != VOIDmode);
2280
2281 case PRE_INC:
2282 case PRE_DEC:
2283 case POST_INC:
2284 case POST_DEC:
2285 case PRE_MODIFY:
2286 case POST_MODIFY:
2287 case CALL:
2288 case UNSPEC_VOLATILE:
2289 /* case TRAP_IF: This isn't clear yet. */
2290 return 1;
2291
2292 case MEM:
2293 case ASM_OPERANDS:
2294 if (MEM_VOLATILE_P (x))
2295 return 1;
2296
2297 default:
2298 break;
2299 }
2300
2301 /* Recursively scan the operands of this expression. */
2302
2303 {
2304 const char *fmt = GET_RTX_FORMAT (code);
2305 int i;
2306
2307 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2308 {
2309 if (fmt[i] == 'e')
2310 {
2311 if (side_effects_p (XEXP (x, i)))
2312 return 1;
2313 }
2314 else if (fmt[i] == 'E')
2315 {
2316 int j;
2317 for (j = 0; j < XVECLEN (x, i); j++)
2318 if (side_effects_p (XVECEXP (x, i, j)))
2319 return 1;
2320 }
2321 }
2322 }
2323 return 0;
2324 }
2325 \f
2326 /* Return nonzero if evaluating rtx X might cause a trap. */
2327
2328 int
2329 may_trap_p (x)
2330 rtx x;
2331 {
2332 int i;
2333 enum rtx_code code;
2334 const char *fmt;
2335
2336 if (x == 0)
2337 return 0;
2338 code = GET_CODE (x);
2339 switch (code)
2340 {
2341 /* Handle these cases quickly. */
2342 case CONST_INT:
2343 case CONST_DOUBLE:
2344 case CONST_VECTOR:
2345 case SYMBOL_REF:
2346 case LABEL_REF:
2347 case CONST:
2348 case PC:
2349 case CC0:
2350 case REG:
2351 case SCRATCH:
2352 return 0;
2353
2354 case ASM_INPUT:
2355 case UNSPEC_VOLATILE:
2356 case TRAP_IF:
2357 return 1;
2358
2359 case ASM_OPERANDS:
2360 return MEM_VOLATILE_P (x);
2361
2362 /* Memory ref can trap unless it's a static var or a stack slot. */
2363 case MEM:
2364 return rtx_addr_can_trap_p (XEXP (x, 0));
2365
2366 /* Division by a non-constant might trap. */
2367 case DIV:
2368 case MOD:
2369 case UDIV:
2370 case UMOD:
2371 if (! CONSTANT_P (XEXP (x, 1))
2372 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2373 && flag_trapping_math))
2374 return 1;
2375 /* This was const0_rtx, but by not using that,
2376 we can link this file into other programs. */
2377 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2378 return 1;
2379 break;
2380
2381 case EXPR_LIST:
2382 /* An EXPR_LIST is used to represent a function call. This
2383 certainly may trap. */
2384 return 1;
2385
2386 case GE:
2387 case GT:
2388 case LE:
2389 case LT:
2390 case COMPARE:
2391 /* Some floating point comparisons may trap. */
2392 if (!flag_trapping_math)
2393 break;
2394 /* ??? There is no machine independent way to check for tests that trap
2395 when COMPARE is used, though many targets do make this distinction.
2396 For instance, sparc uses CCFPE for compares which generate exceptions
2397 and CCFP for compares which do not generate exceptions. */
2398 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2399 return 1;
2400 /* But often the compare has some CC mode, so check operand
2401 modes as well. */
2402 if (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_FLOAT
2403 || GET_MODE_CLASS (GET_MODE (XEXP (x, 1))) == MODE_FLOAT)
2404 return 1;
2405 break;
2406
2407 case NEG:
2408 case ABS:
2409 /* These operations don't trap even with floating point. */
2410 break;
2411
2412 default:
2413 /* Any floating arithmetic may trap. */
2414 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2415 && flag_trapping_math)
2416 return 1;
2417 }
2418
2419 fmt = GET_RTX_FORMAT (code);
2420 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2421 {
2422 if (fmt[i] == 'e')
2423 {
2424 if (may_trap_p (XEXP (x, i)))
2425 return 1;
2426 }
2427 else if (fmt[i] == 'E')
2428 {
2429 int j;
2430 for (j = 0; j < XVECLEN (x, i); j++)
2431 if (may_trap_p (XVECEXP (x, i, j)))
2432 return 1;
2433 }
2434 }
2435 return 0;
2436 }
2437 \f
2438 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2439 i.e., an inequality. */
2440
2441 int
2442 inequality_comparisons_p (x)
2443 rtx x;
2444 {
2445 const char *fmt;
2446 int len, i;
2447 enum rtx_code code = GET_CODE (x);
2448
2449 switch (code)
2450 {
2451 case REG:
2452 case SCRATCH:
2453 case PC:
2454 case CC0:
2455 case CONST_INT:
2456 case CONST_DOUBLE:
2457 case CONST_VECTOR:
2458 case CONST:
2459 case LABEL_REF:
2460 case SYMBOL_REF:
2461 return 0;
2462
2463 case LT:
2464 case LTU:
2465 case GT:
2466 case GTU:
2467 case LE:
2468 case LEU:
2469 case GE:
2470 case GEU:
2471 return 1;
2472
2473 default:
2474 break;
2475 }
2476
2477 len = GET_RTX_LENGTH (code);
2478 fmt = GET_RTX_FORMAT (code);
2479
2480 for (i = 0; i < len; i++)
2481 {
2482 if (fmt[i] == 'e')
2483 {
2484 if (inequality_comparisons_p (XEXP (x, i)))
2485 return 1;
2486 }
2487 else if (fmt[i] == 'E')
2488 {
2489 int j;
2490 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2491 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2492 return 1;
2493 }
2494 }
2495
2496 return 0;
2497 }
2498 \f
2499 /* Replace any occurrence of FROM in X with TO. The function does
2500 not enter into CONST_DOUBLE for the replace.
2501
2502 Note that copying is not done so X must not be shared unless all copies
2503 are to be modified. */
2504
2505 rtx
2506 replace_rtx (x, from, to)
2507 rtx x, from, to;
2508 {
2509 int i, j;
2510 const char *fmt;
2511
2512 /* The following prevents loops occurrence when we change MEM in
2513 CONST_DOUBLE onto the same CONST_DOUBLE. */
2514 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2515 return x;
2516
2517 if (x == from)
2518 return to;
2519
2520 /* Allow this function to make replacements in EXPR_LISTs. */
2521 if (x == 0)
2522 return 0;
2523
2524 if (GET_CODE (x) == SUBREG)
2525 {
2526 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2527
2528 if (GET_CODE (new) == CONST_INT)
2529 {
2530 x = simplify_subreg (GET_MODE (x), new,
2531 GET_MODE (SUBREG_REG (x)),
2532 SUBREG_BYTE (x));
2533 if (! x)
2534 abort ();
2535 }
2536 else
2537 SUBREG_REG (x) = new;
2538
2539 return x;
2540 }
2541 else if (GET_CODE (x) == ZERO_EXTEND)
2542 {
2543 rtx new = replace_rtx (XEXP (x, 0), from, to);
2544
2545 if (GET_CODE (new) == CONST_INT)
2546 {
2547 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2548 new, GET_MODE (XEXP (x, 0)));
2549 if (! x)
2550 abort ();
2551 }
2552 else
2553 XEXP (x, 0) = new;
2554
2555 return x;
2556 }
2557
2558 fmt = GET_RTX_FORMAT (GET_CODE (x));
2559 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2560 {
2561 if (fmt[i] == 'e')
2562 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2563 else if (fmt[i] == 'E')
2564 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2565 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2566 }
2567
2568 return x;
2569 }
2570 \f
2571 /* Throughout the rtx X, replace many registers according to REG_MAP.
2572 Return the replacement for X (which may be X with altered contents).
2573 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2574 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2575
2576 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2577 should not be mapped to pseudos or vice versa since validate_change
2578 is not called.
2579
2580 If REPLACE_DEST is 1, replacements are also done in destinations;
2581 otherwise, only sources are replaced. */
2582
2583 rtx
2584 replace_regs (x, reg_map, nregs, replace_dest)
2585 rtx x;
2586 rtx *reg_map;
2587 unsigned int nregs;
2588 int replace_dest;
2589 {
2590 enum rtx_code code;
2591 int i;
2592 const char *fmt;
2593
2594 if (x == 0)
2595 return x;
2596
2597 code = GET_CODE (x);
2598 switch (code)
2599 {
2600 case SCRATCH:
2601 case PC:
2602 case CC0:
2603 case CONST_INT:
2604 case CONST_DOUBLE:
2605 case CONST_VECTOR:
2606 case CONST:
2607 case SYMBOL_REF:
2608 case LABEL_REF:
2609 return x;
2610
2611 case REG:
2612 /* Verify that the register has an entry before trying to access it. */
2613 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2614 {
2615 /* SUBREGs can't be shared. Always return a copy to ensure that if
2616 this replacement occurs more than once then each instance will
2617 get distinct rtx. */
2618 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2619 return copy_rtx (reg_map[REGNO (x)]);
2620 return reg_map[REGNO (x)];
2621 }
2622 return x;
2623
2624 case SUBREG:
2625 /* Prevent making nested SUBREGs. */
2626 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2627 && reg_map[REGNO (SUBREG_REG (x))] != 0
2628 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2629 {
2630 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2631 return simplify_gen_subreg (GET_MODE (x), map_val,
2632 GET_MODE (SUBREG_REG (x)),
2633 SUBREG_BYTE (x));
2634 }
2635 break;
2636
2637 case SET:
2638 if (replace_dest)
2639 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2640
2641 else if (GET_CODE (SET_DEST (x)) == MEM
2642 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2643 /* Even if we are not to replace destinations, replace register if it
2644 is CONTAINED in destination (destination is memory or
2645 STRICT_LOW_PART). */
2646 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2647 reg_map, nregs, 0);
2648 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2649 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2650 break;
2651
2652 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2653 return x;
2654
2655 default:
2656 break;
2657 }
2658
2659 fmt = GET_RTX_FORMAT (code);
2660 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2661 {
2662 if (fmt[i] == 'e')
2663 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2664 else if (fmt[i] == 'E')
2665 {
2666 int j;
2667 for (j = 0; j < XVECLEN (x, i); j++)
2668 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2669 nregs, replace_dest);
2670 }
2671 }
2672 return x;
2673 }
2674
2675 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2676 constant that is not in the constant pool and not in the condition
2677 of an IF_THEN_ELSE. */
2678
2679 static int
2680 computed_jump_p_1 (x)
2681 rtx x;
2682 {
2683 enum rtx_code code = GET_CODE (x);
2684 int i, j;
2685 const char *fmt;
2686
2687 switch (code)
2688 {
2689 case LABEL_REF:
2690 case PC:
2691 return 0;
2692
2693 case CONST:
2694 case CONST_INT:
2695 case CONST_DOUBLE:
2696 case CONST_VECTOR:
2697 case SYMBOL_REF:
2698 case REG:
2699 return 1;
2700
2701 case MEM:
2702 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2703 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2704
2705 case IF_THEN_ELSE:
2706 return (computed_jump_p_1 (XEXP (x, 1))
2707 || computed_jump_p_1 (XEXP (x, 2)));
2708
2709 default:
2710 break;
2711 }
2712
2713 fmt = GET_RTX_FORMAT (code);
2714 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2715 {
2716 if (fmt[i] == 'e'
2717 && computed_jump_p_1 (XEXP (x, i)))
2718 return 1;
2719
2720 else if (fmt[i] == 'E')
2721 for (j = 0; j < XVECLEN (x, i); j++)
2722 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2723 return 1;
2724 }
2725
2726 return 0;
2727 }
2728
2729 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2730
2731 Tablejumps and casesi insns are not considered indirect jumps;
2732 we can recognize them by a (use (label_ref)). */
2733
2734 int
2735 computed_jump_p (insn)
2736 rtx insn;
2737 {
2738 int i;
2739 if (GET_CODE (insn) == JUMP_INSN)
2740 {
2741 rtx pat = PATTERN (insn);
2742
2743 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2744 return 0;
2745 else if (GET_CODE (pat) == PARALLEL)
2746 {
2747 int len = XVECLEN (pat, 0);
2748 int has_use_labelref = 0;
2749
2750 for (i = len - 1; i >= 0; i--)
2751 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2752 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2753 == LABEL_REF))
2754 has_use_labelref = 1;
2755
2756 if (! has_use_labelref)
2757 for (i = len - 1; i >= 0; i--)
2758 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2759 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2760 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2761 return 1;
2762 }
2763 else if (GET_CODE (pat) == SET
2764 && SET_DEST (pat) == pc_rtx
2765 && computed_jump_p_1 (SET_SRC (pat)))
2766 return 1;
2767 }
2768 return 0;
2769 }
2770
2771 /* Traverse X via depth-first search, calling F for each
2772 sub-expression (including X itself). F is also passed the DATA.
2773 If F returns -1, do not traverse sub-expressions, but continue
2774 traversing the rest of the tree. If F ever returns any other
2775 non-zero value, stop the traversal, and return the value returned
2776 by F. Otherwise, return 0. This function does not traverse inside
2777 tree structure that contains RTX_EXPRs, or into sub-expressions
2778 whose format code is `0' since it is not known whether or not those
2779 codes are actually RTL.
2780
2781 This routine is very general, and could (should?) be used to
2782 implement many of the other routines in this file. */
2783
2784 int
2785 for_each_rtx (x, f, data)
2786 rtx *x;
2787 rtx_function f;
2788 void *data;
2789 {
2790 int result;
2791 int length;
2792 const char *format;
2793 int i;
2794
2795 /* Call F on X. */
2796 result = (*f) (x, data);
2797 if (result == -1)
2798 /* Do not traverse sub-expressions. */
2799 return 0;
2800 else if (result != 0)
2801 /* Stop the traversal. */
2802 return result;
2803
2804 if (*x == NULL_RTX)
2805 /* There are no sub-expressions. */
2806 return 0;
2807
2808 length = GET_RTX_LENGTH (GET_CODE (*x));
2809 format = GET_RTX_FORMAT (GET_CODE (*x));
2810
2811 for (i = 0; i < length; ++i)
2812 {
2813 switch (format[i])
2814 {
2815 case 'e':
2816 result = for_each_rtx (&XEXP (*x, i), f, data);
2817 if (result != 0)
2818 return result;
2819 break;
2820
2821 case 'V':
2822 case 'E':
2823 if (XVEC (*x, i) != 0)
2824 {
2825 int j;
2826 for (j = 0; j < XVECLEN (*x, i); ++j)
2827 {
2828 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2829 if (result != 0)
2830 return result;
2831 }
2832 }
2833 break;
2834
2835 default:
2836 /* Nothing to do. */
2837 break;
2838 }
2839
2840 }
2841
2842 return 0;
2843 }
2844
2845 /* Searches X for any reference to REGNO, returning the rtx of the
2846 reference found if any. Otherwise, returns NULL_RTX. */
2847
2848 rtx
2849 regno_use_in (regno, x)
2850 unsigned int regno;
2851 rtx x;
2852 {
2853 const char *fmt;
2854 int i, j;
2855 rtx tem;
2856
2857 if (GET_CODE (x) == REG && REGNO (x) == regno)
2858 return x;
2859
2860 fmt = GET_RTX_FORMAT (GET_CODE (x));
2861 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2862 {
2863 if (fmt[i] == 'e')
2864 {
2865 if ((tem = regno_use_in (regno, XEXP (x, i))))
2866 return tem;
2867 }
2868 else if (fmt[i] == 'E')
2869 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2870 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2871 return tem;
2872 }
2873
2874 return NULL_RTX;
2875 }
2876
2877 /* Return a value indicating whether OP, an operand of a commutative
2878 operation, is preferred as the first or second operand. The higher
2879 the value, the stronger the preference for being the first operand.
2880 We use negative values to indicate a preference for the first operand
2881 and positive values for the second operand. */
2882
2883 int
2884 commutative_operand_precedence (op)
2885 rtx op;
2886 {
2887 /* Constants always come the second operand. Prefer "nice" constants. */
2888 if (GET_CODE (op) == CONST_INT)
2889 return -5;
2890 if (GET_CODE (op) == CONST_DOUBLE)
2891 return -4;
2892 if (CONSTANT_P (op))
2893 return -3;
2894
2895 /* SUBREGs of objects should come second. */
2896 if (GET_CODE (op) == SUBREG
2897 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
2898 return -2;
2899
2900 /* If only one operand is a `neg', `not',
2901 `mult', `plus', or `minus' expression, it will be the first
2902 operand. */
2903 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
2904 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
2905 || GET_CODE (op) == MINUS)
2906 return 2;
2907
2908 /* Complex expressions should be the first, so decrease priority
2909 of objects. */
2910 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
2911 return -1;
2912 return 0;
2913 }
2914
2915 /* Return 1 iff it is necessary to swap operands of commutative operation
2916 in order to canonicalize expression. */
2917
2918 int
2919 swap_commutative_operands_p (x, y)
2920 rtx x, y;
2921 {
2922 return (commutative_operand_precedence (x)
2923 < commutative_operand_precedence (y));
2924 }
2925
2926 /* Return 1 if X is an autoincrement side effect and the register is
2927 not the stack pointer. */
2928 int
2929 auto_inc_p (x)
2930 rtx x;
2931 {
2932 switch (GET_CODE (x))
2933 {
2934 case PRE_INC:
2935 case POST_INC:
2936 case PRE_DEC:
2937 case POST_DEC:
2938 case PRE_MODIFY:
2939 case POST_MODIFY:
2940 /* There are no REG_INC notes for SP. */
2941 if (XEXP (x, 0) != stack_pointer_rtx)
2942 return 1;
2943 default:
2944 break;
2945 }
2946 return 0;
2947 }
2948
2949 /* Return 1 if the sequence of instructions beginning with FROM and up
2950 to and including TO is safe to move. If NEW_TO is non-NULL, and
2951 the sequence is not already safe to move, but can be easily
2952 extended to a sequence which is safe, then NEW_TO will point to the
2953 end of the extended sequence.
2954
2955 For now, this function only checks that the region contains whole
2956 exception regions, but it could be extended to check additional
2957 conditions as well. */
2958
2959 int
2960 insns_safe_to_move_p (from, to, new_to)
2961 rtx from;
2962 rtx to;
2963 rtx *new_to;
2964 {
2965 int eh_region_count = 0;
2966 int past_to_p = 0;
2967 rtx r = from;
2968
2969 /* By default, assume the end of the region will be what was
2970 suggested. */
2971 if (new_to)
2972 *new_to = to;
2973
2974 while (r)
2975 {
2976 if (GET_CODE (r) == NOTE)
2977 {
2978 switch (NOTE_LINE_NUMBER (r))
2979 {
2980 case NOTE_INSN_EH_REGION_BEG:
2981 ++eh_region_count;
2982 break;
2983
2984 case NOTE_INSN_EH_REGION_END:
2985 if (eh_region_count == 0)
2986 /* This sequence of instructions contains the end of
2987 an exception region, but not he beginning. Moving
2988 it will cause chaos. */
2989 return 0;
2990
2991 --eh_region_count;
2992 break;
2993
2994 default:
2995 break;
2996 }
2997 }
2998 else if (past_to_p)
2999 /* If we've passed TO, and we see a non-note instruction, we
3000 can't extend the sequence to a movable sequence. */
3001 return 0;
3002
3003 if (r == to)
3004 {
3005 if (!new_to)
3006 /* It's OK to move the sequence if there were matched sets of
3007 exception region notes. */
3008 return eh_region_count == 0;
3009
3010 past_to_p = 1;
3011 }
3012
3013 /* It's OK to move the sequence if there were matched sets of
3014 exception region notes. */
3015 if (past_to_p && eh_region_count == 0)
3016 {
3017 *new_to = r;
3018 return 1;
3019 }
3020
3021 /* Go to the next instruction. */
3022 r = NEXT_INSN (r);
3023 }
3024
3025 return 0;
3026 }
3027
3028 /* Return non-zero if IN contains a piece of rtl that has the address LOC */
3029 int
3030 loc_mentioned_in_p (loc, in)
3031 rtx *loc, in;
3032 {
3033 enum rtx_code code = GET_CODE (in);
3034 const char *fmt = GET_RTX_FORMAT (code);
3035 int i, j;
3036
3037 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3038 {
3039 if (loc == &in->fld[i].rtx)
3040 return 1;
3041 if (fmt[i] == 'e')
3042 {
3043 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3044 return 1;
3045 }
3046 else if (fmt[i] == 'E')
3047 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3048 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3049 return 1;
3050 }
3051 return 0;
3052 }
3053
3054 /* Given a subreg X, return the bit offset where the subreg begins
3055 (counting from the least significant bit of the reg). */
3056
3057 unsigned int
3058 subreg_lsb (x)
3059 rtx x;
3060 {
3061 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3062 enum machine_mode mode = GET_MODE (x);
3063 unsigned int bitpos;
3064 unsigned int byte;
3065 unsigned int word;
3066
3067 /* A paradoxical subreg begins at bit position 0. */
3068 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3069 return 0;
3070
3071 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3072 /* If the subreg crosses a word boundary ensure that
3073 it also begins and ends on a word boundary. */
3074 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3075 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3076 && (SUBREG_BYTE (x) % UNITS_PER_WORD
3077 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3078 abort ();
3079
3080 if (WORDS_BIG_ENDIAN)
3081 word = (GET_MODE_SIZE (inner_mode)
3082 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3083 else
3084 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3085 bitpos = word * BITS_PER_WORD;
3086
3087 if (BYTES_BIG_ENDIAN)
3088 byte = (GET_MODE_SIZE (inner_mode)
3089 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3090 else
3091 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3092 bitpos += byte * BITS_PER_UNIT;
3093
3094 return bitpos;
3095 }
3096
3097 /* This function returns the regno offset of a subreg expression.
3098 xregno - A regno of an inner hard subreg_reg (or what will become one).
3099 xmode - The mode of xregno.
3100 offset - The byte offset.
3101 ymode - The mode of a top level SUBREG (or what may become one).
3102 RETURN - The regno offset which would be used. */
3103 unsigned int
3104 subreg_regno_offset (xregno, xmode, offset, ymode)
3105 unsigned int xregno;
3106 enum machine_mode xmode;
3107 unsigned int offset;
3108 enum machine_mode ymode;
3109 {
3110 int nregs_xmode, nregs_ymode;
3111 int mode_multiple, nregs_multiple;
3112 int y_offset;
3113
3114 if (xregno >= FIRST_PSEUDO_REGISTER)
3115 abort ();
3116
3117 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3118 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3119 if (offset == 0 || nregs_xmode == nregs_ymode)
3120 return 0;
3121
3122 /* size of ymode must not be greater than the size of xmode. */
3123 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3124 if (mode_multiple == 0)
3125 abort ();
3126
3127 y_offset = offset / GET_MODE_SIZE (ymode);
3128 nregs_multiple = nregs_xmode / nregs_ymode;
3129 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3130 }
3131
3132 /* Return the final regno that a subreg expression refers to. */
3133 unsigned int
3134 subreg_regno (x)
3135 rtx x;
3136 {
3137 unsigned int ret;
3138 rtx subreg = SUBREG_REG (x);
3139 int regno = REGNO (subreg);
3140
3141 ret = regno + subreg_regno_offset (regno,
3142 GET_MODE (subreg),
3143 SUBREG_BYTE (x),
3144 GET_MODE (x));
3145 return ret;
3146
3147 }
3148 struct parms_set_data
3149 {
3150 int nregs;
3151 HARD_REG_SET regs;
3152 };
3153
3154 /* Helper function for noticing stores to parameter registers. */
3155 static void
3156 parms_set (x, pat, data)
3157 rtx x, pat ATTRIBUTE_UNUSED;
3158 void *data;
3159 {
3160 struct parms_set_data *d = data;
3161 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3162 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3163 {
3164 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3165 d->nregs--;
3166 }
3167 }
3168
3169 /* Look backward for first parameter to be loaded.
3170 Do not skip BOUNDARY. */
3171 rtx
3172 find_first_parameter_load (call_insn, boundary)
3173 rtx call_insn, boundary;
3174 {
3175 struct parms_set_data parm;
3176 rtx p, before;
3177
3178 /* Since different machines initialize their parameter registers
3179 in different orders, assume nothing. Collect the set of all
3180 parameter registers. */
3181 CLEAR_HARD_REG_SET (parm.regs);
3182 parm.nregs = 0;
3183 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3184 if (GET_CODE (XEXP (p, 0)) == USE
3185 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3186 {
3187 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3188 abort ();
3189
3190 /* We only care about registers which can hold function
3191 arguments. */
3192 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3193 continue;
3194
3195 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3196 parm.nregs++;
3197 }
3198 before = call_insn;
3199
3200 /* Search backward for the first set of a register in this set. */
3201 while (parm.nregs && before != boundary)
3202 {
3203 before = PREV_INSN (before);
3204
3205 /* It is possible that some loads got CSEed from one call to
3206 another. Stop in that case. */
3207 if (GET_CODE (before) == CALL_INSN)
3208 break;
3209
3210 /* Our caller needs either ensure that we will find all sets
3211 (in case code has not been optimized yet), or take care
3212 for possible labels in a way by setting boundary to preceding
3213 CODE_LABEL. */
3214 if (GET_CODE (before) == CODE_LABEL)
3215 {
3216 if (before != boundary)
3217 abort ();
3218 break;
3219 }
3220
3221 if (INSN_P (before))
3222 note_stores (PATTERN (before), parms_set, &parm);
3223 }
3224 return before;
3225 }
3226
3227 /* Return true if we should avoid inserting code between INSN and preceeding
3228 call instruction. */
3229
3230 bool
3231 keep_with_call_p (insn)
3232 rtx insn;
3233 {
3234 rtx set;
3235
3236 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3237 {
3238 if (GET_CODE (SET_DEST (set)) == REG
3239 && fixed_regs[REGNO (SET_DEST (set))]
3240 && general_operand (SET_SRC (set), VOIDmode))
3241 return true;
3242 if (GET_CODE (SET_SRC (set)) == REG
3243 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3244 && GET_CODE (SET_DEST (set)) == REG
3245 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3246 return true;
3247 /* There may be a stack pop just after the call and before the store
3248 of the return register. Search for the actual store when deciding
3249 if we can break or not. */
3250 if (SET_DEST (set) == stack_pointer_rtx)
3251 {
3252 rtx i2 = next_nonnote_insn (insn);
3253 if (i2 && keep_with_call_p (i2))
3254 return true;
3255 }
3256 }
3257 return false;
3258 }
This page took 0.172724 seconds and 6 git commands to generate.