]> gcc.gnu.org Git - gcc.git/blob - gcc/rtlanal.c
rtlanal.c (find_reg_note): Manually unswitch the loop.
[gcc.git] / gcc / rtlanal.c
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "basic-block.h"
37 #include "real.h"
38 #include "regs.h"
39
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx *, void *);
42 static void set_of_1 (rtx, rtx, void *);
43 static void insn_dependent_p_1 (rtx, rtx, void *);
44 static int rtx_referenced_p_1 (rtx *, void *);
45 static int computed_jump_p_1 (rtx);
46 static void parms_set (rtx, rtx, void *);
47 static bool hoist_test_store (rtx, rtx, regset);
48 static void hoist_update_store (rtx, rtx *, rtx, rtx);
49
50 /* Bit flags that specify the machine subtype we are compiling for.
51 Bits are tested using macros TARGET_... defined in the tm.h file
52 and set by `-m...' switches. Must be defined in rtlanal.c. */
53
54 int target_flags;
55 \f
56 /* Return 1 if the value of X is unstable
57 (would be different at a different point in the program).
58 The frame pointer, arg pointer, etc. are considered stable
59 (within one function) and so is anything marked `unchanging'. */
60
61 int
62 rtx_unstable_p (rtx x)
63 {
64 RTX_CODE code = GET_CODE (x);
65 int i;
66 const char *fmt;
67
68 switch (code)
69 {
70 case MEM:
71 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
72
73 case QUEUED:
74 return 1;
75
76 case ADDRESSOF:
77 case CONST:
78 case CONST_INT:
79 case CONST_DOUBLE:
80 case CONST_VECTOR:
81 case SYMBOL_REF:
82 case LABEL_REF:
83 return 0;
84
85 case REG:
86 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
87 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
88 /* The arg pointer varies if it is not a fixed register. */
89 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
90 || RTX_UNCHANGING_P (x))
91 return 0;
92 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
93 /* ??? When call-clobbered, the value is stable modulo the restore
94 that must happen after a call. This currently screws up local-alloc
95 into believing that the restore is not needed. */
96 if (x == pic_offset_table_rtx)
97 return 0;
98 #endif
99 return 1;
100
101 case ASM_OPERANDS:
102 if (MEM_VOLATILE_P (x))
103 return 1;
104
105 /* Fall through. */
106
107 default:
108 break;
109 }
110
111 fmt = GET_RTX_FORMAT (code);
112 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
113 if (fmt[i] == 'e')
114 {
115 if (rtx_unstable_p (XEXP (x, i)))
116 return 1;
117 }
118 else if (fmt[i] == 'E')
119 {
120 int j;
121 for (j = 0; j < XVECLEN (x, i); j++)
122 if (rtx_unstable_p (XVECEXP (x, i, j)))
123 return 1;
124 }
125
126 return 0;
127 }
128
129 /* Return 1 if X has a value that can vary even between two
130 executions of the program. 0 means X can be compared reliably
131 against certain constants or near-constants.
132 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
133 zero, we are slightly more conservative.
134 The frame pointer and the arg pointer are considered constant. */
135
136 int
137 rtx_varies_p (rtx x, int for_alias)
138 {
139 RTX_CODE code;
140 int i;
141 const char *fmt;
142
143 if (!x)
144 return 0;
145
146 code = GET_CODE (x);
147 switch (code)
148 {
149 case MEM:
150 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
151
152 case QUEUED:
153 return 1;
154
155 case CONST:
156 case CONST_INT:
157 case CONST_DOUBLE:
158 case CONST_VECTOR:
159 case SYMBOL_REF:
160 case LABEL_REF:
161 return 0;
162
163 case ADDRESSOF:
164 /* This will resolve to some offset from the frame pointer. */
165 return 0;
166
167 case REG:
168 /* Note that we have to test for the actual rtx used for the frame
169 and arg pointers and not just the register number in case we have
170 eliminated the frame and/or arg pointer and are using it
171 for pseudos. */
172 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
173 /* The arg pointer varies if it is not a fixed register. */
174 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
175 return 0;
176 if (x == pic_offset_table_rtx
177 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
178 /* ??? When call-clobbered, the value is stable modulo the restore
179 that must happen after a call. This currently screws up
180 local-alloc into believing that the restore is not needed, so we
181 must return 0 only if we are called from alias analysis. */
182 && for_alias
183 #endif
184 )
185 return 0;
186 return 1;
187
188 case LO_SUM:
189 /* The operand 0 of a LO_SUM is considered constant
190 (in fact it is related specifically to operand 1)
191 during alias analysis. */
192 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
193 || rtx_varies_p (XEXP (x, 1), for_alias);
194
195 case ASM_OPERANDS:
196 if (MEM_VOLATILE_P (x))
197 return 1;
198
199 /* Fall through. */
200
201 default:
202 break;
203 }
204
205 fmt = GET_RTX_FORMAT (code);
206 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
207 if (fmt[i] == 'e')
208 {
209 if (rtx_varies_p (XEXP (x, i), for_alias))
210 return 1;
211 }
212 else if (fmt[i] == 'E')
213 {
214 int j;
215 for (j = 0; j < XVECLEN (x, i); j++)
216 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
217 return 1;
218 }
219
220 return 0;
221 }
222
223 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
224
225 int
226 rtx_addr_can_trap_p (rtx x)
227 {
228 enum rtx_code code = GET_CODE (x);
229
230 switch (code)
231 {
232 case SYMBOL_REF:
233 return SYMBOL_REF_WEAK (x);
234
235 case LABEL_REF:
236 return 0;
237
238 case ADDRESSOF:
239 /* This will resolve to some offset from the frame pointer. */
240 return 0;
241
242 case REG:
243 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
244 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
245 || x == stack_pointer_rtx
246 /* The arg pointer varies if it is not a fixed register. */
247 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
248 return 0;
249 /* All of the virtual frame registers are stack references. */
250 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
251 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
252 return 0;
253 return 1;
254
255 case CONST:
256 return rtx_addr_can_trap_p (XEXP (x, 0));
257
258 case PLUS:
259 /* An address is assumed not to trap if it is an address that can't
260 trap plus a constant integer or it is the pic register plus a
261 constant. */
262 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
263 && GET_CODE (XEXP (x, 1)) == CONST_INT)
264 || (XEXP (x, 0) == pic_offset_table_rtx
265 && CONSTANT_P (XEXP (x, 1))));
266
267 case LO_SUM:
268 case PRE_MODIFY:
269 return rtx_addr_can_trap_p (XEXP (x, 1));
270
271 case PRE_DEC:
272 case PRE_INC:
273 case POST_DEC:
274 case POST_INC:
275 case POST_MODIFY:
276 return rtx_addr_can_trap_p (XEXP (x, 0));
277
278 default:
279 break;
280 }
281
282 /* If it isn't one of the case above, it can cause a trap. */
283 return 1;
284 }
285
286 /* Return true if X is an address that is known to not be zero. */
287
288 bool
289 nonzero_address_p (rtx x)
290 {
291 enum rtx_code code = GET_CODE (x);
292
293 switch (code)
294 {
295 case SYMBOL_REF:
296 return !SYMBOL_REF_WEAK (x);
297
298 case LABEL_REF:
299 return true;
300
301 case ADDRESSOF:
302 /* This will resolve to some offset from the frame pointer. */
303 return true;
304
305 case REG:
306 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
307 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
308 || x == stack_pointer_rtx
309 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
310 return true;
311 /* All of the virtual frame registers are stack references. */
312 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
313 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
314 return true;
315 return false;
316
317 case CONST:
318 return nonzero_address_p (XEXP (x, 0));
319
320 case PLUS:
321 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
322 {
323 /* Pointers aren't allowed to wrap. If we've got a register
324 that is known to be a pointer, and a positive offset, then
325 the composite can't be zero. */
326 if (INTVAL (XEXP (x, 1)) > 0
327 && REG_P (XEXP (x, 0))
328 && REG_POINTER (XEXP (x, 0)))
329 return true;
330
331 return nonzero_address_p (XEXP (x, 0));
332 }
333 /* Handle PIC references. */
334 else if (XEXP (x, 0) == pic_offset_table_rtx
335 && CONSTANT_P (XEXP (x, 1)))
336 return true;
337 return false;
338
339 case PRE_MODIFY:
340 /* Similar to the above; allow positive offsets. Further, since
341 auto-inc is only allowed in memories, the register must be a
342 pointer. */
343 if (GET_CODE (XEXP (x, 1)) == CONST_INT
344 && INTVAL (XEXP (x, 1)) > 0)
345 return true;
346 return nonzero_address_p (XEXP (x, 0));
347
348 case PRE_INC:
349 /* Similarly. Further, the offset is always positive. */
350 return true;
351
352 case PRE_DEC:
353 case POST_DEC:
354 case POST_INC:
355 case POST_MODIFY:
356 return nonzero_address_p (XEXP (x, 0));
357
358 case LO_SUM:
359 return nonzero_address_p (XEXP (x, 1));
360
361 default:
362 break;
363 }
364
365 /* If it isn't one of the case above, might be zero. */
366 return false;
367 }
368
369 /* Return 1 if X refers to a memory location whose address
370 cannot be compared reliably with constant addresses,
371 or if X refers to a BLKmode memory object.
372 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
373 zero, we are slightly more conservative. */
374
375 int
376 rtx_addr_varies_p (rtx x, int for_alias)
377 {
378 enum rtx_code code;
379 int i;
380 const char *fmt;
381
382 if (x == 0)
383 return 0;
384
385 code = GET_CODE (x);
386 if (code == MEM)
387 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
388
389 fmt = GET_RTX_FORMAT (code);
390 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
391 if (fmt[i] == 'e')
392 {
393 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
394 return 1;
395 }
396 else if (fmt[i] == 'E')
397 {
398 int j;
399 for (j = 0; j < XVECLEN (x, i); j++)
400 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
401 return 1;
402 }
403 return 0;
404 }
405 \f
406 /* Return the value of the integer term in X, if one is apparent;
407 otherwise return 0.
408 Only obvious integer terms are detected.
409 This is used in cse.c with the `related_value' field. */
410
411 HOST_WIDE_INT
412 get_integer_term (rtx x)
413 {
414 if (GET_CODE (x) == CONST)
415 x = XEXP (x, 0);
416
417 if (GET_CODE (x) == MINUS
418 && GET_CODE (XEXP (x, 1)) == CONST_INT)
419 return - INTVAL (XEXP (x, 1));
420 if (GET_CODE (x) == PLUS
421 && GET_CODE (XEXP (x, 1)) == CONST_INT)
422 return INTVAL (XEXP (x, 1));
423 return 0;
424 }
425
426 /* If X is a constant, return the value sans apparent integer term;
427 otherwise return 0.
428 Only obvious integer terms are detected. */
429
430 rtx
431 get_related_value (rtx x)
432 {
433 if (GET_CODE (x) != CONST)
434 return 0;
435 x = XEXP (x, 0);
436 if (GET_CODE (x) == PLUS
437 && GET_CODE (XEXP (x, 1)) == CONST_INT)
438 return XEXP (x, 0);
439 else if (GET_CODE (x) == MINUS
440 && GET_CODE (XEXP (x, 1)) == CONST_INT)
441 return XEXP (x, 0);
442 return 0;
443 }
444 \f
445 /* Given a tablejump insn INSN, return the RTL expression for the offset
446 into the jump table. If the offset cannot be determined, then return
447 NULL_RTX.
448
449 If EARLIEST is nonzero, it is a pointer to a place where the earliest
450 insn used in locating the offset was found. */
451
452 rtx
453 get_jump_table_offset (rtx insn, rtx *earliest)
454 {
455 rtx label;
456 rtx table;
457 rtx set;
458 rtx old_insn;
459 rtx x;
460 rtx old_x;
461 rtx y;
462 rtx old_y;
463 int i;
464
465 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
466 return NULL_RTX;
467
468 x = SET_SRC (set);
469
470 /* Some targets (eg, ARM) emit a tablejump that also
471 contains the out-of-range target. */
472 if (GET_CODE (x) == IF_THEN_ELSE
473 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
474 x = XEXP (x, 1);
475
476 /* Search backwards and locate the expression stored in X. */
477 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
478 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
479 ;
480
481 /* If X is an expression using a relative address then strip
482 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
483 or the jump table label. */
484 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
485 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
486 {
487 for (i = 0; i < 2; i++)
488 {
489 old_insn = insn;
490 y = XEXP (x, i);
491
492 if (y == pc_rtx || y == pic_offset_table_rtx)
493 break;
494
495 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
496 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
497 ;
498
499 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
500 break;
501 }
502
503 if (i >= 2)
504 return NULL_RTX;
505
506 x = XEXP (x, 1 - i);
507
508 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
509 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
510 ;
511 }
512
513 /* Strip off any sign or zero extension. */
514 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
515 {
516 x = XEXP (x, 0);
517
518 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
519 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
520 ;
521 }
522
523 /* If X isn't a MEM then this isn't a tablejump we understand. */
524 if (GET_CODE (x) != MEM)
525 return NULL_RTX;
526
527 /* Strip off the MEM. */
528 x = XEXP (x, 0);
529
530 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
531 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
532 ;
533
534 /* If X isn't a PLUS than this isn't a tablejump we understand. */
535 if (GET_CODE (x) != PLUS)
536 return NULL_RTX;
537
538 /* At this point we should have an expression representing the jump table
539 plus an offset. Examine each operand in order to determine which one
540 represents the jump table. Knowing that tells us that the other operand
541 must represent the offset. */
542 for (i = 0; i < 2; i++)
543 {
544 old_insn = insn;
545 y = XEXP (x, i);
546
547 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
548 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
549 ;
550
551 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
552 && reg_mentioned_p (label, y))
553 break;
554 }
555
556 if (i >= 2)
557 return NULL_RTX;
558
559 x = XEXP (x, 1 - i);
560
561 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
562 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
563 for (i = 0; i < 2; i++)
564 if (XEXP (x, i) == pic_offset_table_rtx)
565 {
566 x = XEXP (x, 1 - i);
567 break;
568 }
569
570 if (earliest)
571 *earliest = insn;
572
573 /* Return the RTL expression representing the offset. */
574 return x;
575 }
576 \f
577 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
578 a global register. */
579
580 static int
581 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
582 {
583 int regno;
584 rtx x = *loc;
585
586 if (! x)
587 return 0;
588
589 switch (GET_CODE (x))
590 {
591 case SUBREG:
592 if (GET_CODE (SUBREG_REG (x)) == REG)
593 {
594 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
595 && global_regs[subreg_regno (x)])
596 return 1;
597 return 0;
598 }
599 break;
600
601 case REG:
602 regno = REGNO (x);
603 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
604 return 1;
605 return 0;
606
607 case SCRATCH:
608 case PC:
609 case CC0:
610 case CONST_INT:
611 case CONST_DOUBLE:
612 case CONST:
613 case LABEL_REF:
614 return 0;
615
616 case CALL:
617 /* A non-constant call might use a global register. */
618 return 1;
619
620 default:
621 break;
622 }
623
624 return 0;
625 }
626
627 /* Returns nonzero if X mentions a global register. */
628
629 int
630 global_reg_mentioned_p (rtx x)
631 {
632 if (INSN_P (x))
633 {
634 if (GET_CODE (x) == CALL_INSN)
635 {
636 if (! CONST_OR_PURE_CALL_P (x))
637 return 1;
638 x = CALL_INSN_FUNCTION_USAGE (x);
639 if (x == 0)
640 return 0;
641 }
642 else
643 x = PATTERN (x);
644 }
645
646 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
647 }
648 \f
649 /* Return the number of places FIND appears within X. If COUNT_DEST is
650 zero, we do not count occurrences inside the destination of a SET. */
651
652 int
653 count_occurrences (rtx x, rtx find, int count_dest)
654 {
655 int i, j;
656 enum rtx_code code;
657 const char *format_ptr;
658 int count;
659
660 if (x == find)
661 return 1;
662
663 code = GET_CODE (x);
664
665 switch (code)
666 {
667 case REG:
668 case CONST_INT:
669 case CONST_DOUBLE:
670 case CONST_VECTOR:
671 case SYMBOL_REF:
672 case CODE_LABEL:
673 case PC:
674 case CC0:
675 return 0;
676
677 case MEM:
678 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
679 return 1;
680 break;
681
682 case SET:
683 if (SET_DEST (x) == find && ! count_dest)
684 return count_occurrences (SET_SRC (x), find, count_dest);
685 break;
686
687 default:
688 break;
689 }
690
691 format_ptr = GET_RTX_FORMAT (code);
692 count = 0;
693
694 for (i = 0; i < GET_RTX_LENGTH (code); i++)
695 {
696 switch (*format_ptr++)
697 {
698 case 'e':
699 count += count_occurrences (XEXP (x, i), find, count_dest);
700 break;
701
702 case 'E':
703 for (j = 0; j < XVECLEN (x, i); j++)
704 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
705 break;
706 }
707 }
708 return count;
709 }
710 \f
711 /* Nonzero if register REG appears somewhere within IN.
712 Also works if REG is not a register; in this case it checks
713 for a subexpression of IN that is Lisp "equal" to REG. */
714
715 int
716 reg_mentioned_p (rtx reg, rtx in)
717 {
718 const char *fmt;
719 int i;
720 enum rtx_code code;
721
722 if (in == 0)
723 return 0;
724
725 if (reg == in)
726 return 1;
727
728 if (GET_CODE (in) == LABEL_REF)
729 return reg == XEXP (in, 0);
730
731 code = GET_CODE (in);
732
733 switch (code)
734 {
735 /* Compare registers by number. */
736 case REG:
737 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
738
739 /* These codes have no constituent expressions
740 and are unique. */
741 case SCRATCH:
742 case CC0:
743 case PC:
744 return 0;
745
746 case CONST_INT:
747 case CONST_VECTOR:
748 case CONST_DOUBLE:
749 /* These are kept unique for a given value. */
750 return 0;
751
752 default:
753 break;
754 }
755
756 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
757 return 1;
758
759 fmt = GET_RTX_FORMAT (code);
760
761 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
762 {
763 if (fmt[i] == 'E')
764 {
765 int j;
766 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
767 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
768 return 1;
769 }
770 else if (fmt[i] == 'e'
771 && reg_mentioned_p (reg, XEXP (in, i)))
772 return 1;
773 }
774 return 0;
775 }
776 \f
777 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
778 no CODE_LABEL insn. */
779
780 int
781 no_labels_between_p (rtx beg, rtx end)
782 {
783 rtx p;
784 if (beg == end)
785 return 0;
786 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
787 if (GET_CODE (p) == CODE_LABEL)
788 return 0;
789 return 1;
790 }
791
792 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
793 no JUMP_INSN insn. */
794
795 int
796 no_jumps_between_p (rtx beg, rtx end)
797 {
798 rtx p;
799 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
800 if (GET_CODE (p) == JUMP_INSN)
801 return 0;
802 return 1;
803 }
804
805 /* Nonzero if register REG is used in an insn between
806 FROM_INSN and TO_INSN (exclusive of those two). */
807
808 int
809 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
810 {
811 rtx insn;
812
813 if (from_insn == to_insn)
814 return 0;
815
816 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
817 if (INSN_P (insn)
818 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
819 || (GET_CODE (insn) == CALL_INSN
820 && (find_reg_fusage (insn, USE, reg)
821 || find_reg_fusage (insn, CLOBBER, reg)))))
822 return 1;
823 return 0;
824 }
825 \f
826 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
827 is entirely replaced by a new value and the only use is as a SET_DEST,
828 we do not consider it a reference. */
829
830 int
831 reg_referenced_p (rtx x, rtx body)
832 {
833 int i;
834
835 switch (GET_CODE (body))
836 {
837 case SET:
838 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
839 return 1;
840
841 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
842 of a REG that occupies all of the REG, the insn references X if
843 it is mentioned in the destination. */
844 if (GET_CODE (SET_DEST (body)) != CC0
845 && GET_CODE (SET_DEST (body)) != PC
846 && GET_CODE (SET_DEST (body)) != REG
847 && ! (GET_CODE (SET_DEST (body)) == SUBREG
848 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
849 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
850 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
851 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
852 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
853 && reg_overlap_mentioned_p (x, SET_DEST (body)))
854 return 1;
855 return 0;
856
857 case ASM_OPERANDS:
858 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
859 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
860 return 1;
861 return 0;
862
863 case CALL:
864 case USE:
865 case IF_THEN_ELSE:
866 return reg_overlap_mentioned_p (x, body);
867
868 case TRAP_IF:
869 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
870
871 case PREFETCH:
872 return reg_overlap_mentioned_p (x, XEXP (body, 0));
873
874 case UNSPEC:
875 case UNSPEC_VOLATILE:
876 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
877 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
878 return 1;
879 return 0;
880
881 case PARALLEL:
882 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
883 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
884 return 1;
885 return 0;
886
887 case CLOBBER:
888 if (GET_CODE (XEXP (body, 0)) == MEM)
889 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
890 return 1;
891 return 0;
892
893 case COND_EXEC:
894 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
895 return 1;
896 return reg_referenced_p (x, COND_EXEC_CODE (body));
897
898 default:
899 return 0;
900 }
901 }
902
903 /* Nonzero if register REG is referenced in an insn between
904 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
905 not count. */
906
907 int
908 reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
909 {
910 rtx insn;
911
912 if (from_insn == to_insn)
913 return 0;
914
915 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
916 if (INSN_P (insn)
917 && (reg_referenced_p (reg, PATTERN (insn))
918 || (GET_CODE (insn) == CALL_INSN
919 && find_reg_fusage (insn, USE, reg))))
920 return 1;
921 return 0;
922 }
923 \f
924 /* Nonzero if register REG is set or clobbered in an insn between
925 FROM_INSN and TO_INSN (exclusive of those two). */
926
927 int
928 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
929 {
930 rtx insn;
931
932 if (from_insn == to_insn)
933 return 0;
934
935 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
936 if (INSN_P (insn) && reg_set_p (reg, insn))
937 return 1;
938 return 0;
939 }
940
941 /* Internals of reg_set_between_p. */
942 int
943 reg_set_p (rtx reg, rtx insn)
944 {
945 /* We can be passed an insn or part of one. If we are passed an insn,
946 check if a side-effect of the insn clobbers REG. */
947 if (INSN_P (insn)
948 && (FIND_REG_INC_NOTE (insn, reg)
949 || (GET_CODE (insn) == CALL_INSN
950 /* We'd like to test call_used_regs here, but rtlanal.c can't
951 reference that variable due to its use in genattrtab. So
952 we'll just be more conservative.
953
954 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
955 information holds all clobbered registers. */
956 && ((GET_CODE (reg) == REG
957 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
958 || GET_CODE (reg) == MEM
959 || find_reg_fusage (insn, CLOBBER, reg)))))
960 return 1;
961
962 return set_of (reg, insn) != NULL_RTX;
963 }
964
965 /* Similar to reg_set_between_p, but check all registers in X. Return 0
966 only if none of them are modified between START and END. Do not
967 consider non-registers one way or the other. */
968
969 int
970 regs_set_between_p (rtx x, rtx start, rtx end)
971 {
972 enum rtx_code code = GET_CODE (x);
973 const char *fmt;
974 int i, j;
975
976 switch (code)
977 {
978 case CONST_INT:
979 case CONST_DOUBLE:
980 case CONST_VECTOR:
981 case CONST:
982 case SYMBOL_REF:
983 case LABEL_REF:
984 case PC:
985 case CC0:
986 return 0;
987
988 case REG:
989 return reg_set_between_p (x, start, end);
990
991 default:
992 break;
993 }
994
995 fmt = GET_RTX_FORMAT (code);
996 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
997 {
998 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
999 return 1;
1000
1001 else if (fmt[i] == 'E')
1002 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1003 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
1004 return 1;
1005 }
1006
1007 return 0;
1008 }
1009
1010 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1011 only if none of them are modified between START and END. Return 1 if
1012 X contains a MEM; this routine does usememory aliasing. */
1013
1014 int
1015 modified_between_p (rtx x, rtx start, rtx end)
1016 {
1017 enum rtx_code code = GET_CODE (x);
1018 const char *fmt;
1019 int i, j;
1020 rtx insn;
1021
1022 if (start == end)
1023 return 0;
1024
1025 switch (code)
1026 {
1027 case CONST_INT:
1028 case CONST_DOUBLE:
1029 case CONST_VECTOR:
1030 case CONST:
1031 case SYMBOL_REF:
1032 case LABEL_REF:
1033 return 0;
1034
1035 case PC:
1036 case CC0:
1037 return 1;
1038
1039 case MEM:
1040 if (RTX_UNCHANGING_P (x))
1041 return 0;
1042 if (modified_between_p (XEXP (x, 0), start, end))
1043 return 1;
1044 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1045 if (memory_modified_in_insn_p (x, insn))
1046 return 1;
1047 return 0;
1048 break;
1049
1050 case REG:
1051 return reg_set_between_p (x, start, end);
1052
1053 default:
1054 break;
1055 }
1056
1057 fmt = GET_RTX_FORMAT (code);
1058 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1059 {
1060 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1061 return 1;
1062
1063 else if (fmt[i] == 'E')
1064 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1065 if (modified_between_p (XVECEXP (x, i, j), start, end))
1066 return 1;
1067 }
1068
1069 return 0;
1070 }
1071
1072 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1073 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1074 does use memory aliasing. */
1075
1076 int
1077 modified_in_p (rtx x, rtx insn)
1078 {
1079 enum rtx_code code = GET_CODE (x);
1080 const char *fmt;
1081 int i, j;
1082
1083 switch (code)
1084 {
1085 case CONST_INT:
1086 case CONST_DOUBLE:
1087 case CONST_VECTOR:
1088 case CONST:
1089 case SYMBOL_REF:
1090 case LABEL_REF:
1091 return 0;
1092
1093 case PC:
1094 case CC0:
1095 return 1;
1096
1097 case MEM:
1098 if (RTX_UNCHANGING_P (x))
1099 return 0;
1100 if (modified_in_p (XEXP (x, 0), insn))
1101 return 1;
1102 if (memory_modified_in_insn_p (x, insn))
1103 return 1;
1104 return 0;
1105 break;
1106
1107 case REG:
1108 return reg_set_p (x, insn);
1109
1110 default:
1111 break;
1112 }
1113
1114 fmt = GET_RTX_FORMAT (code);
1115 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1116 {
1117 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1118 return 1;
1119
1120 else if (fmt[i] == 'E')
1121 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1122 if (modified_in_p (XVECEXP (x, i, j), insn))
1123 return 1;
1124 }
1125
1126 return 0;
1127 }
1128
1129 /* Return true if anything in insn X is (anti,output,true) dependent on
1130 anything in insn Y. */
1131
1132 int
1133 insn_dependent_p (rtx x, rtx y)
1134 {
1135 rtx tmp;
1136
1137 if (! INSN_P (x) || ! INSN_P (y))
1138 abort ();
1139
1140 tmp = PATTERN (y);
1141 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1142 if (tmp == NULL_RTX)
1143 return 1;
1144
1145 tmp = PATTERN (x);
1146 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1147 if (tmp == NULL_RTX)
1148 return 1;
1149
1150 return 0;
1151 }
1152
1153 /* A helper routine for insn_dependent_p called through note_stores. */
1154
1155 static void
1156 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
1157 {
1158 rtx * pinsn = (rtx *) data;
1159
1160 if (*pinsn && reg_mentioned_p (x, *pinsn))
1161 *pinsn = NULL_RTX;
1162 }
1163 \f
1164 /* Helper function for set_of. */
1165 struct set_of_data
1166 {
1167 rtx found;
1168 rtx pat;
1169 };
1170
1171 static void
1172 set_of_1 (rtx x, rtx pat, void *data1)
1173 {
1174 struct set_of_data *data = (struct set_of_data *) (data1);
1175 if (rtx_equal_p (x, data->pat)
1176 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1177 data->found = pat;
1178 }
1179
1180 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1181 (either directly or via STRICT_LOW_PART and similar modifiers). */
1182 rtx
1183 set_of (rtx pat, rtx insn)
1184 {
1185 struct set_of_data data;
1186 data.found = NULL_RTX;
1187 data.pat = pat;
1188 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1189 return data.found;
1190 }
1191 \f
1192 /* Given an INSN, return a SET expression if this insn has only a single SET.
1193 It may also have CLOBBERs, USEs, or SET whose output
1194 will not be used, which we ignore. */
1195
1196 rtx
1197 single_set_2 (rtx insn, rtx pat)
1198 {
1199 rtx set = NULL;
1200 int set_verified = 1;
1201 int i;
1202
1203 if (GET_CODE (pat) == PARALLEL)
1204 {
1205 for (i = 0; i < XVECLEN (pat, 0); i++)
1206 {
1207 rtx sub = XVECEXP (pat, 0, i);
1208 switch (GET_CODE (sub))
1209 {
1210 case USE:
1211 case CLOBBER:
1212 break;
1213
1214 case SET:
1215 /* We can consider insns having multiple sets, where all
1216 but one are dead as single set insns. In common case
1217 only single set is present in the pattern so we want
1218 to avoid checking for REG_UNUSED notes unless necessary.
1219
1220 When we reach set first time, we just expect this is
1221 the single set we are looking for and only when more
1222 sets are found in the insn, we check them. */
1223 if (!set_verified)
1224 {
1225 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1226 && !side_effects_p (set))
1227 set = NULL;
1228 else
1229 set_verified = 1;
1230 }
1231 if (!set)
1232 set = sub, set_verified = 0;
1233 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1234 || side_effects_p (sub))
1235 return NULL_RTX;
1236 break;
1237
1238 default:
1239 return NULL_RTX;
1240 }
1241 }
1242 }
1243 return set;
1244 }
1245
1246 /* Given an INSN, return nonzero if it has more than one SET, else return
1247 zero. */
1248
1249 int
1250 multiple_sets (rtx insn)
1251 {
1252 int found;
1253 int i;
1254
1255 /* INSN must be an insn. */
1256 if (! INSN_P (insn))
1257 return 0;
1258
1259 /* Only a PARALLEL can have multiple SETs. */
1260 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1261 {
1262 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1263 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1264 {
1265 /* If we have already found a SET, then return now. */
1266 if (found)
1267 return 1;
1268 else
1269 found = 1;
1270 }
1271 }
1272
1273 /* Either zero or one SET. */
1274 return 0;
1275 }
1276 \f
1277 /* Return nonzero if the destination of SET equals the source
1278 and there are no side effects. */
1279
1280 int
1281 set_noop_p (rtx set)
1282 {
1283 rtx src = SET_SRC (set);
1284 rtx dst = SET_DEST (set);
1285
1286 if (dst == pc_rtx && src == pc_rtx)
1287 return 1;
1288
1289 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1290 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1291
1292 if (GET_CODE (dst) == SIGN_EXTRACT
1293 || GET_CODE (dst) == ZERO_EXTRACT)
1294 return rtx_equal_p (XEXP (dst, 0), src)
1295 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1296 && !side_effects_p (src);
1297
1298 if (GET_CODE (dst) == STRICT_LOW_PART)
1299 dst = XEXP (dst, 0);
1300
1301 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1302 {
1303 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1304 return 0;
1305 src = SUBREG_REG (src);
1306 dst = SUBREG_REG (dst);
1307 }
1308
1309 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1310 && REGNO (src) == REGNO (dst));
1311 }
1312 \f
1313 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1314 value to itself. */
1315
1316 int
1317 noop_move_p (rtx insn)
1318 {
1319 rtx pat = PATTERN (insn);
1320
1321 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1322 return 1;
1323
1324 /* Insns carrying these notes are useful later on. */
1325 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1326 return 0;
1327
1328 /* For now treat an insn with a REG_RETVAL note as a
1329 a special insn which should not be considered a no-op. */
1330 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1331 return 0;
1332
1333 if (GET_CODE (pat) == SET && set_noop_p (pat))
1334 return 1;
1335
1336 if (GET_CODE (pat) == PARALLEL)
1337 {
1338 int i;
1339 /* If nothing but SETs of registers to themselves,
1340 this insn can also be deleted. */
1341 for (i = 0; i < XVECLEN (pat, 0); i++)
1342 {
1343 rtx tem = XVECEXP (pat, 0, i);
1344
1345 if (GET_CODE (tem) == USE
1346 || GET_CODE (tem) == CLOBBER)
1347 continue;
1348
1349 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1350 return 0;
1351 }
1352
1353 return 1;
1354 }
1355 return 0;
1356 }
1357 \f
1358
1359 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1360 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1361 If the object was modified, if we hit a partial assignment to X, or hit a
1362 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1363 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1364 be the src. */
1365
1366 rtx
1367 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1368 {
1369 rtx p;
1370
1371 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1372 p = PREV_INSN (p))
1373 if (INSN_P (p))
1374 {
1375 rtx set = single_set (p);
1376 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1377
1378 if (set && rtx_equal_p (x, SET_DEST (set)))
1379 {
1380 rtx src = SET_SRC (set);
1381
1382 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1383 src = XEXP (note, 0);
1384
1385 if ((valid_to == NULL_RTX
1386 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1387 /* Reject hard registers because we don't usually want
1388 to use them; we'd rather use a pseudo. */
1389 && (! (GET_CODE (src) == REG
1390 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1391 {
1392 *pinsn = p;
1393 return src;
1394 }
1395 }
1396
1397 /* If set in non-simple way, we don't have a value. */
1398 if (reg_set_p (x, p))
1399 break;
1400 }
1401
1402 return x;
1403 }
1404 \f
1405 /* Return nonzero if register in range [REGNO, ENDREGNO)
1406 appears either explicitly or implicitly in X
1407 other than being stored into.
1408
1409 References contained within the substructure at LOC do not count.
1410 LOC may be zero, meaning don't ignore anything. */
1411
1412 int
1413 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1414 rtx *loc)
1415 {
1416 int i;
1417 unsigned int x_regno;
1418 RTX_CODE code;
1419 const char *fmt;
1420
1421 repeat:
1422 /* The contents of a REG_NONNEG note is always zero, so we must come here
1423 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1424 if (x == 0)
1425 return 0;
1426
1427 code = GET_CODE (x);
1428
1429 switch (code)
1430 {
1431 case REG:
1432 x_regno = REGNO (x);
1433
1434 /* If we modifying the stack, frame, or argument pointer, it will
1435 clobber a virtual register. In fact, we could be more precise,
1436 but it isn't worth it. */
1437 if ((x_regno == STACK_POINTER_REGNUM
1438 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1439 || x_regno == ARG_POINTER_REGNUM
1440 #endif
1441 || x_regno == FRAME_POINTER_REGNUM)
1442 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1443 return 1;
1444
1445 return (endregno > x_regno
1446 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1447 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1448 : 1));
1449
1450 case SUBREG:
1451 /* If this is a SUBREG of a hard reg, we can see exactly which
1452 registers are being modified. Otherwise, handle normally. */
1453 if (GET_CODE (SUBREG_REG (x)) == REG
1454 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1455 {
1456 unsigned int inner_regno = subreg_regno (x);
1457 unsigned int inner_endregno
1458 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1459 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1460
1461 return endregno > inner_regno && regno < inner_endregno;
1462 }
1463 break;
1464
1465 case CLOBBER:
1466 case SET:
1467 if (&SET_DEST (x) != loc
1468 /* Note setting a SUBREG counts as referring to the REG it is in for
1469 a pseudo but not for hard registers since we can
1470 treat each word individually. */
1471 && ((GET_CODE (SET_DEST (x)) == SUBREG
1472 && loc != &SUBREG_REG (SET_DEST (x))
1473 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1474 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1475 && refers_to_regno_p (regno, endregno,
1476 SUBREG_REG (SET_DEST (x)), loc))
1477 || (GET_CODE (SET_DEST (x)) != REG
1478 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1479 return 1;
1480
1481 if (code == CLOBBER || loc == &SET_SRC (x))
1482 return 0;
1483 x = SET_SRC (x);
1484 goto repeat;
1485
1486 default:
1487 break;
1488 }
1489
1490 /* X does not match, so try its subexpressions. */
1491
1492 fmt = GET_RTX_FORMAT (code);
1493 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1494 {
1495 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1496 {
1497 if (i == 0)
1498 {
1499 x = XEXP (x, 0);
1500 goto repeat;
1501 }
1502 else
1503 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1504 return 1;
1505 }
1506 else if (fmt[i] == 'E')
1507 {
1508 int j;
1509 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1510 if (loc != &XVECEXP (x, i, j)
1511 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1512 return 1;
1513 }
1514 }
1515 return 0;
1516 }
1517
1518 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1519 we check if any register number in X conflicts with the relevant register
1520 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1521 contains a MEM (we don't bother checking for memory addresses that can't
1522 conflict because we expect this to be a rare case. */
1523
1524 int
1525 reg_overlap_mentioned_p (rtx x, rtx in)
1526 {
1527 unsigned int regno, endregno;
1528
1529 /* If either argument is a constant, then modifying X can not
1530 affect IN. Here we look at IN, we can profitably combine
1531 CONSTANT_P (x) with the switch statement below. */
1532 if (CONSTANT_P (in))
1533 return 0;
1534
1535 recurse:
1536 switch (GET_CODE (x))
1537 {
1538 case STRICT_LOW_PART:
1539 case ZERO_EXTRACT:
1540 case SIGN_EXTRACT:
1541 /* Overly conservative. */
1542 x = XEXP (x, 0);
1543 goto recurse;
1544
1545 case SUBREG:
1546 regno = REGNO (SUBREG_REG (x));
1547 if (regno < FIRST_PSEUDO_REGISTER)
1548 regno = subreg_regno (x);
1549 goto do_reg;
1550
1551 case REG:
1552 regno = REGNO (x);
1553 do_reg:
1554 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1555 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1556 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1557
1558 case MEM:
1559 {
1560 const char *fmt;
1561 int i;
1562
1563 if (GET_CODE (in) == MEM)
1564 return 1;
1565
1566 fmt = GET_RTX_FORMAT (GET_CODE (in));
1567 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1568 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1569 return 1;
1570
1571 return 0;
1572 }
1573
1574 case SCRATCH:
1575 case PC:
1576 case CC0:
1577 return reg_mentioned_p (x, in);
1578
1579 case PARALLEL:
1580 {
1581 int i;
1582
1583 /* If any register in here refers to it we return true. */
1584 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1585 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1586 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1587 return 1;
1588 return 0;
1589 }
1590
1591 default:
1592 #ifdef ENABLE_CHECKING
1593 if (!CONSTANT_P (x))
1594 abort ();
1595 #endif
1596
1597 return 0;
1598 }
1599 }
1600 \f
1601 /* Return the last value to which REG was set prior to INSN. If we can't
1602 find it easily, return 0.
1603
1604 We only return a REG, SUBREG, or constant because it is too hard to
1605 check if a MEM remains unchanged. */
1606
1607 rtx
1608 reg_set_last (rtx x, rtx insn)
1609 {
1610 rtx orig_insn = insn;
1611
1612 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1613 Stop when we reach a label or X is a hard reg and we reach a
1614 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1615
1616 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1617
1618 /* We compare with <= here, because reg_set_last_last_regno
1619 is actually the number of the first reg *not* in X. */
1620 for (;
1621 insn && GET_CODE (insn) != CODE_LABEL
1622 && ! (GET_CODE (insn) == CALL_INSN
1623 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1624 insn = PREV_INSN (insn))
1625 if (INSN_P (insn))
1626 {
1627 rtx set = set_of (x, insn);
1628 /* OK, this function modify our register. See if we understand it. */
1629 if (set)
1630 {
1631 rtx last_value;
1632 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1633 return 0;
1634 last_value = SET_SRC (x);
1635 if (CONSTANT_P (last_value)
1636 || ((GET_CODE (last_value) == REG
1637 || GET_CODE (last_value) == SUBREG)
1638 && ! reg_set_between_p (last_value,
1639 insn, orig_insn)))
1640 return last_value;
1641 else
1642 return 0;
1643 }
1644 }
1645
1646 return 0;
1647 }
1648 \f
1649 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1650 (X would be the pattern of an insn).
1651 FUN receives two arguments:
1652 the REG, MEM, CC0 or PC being stored in or clobbered,
1653 the SET or CLOBBER rtx that does the store.
1654
1655 If the item being stored in or clobbered is a SUBREG of a hard register,
1656 the SUBREG will be passed. */
1657
1658 void
1659 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1660 {
1661 int i;
1662
1663 if (GET_CODE (x) == COND_EXEC)
1664 x = COND_EXEC_CODE (x);
1665
1666 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1667 {
1668 rtx dest = SET_DEST (x);
1669
1670 while ((GET_CODE (dest) == SUBREG
1671 && (GET_CODE (SUBREG_REG (dest)) != REG
1672 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1673 || GET_CODE (dest) == ZERO_EXTRACT
1674 || GET_CODE (dest) == SIGN_EXTRACT
1675 || GET_CODE (dest) == STRICT_LOW_PART)
1676 dest = XEXP (dest, 0);
1677
1678 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1679 each of whose first operand is a register. */
1680 if (GET_CODE (dest) == PARALLEL)
1681 {
1682 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1683 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1684 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1685 }
1686 else
1687 (*fun) (dest, x, data);
1688 }
1689
1690 else if (GET_CODE (x) == PARALLEL)
1691 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1692 note_stores (XVECEXP (x, 0, i), fun, data);
1693 }
1694 \f
1695 /* Like notes_stores, but call FUN for each expression that is being
1696 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1697 FUN for each expression, not any interior subexpressions. FUN receives a
1698 pointer to the expression and the DATA passed to this function.
1699
1700 Note that this is not quite the same test as that done in reg_referenced_p
1701 since that considers something as being referenced if it is being
1702 partially set, while we do not. */
1703
1704 void
1705 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1706 {
1707 rtx body = *pbody;
1708 int i;
1709
1710 switch (GET_CODE (body))
1711 {
1712 case COND_EXEC:
1713 (*fun) (&COND_EXEC_TEST (body), data);
1714 note_uses (&COND_EXEC_CODE (body), fun, data);
1715 return;
1716
1717 case PARALLEL:
1718 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1719 note_uses (&XVECEXP (body, 0, i), fun, data);
1720 return;
1721
1722 case USE:
1723 (*fun) (&XEXP (body, 0), data);
1724 return;
1725
1726 case ASM_OPERANDS:
1727 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1728 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1729 return;
1730
1731 case TRAP_IF:
1732 (*fun) (&TRAP_CONDITION (body), data);
1733 return;
1734
1735 case PREFETCH:
1736 (*fun) (&XEXP (body, 0), data);
1737 return;
1738
1739 case UNSPEC:
1740 case UNSPEC_VOLATILE:
1741 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1742 (*fun) (&XVECEXP (body, 0, i), data);
1743 return;
1744
1745 case CLOBBER:
1746 if (GET_CODE (XEXP (body, 0)) == MEM)
1747 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1748 return;
1749
1750 case SET:
1751 {
1752 rtx dest = SET_DEST (body);
1753
1754 /* For sets we replace everything in source plus registers in memory
1755 expression in store and operands of a ZERO_EXTRACT. */
1756 (*fun) (&SET_SRC (body), data);
1757
1758 if (GET_CODE (dest) == ZERO_EXTRACT)
1759 {
1760 (*fun) (&XEXP (dest, 1), data);
1761 (*fun) (&XEXP (dest, 2), data);
1762 }
1763
1764 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1765 dest = XEXP (dest, 0);
1766
1767 if (GET_CODE (dest) == MEM)
1768 (*fun) (&XEXP (dest, 0), data);
1769 }
1770 return;
1771
1772 default:
1773 /* All the other possibilities never store. */
1774 (*fun) (pbody, data);
1775 return;
1776 }
1777 }
1778 \f
1779 /* Return nonzero if X's old contents don't survive after INSN.
1780 This will be true if X is (cc0) or if X is a register and
1781 X dies in INSN or because INSN entirely sets X.
1782
1783 "Entirely set" means set directly and not through a SUBREG,
1784 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1785 Likewise, REG_INC does not count.
1786
1787 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1788 but for this use that makes no difference, since regs don't overlap
1789 during their lifetimes. Therefore, this function may be used
1790 at any time after deaths have been computed (in flow.c).
1791
1792 If REG is a hard reg that occupies multiple machine registers, this
1793 function will only return 1 if each of those registers will be replaced
1794 by INSN. */
1795
1796 int
1797 dead_or_set_p (rtx insn, rtx x)
1798 {
1799 unsigned int regno, last_regno;
1800 unsigned int i;
1801
1802 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1803 if (GET_CODE (x) == CC0)
1804 return 1;
1805
1806 if (GET_CODE (x) != REG)
1807 abort ();
1808
1809 regno = REGNO (x);
1810 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1811 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1812
1813 for (i = regno; i <= last_regno; i++)
1814 if (! dead_or_set_regno_p (insn, i))
1815 return 0;
1816
1817 return 1;
1818 }
1819
1820 /* Utility function for dead_or_set_p to check an individual register. Also
1821 called from flow.c. */
1822
1823 int
1824 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1825 {
1826 unsigned int regno, endregno;
1827 rtx pattern;
1828
1829 /* See if there is a death note for something that includes TEST_REGNO. */
1830 if (find_regno_note (insn, REG_DEAD, test_regno))
1831 return 1;
1832
1833 if (GET_CODE (insn) == CALL_INSN
1834 && find_regno_fusage (insn, CLOBBER, test_regno))
1835 return 1;
1836
1837 pattern = PATTERN (insn);
1838
1839 if (GET_CODE (pattern) == COND_EXEC)
1840 pattern = COND_EXEC_CODE (pattern);
1841
1842 if (GET_CODE (pattern) == SET)
1843 {
1844 rtx dest = SET_DEST (pattern);
1845
1846 /* A value is totally replaced if it is the destination or the
1847 destination is a SUBREG of REGNO that does not change the number of
1848 words in it. */
1849 if (GET_CODE (dest) == SUBREG
1850 && (((GET_MODE_SIZE (GET_MODE (dest))
1851 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1852 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1853 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1854 dest = SUBREG_REG (dest);
1855
1856 if (GET_CODE (dest) != REG)
1857 return 0;
1858
1859 regno = REGNO (dest);
1860 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1861 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1862
1863 return (test_regno >= regno && test_regno < endregno);
1864 }
1865 else if (GET_CODE (pattern) == PARALLEL)
1866 {
1867 int i;
1868
1869 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1870 {
1871 rtx body = XVECEXP (pattern, 0, i);
1872
1873 if (GET_CODE (body) == COND_EXEC)
1874 body = COND_EXEC_CODE (body);
1875
1876 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1877 {
1878 rtx dest = SET_DEST (body);
1879
1880 if (GET_CODE (dest) == SUBREG
1881 && (((GET_MODE_SIZE (GET_MODE (dest))
1882 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1883 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1884 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1885 dest = SUBREG_REG (dest);
1886
1887 if (GET_CODE (dest) != REG)
1888 continue;
1889
1890 regno = REGNO (dest);
1891 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1892 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1893
1894 if (test_regno >= regno && test_regno < endregno)
1895 return 1;
1896 }
1897 }
1898 }
1899
1900 return 0;
1901 }
1902
1903 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1904 If DATUM is nonzero, look for one whose datum is DATUM. */
1905
1906 rtx
1907 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1908 {
1909 rtx link;
1910
1911 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1912 if (! INSN_P (insn))
1913 return 0;
1914 if (datum == 0)
1915 {
1916 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1917 if (REG_NOTE_KIND (link) == kind)
1918 return link;
1919 return 0;
1920 }
1921
1922 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1923 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1924 return link;
1925 return 0;
1926 }
1927
1928 /* Return the reg-note of kind KIND in insn INSN which applies to register
1929 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1930 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1931 it might be the case that the note overlaps REGNO. */
1932
1933 rtx
1934 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1935 {
1936 rtx link;
1937
1938 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1939 if (! INSN_P (insn))
1940 return 0;
1941
1942 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1943 if (REG_NOTE_KIND (link) == kind
1944 /* Verify that it is a register, so that scratch and MEM won't cause a
1945 problem here. */
1946 && GET_CODE (XEXP (link, 0)) == REG
1947 && REGNO (XEXP (link, 0)) <= regno
1948 && ((REGNO (XEXP (link, 0))
1949 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1950 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1951 [GET_MODE (XEXP (link, 0))]))
1952 > regno))
1953 return link;
1954 return 0;
1955 }
1956
1957 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1958 has such a note. */
1959
1960 rtx
1961 find_reg_equal_equiv_note (rtx insn)
1962 {
1963 rtx link;
1964
1965 if (!INSN_P (insn))
1966 return 0;
1967 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1968 if (REG_NOTE_KIND (link) == REG_EQUAL
1969 || REG_NOTE_KIND (link) == REG_EQUIV)
1970 {
1971 if (single_set (insn) == 0)
1972 return 0;
1973 return link;
1974 }
1975 return NULL;
1976 }
1977
1978 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1979 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1980
1981 int
1982 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1983 {
1984 /* If it's not a CALL_INSN, it can't possibly have a
1985 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1986 if (GET_CODE (insn) != CALL_INSN)
1987 return 0;
1988
1989 if (! datum)
1990 abort ();
1991
1992 if (GET_CODE (datum) != REG)
1993 {
1994 rtx link;
1995
1996 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1997 link;
1998 link = XEXP (link, 1))
1999 if (GET_CODE (XEXP (link, 0)) == code
2000 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
2001 return 1;
2002 }
2003 else
2004 {
2005 unsigned int regno = REGNO (datum);
2006
2007 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2008 to pseudo registers, so don't bother checking. */
2009
2010 if (regno < FIRST_PSEUDO_REGISTER)
2011 {
2012 unsigned int end_regno
2013 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
2014 unsigned int i;
2015
2016 for (i = regno; i < end_regno; i++)
2017 if (find_regno_fusage (insn, code, i))
2018 return 1;
2019 }
2020 }
2021
2022 return 0;
2023 }
2024
2025 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2026 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2027
2028 int
2029 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
2030 {
2031 rtx link;
2032
2033 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2034 to pseudo registers, so don't bother checking. */
2035
2036 if (regno >= FIRST_PSEUDO_REGISTER
2037 || GET_CODE (insn) != CALL_INSN )
2038 return 0;
2039
2040 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2041 {
2042 unsigned int regnote;
2043 rtx op, reg;
2044
2045 if (GET_CODE (op = XEXP (link, 0)) == code
2046 && GET_CODE (reg = XEXP (op, 0)) == REG
2047 && (regnote = REGNO (reg)) <= regno
2048 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
2049 return 1;
2050 }
2051
2052 return 0;
2053 }
2054
2055 /* Return true if INSN is a call to a pure function. */
2056
2057 int
2058 pure_call_p (rtx insn)
2059 {
2060 rtx link;
2061
2062 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2063 return 0;
2064
2065 /* Look for the note that differentiates const and pure functions. */
2066 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2067 {
2068 rtx u, m;
2069
2070 if (GET_CODE (u = XEXP (link, 0)) == USE
2071 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2072 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2073 return 1;
2074 }
2075
2076 return 0;
2077 }
2078 \f
2079 /* Remove register note NOTE from the REG_NOTES of INSN. */
2080
2081 void
2082 remove_note (rtx insn, rtx note)
2083 {
2084 rtx link;
2085
2086 if (note == NULL_RTX)
2087 return;
2088
2089 if (REG_NOTES (insn) == note)
2090 {
2091 REG_NOTES (insn) = XEXP (note, 1);
2092 return;
2093 }
2094
2095 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2096 if (XEXP (link, 1) == note)
2097 {
2098 XEXP (link, 1) = XEXP (note, 1);
2099 return;
2100 }
2101
2102 abort ();
2103 }
2104
2105 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2106 return 1 if it is found. A simple equality test is used to determine if
2107 NODE matches. */
2108
2109 int
2110 in_expr_list_p (rtx listp, rtx node)
2111 {
2112 rtx x;
2113
2114 for (x = listp; x; x = XEXP (x, 1))
2115 if (node == XEXP (x, 0))
2116 return 1;
2117
2118 return 0;
2119 }
2120
2121 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2122 remove that entry from the list if it is found.
2123
2124 A simple equality test is used to determine if NODE matches. */
2125
2126 void
2127 remove_node_from_expr_list (rtx node, rtx *listp)
2128 {
2129 rtx temp = *listp;
2130 rtx prev = NULL_RTX;
2131
2132 while (temp)
2133 {
2134 if (node == XEXP (temp, 0))
2135 {
2136 /* Splice the node out of the list. */
2137 if (prev)
2138 XEXP (prev, 1) = XEXP (temp, 1);
2139 else
2140 *listp = XEXP (temp, 1);
2141
2142 return;
2143 }
2144
2145 prev = temp;
2146 temp = XEXP (temp, 1);
2147 }
2148 }
2149 \f
2150 /* Nonzero if X contains any volatile instructions. These are instructions
2151 which may cause unpredictable machine state instructions, and thus no
2152 instructions should be moved or combined across them. This includes
2153 only volatile asms and UNSPEC_VOLATILE instructions. */
2154
2155 int
2156 volatile_insn_p (rtx x)
2157 {
2158 RTX_CODE code;
2159
2160 code = GET_CODE (x);
2161 switch (code)
2162 {
2163 case LABEL_REF:
2164 case SYMBOL_REF:
2165 case CONST_INT:
2166 case CONST:
2167 case CONST_DOUBLE:
2168 case CONST_VECTOR:
2169 case CC0:
2170 case PC:
2171 case REG:
2172 case SCRATCH:
2173 case CLOBBER:
2174 case ADDR_VEC:
2175 case ADDR_DIFF_VEC:
2176 case CALL:
2177 case MEM:
2178 return 0;
2179
2180 case UNSPEC_VOLATILE:
2181 /* case TRAP_IF: This isn't clear yet. */
2182 return 1;
2183
2184 case ASM_INPUT:
2185 case ASM_OPERANDS:
2186 if (MEM_VOLATILE_P (x))
2187 return 1;
2188
2189 default:
2190 break;
2191 }
2192
2193 /* Recursively scan the operands of this expression. */
2194
2195 {
2196 const char *fmt = GET_RTX_FORMAT (code);
2197 int i;
2198
2199 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2200 {
2201 if (fmt[i] == 'e')
2202 {
2203 if (volatile_insn_p (XEXP (x, i)))
2204 return 1;
2205 }
2206 else if (fmt[i] == 'E')
2207 {
2208 int j;
2209 for (j = 0; j < XVECLEN (x, i); j++)
2210 if (volatile_insn_p (XVECEXP (x, i, j)))
2211 return 1;
2212 }
2213 }
2214 }
2215 return 0;
2216 }
2217
2218 /* Nonzero if X contains any volatile memory references
2219 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2220
2221 int
2222 volatile_refs_p (rtx x)
2223 {
2224 RTX_CODE code;
2225
2226 code = GET_CODE (x);
2227 switch (code)
2228 {
2229 case LABEL_REF:
2230 case SYMBOL_REF:
2231 case CONST_INT:
2232 case CONST:
2233 case CONST_DOUBLE:
2234 case CONST_VECTOR:
2235 case CC0:
2236 case PC:
2237 case REG:
2238 case SCRATCH:
2239 case CLOBBER:
2240 case ADDR_VEC:
2241 case ADDR_DIFF_VEC:
2242 return 0;
2243
2244 case UNSPEC_VOLATILE:
2245 return 1;
2246
2247 case MEM:
2248 case ASM_INPUT:
2249 case ASM_OPERANDS:
2250 if (MEM_VOLATILE_P (x))
2251 return 1;
2252
2253 default:
2254 break;
2255 }
2256
2257 /* Recursively scan the operands of this expression. */
2258
2259 {
2260 const char *fmt = GET_RTX_FORMAT (code);
2261 int i;
2262
2263 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2264 {
2265 if (fmt[i] == 'e')
2266 {
2267 if (volatile_refs_p (XEXP (x, i)))
2268 return 1;
2269 }
2270 else if (fmt[i] == 'E')
2271 {
2272 int j;
2273 for (j = 0; j < XVECLEN (x, i); j++)
2274 if (volatile_refs_p (XVECEXP (x, i, j)))
2275 return 1;
2276 }
2277 }
2278 }
2279 return 0;
2280 }
2281
2282 /* Similar to above, except that it also rejects register pre- and post-
2283 incrementing. */
2284
2285 int
2286 side_effects_p (rtx x)
2287 {
2288 RTX_CODE code;
2289
2290 code = GET_CODE (x);
2291 switch (code)
2292 {
2293 case LABEL_REF:
2294 case SYMBOL_REF:
2295 case CONST_INT:
2296 case CONST:
2297 case CONST_DOUBLE:
2298 case CONST_VECTOR:
2299 case CC0:
2300 case PC:
2301 case REG:
2302 case SCRATCH:
2303 case ADDR_VEC:
2304 case ADDR_DIFF_VEC:
2305 return 0;
2306
2307 case CLOBBER:
2308 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2309 when some combination can't be done. If we see one, don't think
2310 that we can simplify the expression. */
2311 return (GET_MODE (x) != VOIDmode);
2312
2313 case PRE_INC:
2314 case PRE_DEC:
2315 case POST_INC:
2316 case POST_DEC:
2317 case PRE_MODIFY:
2318 case POST_MODIFY:
2319 case CALL:
2320 case UNSPEC_VOLATILE:
2321 /* case TRAP_IF: This isn't clear yet. */
2322 return 1;
2323
2324 case MEM:
2325 case ASM_INPUT:
2326 case ASM_OPERANDS:
2327 if (MEM_VOLATILE_P (x))
2328 return 1;
2329
2330 default:
2331 break;
2332 }
2333
2334 /* Recursively scan the operands of this expression. */
2335
2336 {
2337 const char *fmt = GET_RTX_FORMAT (code);
2338 int i;
2339
2340 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2341 {
2342 if (fmt[i] == 'e')
2343 {
2344 if (side_effects_p (XEXP (x, i)))
2345 return 1;
2346 }
2347 else if (fmt[i] == 'E')
2348 {
2349 int j;
2350 for (j = 0; j < XVECLEN (x, i); j++)
2351 if (side_effects_p (XVECEXP (x, i, j)))
2352 return 1;
2353 }
2354 }
2355 }
2356 return 0;
2357 }
2358 \f
2359 /* Return nonzero if evaluating rtx X might cause a trap. */
2360
2361 int
2362 may_trap_p (rtx x)
2363 {
2364 int i;
2365 enum rtx_code code;
2366 const char *fmt;
2367
2368 if (x == 0)
2369 return 0;
2370 code = GET_CODE (x);
2371 switch (code)
2372 {
2373 /* Handle these cases quickly. */
2374 case CONST_INT:
2375 case CONST_DOUBLE:
2376 case CONST_VECTOR:
2377 case SYMBOL_REF:
2378 case LABEL_REF:
2379 case CONST:
2380 case PC:
2381 case CC0:
2382 case REG:
2383 case SCRATCH:
2384 return 0;
2385
2386 case ASM_INPUT:
2387 case UNSPEC_VOLATILE:
2388 case TRAP_IF:
2389 return 1;
2390
2391 case ASM_OPERANDS:
2392 return MEM_VOLATILE_P (x);
2393
2394 /* Memory ref can trap unless it's a static var or a stack slot. */
2395 case MEM:
2396 if (MEM_NOTRAP_P (x))
2397 return 0;
2398 return rtx_addr_can_trap_p (XEXP (x, 0));
2399
2400 /* Division by a non-constant might trap. */
2401 case DIV:
2402 case MOD:
2403 case UDIV:
2404 case UMOD:
2405 if (HONOR_SNANS (GET_MODE (x)))
2406 return 1;
2407 if (! CONSTANT_P (XEXP (x, 1))
2408 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2409 && flag_trapping_math))
2410 return 1;
2411 if (XEXP (x, 1) == const0_rtx)
2412 return 1;
2413 break;
2414
2415 case EXPR_LIST:
2416 /* An EXPR_LIST is used to represent a function call. This
2417 certainly may trap. */
2418 return 1;
2419
2420 case GE:
2421 case GT:
2422 case LE:
2423 case LT:
2424 case COMPARE:
2425 /* Some floating point comparisons may trap. */
2426 if (!flag_trapping_math)
2427 break;
2428 /* ??? There is no machine independent way to check for tests that trap
2429 when COMPARE is used, though many targets do make this distinction.
2430 For instance, sparc uses CCFPE for compares which generate exceptions
2431 and CCFP for compares which do not generate exceptions. */
2432 if (HONOR_NANS (GET_MODE (x)))
2433 return 1;
2434 /* But often the compare has some CC mode, so check operand
2435 modes as well. */
2436 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2437 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2438 return 1;
2439 break;
2440
2441 case EQ:
2442 case NE:
2443 if (HONOR_SNANS (GET_MODE (x)))
2444 return 1;
2445 /* Often comparison is CC mode, so check operand modes. */
2446 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2447 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2448 return 1;
2449 break;
2450
2451 case FIX:
2452 /* Conversion of floating point might trap. */
2453 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2454 return 1;
2455 break;
2456
2457 case NEG:
2458 case ABS:
2459 /* These operations don't trap even with floating point. */
2460 break;
2461
2462 default:
2463 /* Any floating arithmetic may trap. */
2464 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2465 && flag_trapping_math)
2466 return 1;
2467 }
2468
2469 fmt = GET_RTX_FORMAT (code);
2470 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2471 {
2472 if (fmt[i] == 'e')
2473 {
2474 if (may_trap_p (XEXP (x, i)))
2475 return 1;
2476 }
2477 else if (fmt[i] == 'E')
2478 {
2479 int j;
2480 for (j = 0; j < XVECLEN (x, i); j++)
2481 if (may_trap_p (XVECEXP (x, i, j)))
2482 return 1;
2483 }
2484 }
2485 return 0;
2486 }
2487 \f
2488 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2489 i.e., an inequality. */
2490
2491 int
2492 inequality_comparisons_p (rtx x)
2493 {
2494 const char *fmt;
2495 int len, i;
2496 enum rtx_code code = GET_CODE (x);
2497
2498 switch (code)
2499 {
2500 case REG:
2501 case SCRATCH:
2502 case PC:
2503 case CC0:
2504 case CONST_INT:
2505 case CONST_DOUBLE:
2506 case CONST_VECTOR:
2507 case CONST:
2508 case LABEL_REF:
2509 case SYMBOL_REF:
2510 return 0;
2511
2512 case LT:
2513 case LTU:
2514 case GT:
2515 case GTU:
2516 case LE:
2517 case LEU:
2518 case GE:
2519 case GEU:
2520 return 1;
2521
2522 default:
2523 break;
2524 }
2525
2526 len = GET_RTX_LENGTH (code);
2527 fmt = GET_RTX_FORMAT (code);
2528
2529 for (i = 0; i < len; i++)
2530 {
2531 if (fmt[i] == 'e')
2532 {
2533 if (inequality_comparisons_p (XEXP (x, i)))
2534 return 1;
2535 }
2536 else if (fmt[i] == 'E')
2537 {
2538 int j;
2539 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2540 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2541 return 1;
2542 }
2543 }
2544
2545 return 0;
2546 }
2547 \f
2548 /* Replace any occurrence of FROM in X with TO. The function does
2549 not enter into CONST_DOUBLE for the replace.
2550
2551 Note that copying is not done so X must not be shared unless all copies
2552 are to be modified. */
2553
2554 rtx
2555 replace_rtx (rtx x, rtx from, rtx to)
2556 {
2557 int i, j;
2558 const char *fmt;
2559
2560 /* The following prevents loops occurrence when we change MEM in
2561 CONST_DOUBLE onto the same CONST_DOUBLE. */
2562 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2563 return x;
2564
2565 if (x == from)
2566 return to;
2567
2568 /* Allow this function to make replacements in EXPR_LISTs. */
2569 if (x == 0)
2570 return 0;
2571
2572 if (GET_CODE (x) == SUBREG)
2573 {
2574 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2575
2576 if (GET_CODE (new) == CONST_INT)
2577 {
2578 x = simplify_subreg (GET_MODE (x), new,
2579 GET_MODE (SUBREG_REG (x)),
2580 SUBREG_BYTE (x));
2581 if (! x)
2582 abort ();
2583 }
2584 else
2585 SUBREG_REG (x) = new;
2586
2587 return x;
2588 }
2589 else if (GET_CODE (x) == ZERO_EXTEND)
2590 {
2591 rtx new = replace_rtx (XEXP (x, 0), from, to);
2592
2593 if (GET_CODE (new) == CONST_INT)
2594 {
2595 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2596 new, GET_MODE (XEXP (x, 0)));
2597 if (! x)
2598 abort ();
2599 }
2600 else
2601 XEXP (x, 0) = new;
2602
2603 return x;
2604 }
2605
2606 fmt = GET_RTX_FORMAT (GET_CODE (x));
2607 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2608 {
2609 if (fmt[i] == 'e')
2610 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2611 else if (fmt[i] == 'E')
2612 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2613 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2614 }
2615
2616 return x;
2617 }
2618 \f
2619 /* Throughout the rtx X, replace many registers according to REG_MAP.
2620 Return the replacement for X (which may be X with altered contents).
2621 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2622 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2623
2624 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2625 should not be mapped to pseudos or vice versa since validate_change
2626 is not called.
2627
2628 If REPLACE_DEST is 1, replacements are also done in destinations;
2629 otherwise, only sources are replaced. */
2630
2631 rtx
2632 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2633 {
2634 enum rtx_code code;
2635 int i;
2636 const char *fmt;
2637
2638 if (x == 0)
2639 return x;
2640
2641 code = GET_CODE (x);
2642 switch (code)
2643 {
2644 case SCRATCH:
2645 case PC:
2646 case CC0:
2647 case CONST_INT:
2648 case CONST_DOUBLE:
2649 case CONST_VECTOR:
2650 case CONST:
2651 case SYMBOL_REF:
2652 case LABEL_REF:
2653 return x;
2654
2655 case REG:
2656 /* Verify that the register has an entry before trying to access it. */
2657 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2658 {
2659 /* SUBREGs can't be shared. Always return a copy to ensure that if
2660 this replacement occurs more than once then each instance will
2661 get distinct rtx. */
2662 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2663 return copy_rtx (reg_map[REGNO (x)]);
2664 return reg_map[REGNO (x)];
2665 }
2666 return x;
2667
2668 case SUBREG:
2669 /* Prevent making nested SUBREGs. */
2670 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2671 && reg_map[REGNO (SUBREG_REG (x))] != 0
2672 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2673 {
2674 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2675 return simplify_gen_subreg (GET_MODE (x), map_val,
2676 GET_MODE (SUBREG_REG (x)),
2677 SUBREG_BYTE (x));
2678 }
2679 break;
2680
2681 case SET:
2682 if (replace_dest)
2683 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2684
2685 else if (GET_CODE (SET_DEST (x)) == MEM
2686 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2687 /* Even if we are not to replace destinations, replace register if it
2688 is CONTAINED in destination (destination is memory or
2689 STRICT_LOW_PART). */
2690 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2691 reg_map, nregs, 0);
2692 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2693 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2694 break;
2695
2696 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2697 return x;
2698
2699 default:
2700 break;
2701 }
2702
2703 fmt = GET_RTX_FORMAT (code);
2704 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2705 {
2706 if (fmt[i] == 'e')
2707 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2708 else if (fmt[i] == 'E')
2709 {
2710 int j;
2711 for (j = 0; j < XVECLEN (x, i); j++)
2712 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2713 nregs, replace_dest);
2714 }
2715 }
2716 return x;
2717 }
2718
2719 /* Replace occurrences of the old label in *X with the new one.
2720 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2721
2722 int
2723 replace_label (rtx *x, void *data)
2724 {
2725 rtx l = *x;
2726 rtx old_label = ((replace_label_data *) data)->r1;
2727 rtx new_label = ((replace_label_data *) data)->r2;
2728 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2729
2730 if (l == NULL_RTX)
2731 return 0;
2732
2733 if (GET_CODE (l) == SYMBOL_REF
2734 && CONSTANT_POOL_ADDRESS_P (l))
2735 {
2736 rtx c = get_pool_constant (l);
2737 if (rtx_referenced_p (old_label, c))
2738 {
2739 rtx new_c, new_l;
2740 replace_label_data *d = (replace_label_data *) data;
2741
2742 /* Create a copy of constant C; replace the label inside
2743 but do not update LABEL_NUSES because uses in constant pool
2744 are not counted. */
2745 new_c = copy_rtx (c);
2746 d->update_label_nuses = false;
2747 for_each_rtx (&new_c, replace_label, data);
2748 d->update_label_nuses = update_label_nuses;
2749
2750 /* Add the new constant NEW_C to constant pool and replace
2751 the old reference to constant by new reference. */
2752 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2753 *x = replace_rtx (l, l, new_l);
2754 }
2755 return 0;
2756 }
2757
2758 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2759 field. This is not handled by for_each_rtx because it doesn't
2760 handle unprinted ('0') fields. */
2761 if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
2762 JUMP_LABEL (l) = new_label;
2763
2764 if ((GET_CODE (l) == LABEL_REF
2765 || GET_CODE (l) == INSN_LIST)
2766 && XEXP (l, 0) == old_label)
2767 {
2768 XEXP (l, 0) = new_label;
2769 if (update_label_nuses)
2770 {
2771 ++LABEL_NUSES (new_label);
2772 --LABEL_NUSES (old_label);
2773 }
2774 return 0;
2775 }
2776
2777 return 0;
2778 }
2779
2780 /* When *BODY is equal to X or X is directly referenced by *BODY
2781 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2782 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2783
2784 static int
2785 rtx_referenced_p_1 (rtx *body, void *x)
2786 {
2787 rtx y = (rtx) x;
2788
2789 if (*body == NULL_RTX)
2790 return y == NULL_RTX;
2791
2792 /* Return true if a label_ref *BODY refers to label Y. */
2793 if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
2794 return XEXP (*body, 0) == y;
2795
2796 /* If *BODY is a reference to pool constant traverse the constant. */
2797 if (GET_CODE (*body) == SYMBOL_REF
2798 && CONSTANT_POOL_ADDRESS_P (*body))
2799 return rtx_referenced_p (y, get_pool_constant (*body));
2800
2801 /* By default, compare the RTL expressions. */
2802 return rtx_equal_p (*body, y);
2803 }
2804
2805 /* Return true if X is referenced in BODY. */
2806
2807 int
2808 rtx_referenced_p (rtx x, rtx body)
2809 {
2810 return for_each_rtx (&body, rtx_referenced_p_1, x);
2811 }
2812
2813 /* If INSN is a tablejump return true and store the label (before jump table) to
2814 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2815
2816 bool
2817 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2818 {
2819 rtx label, table;
2820
2821 if (GET_CODE (insn) == JUMP_INSN
2822 && (label = JUMP_LABEL (insn)) != NULL_RTX
2823 && (table = next_active_insn (label)) != NULL_RTX
2824 && GET_CODE (table) == JUMP_INSN
2825 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2826 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2827 {
2828 if (labelp)
2829 *labelp = label;
2830 if (tablep)
2831 *tablep = table;
2832 return true;
2833 }
2834 return false;
2835 }
2836
2837 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2838 constant that is not in the constant pool and not in the condition
2839 of an IF_THEN_ELSE. */
2840
2841 static int
2842 computed_jump_p_1 (rtx x)
2843 {
2844 enum rtx_code code = GET_CODE (x);
2845 int i, j;
2846 const char *fmt;
2847
2848 switch (code)
2849 {
2850 case LABEL_REF:
2851 case PC:
2852 return 0;
2853
2854 case CONST:
2855 case CONST_INT:
2856 case CONST_DOUBLE:
2857 case CONST_VECTOR:
2858 case SYMBOL_REF:
2859 case REG:
2860 return 1;
2861
2862 case MEM:
2863 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2864 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2865
2866 case IF_THEN_ELSE:
2867 return (computed_jump_p_1 (XEXP (x, 1))
2868 || computed_jump_p_1 (XEXP (x, 2)));
2869
2870 default:
2871 break;
2872 }
2873
2874 fmt = GET_RTX_FORMAT (code);
2875 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2876 {
2877 if (fmt[i] == 'e'
2878 && computed_jump_p_1 (XEXP (x, i)))
2879 return 1;
2880
2881 else if (fmt[i] == 'E')
2882 for (j = 0; j < XVECLEN (x, i); j++)
2883 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2884 return 1;
2885 }
2886
2887 return 0;
2888 }
2889
2890 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2891
2892 Tablejumps and casesi insns are not considered indirect jumps;
2893 we can recognize them by a (use (label_ref)). */
2894
2895 int
2896 computed_jump_p (rtx insn)
2897 {
2898 int i;
2899 if (GET_CODE (insn) == JUMP_INSN)
2900 {
2901 rtx pat = PATTERN (insn);
2902
2903 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2904 return 0;
2905 else if (GET_CODE (pat) == PARALLEL)
2906 {
2907 int len = XVECLEN (pat, 0);
2908 int has_use_labelref = 0;
2909
2910 for (i = len - 1; i >= 0; i--)
2911 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2912 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2913 == LABEL_REF))
2914 has_use_labelref = 1;
2915
2916 if (! has_use_labelref)
2917 for (i = len - 1; i >= 0; i--)
2918 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2919 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2920 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2921 return 1;
2922 }
2923 else if (GET_CODE (pat) == SET
2924 && SET_DEST (pat) == pc_rtx
2925 && computed_jump_p_1 (SET_SRC (pat)))
2926 return 1;
2927 }
2928 return 0;
2929 }
2930
2931 /* Traverse X via depth-first search, calling F for each
2932 sub-expression (including X itself). F is also passed the DATA.
2933 If F returns -1, do not traverse sub-expressions, but continue
2934 traversing the rest of the tree. If F ever returns any other
2935 nonzero value, stop the traversal, and return the value returned
2936 by F. Otherwise, return 0. This function does not traverse inside
2937 tree structure that contains RTX_EXPRs, or into sub-expressions
2938 whose format code is `0' since it is not known whether or not those
2939 codes are actually RTL.
2940
2941 This routine is very general, and could (should?) be used to
2942 implement many of the other routines in this file. */
2943
2944 int
2945 for_each_rtx (rtx *x, rtx_function f, void *data)
2946 {
2947 int result;
2948 int length;
2949 const char *format;
2950 int i;
2951
2952 /* Call F on X. */
2953 result = (*f) (x, data);
2954 if (result == -1)
2955 /* Do not traverse sub-expressions. */
2956 return 0;
2957 else if (result != 0)
2958 /* Stop the traversal. */
2959 return result;
2960
2961 if (*x == NULL_RTX)
2962 /* There are no sub-expressions. */
2963 return 0;
2964
2965 length = GET_RTX_LENGTH (GET_CODE (*x));
2966 format = GET_RTX_FORMAT (GET_CODE (*x));
2967
2968 for (i = 0; i < length; ++i)
2969 {
2970 switch (format[i])
2971 {
2972 case 'e':
2973 result = for_each_rtx (&XEXP (*x, i), f, data);
2974 if (result != 0)
2975 return result;
2976 break;
2977
2978 case 'V':
2979 case 'E':
2980 if (XVEC (*x, i) != 0)
2981 {
2982 int j;
2983 for (j = 0; j < XVECLEN (*x, i); ++j)
2984 {
2985 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2986 if (result != 0)
2987 return result;
2988 }
2989 }
2990 break;
2991
2992 default:
2993 /* Nothing to do. */
2994 break;
2995 }
2996
2997 }
2998
2999 return 0;
3000 }
3001
3002 /* Searches X for any reference to REGNO, returning the rtx of the
3003 reference found if any. Otherwise, returns NULL_RTX. */
3004
3005 rtx
3006 regno_use_in (unsigned int regno, rtx x)
3007 {
3008 const char *fmt;
3009 int i, j;
3010 rtx tem;
3011
3012 if (GET_CODE (x) == REG && REGNO (x) == regno)
3013 return x;
3014
3015 fmt = GET_RTX_FORMAT (GET_CODE (x));
3016 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3017 {
3018 if (fmt[i] == 'e')
3019 {
3020 if ((tem = regno_use_in (regno, XEXP (x, i))))
3021 return tem;
3022 }
3023 else if (fmt[i] == 'E')
3024 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3025 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3026 return tem;
3027 }
3028
3029 return NULL_RTX;
3030 }
3031
3032 /* Return a value indicating whether OP, an operand of a commutative
3033 operation, is preferred as the first or second operand. The higher
3034 the value, the stronger the preference for being the first operand.
3035 We use negative values to indicate a preference for the first operand
3036 and positive values for the second operand. */
3037
3038 int
3039 commutative_operand_precedence (rtx op)
3040 {
3041 enum rtx_code code = GET_CODE (op);
3042
3043 /* Constants always come the second operand. Prefer "nice" constants. */
3044 if (code == CONST_INT)
3045 return -7;
3046 if (code == CONST_DOUBLE)
3047 return -6;
3048 op = avoid_constant_pool_reference (op);
3049
3050 switch (GET_RTX_CLASS (code))
3051 {
3052 case RTX_CONST_OBJ:
3053 if (code == CONST_INT)
3054 return -5;
3055 if (code == CONST_DOUBLE)
3056 return -4;
3057 return -3;
3058
3059 case RTX_EXTRA:
3060 /* SUBREGs of objects should come second. */
3061 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3062 return -2;
3063
3064 if (!CONSTANT_P (op))
3065 return 0;
3066 else
3067 /* As for RTX_CONST_OBJ. */
3068 return -3;
3069
3070 case RTX_OBJ:
3071 /* Complex expressions should be the first, so decrease priority
3072 of objects. */
3073 return -1;
3074
3075 case RTX_COMM_ARITH:
3076 /* Prefer operands that are themselves commutative to be first.
3077 This helps to make things linear. In particular,
3078 (and (and (reg) (reg)) (not (reg))) is canonical. */
3079 return 4;
3080
3081 case RTX_BIN_ARITH:
3082 /* If only one operand is a binary expression, it will be the first
3083 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3084 is canonical, although it will usually be further simplified. */
3085 return 2;
3086
3087 case RTX_UNARY:
3088 /* Then prefer NEG and NOT. */
3089 if (code == NEG || code == NOT)
3090 return 1;
3091
3092 default:
3093 return 0;
3094 }
3095 }
3096
3097 /* Return 1 iff it is necessary to swap operands of commutative operation
3098 in order to canonicalize expression. */
3099
3100 int
3101 swap_commutative_operands_p (rtx x, rtx y)
3102 {
3103 return (commutative_operand_precedence (x)
3104 < commutative_operand_precedence (y));
3105 }
3106
3107 /* Return 1 if X is an autoincrement side effect and the register is
3108 not the stack pointer. */
3109 int
3110 auto_inc_p (rtx x)
3111 {
3112 switch (GET_CODE (x))
3113 {
3114 case PRE_INC:
3115 case POST_INC:
3116 case PRE_DEC:
3117 case POST_DEC:
3118 case PRE_MODIFY:
3119 case POST_MODIFY:
3120 /* There are no REG_INC notes for SP. */
3121 if (XEXP (x, 0) != stack_pointer_rtx)
3122 return 1;
3123 default:
3124 break;
3125 }
3126 return 0;
3127 }
3128
3129 /* Return 1 if the sequence of instructions beginning with FROM and up
3130 to and including TO is safe to move. If NEW_TO is non-NULL, and
3131 the sequence is not already safe to move, but can be easily
3132 extended to a sequence which is safe, then NEW_TO will point to the
3133 end of the extended sequence.
3134
3135 For now, this function only checks that the region contains whole
3136 exception regions, but it could be extended to check additional
3137 conditions as well. */
3138
3139 int
3140 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
3141 {
3142 int eh_region_count = 0;
3143 int past_to_p = 0;
3144 rtx r = from;
3145
3146 /* By default, assume the end of the region will be what was
3147 suggested. */
3148 if (new_to)
3149 *new_to = to;
3150
3151 while (r)
3152 {
3153 if (GET_CODE (r) == NOTE)
3154 {
3155 switch (NOTE_LINE_NUMBER (r))
3156 {
3157 case NOTE_INSN_EH_REGION_BEG:
3158 ++eh_region_count;
3159 break;
3160
3161 case NOTE_INSN_EH_REGION_END:
3162 if (eh_region_count == 0)
3163 /* This sequence of instructions contains the end of
3164 an exception region, but not he beginning. Moving
3165 it will cause chaos. */
3166 return 0;
3167
3168 --eh_region_count;
3169 break;
3170
3171 default:
3172 break;
3173 }
3174 }
3175 else if (past_to_p)
3176 /* If we've passed TO, and we see a non-note instruction, we
3177 can't extend the sequence to a movable sequence. */
3178 return 0;
3179
3180 if (r == to)
3181 {
3182 if (!new_to)
3183 /* It's OK to move the sequence if there were matched sets of
3184 exception region notes. */
3185 return eh_region_count == 0;
3186
3187 past_to_p = 1;
3188 }
3189
3190 /* It's OK to move the sequence if there were matched sets of
3191 exception region notes. */
3192 if (past_to_p && eh_region_count == 0)
3193 {
3194 *new_to = r;
3195 return 1;
3196 }
3197
3198 /* Go to the next instruction. */
3199 r = NEXT_INSN (r);
3200 }
3201
3202 return 0;
3203 }
3204
3205 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3206 int
3207 loc_mentioned_in_p (rtx *loc, rtx in)
3208 {
3209 enum rtx_code code = GET_CODE (in);
3210 const char *fmt = GET_RTX_FORMAT (code);
3211 int i, j;
3212
3213 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3214 {
3215 if (loc == &in->u.fld[i].rtx)
3216 return 1;
3217 if (fmt[i] == 'e')
3218 {
3219 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3220 return 1;
3221 }
3222 else if (fmt[i] == 'E')
3223 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3224 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3225 return 1;
3226 }
3227 return 0;
3228 }
3229
3230 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3231 and SUBREG_BYTE, return the bit offset where the subreg begins
3232 (counting from the least significant bit of the operand). */
3233
3234 unsigned int
3235 subreg_lsb_1 (enum machine_mode outer_mode,
3236 enum machine_mode inner_mode,
3237 unsigned int subreg_byte)
3238 {
3239 unsigned int bitpos;
3240 unsigned int byte;
3241 unsigned int word;
3242
3243 /* A paradoxical subreg begins at bit position 0. */
3244 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3245 return 0;
3246
3247 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3248 /* If the subreg crosses a word boundary ensure that
3249 it also begins and ends on a word boundary. */
3250 if ((subreg_byte % UNITS_PER_WORD
3251 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3252 && (subreg_byte % UNITS_PER_WORD
3253 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))
3254 abort ();
3255
3256 if (WORDS_BIG_ENDIAN)
3257 word = (GET_MODE_SIZE (inner_mode)
3258 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3259 else
3260 word = subreg_byte / UNITS_PER_WORD;
3261 bitpos = word * BITS_PER_WORD;
3262
3263 if (BYTES_BIG_ENDIAN)
3264 byte = (GET_MODE_SIZE (inner_mode)
3265 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3266 else
3267 byte = subreg_byte % UNITS_PER_WORD;
3268 bitpos += byte * BITS_PER_UNIT;
3269
3270 return bitpos;
3271 }
3272
3273 /* Given a subreg X, return the bit offset where the subreg begins
3274 (counting from the least significant bit of the reg). */
3275
3276 unsigned int
3277 subreg_lsb (rtx x)
3278 {
3279 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3280 SUBREG_BYTE (x));
3281 }
3282
3283 /* This function returns the regno offset of a subreg expression.
3284 xregno - A regno of an inner hard subreg_reg (or what will become one).
3285 xmode - The mode of xregno.
3286 offset - The byte offset.
3287 ymode - The mode of a top level SUBREG (or what may become one).
3288 RETURN - The regno offset which would be used. */
3289 unsigned int
3290 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3291 unsigned int offset, enum machine_mode ymode)
3292 {
3293 int nregs_xmode, nregs_ymode;
3294 int mode_multiple, nregs_multiple;
3295 int y_offset;
3296
3297 if (xregno >= FIRST_PSEUDO_REGISTER)
3298 abort ();
3299
3300 nregs_xmode = hard_regno_nregs[xregno][xmode];
3301 nregs_ymode = hard_regno_nregs[xregno][ymode];
3302
3303 /* If this is a big endian paradoxical subreg, which uses more actual
3304 hard registers than the original register, we must return a negative
3305 offset so that we find the proper highpart of the register. */
3306 if (offset == 0
3307 && nregs_ymode > nregs_xmode
3308 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3309 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3310 return nregs_xmode - nregs_ymode;
3311
3312 if (offset == 0 || nregs_xmode == nregs_ymode)
3313 return 0;
3314
3315 /* size of ymode must not be greater than the size of xmode. */
3316 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3317 if (mode_multiple == 0)
3318 abort ();
3319
3320 y_offset = offset / GET_MODE_SIZE (ymode);
3321 nregs_multiple = nregs_xmode / nregs_ymode;
3322 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3323 }
3324
3325 /* This function returns true when the offset is representable via
3326 subreg_offset in the given regno.
3327 xregno - A regno of an inner hard subreg_reg (or what will become one).
3328 xmode - The mode of xregno.
3329 offset - The byte offset.
3330 ymode - The mode of a top level SUBREG (or what may become one).
3331 RETURN - The regno offset which would be used. */
3332 bool
3333 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3334 unsigned int offset, enum machine_mode ymode)
3335 {
3336 int nregs_xmode, nregs_ymode;
3337 int mode_multiple, nregs_multiple;
3338 int y_offset;
3339
3340 if (xregno >= FIRST_PSEUDO_REGISTER)
3341 abort ();
3342
3343 nregs_xmode = hard_regno_nregs[xregno][xmode];
3344 nregs_ymode = hard_regno_nregs[xregno][ymode];
3345
3346 /* Paradoxical subregs are always valid. */
3347 if (offset == 0
3348 && nregs_ymode > nregs_xmode
3349 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3350 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3351 return true;
3352
3353 /* Lowpart subregs are always valid. */
3354 if (offset == subreg_lowpart_offset (ymode, xmode))
3355 return true;
3356
3357 #ifdef ENABLE_CHECKING
3358 /* This should always pass, otherwise we don't know how to verify the
3359 constraint. These conditions may be relaxed but subreg_offset would
3360 need to be redesigned. */
3361 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3362 || GET_MODE_SIZE (ymode) % nregs_ymode
3363 || nregs_xmode % nregs_ymode)
3364 abort ();
3365 #endif
3366
3367 /* The XMODE value can be seen as a vector of NREGS_XMODE
3368 values. The subreg must represent a lowpart of given field.
3369 Compute what field it is. */
3370 offset -= subreg_lowpart_offset (ymode,
3371 mode_for_size (GET_MODE_BITSIZE (xmode)
3372 / nregs_xmode,
3373 MODE_INT, 0));
3374
3375 /* size of ymode must not be greater than the size of xmode. */
3376 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3377 if (mode_multiple == 0)
3378 abort ();
3379
3380 y_offset = offset / GET_MODE_SIZE (ymode);
3381 nregs_multiple = nregs_xmode / nregs_ymode;
3382 #ifdef ENABLE_CHECKING
3383 if (offset % GET_MODE_SIZE (ymode)
3384 || mode_multiple % nregs_multiple)
3385 abort ();
3386 #endif
3387 return (!(y_offset % (mode_multiple / nregs_multiple)));
3388 }
3389
3390 /* Return the final regno that a subreg expression refers to. */
3391 unsigned int
3392 subreg_regno (rtx x)
3393 {
3394 unsigned int ret;
3395 rtx subreg = SUBREG_REG (x);
3396 int regno = REGNO (subreg);
3397
3398 ret = regno + subreg_regno_offset (regno,
3399 GET_MODE (subreg),
3400 SUBREG_BYTE (x),
3401 GET_MODE (x));
3402 return ret;
3403
3404 }
3405 struct parms_set_data
3406 {
3407 int nregs;
3408 HARD_REG_SET regs;
3409 };
3410
3411 /* Helper function for noticing stores to parameter registers. */
3412 static void
3413 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3414 {
3415 struct parms_set_data *d = data;
3416 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3417 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3418 {
3419 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3420 d->nregs--;
3421 }
3422 }
3423
3424 /* Look backward for first parameter to be loaded.
3425 Do not skip BOUNDARY. */
3426 rtx
3427 find_first_parameter_load (rtx call_insn, rtx boundary)
3428 {
3429 struct parms_set_data parm;
3430 rtx p, before;
3431
3432 /* Since different machines initialize their parameter registers
3433 in different orders, assume nothing. Collect the set of all
3434 parameter registers. */
3435 CLEAR_HARD_REG_SET (parm.regs);
3436 parm.nregs = 0;
3437 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3438 if (GET_CODE (XEXP (p, 0)) == USE
3439 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3440 {
3441 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3442 abort ();
3443
3444 /* We only care about registers which can hold function
3445 arguments. */
3446 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3447 continue;
3448
3449 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3450 parm.nregs++;
3451 }
3452 before = call_insn;
3453
3454 /* Search backward for the first set of a register in this set. */
3455 while (parm.nregs && before != boundary)
3456 {
3457 before = PREV_INSN (before);
3458
3459 /* It is possible that some loads got CSEed from one call to
3460 another. Stop in that case. */
3461 if (GET_CODE (before) == CALL_INSN)
3462 break;
3463
3464 /* Our caller needs either ensure that we will find all sets
3465 (in case code has not been optimized yet), or take care
3466 for possible labels in a way by setting boundary to preceding
3467 CODE_LABEL. */
3468 if (GET_CODE (before) == CODE_LABEL)
3469 {
3470 if (before != boundary)
3471 abort ();
3472 break;
3473 }
3474
3475 if (INSN_P (before))
3476 note_stores (PATTERN (before), parms_set, &parm);
3477 }
3478 return before;
3479 }
3480
3481 /* Return true if we should avoid inserting code between INSN and preceding
3482 call instruction. */
3483
3484 bool
3485 keep_with_call_p (rtx insn)
3486 {
3487 rtx set;
3488
3489 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3490 {
3491 if (GET_CODE (SET_DEST (set)) == REG
3492 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3493 && fixed_regs[REGNO (SET_DEST (set))]
3494 && general_operand (SET_SRC (set), VOIDmode))
3495 return true;
3496 if (GET_CODE (SET_SRC (set)) == REG
3497 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3498 && GET_CODE (SET_DEST (set)) == REG
3499 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3500 return true;
3501 /* There may be a stack pop just after the call and before the store
3502 of the return register. Search for the actual store when deciding
3503 if we can break or not. */
3504 if (SET_DEST (set) == stack_pointer_rtx)
3505 {
3506 rtx i2 = next_nonnote_insn (insn);
3507 if (i2 && keep_with_call_p (i2))
3508 return true;
3509 }
3510 }
3511 return false;
3512 }
3513
3514 /* Return true when store to register X can be hoisted to the place
3515 with LIVE registers (can be NULL). Value VAL contains destination
3516 whose value will be used. */
3517
3518 static bool
3519 hoist_test_store (rtx x, rtx val, regset live)
3520 {
3521 if (GET_CODE (x) == SCRATCH)
3522 return true;
3523
3524 if (rtx_equal_p (x, val))
3525 return true;
3526
3527 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3528 Then we would need to update all users to care hoisting the store too.
3529 Caller may represent that by specifying whole subreg as val. */
3530
3531 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3532 {
3533 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3534 && GET_MODE_BITSIZE (GET_MODE (x)) <
3535 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3536 return false;
3537 return true;
3538 }
3539 if (GET_CODE (x) == SUBREG)
3540 x = SUBREG_REG (x);
3541
3542 /* Anything except register store is not hoistable. This includes the
3543 partial stores to registers. */
3544
3545 if (!REG_P (x))
3546 return false;
3547
3548 /* Pseudo registers can be always replaced by another pseudo to avoid
3549 the side effect, for hard register we must ensure that they are dead.
3550 Eventually we may want to add code to try turn pseudos to hards, but it
3551 is unlikely useful. */
3552
3553 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3554 {
3555 int regno = REGNO (x);
3556 int n = hard_regno_nregs[regno][GET_MODE (x)];
3557
3558 if (!live)
3559 return false;
3560 if (REGNO_REG_SET_P (live, regno))
3561 return false;
3562 while (--n > 0)
3563 if (REGNO_REG_SET_P (live, regno + n))
3564 return false;
3565 }
3566 return true;
3567 }
3568
3569
3570 /* Return true if INSN can be hoisted to place with LIVE hard registers
3571 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3572 and used by the hoisting pass. */
3573
3574 bool
3575 can_hoist_insn_p (rtx insn, rtx val, regset live)
3576 {
3577 rtx pat = PATTERN (insn);
3578 int i;
3579
3580 /* It probably does not worth the complexity to handle multiple
3581 set stores. */
3582 if (!single_set (insn))
3583 return false;
3584 /* We can move CALL_INSN, but we need to check that all caller clobbered
3585 regs are dead. */
3586 if (GET_CODE (insn) == CALL_INSN)
3587 return false;
3588 /* In future we will handle hoisting of libcall sequences, but
3589 give up for now. */
3590 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3591 return false;
3592 switch (GET_CODE (pat))
3593 {
3594 case SET:
3595 if (!hoist_test_store (SET_DEST (pat), val, live))
3596 return false;
3597 break;
3598 case USE:
3599 /* USES do have sick semantics, so do not move them. */
3600 return false;
3601 break;
3602 case CLOBBER:
3603 if (!hoist_test_store (XEXP (pat, 0), val, live))
3604 return false;
3605 break;
3606 case PARALLEL:
3607 for (i = 0; i < XVECLEN (pat, 0); i++)
3608 {
3609 rtx x = XVECEXP (pat, 0, i);
3610 switch (GET_CODE (x))
3611 {
3612 case SET:
3613 if (!hoist_test_store (SET_DEST (x), val, live))
3614 return false;
3615 break;
3616 case USE:
3617 /* We need to fix callers to really ensure availability
3618 of all values insn uses, but for now it is safe to prohibit
3619 hoisting of any insn having such a hidden uses. */
3620 return false;
3621 break;
3622 case CLOBBER:
3623 if (!hoist_test_store (SET_DEST (x), val, live))
3624 return false;
3625 break;
3626 default:
3627 break;
3628 }
3629 }
3630 break;
3631 default:
3632 abort ();
3633 }
3634 return true;
3635 }
3636
3637 /* Update store after hoisting - replace all stores to pseudo registers
3638 by new ones to avoid clobbering of values except for store to VAL that will
3639 be updated to NEW. */
3640
3641 static void
3642 hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
3643 {
3644 rtx x = *xp;
3645
3646 if (GET_CODE (x) == SCRATCH)
3647 return;
3648
3649 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3650 validate_change (insn, xp,
3651 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3652 SUBREG_BYTE (x)), 1);
3653 if (rtx_equal_p (x, val))
3654 {
3655 validate_change (insn, xp, new, 1);
3656 return;
3657 }
3658 if (GET_CODE (x) == SUBREG)
3659 {
3660 xp = &SUBREG_REG (x);
3661 x = *xp;
3662 }
3663
3664 if (!REG_P (x))
3665 abort ();
3666
3667 /* We've verified that hard registers are dead, so we may keep the side
3668 effect. Otherwise replace it by new pseudo. */
3669 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3670 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3671 REG_NOTES (insn)
3672 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3673 }
3674
3675 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3676 and each other side effect to pseudo register by new pseudo register. */
3677
3678 rtx
3679 hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
3680 {
3681 rtx pat;
3682 int i;
3683 rtx note;
3684
3685 insn = emit_copy_of_insn_after (insn, after);
3686 pat = PATTERN (insn);
3687
3688 /* Remove REG_UNUSED notes as we will re-emit them. */
3689 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3690 remove_note (insn, note);
3691
3692 /* To get this working callers must ensure to move everything referenced
3693 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3694 easier. */
3695 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3696 remove_note (insn, note);
3697 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3698 remove_note (insn, note);
3699
3700 /* Remove REG_DEAD notes as they might not be valid anymore in case
3701 we create redundancy. */
3702 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3703 remove_note (insn, note);
3704 switch (GET_CODE (pat))
3705 {
3706 case SET:
3707 hoist_update_store (insn, &SET_DEST (pat), val, new);
3708 break;
3709 case USE:
3710 break;
3711 case CLOBBER:
3712 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3713 break;
3714 case PARALLEL:
3715 for (i = 0; i < XVECLEN (pat, 0); i++)
3716 {
3717 rtx x = XVECEXP (pat, 0, i);
3718 switch (GET_CODE (x))
3719 {
3720 case SET:
3721 hoist_update_store (insn, &SET_DEST (x), val, new);
3722 break;
3723 case USE:
3724 break;
3725 case CLOBBER:
3726 hoist_update_store (insn, &SET_DEST (x), val, new);
3727 break;
3728 default:
3729 break;
3730 }
3731 }
3732 break;
3733 default:
3734 abort ();
3735 }
3736 if (!apply_change_group ())
3737 abort ();
3738
3739 return insn;
3740 }
3741
3742 rtx
3743 hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
3744 {
3745 rtx new_insn;
3746
3747 /* We cannot insert instructions on an abnormal critical edge.
3748 It will be easier to find the culprit if we die now. */
3749 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3750 abort ();
3751
3752 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3753 stuff. We also emit CALL_INSNS and friends. */
3754 if (e->insns == NULL_RTX)
3755 {
3756 start_sequence ();
3757 emit_note (NOTE_INSN_DELETED);
3758 }
3759 else
3760 push_to_sequence (e->insns);
3761
3762 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3763
3764 e->insns = get_insns ();
3765 end_sequence ();
3766 return new_insn;
3767 }
3768
3769 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3770 to non-complex jumps. That is, direct unconditional, conditional,
3771 and tablejumps, but not computed jumps or returns. It also does
3772 not apply to the fallthru case of a conditional jump. */
3773
3774 bool
3775 label_is_jump_target_p (rtx label, rtx jump_insn)
3776 {
3777 rtx tmp = JUMP_LABEL (jump_insn);
3778
3779 if (label == tmp)
3780 return true;
3781
3782 if (tablejump_p (jump_insn, NULL, &tmp))
3783 {
3784 rtvec vec = XVEC (PATTERN (tmp),
3785 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3786 int i, veclen = GET_NUM_ELEM (vec);
3787
3788 for (i = 0; i < veclen; ++i)
3789 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3790 return true;
3791 }
3792
3793 return false;
3794 }
3795
3796 \f
3797 /* Return an estimate of the cost of computing rtx X.
3798 One use is in cse, to decide which expression to keep in the hash table.
3799 Another is in rtl generation, to pick the cheapest way to multiply.
3800 Other uses like the latter are expected in the future. */
3801
3802 int
3803 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3804 {
3805 int i, j;
3806 enum rtx_code code;
3807 const char *fmt;
3808 int total;
3809
3810 if (x == 0)
3811 return 0;
3812
3813 /* Compute the default costs of certain things.
3814 Note that targetm.rtx_costs can override the defaults. */
3815
3816 code = GET_CODE (x);
3817 switch (code)
3818 {
3819 case MULT:
3820 total = COSTS_N_INSNS (5);
3821 break;
3822 case DIV:
3823 case UDIV:
3824 case MOD:
3825 case UMOD:
3826 total = COSTS_N_INSNS (7);
3827 break;
3828 case USE:
3829 /* Used in loop.c and combine.c as a marker. */
3830 total = 0;
3831 break;
3832 default:
3833 total = COSTS_N_INSNS (1);
3834 }
3835
3836 switch (code)
3837 {
3838 case REG:
3839 return 0;
3840
3841 case SUBREG:
3842 /* If we can't tie these modes, make this expensive. The larger
3843 the mode, the more expensive it is. */
3844 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3845 return COSTS_N_INSNS (2
3846 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3847 break;
3848
3849 default:
3850 if (targetm.rtx_costs (x, code, outer_code, &total))
3851 return total;
3852 break;
3853 }
3854
3855 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3856 which is already in total. */
3857
3858 fmt = GET_RTX_FORMAT (code);
3859 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3860 if (fmt[i] == 'e')
3861 total += rtx_cost (XEXP (x, i), code);
3862 else if (fmt[i] == 'E')
3863 for (j = 0; j < XVECLEN (x, i); j++)
3864 total += rtx_cost (XVECEXP (x, i, j), code);
3865
3866 return total;
3867 }
3868 \f
3869 /* Return cost of address expression X.
3870 Expect that X is properly formed address reference. */
3871
3872 int
3873 address_cost (rtx x, enum machine_mode mode)
3874 {
3875 /* The address_cost target hook does not deal with ADDRESSOF nodes. But,
3876 during CSE, such nodes are present. Using an ADDRESSOF node which
3877 refers to the address of a REG is a good thing because we can then
3878 turn (MEM (ADDRESSOF (REG))) into just plain REG. */
3879
3880 if (GET_CODE (x) == ADDRESSOF && REG_P (XEXP ((x), 0)))
3881 return -1;
3882
3883 /* We may be asked for cost of various unusual addresses, such as operands
3884 of push instruction. It is not worthwhile to complicate writing
3885 of the target hook by such cases. */
3886
3887 if (!memory_address_p (mode, x))
3888 return 1000;
3889
3890 return targetm.address_cost (x);
3891 }
3892
3893 /* If the target doesn't override, compute the cost as with arithmetic. */
3894
3895 int
3896 default_address_cost (rtx x)
3897 {
3898 return rtx_cost (x, MEM);
3899 }
This page took 0.272188 seconds and 5 git commands to generate.