]> gcc.gnu.org Git - gcc.git/blob - gcc/config/pyr/pyr.c
entered into RCS
[gcc.git] / gcc / config / pyr / pyr.c
1 /* Subroutines for insn-output.c for Pyramid 90x, 9000, and MIServer Series.
2 Copyright (C) 1989, 1991 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20 /* Some output-actions in pyr.md need these. */
21 #include <stdio.h>
22 #include "config.h"
23 #include "rtl.h"
24 #include "regs.h"
25 #include "hard-reg-set.h"
26 #include "real.h"
27 #include "insn-config.h"
28 #include "conditions.h"
29 #include "insn-flags.h"
30 #include "output.h"
31 #include "insn-attr.h"
32 #include "tree.h"
33
34 /*
35 * Do FUNCTION_ARG.
36 * This cannot be defined as a macro on pyramids, because Pyramid Technology's
37 * C compiler dies on (several equivalent definitions of) this macro.
38 * The only way around this cc bug was to make this a function.
39 * While it would be possible to use a macro version for gcc, it seems
40 * more reliable to have a single version of the code.
41 */
42 void *
43 pyr_function_arg(cum, mode, type, named)
44 CUMULATIVE_ARGS cum;
45 enum machine_mode mode;
46 tree type;
47 {
48 return (void *)(FUNCTION_ARG_HELPER (cum, mode,type,named));
49 }
50 \f
51 /* Do the hard part of PARAM_SAFE_FOR_REG_P.
52 * This cannot be defined as a macro on pyramids, because Pyramid Technology's
53 * C compiler dies on (several equivalent definitions of) this macro.
54 * The only way around this cc bug was to make this a function.
55 */
56 int
57 inner_param_safe_helper (type)
58 tree type;
59 {
60 return (INNER_PARAM_SAFE_HELPER(type));
61 }
62 \f
63
64 /* Return 1 if OP is a non-indexed operand of mode MODE.
65 This is either a register reference, a memory reference,
66 or a constant. In the case of a memory reference, the address
67 is checked to make sure it isn't indexed.
68
69 Register and memory references must have mode MODE in order to be valid,
70 but some constants have no machine mode and are valid for any mode.
71
72 If MODE is VOIDmode, OP is checked for validity for whatever mode
73 it has.
74
75 The main use of this function is as a predicate in match_operand
76 expressions in the machine description.
77
78 It is useful to compare this with general_operand(). They should
79 be identical except for one line.
80
81 This function seems necessary because of the non-orthogonality of
82 Pyramid insns.
83 For any 2-operand insn, and any combination of operand modes,
84 if indexing is valid for the isn's second operand, it is invalid
85 for the first operand to be indexed. */
86
87 extern int volatile_ok;
88
89 int
90 nonindexed_operand (op, mode)
91 register rtx op;
92 enum machine_mode mode;
93 {
94 register RTX_CODE code = GET_CODE (op);
95 int mode_altering_drug = 0;
96
97 if (mode == VOIDmode)
98 mode = GET_MODE (op);
99
100 /* Don't accept CONST_INT or anything similar
101 if the caller wants something floating. */
102 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
103 && GET_MODE_CLASS (mode) != MODE_INT)
104 return 0;
105
106 if (CONSTANT_P (op))
107 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
108 && LEGITIMATE_CONSTANT_P (op));
109
110 /* Except for certain constants with VOIDmode, already checked for,
111 OP's mode must match MODE if MODE specifies a mode. */
112
113 if (GET_MODE (op) != mode)
114 return 0;
115
116 while (code == SUBREG)
117 {
118 op = SUBREG_REG (op);
119 code = GET_CODE (op);
120 #if 0
121 /* No longer needed, since (SUBREG (MEM...))
122 will load the MEM into a reload reg in the MEM's own mode. */
123 mode_altering_drug = 1;
124 #endif
125 }
126 if (code == REG)
127 return 1;
128 if (code == CONST_DOUBLE)
129 return LEGITIMATE_CONSTANT_P (op);
130 if (code == MEM)
131 {
132 register rtx y = XEXP (op, 0);
133 if (! volatile_ok && MEM_VOLATILE_P (op))
134 return 0;
135 GO_IF_NONINDEXED_ADDRESS (y, win);
136 }
137 return 0;
138
139 win:
140 if (mode_altering_drug)
141 return ! mode_dependent_address_p (XEXP (op, 0));
142 return 1;
143 }
144
145 /* Return non-zero if the rtx OP has an immediate component. An
146 immediate component or additive term equal to zero is rejected
147 due to assembler problems. */
148
149 int
150 has_direct_base (op)
151 rtx op;
152 {
153 if ((CONSTANT_ADDRESS_P (op)
154 && op != const0_rtx)
155 || (GET_CODE (op) == PLUS
156 && ((CONSTANT_ADDRESS_P (XEXP (op, 1))
157 && XEXP (op, 1) != const0_rtx)
158 || (CONSTANT_ADDRESS_P (XEXP (op, 0))
159 && XEXP (op, 0) != const0_rtx))))
160 return 1;
161
162 return 0;
163 }
164
165 /* Return zero if the rtx OP has a (scaled) index. */
166
167 int
168 has_index (op)
169 rtx op;
170 {
171 if (GET_CODE (op) == PLUS
172 && (GET_CODE (XEXP (op, 0)) == MULT
173 || (GET_CODE (XEXP (op, 1)) == MULT)))
174 return 1;
175 else
176 return 0;
177 }
178
179 int swap_operands;
180
181 /* weird_memory_memory -- return 1 if OP1 and OP2 can be compared (or
182 exchanged with xchw) with one instruction. If the operands need to
183 be swapped, set the global variable SWAP_OPERANDS. This function
184 silently assumes that both OP0 and OP1 are valid memory references.
185 */
186
187 int
188 weird_memory_memory (op0, op1)
189 rtx op0, op1;
190 {
191 RTX_CODE code0, code1;
192
193 op0 = XEXP (op0, 0);
194 op1 = XEXP (op1, 0);
195 code0 = GET_CODE (op0);
196 code1 = GET_CODE (op1);
197
198 swap_operands = 0;
199
200 if (code1 == REG || code1 == SUBREG)
201 {
202 return 1;
203 }
204 if (code0 == REG || code0 == SUBREG)
205 {
206 swap_operands = 1;
207 return 1;
208 }
209 if (has_direct_base (op0) && has_direct_base (op1))
210 {
211 if (has_index (op1))
212 {
213 if (has_index (op0))
214 return 0;
215 swap_operands = 1;
216 }
217
218 return 1;
219 }
220 return 0;
221 }
222
223 int
224 signed_comparison (x, mode)
225 rtx x;
226 enum machine_mode mode;
227 {
228 return ! TRULY_UNSIGNED_COMPARE_P (GET_CODE (x));
229 }
230
231 extern rtx force_reg ();
232 rtx test_op0, test_op1;
233 enum machine_mode test_mode;
234
235 /* Sign-extend or zero-extend constant X from FROM_MODE to TO_MODE. */
236
237 rtx
238 extend_const (x, extop, from_mode, to_mode)
239 rtx x;
240 RTX_CODE extop;
241 enum machine_mode from_mode, to_mode;
242 {
243 int val;
244 int negative;
245 if (from_mode == to_mode)
246 return x;
247 if (GET_CODE (x) != CONST_INT)
248 abort ();
249 val = INTVAL (x);
250 negative = val & (1 << (GET_MODE_BITSIZE (from_mode) - 1));
251 if (GET_MODE_BITSIZE (from_mode) == HOST_BITS_PER_INT)
252 abort ();
253 if (negative && extop == SIGN_EXTEND)
254 val = val | ((-1) << (GET_MODE_BITSIZE (from_mode)));
255 else
256 val = val & ~((-1) << (GET_MODE_BITSIZE (from_mode)));
257 if (GET_MODE_BITSIZE (to_mode) == HOST_BITS_PER_INT)
258 return gen_rtx (CONST_INT, VOIDmode, val);
259 return gen_rtx (CONST_INT, VOIDmode,
260 val & ~((-1) << (GET_MODE_BITSIZE (to_mode))));
261 }
262
263 rtx
264 ensure_extended (op, extop, from_mode)
265 rtx op;
266 RTX_CODE extop;
267 enum machine_mode from_mode;
268 {
269 if (GET_CODE (op) == CONST_INT)
270 return extend_const (op, extop, from_mode, SImode);
271 else
272 return force_reg (SImode, gen_rtx (extop, SImode, op));
273 }
274
275 /* Emit rtl for a branch, as well as any delayed (integer) compare insns.
276 The compare insn to perform is determined by the global variables
277 test_op0 and test_op1. */
278
279 void
280 extend_and_branch (extop)
281 RTX_CODE extop;
282 {
283 rtx op0, op1;
284 RTX_CODE code0, code1;
285
286 op0 = test_op0, op1 = test_op1;
287 if (op0 == 0)
288 return;
289
290 code0 = GET_CODE (op0);
291 if (op1 != 0)
292 code1 = GET_CODE (op1);
293 test_op0 = test_op1 = 0;
294
295 if (op1 == 0)
296 {
297 op0 = ensure_extended (op0, extop, test_mode);
298 emit_insn (gen_rtx (SET, VOIDmode, cc0_rtx, op0));
299 }
300 else
301 {
302 if (CONSTANT_P (op0) && CONSTANT_P (op1))
303 {
304 op0 = ensure_extended (op0, extop, test_mode);
305 op1 = ensure_extended (op1, extop, test_mode);
306 }
307 else if (extop == ZERO_EXTEND && test_mode == HImode)
308 {
309 /* Pyramids have no unsigned "cmphi" instructions. We need to
310 zero extend unsigned halfwords into temporary registers. */
311 op0 = ensure_extended (op0, extop, test_mode);
312 op1 = ensure_extended (op1, extop, test_mode);
313 }
314 else if (CONSTANT_P (op0))
315 {
316 op0 = ensure_extended (op0, extop, test_mode);
317 op1 = ensure_extended (op1, extop, test_mode);
318 }
319 else if (CONSTANT_P (op1))
320 {
321 op1 = ensure_extended (op1, extop, test_mode);
322 op0 = ensure_extended (op0, extop, test_mode);
323 }
324 else if ((code0 == REG || code0 == SUBREG)
325 && (code1 == REG || code1 == SUBREG))
326 {
327 /* I could do this case without extension, by using the virtual
328 register address (but that would lose for global regs). */
329 op0 = ensure_extended (op0, extop, test_mode);
330 op1 = ensure_extended (op1, extop, test_mode);
331 }
332 else if (code0 == MEM && code1 == MEM)
333 {
334 /* Load into a reg if the address combination can't be handled
335 directly. */
336 if (! weird_memory_memory (op0, op1))
337 op0 = force_reg (test_mode, op0);
338 }
339
340 emit_insn (gen_rtx (SET, VOIDmode, cc0_rtx,
341 gen_rtx (COMPARE, VOIDmode, op0, op1)));
342 }
343 }
344
345 /* Return non-zero if the two single-word moves with operands[0]
346 and operands[1] for the first single-word move, and operands[2]
347 and operands[3] for the second single-word move, is possible to
348 combine to a double word move.
349
350 The criterion is whether the operands are in consecutive memory cells,
351 registers, etc. */
352
353 int
354 movdi_possible (operands)
355 rtx operands[];
356 {
357 int cnst_diff0, cnst_diff1;
358 RTX_CODE code0 = GET_CODE (operands[0]);
359 RTX_CODE code1 = GET_CODE (operands[1]);
360
361 /* Don't dare to combine (possibly overlapping) memory -> memory moves. */
362 /* It would be possible to detect the cases where we dare, by using
363 constant_diff (operands[0], operands[1])!!! */
364 if (code0 == MEM && code1 == MEM)
365 return 0;
366
367 cnst_diff0 = consecutive_operands (operands[0], operands[2]);
368 if (cnst_diff0 == 0)
369 return 0;
370
371 cnst_diff1 = consecutive_operands (operands[1], operands[3]);
372 if (cnst_diff1 == 0)
373 return 0;
374
375 if (cnst_diff0 & cnst_diff1)
376 {
377 /* The source and destination operands are consecutive. */
378
379 /* If the first move writes into the source of the second move,
380 we cannot combine. */
381 if ((code0 == REG
382 && reg_overlap_mentioned_p (operands[0], operands[3]))
383 || (code0 == SUBREG
384 && subreg_overlap_mentioned_p (operands[0], operands[3])))
385 return 0;
386
387 if (cnst_diff0 & 1)
388 /* operands[0],[1] has higher addresses than operands[2],[3]. */
389 swap_operands = 0;
390 else
391 /* operands[0],[1] has lower addresses than operands[2],[3]. */
392 swap_operands = 1;
393 return 1;
394 }
395 return 0;
396 }
397
398 /* Like reg_overlap_mentioned_p, but accepts a subreg rtx instead
399 of a reg. */
400
401 int
402 subreg_overlap_mentioned_p (subreg, x)
403 rtx subreg, x;
404 {
405 rtx reg = SUBREG_REG (subreg);
406 int regno = REGNO (reg) + SUBREG_WORD (subreg);
407 int endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (subreg));
408 return refers_to_regno_p (regno, endregno, x, 0);
409 }
410
411 /* Return 1 if OP0 is a consecutive operand to OP1, 2 if OP1 is a
412 consecutive operand to OP0.
413
414 This function is used to determine if addresses are consecutive,
415 and therefore possible to combine to fewer instructions. */
416
417 int
418 consecutive_operands (op0, op1)
419 rtx op0, op1;
420 {
421 RTX_CODE code0, code1;
422 int cnst_diff;
423 int regno_off0, regno_off1;
424
425 code0 = GET_CODE (op0);
426 code1 = GET_CODE (op1);
427
428 regno_off0 = 0;
429 if (code0 == SUBREG)
430 {
431 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))) <= UNITS_PER_WORD)
432 return 0;
433 regno_off0 = SUBREG_WORD (op0);
434 op0 = SUBREG_REG (op0);
435 code0 = REG;
436 }
437
438 regno_off1 = 0;
439 if (code1 == SUBREG)
440 {
441 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (op1))) <= UNITS_PER_WORD)
442 return 0;
443 regno_off1 = SUBREG_WORD (op1);
444 op1 = SUBREG_REG (op1);
445 code1 = REG;
446 }
447
448 if (code0 != code1)
449 return 0;
450
451 switch (code0)
452 {
453 case CONST_INT:
454 /* Cannot permit any symbolic constants, even if the consecutive
455 operand is 0, since a movl really performs sign extension. */
456 if (code1 != CONST_INT)
457 return 0;
458 if ((INTVAL (op0) == 0 && INTVAL (op1) == 0)
459 || (INTVAL (op0) == -1 && INTVAL (op1) == -1))
460 return 3;
461 if ((INTVAL (op0) == 0 && INTVAL (op1) > 0)
462 || (INTVAL (op0) == -1 && INTVAL (op1) < 0))
463 return 2;
464 if ((INTVAL (op1) == 0 && INTVAL (op0) > 0)
465 || (INTVAL (op1) == -1 && INTVAL (op0) < 0))
466 return 1;
467 break;
468
469 case REG:
470 regno_off0 = REGNO (op0) + regno_off0;
471 regno_off1 = REGNO (op1) + regno_off1;
472
473 cnst_diff = regno_off0 - regno_off1;
474 if (cnst_diff == 1)
475 {
476 /* movl with the highest numbered parameter (local) register as
477 source or destination, doesn't wrap to the lowest numbered local
478 (temporary) register. */
479
480 if (regno_off0 % 16 != 0)
481 return 1;
482 else
483 return 0;
484 }
485 else if (cnst_diff == -1)
486 {
487 if (regno_off1 % 16 != 0)
488 return 2;
489 else
490 return 0;
491 }
492 break;
493
494 case MEM:
495 op0 = XEXP (op0, 0);
496 op1 = XEXP (op1, 0);
497 if (GET_CODE (op0) == CONST)
498 op0 = XEXP (op0, 0);
499 if (GET_CODE (op1) == CONST)
500 op1 = XEXP (op1, 0);
501
502 cnst_diff = constant_diff (op0, op1);
503 if (cnst_diff)
504 {
505 if (cnst_diff == 4)
506 return 1;
507 else if (cnst_diff == -4)
508 return 2;
509 }
510 break;
511 }
512 return 0;
513 }
514
515 /* Return the constant difference of the rtx expressions OP0 and OP1,
516 or 0 if they don't have a constant difference.
517
518 This function is used to determine if addresses are consecutive,
519 and therefore possible to combine to fewer instructions. */
520
521 int
522 constant_diff (op0, op1)
523 rtx op0, op1;
524 {
525 RTX_CODE code0, code1;
526 int cnst_diff;
527
528 code0 = GET_CODE (op0);
529 code1 = GET_CODE (op1);
530
531 if (code0 != code1)
532 {
533 if (code0 == PLUS)
534 {
535 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
536 && rtx_equal_p (op1, XEXP (op0, 0)))
537 return INTVAL (XEXP (op0, 1));
538 }
539 else if (code1 == PLUS)
540 {
541 if (GET_CODE (XEXP (op1, 1)) == CONST_INT
542 && rtx_equal_p (op0, XEXP (op1, 0)))
543 return -INTVAL (XEXP (op1, 1));
544 }
545 return 0;
546 }
547
548 if (code0 == CONST_INT)
549 return INTVAL (op0) - INTVAL (op1);
550
551 if (code0 == PLUS)
552 {
553 cnst_diff = constant_diff (XEXP (op0, 0), XEXP (op1, 0));
554 if (cnst_diff)
555 return (rtx_equal_p (XEXP (op0, 1), XEXP (op1, 1)))
556 ? cnst_diff : 0;
557 cnst_diff = constant_diff (XEXP (op0, 1), XEXP (op1, 1));
558 if (cnst_diff)
559 return (rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0)))
560 ? cnst_diff : 0;
561 }
562
563 return 0;
564 }
565
566 int
567 already_sign_extended (insn, from_mode, op)
568 rtx insn;
569 enum machine_mode from_mode;
570 rtx op;
571 {
572 rtx xinsn, xdest, xsrc;
573
574 for (;;)
575 {
576 insn = PREV_INSN (insn);
577 if (insn == 0)
578 return 0;
579 if (GET_CODE (insn) == NOTE || GET_CODE (insn) == JUMP_INSN)
580 continue;
581 if (GET_CODE (insn) == CALL_INSN && ! call_used_regs[REGNO (op)])
582 continue;
583 if (GET_CODE (insn) != INSN)
584 return 0;
585 xinsn = PATTERN (insn);
586
587 if (GET_CODE (xinsn) != SET)
588 return 0;
589
590 xdest = SET_DEST (xinsn);
591 xsrc = SET_SRC (xinsn);
592
593 if (GET_CODE (xdest) == SUBREG)
594 abort ();
595
596 if ( ! REG_P (xdest))
597 continue;
598
599 if (REGNO (op) == REGNO (xdest)
600 && ((GET_CODE (xsrc) == SIGN_EXTEND
601 && GET_MODE (XEXP (xsrc, 0)) == from_mode)
602 || (GET_CODE (xsrc) == MEM
603 && GET_MODE (xsrc) == from_mode)))
604 return 1;
605
606 /* The register is modified by another operation. */
607 if (reg_overlap_mentioned_p (xdest, op))
608 return 0;
609 }
610 }
611
612 char *
613 output_move_double (operands)
614 rtx *operands;
615 {
616 if (GET_CODE (operands[1]) == CONST_DOUBLE)
617 {
618 if (GET_MODE_CLASS (GET_MODE (operands[1])) == MODE_INT)
619 {
620 /* In an integer, the low-order word is in CONST_DOUBLE_LOW. */
621 rtx const_op = operands[1];
622 if ((CONST_DOUBLE_HIGH (const_op) == 0
623 && CONST_DOUBLE_LOW (const_op) >= 0)
624 || (CONST_DOUBLE_HIGH (const_op) == -1
625 && CONST_DOUBLE_LOW (const_op) < 0))
626 {
627 operands[1] = gen_rtx (CONST_INT, VOIDmode,
628 CONST_DOUBLE_LOW (const_op));
629 return "movl %1,%0";
630 }
631 operands[1] = gen_rtx (CONST_INT, VOIDmode,
632 CONST_DOUBLE_HIGH (const_op));
633 output_asm_insn ("movw %1,%0", operands);
634 operands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
635 operands[1] = gen_rtx (CONST_INT, VOIDmode,
636 CONST_DOUBLE_LOW (const_op));
637 return "movw %1,%0";
638 }
639 else
640 {
641 /* In a real, the low-address word is in CONST_DOUBLE_LOW. */
642 rtx const_op = operands[1];
643 if ((CONST_DOUBLE_LOW (const_op) == 0
644 && CONST_DOUBLE_HIGH (const_op) >= 0)
645 || (CONST_DOUBLE_LOW (const_op) == -1
646 && CONST_DOUBLE_HIGH (const_op) < 0))
647 {
648 operands[1] = gen_rtx (CONST_INT, VOIDmode,
649 CONST_DOUBLE_HIGH (const_op));
650 return "movl %1,%0";
651 }
652 operands[1] = gen_rtx (CONST_INT, VOIDmode,
653 CONST_DOUBLE_LOW (const_op));
654 output_asm_insn ("movw %1,%0", operands);
655 operands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
656 operands[1] = gen_rtx (CONST_INT, VOIDmode,
657 CONST_DOUBLE_HIGH (const_op));
658 return "movw %1,%0";
659 }
660 }
661
662 return "movl %1,%0";
663 }
664
665 /* Output a shift insns, after having reduced integer arguments to
666 avoid as warnings. */
667
668 char *
669 output_shift (pattern, op2, mod)
670 char *pattern;
671 rtx op2;
672 int mod;
673 {
674 if (GET_CODE (op2) == CONST_INT)
675 {
676 int cnt = INTVAL (op2) % mod;
677 if (cnt == 0)
678 {
679 cc_status = cc_prev_status;
680 return "";
681 }
682 op2 = gen_rtx (CONST_INT, VOIDmode, cnt);
683 }
684 return pattern;
685 }
686
687 /* Return non-zero if the code of this rtx pattern is a relop. */
688
689 int
690 relop (op, mode)
691 rtx op;
692 enum machine_mode mode;
693 {
694 switch (GET_CODE (op))
695 {
696 case EQ:
697 case NE:
698 case LT:
699 case LE:
700 case GE:
701 case GT:
702 case LTU:
703 case LEU:
704 case GEU:
705 case GTU:
706 return 1;
707 }
708 return 0;
709 }
710
711 void
712 notice_update_cc (EXP, INSN)
713 rtx EXP, INSN;
714 {
715 switch (GET_CODE (EXP))
716 {
717 case SET:
718 switch (GET_CODE (SET_DEST (EXP)))
719 {
720 case CC0:
721 cc_status.mdep = 0;
722 cc_status.flags = 0;
723 cc_status.value1 = 0;
724 cc_status.value2 = SET_SRC (EXP);
725 break;
726
727 case PC:
728 break;
729
730 case REG:
731 switch (GET_CODE (SET_SRC (EXP)))
732 {
733 case CALL:
734 goto call;
735 case MEM:
736 if (GET_MODE (SET_SRC (EXP)) == QImode
737 || GET_MODE (SET_SRC (EXP)) == HImode)
738 {
739 cc_status.mdep = 0;
740 cc_status.flags = CC_NO_OVERFLOW;
741 cc_status.value1 = SET_DEST (EXP);
742 cc_status.value2 = SET_SRC (EXP);
743 break;
744 }
745 /* else: Fall through. */
746 case CONST_INT:
747 case SYMBOL_REF:
748 case LABEL_REF:
749 case CONST:
750 case CONST_DOUBLE:
751 case REG:
752 if (cc_status.value1
753 && reg_overlap_mentioned_p (SET_DEST (EXP),
754 cc_status.value1))
755 cc_status.value1 = 0;
756 if (cc_status.value2
757 && reg_overlap_mentioned_p (SET_DEST (EXP),
758 cc_status.value2))
759 cc_status.value2 = 0;
760 break;
761
762 case UDIV:
763 case UMOD:
764 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
765 cc_status.flags = CC_NO_OVERFLOW;
766 cc_status.value1 = SET_DEST (EXP);
767 cc_status.value2 = SET_SRC (EXP);
768 break;
769 default:
770 cc_status.mdep = 0;
771 cc_status.flags = CC_NO_OVERFLOW;
772 cc_status.value1 = SET_DEST (EXP);
773 cc_status.value2 = SET_SRC (EXP);
774 break;
775 }
776 break;
777
778 case MEM:
779 switch (GET_CODE (SET_SRC (EXP)))
780 {
781 case REG:
782 if (GET_MODE (SET_SRC (EXP)) == QImode
783 || GET_MODE (SET_SRC (EXP)) == HImode)
784 {
785 cc_status.flags = CC_NO_OVERFLOW;
786 cc_status.value1 = SET_DEST (EXP);
787 cc_status.value2 = SET_SRC (EXP);
788 cc_status.mdep = 0;
789 break;
790 }
791 /* else: Fall through. */
792 case CONST_INT:
793 case SYMBOL_REF:
794 case LABEL_REF:
795 case CONST:
796 case CONST_DOUBLE:
797 case MEM:
798 /* Need to forget cc_status about memory positions each
799 time a memory store is made, even if the memory store
800 insns in question doesn't modify the condition codes. */
801 if (cc_status.value1 &&
802 GET_CODE (cc_status.value1) == MEM)
803 cc_status.value1 = 0;
804 if (cc_status.value2 &&
805 GET_CODE (cc_status.value2) == MEM)
806 cc_status.value2 = 0;
807 break;
808 case SIGN_EXTEND:
809 case FLOAT_EXTEND:
810 case FLOAT_TRUNCATE:
811 case FLOAT:
812 case FIX:
813 cc_status.flags = CC_NO_OVERFLOW;
814 cc_status.value1 = SET_DEST (EXP);
815 cc_status.value2 = SET_SRC (EXP);
816 cc_status.mdep = 0;
817 break;
818
819 default:
820 abort ();
821 }
822 break;
823
824 default:
825 abort ();
826 }
827 break;
828
829 case CALL:
830 call:
831 CC_STATUS_INIT;
832 break;
833 /* Do calls preserve the condition codes? (At least forget
834 cc_status expressions if they refer to registers
835 not preserved across calls. Also forget expressions
836 about memory contents.) */
837 if (cc_status.value1
838 && (refers_to_regno_p (PYR_TREG (0), PYR_TREG (15),
839 cc_status.value1, 0)
840 || GET_CODE (cc_status.value1) == MEM))
841 cc_status.value1 = 0;
842 if (cc_status.value2
843 && (refers_to_regno_p (PYR_TREG (0), PYR_TREG (15),
844 cc_status.value2, 0)
845 || GET_CODE (cc_status.value2) == MEM))
846 cc_status.value2 = 0;
847 break;
848
849 default:
850 CC_STATUS_INIT;
851 }
852 }
853
854 void
855 forget_cc_if_dependent (op)
856 rtx op;
857 {
858 cc_status = cc_prev_status;
859 if (cc_status.value1 && reg_overlap_mentioned_p (op, cc_status.value1))
860 cc_status.value1 = 0;
861 if (cc_status.value2 && reg_overlap_mentioned_p (op, cc_status.value2))
862 cc_status.value2 = 0;
863 }
This page took 0.072206 seconds and 5 git commands to generate.