1 ;; GNU C machine description for Pyramid
90x,
9000, MIServer Series
2 ;; Copyright (C)
1989,
1990,
1995,
1997 Free Software Foundation, Inc.
4 ;; This file is part of GNU CC.
6 ;; GNU CC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version
2, or (at your option)
11 ;; GNU CC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GNU CC; see the file COPYING. If not, write to
18 ;; the Free Software Foundation,
59 Temple Place - Suite
330,
19 ;; Boston, MA
02111-
1307, USA.
21 ;; Instruction patterns. When multiple patterns apply,
22 ;; the first one in the file is chosen.
24 ;; See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;; cpp macro #define NOTICE_UPDATE_CC in file tm.h handles condition code
27 ;; updates for most instructions.
29 ;; These comments are mostly obsolete. Written for gcc version
1.XX.
30 ;; * Try using define_insn instead of some peepholes in more places.
31 ;; * Set REG_NOTES:REG_EQUIV for cvt[bh]w loads. This would make the
32 ;; backward scan in sign_extend needless.
33 ;; * Match (pc) (label_ref) case in peephole patterns.
35 ;; "cmpX op1,op2; b{eq,ne} LY; ucmpX op1.op2; b{lt,le,gt,ge} LZ"
37 ;; "ucmpX op1,op2; b{eq,ne} LY; b{lt,le,gt,ge} LZ"
38 ;; by pre-scanning insn and running notice_update_cc for them.
39 ;; * Is it necessary to do copy_rtx in the test and compare patterns?
40 ;; * Fix true frame pointer omission.
41 ;; * Make the jump tables contain branches, not addresses! This would
42 ;; save us one instruction.
43 ;; * Could the complicated scheme for compares be simplified, if we had
44 ;; no named cmpqi or cmphi patterns, and instead anonymous patterns for
45 ;; the less-than-word compare cases pyr can handle???
46 ;; * The jump insn seems to accept more than just IR addressing. Would
47 ;; we win by telling GCC? Or can we use movw into the global reg which
48 ;; is a synonym for pc?
49 ;; * More DImode patterns.
50 ;; * Scan backwards in "zero_extendhisi2", "zero_extendqisi2" to find out
51 ;; if the extension can be omitted.
52 ;; * "divmodsi" with Pyramid "ediv" insn. Is it possible in rtl??
53 ;; * Would "rcsp tmpreg; u?cmp[bh] op1_regdispl(tmpreg),op2" win in
54 ;; comparison with the two extensions and single test generated now?
55 ;; The rcsp insn could be expanded, and moved out of loops by the
56 ;; optimizer, making
1 (
64 bit) insn of
3 (
32 bit) insns in loops.
57 ;; The rcsp insn could be followed by an add insn, making non-displacement
58 ;; IR addressing sufficient.
60 ;______________________________________________________________________
62 ; Test and Compare Patterns.
63 ;______________________________________________________________________
65 ; The argument for the rather complicated test and compare expansion
66 ; scheme, is the irregular pyramid instructions for these operations.
67 ;
1) Pyramid has different signed and unsigned compares.
2) HImode
68 ; and QImode integers are memory-memory and immediate-memory only.
3)
69 ; Unsigned HImode compares doesn't exist.
4) Only certain
70 ; combinations of addresses are allowed for memory-memory compares.
71 ; Whenever necessary, in order to fulfill these addressing
72 ; constraints, the compare operands are swapped.
74 (define_expand "tstsi"
76 (match_operand:SI
0 "general_operand" ""))]
77 "" "operands[
0] = force_reg (SImode, operands[
0]);")
81 (compare (match_operand:SI
0 "memory_operand" "m")
82 (match_operand:SI
1 "memory_operand" "m")))]
83 "weird_memory_memory (operands[
0], operands[
1])"
86 rtx br_insn = NEXT_INSN (insn);
89 if (GET_CODE (br_insn) != JUMP_INSN)
91 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
93 weird_memory_memory (operands[
0], operands[
1]);
97 cc_status.flags = CC_REVERSED;
98 if (TRULY_UNSIGNED_COMPARE_P (br_code))
100 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
101 return
\"ucmpw %
0,%
1\";
103 return
\"cmpw %
0,%
1\";
106 if (TRULY_UNSIGNED_COMPARE_P (br_code))
108 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
109 return
\"ucmpw %
1,%
0\";
111 return
\"cmpw %
1,%
0\";
116 (compare (match_operand:SI
0 "nonimmediate_operand" "r,g")
117 (match_operand:SI
1 "general_operand" "g,r")))]
121 rtx br_insn = NEXT_INSN (insn);
124 if (GET_CODE (br_insn) != JUMP_INSN)
126 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
128 if (which_alternative !=
0)
130 cc_status.flags = CC_REVERSED;
131 if (TRULY_UNSIGNED_COMPARE_P (br_code))
133 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
134 return
\"ucmpw %
0,%
1\";
136 return
\"cmpw %
0,%
1\";
139 if (TRULY_UNSIGNED_COMPARE_P (br_code))
141 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
142 return
\"ucmpw %
1,%
0\";
144 return
\"cmpw %
1,%
0\";
149 (match_operand:SI
0 "nonimmediate_operand" "r"))]
154 cc_status.flags |= CC_NO_OVERFLOW;
155 return
\"cmpw $
0,%
0\";
157 rtx br_insn = NEXT_INSN (insn);
160 if (GET_CODE (br_insn) != JUMP_INSN)
162 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
164 if (TRULY_UNSIGNED_COMPARE_P (br_code))
166 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
167 return
\"ucmpw $
0,%
0\";
169 return
\"mtstw %
0,%
0\";
172 (define_expand "cmphi"
174 (compare (match_operand:HI
0 "nonimmediate_operand" "")
175 (match_operand:HI
1 "general_operand" "")))]
179 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
180 test_op0 = copy_rtx (operands[
0]);
181 test_op1 = copy_rtx (operands[
1]);
186 (define_expand "tsthi"
188 (match_operand:HI
0 "nonimmediate_operand" ""))]
192 extern rtx test_op0; extern enum machine_mode test_mode;
193 test_op0 = copy_rtx (operands[
0]);
200 (compare (match_operand:HI
0 "memory_operand" "m")
201 (match_operand:HI
1 "memory_operand" "m")))]
202 "(!TRULY_UNSIGNED_COMPARE_P (GET_CODE (XEXP (SET_SRC (PATTERN (NEXT_INSN (insn))),
0))))
203 && weird_memory_memory (operands[
0], operands[
1])"
206 rtx br_insn = NEXT_INSN (insn);
208 if (GET_CODE (br_insn) != JUMP_INSN)
211 weird_memory_memory (operands[
0], operands[
1]);
215 cc_status.flags = CC_REVERSED;
216 return
\"cmph %
0,%
1\";
219 return
\"cmph %
1,%
0\";
224 (compare (match_operand:HI
0 "nonimmediate_operand" "r,m")
225 (match_operand:HI
1 "nonimmediate_operand" "m,r")))]
226 "(!TRULY_UNSIGNED_COMPARE_P (GET_CODE (XEXP (SET_SRC (PATTERN (NEXT_INSN (insn))),
0))))
227 && ((GET_CODE (operands[
0]) == MEM) != (GET_CODE (operands[
1]) == MEM))"
230 rtx br_insn = NEXT_INSN (insn);
232 if (GET_CODE (br_insn) != JUMP_INSN)
235 if (which_alternative !=
0)
237 cc_status.flags = CC_REVERSED;
238 return
\"cmph %
0,%
1\";
241 return
\"cmph %
1,%
0\";
244 (define_expand "cmpqi"
246 (compare (match_operand:QI
0 "nonimmediate_operand" "")
247 (match_operand:QI
1 "general_operand" "")))]
251 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
252 test_op0 = copy_rtx (operands[
0]);
253 test_op1 = copy_rtx (operands[
1]);
258 (define_expand "tstqi"
260 (match_operand:QI
0 "nonimmediate_operand" ""))]
264 extern rtx test_op0; extern enum machine_mode test_mode;
265 test_op0 = copy_rtx (operands[
0]);
272 (compare (match_operand:QI
0 "memory_operand" "m")
273 (match_operand:QI
1 "memory_operand" "m")))]
274 "weird_memory_memory (operands[
0], operands[
1])"
277 rtx br_insn = NEXT_INSN (insn);
280 if (GET_CODE (br_insn) != JUMP_INSN)
282 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
284 weird_memory_memory (operands[
0], operands[
1]);
288 cc_status.flags = CC_REVERSED;
289 if (TRULY_UNSIGNED_COMPARE_P (br_code))
291 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
292 return
\"ucmpb %
0,%
1\";
294 return
\"cmpb %
0,%
1\";
297 if (TRULY_UNSIGNED_COMPARE_P (br_code))
299 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
300 return
\"ucmpb %
1,%
0\";
302 return
\"cmpb %
1,%
0\";
307 (compare (match_operand:QI
0 "nonimmediate_operand" "r,m")
308 (match_operand:QI
1 "nonimmediate_operand" "m,r")))]
309 "((GET_CODE (operands[
0]) == MEM) != (GET_CODE (operands[
1]) == MEM))"
312 rtx br_insn = NEXT_INSN (insn);
315 if (GET_CODE (br_insn) != JUMP_INSN)
317 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
319 if (which_alternative !=
0)
321 cc_status.flags = CC_REVERSED;
322 if (TRULY_UNSIGNED_COMPARE_P (br_code))
324 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
325 return
\"ucmpb %
0,%
1\";
327 return
\"cmpb %
0,%
1\";
330 if (TRULY_UNSIGNED_COMPARE_P (br_code))
332 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
333 return
\"ucmpb %
1,%
0\";
335 return
\"cmpb %
1,%
0\";
339 [(set (pc) (if_then_else (gt (cc0) (const_int
0))
340 (label_ref (match_operand
0 "" "")) (pc)))]
341 "" "extend_and_branch (SIGN_EXTEND);")
344 [(set (pc) (if_then_else (lt (cc0) (const_int
0))
345 (label_ref (match_operand
0 "" "")) (pc)))]
346 "" "extend_and_branch (SIGN_EXTEND);")
349 [(set (pc) (if_then_else (ge (cc0) (const_int
0))
350 (label_ref (match_operand
0 "" "")) (pc)))]
351 "" "extend_and_branch (SIGN_EXTEND);")
354 [(set (pc) (if_then_else (le (cc0) (const_int
0))
355 (label_ref (match_operand
0 "" "")) (pc)))]
356 "" "extend_and_branch (SIGN_EXTEND);")
359 [(set (pc) (if_then_else (eq (cc0) (const_int
0))
360 (label_ref (match_operand
0 "" "")) (pc)))]
361 "" "extend_and_branch (SIGN_EXTEND);")
364 [(set (pc) (if_then_else (ne (cc0) (const_int
0))
365 (label_ref (match_operand
0 "" "")) (pc)))]
366 "" "extend_and_branch (SIGN_EXTEND);")
368 (define_expand "bgtu"
369 [(set (pc) (if_then_else (gtu (cc0) (const_int
0))
370 (label_ref (match_operand
0 "" "")) (pc)))]
371 "" "extend_and_branch (ZERO_EXTEND);")
373 (define_expand "bltu"
374 [(set (pc) (if_then_else (ltu (cc0) (const_int
0))
375 (label_ref (match_operand
0 "" "")) (pc)))]
376 "" "extend_and_branch (ZERO_EXTEND);")
378 (define_expand "bgeu"
379 [(set (pc) (if_then_else (geu (cc0) (const_int
0))
380 (label_ref (match_operand
0 "" "")) (pc)))]
381 "" "extend_and_branch (ZERO_EXTEND);")
383 (define_expand "bleu"
384 [(set (pc) (if_then_else (leu (cc0) (const_int
0))
385 (label_ref (match_operand
0 "" "")) (pc)))]
386 "" "extend_and_branch (ZERO_EXTEND);")
390 (compare (match_operand:DF
0 "register_operand" "r")
391 (match_operand:DF
1 "register_operand" "r")))]
397 (compare (match_operand:SF
0 "register_operand" "r")
398 (match_operand:SF
1 "register_operand" "r")))]
404 (match_operand:DF
0 "register_operand" "r"))]
410 (match_operand:SF
0 "register_operand" "r"))]
414 ;______________________________________________________________________
416 ; Fixed-point Arithmetic.
417 ;______________________________________________________________________
419 (define_insn "addsi3"
420 [(set (match_operand:SI
0 "register_operand" "=r,!r")
421 (plus:SI (match_operand:SI
1 "general_operand" "%
0,r")
422 (match_operand:SI
2 "general_operand" "g,rJ")))]
426 if (which_alternative ==
0)
427 return (GET_CODE (operands[
2]) == CONST_INT && INTVAL (operands[
2]) ==
32
428 ?
\"subw %n2,%
0\" :
\"addw %
2,%
0\");
431 forget_cc_if_dependent (operands[
0]);
432 return
\"mova %a2[%
1*
1],%
0\";
436 (define_insn "subsi3"
437 [(set (match_operand:SI
0 "register_operand" "=r,r")
438 (minus:SI (match_operand:SI
1 "general_operand" "
0,g")
439 (match_operand:SI
2 "general_operand" "g,
0")))]
441 "* return (which_alternative ==
0) ?
\"subw %
2,%
0\" :
\"rsubw %
1,%
0\";")
443 (define_insn "mulsi3"
444 [(set (match_operand:SI
0 "register_operand" "=r")
445 (mult:SI (match_operand:SI
1 "general_operand" "%
0")
446 (match_operand:SI
2 "general_operand" "g")))]
450 (define_insn "divsi3"
451 [(set (match_operand:SI
0 "register_operand" "=r,r")
452 (div:SI (match_operand:SI
1 "general_operand" "
0,g")
453 (match_operand:SI
2 "general_operand" "g,
0")))]
455 "* return (which_alternative ==
0) ?
\"divw %
2,%
0\" :
\"rdivw %
1,%
0\";")
457 (define_insn "udivsi3"
458 [(set (match_operand:SI
0 "register_operand" "=r")
459 (udiv:SI (match_operand:SI
1 "register_operand" "
0")
460 (match_operand:SI
2 "general_operand" "g")))]
464 (define_insn "modsi3"
465 [(set (match_operand:SI
0 "register_operand" "=r")
466 (mod:SI (match_operand:SI
1 "register_operand" "
0")
467 (match_operand:SI
2 "general_operand" "g")))]
471 (define_insn "umodsi3"
472 [(set (match_operand:SI
0 "register_operand" "=r")
473 (umod:SI (match_operand:SI
1 "register_operand" "
0")
474 (match_operand:SI
2 "general_operand" "g")))]
478 (define_insn "negsi2"
479 [(set (match_operand:SI
0 "register_operand" "=r")
480 (neg:SI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
484 (define_insn "one_cmplsi2"
485 [(set (match_operand:SI
0 "register_operand" "=r")
486 (not:SI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
490 (define_insn "abssi2"
491 [(set (match_operand:SI
0 "register_operand" "=r")
492 (abs:SI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
496 ;______________________________________________________________________
498 ; Floating-point Arithmetic.
499 ;______________________________________________________________________
501 (define_insn "adddf3"
502 [(set (match_operand:DF
0 "register_operand" "=r")
503 (plus:DF (match_operand:DF
1 "register_operand" "%
0")
504 (match_operand:DF
2 "register_operand" "r")))]
508 (define_insn "addsf3"
509 [(set (match_operand:SF
0 "register_operand" "=r")
510 (plus:SF (match_operand:SF
1 "register_operand" "%
0")
511 (match_operand:SF
2 "register_operand" "r")))]
515 (define_insn "subdf3"
516 [(set (match_operand:DF
0 "register_operand" "=r")
517 (minus:DF (match_operand:DF
1 "register_operand" "
0")
518 (match_operand:DF
2 "register_operand" "r")))]
522 (define_insn "subsf3"
523 [(set (match_operand:SF
0 "register_operand" "=r")
524 (minus:SF (match_operand:SF
1 "register_operand" "
0")
525 (match_operand:SF
2 "register_operand" "r")))]
529 (define_insn "muldf3"
530 [(set (match_operand:DF
0 "register_operand" "=r")
531 (mult:DF (match_operand:DF
1 "register_operand" "%
0")
532 (match_operand:DF
2 "register_operand" "r")))]
536 (define_insn "mulsf3"
537 [(set (match_operand:SF
0 "register_operand" "=r")
538 (mult:SF (match_operand:SF
1 "register_operand" "%
0")
539 (match_operand:SF
2 "register_operand" "r")))]
543 (define_insn "divdf3"
544 [(set (match_operand:DF
0 "register_operand" "=r")
545 (div:DF (match_operand:DF
1 "register_operand" "
0")
546 (match_operand:DF
2 "register_operand" "r")))]
550 (define_insn "divsf3"
551 [(set (match_operand:SF
0 "register_operand" "=r")
552 (div:SF (match_operand:SF
1 "register_operand" "
0")
553 (match_operand:SF
2 "register_operand" "r")))]
557 (define_insn "negdf2"
558 [(set (match_operand:DF
0 "register_operand" "=r")
559 (neg:DF (match_operand:DF
1 "register_operand" "r")))]
563 (define_insn "negsf2"
564 [(set (match_operand:SF
0 "register_operand" "=r")
565 (neg:SF (match_operand:SF
1 "register_operand" "r")))]
569 (define_insn "absdf2"
570 [(set (match_operand:DF
0 "register_operand" "=r")
571 (abs:DF (match_operand:DF
1 "register_operand" "r")))]
575 (define_insn "abssf2"
576 [(set (match_operand:SF
0 "register_operand" "=r")
577 (abs:SF (match_operand:SF
1 "register_operand" "r")))]
581 ;______________________________________________________________________
583 ; Logical and Shift Instructions.
584 ;______________________________________________________________________
588 (and:SI (match_operand:SI
0 "general_operand" "%r")
589 (match_operand:SI
1 "general_operand" "g")))]
593 cc_status.flags |= CC_NO_OVERFLOW;
594 return
\"bitw %
1,%
0\";
597 (define_insn "andsi3"
598 [(set (match_operand:SI
0 "register_operand" "=r,r")
599 (and:SI (match_operand:SI
1 "general_operand" "%
0,r")
600 (match_operand:SI
2 "general_operand" "g,K")))]
604 if (which_alternative ==
0)
605 return
\"andw %
2,%
0\";
607 cc_status.flags = CC_NOT_NEGATIVE;
608 return (INTVAL (operands[
2]) ==
255
609 ?
\"movzbw %
1,%
0\" :
\"movzhw %
1,%
0\");
613 [(set (match_operand:SI
0 "register_operand" "=r")
614 (and:SI (not:SI (match_operand:SI
1 "general_operand" "g"))
615 (match_operand:SI
2 "register_operand" "
0")))]
619 (define_insn "iorsi3"
620 [(set (match_operand:SI
0 "register_operand" "=r")
621 (ior:SI (match_operand:SI
1 "general_operand" "%
0")
622 (match_operand:SI
2 "general_operand" "g")))]
626 (define_insn "xorsi3"
627 [(set (match_operand:SI
0 "register_operand" "=r")
628 (xor:SI (match_operand:SI
1 "general_operand" "%
0")
629 (match_operand:SI
2 "general_operand" "g")))]
633 ; The arithmetic left shift instructions work strangely on pyramids.
634 ; They fail to modify the sign bit. Therefore, use logic shifts.
636 (define_insn "ashlsi3"
637 [(set (match_operand:SI
0 "register_operand" "=r")
638 (ashift:SI (match_operand:SI
1 "register_operand" "
0")
639 (match_operand:SI
2 "general_operand" "rnm")))]
643 extern char *output_shift ();
644 return output_shift (
\"lshlw %
2,%
0\", operands[
2],
32);
647 (define_insn "ashrsi3"
648 [(set (match_operand:SI
0 "register_operand" "=r")
649 (ashiftrt:SI (match_operand:SI
1 "register_operand" "
0")
650 (match_operand:SI
2 "general_operand" "rnm")))]
654 extern char *output_shift ();
655 return output_shift (
\"ashrw %
2,%
0\", operands[
2],
32);
658 (define_insn "ashrdi3"
659 [(set (match_operand:DI
0 "register_operand" "=r")
660 (ashiftrt:DI (match_operand:DI
1 "register_operand" "
0")
661 (match_operand:SI
2 "general_operand" "rnm")))]
665 extern char *output_shift ();
666 return output_shift (
\"ashrl %
2,%
0\", operands[
2],
64);
669 (define_insn "lshrsi3"
670 [(set (match_operand:SI
0 "register_operand" "=r")
671 (lshiftrt:SI (match_operand:SI
1 "register_operand" "
0")
672 (match_operand:SI
2 "general_operand" "rnm")))]
676 extern char *output_shift ();
677 return output_shift (
\"lshrw %
2,%
0\", operands[
2],
32);
680 (define_insn "rotlsi3"
681 [(set (match_operand:SI
0 "register_operand" "=r")
682 (rotate:SI (match_operand:SI
1 "register_operand" "
0")
683 (match_operand:SI
2 "general_operand" "rnm")))]
687 extern char *output_shift ();
688 return output_shift (
\"rotlw %
2,%
0\", operands[
2],
32);
691 (define_insn "rotrsi3"
692 [(set (match_operand:SI
0 "register_operand" "=r")
693 (rotatert:SI (match_operand:SI
1 "register_operand" "
0")
694 (match_operand:SI
2 "general_operand" "rnm")))]
698 extern char *output_shift ();
699 return output_shift (
\"rotrw %
2,%
0\", operands[
2],
32);
702 ;______________________________________________________________________
704 ; Fixed and Floating Moves.
705 ;______________________________________________________________________
707 ;; If the destination is a memory operand, indexed source operands are
708 ;; disallowed. Big DImode constants are always loaded into a reg pair,
709 ;; although offsettable memory addresses really could be dealt with.
712 [(set (match_operand:DI
0 "memory_operand" "=m")
713 (match_operand:DI
1 "nonindexed_operand" "gF"))]
714 "(GET_CODE (operands[
1]) == CONST_DOUBLE
715 ? ((CONST_DOUBLE_HIGH (operands[
1]) ==
0
716 && CONST_DOUBLE_LOW (operands[
1]) >=
0)
717 || (CONST_DOUBLE_HIGH (operands[
1]) == -
1
718 && CONST_DOUBLE_LOW (operands[
1]) <
0))
722 if (GET_CODE (operands[
1]) == CONST_DOUBLE)
723 operands[
1] = gen_rtx (CONST_INT, VOIDmode,
724 CONST_DOUBLE_LOW (operands[
1]));
725 return
\"movl %
1,%
0\";
728 ;; Force the destination to a register, so all source operands are allowed.
731 [(set (match_operand:DI
0 "general_operand" "=r")
732 (match_operand:DI
1 "general_operand" "gF"))]
736 extern char *output_move_double ();
737 return output_move_double (operands);
740 ;; If the destination is a memory address, indexed source operands are
744 [(set (match_operand:SI
0 "memory_operand" "=m")
745 (match_operand:SI
1 "nonindexed_operand" "g"))]
749 ;; Force the destination to a register, so all source operands are allowed.
752 [(set (match_operand:SI
0 "general_operand" "=r")
753 (match_operand:SI
1 "general_operand" "g"))]
757 ;; If the destination is a memory address, indexed source operands are
761 [(set (match_operand:HI
0 "memory_operand" "=m")
762 (match_operand:HI
1 "nonindexed_operand" "g"))]
766 if (REG_P (operands[
1]))
767 return
\"cvtwh %
1,%
0\"; /* reg -> mem */
769 return
\"movh %
1,%
0\"; /* mem imm -> mem */
772 ;; Force the destination to a register, so all source operands are allowed.
775 [(set (match_operand:HI
0 "general_operand" "=r")
776 (match_operand:HI
1 "general_operand" "g"))]
780 if (GET_CODE (operands[
1]) != MEM)
781 return
\"movw %
1,%
0\"; /* reg imm -> reg */
782 return
\"cvthw %
1,%
0\"; /* mem -> reg */
785 ;; If the destination is a memory address, indexed source operands are
789 [(set (match_operand:QI
0 "memory_operand" "=m")
790 (match_operand:QI
1 "nonindexed_operand" "g"))]
794 if (REG_P (operands[
1]))
795 return
\"cvtwb %
1,%
0\"; /* reg -> mem */
797 return
\"movb %
1,%
0\"; /* mem imm -> mem */
800 ;; Force the destination to a register, so all source operands are allowed.
803 [(set (match_operand:QI
0 "general_operand" "=r")
804 (match_operand:QI
1 "general_operand" "g"))]
808 if (GET_CODE (operands[
1]) != MEM)
809 return
\"movw %
1,%
0\"; /* reg imm -> reg */
810 return
\"cvtbw %
1,%
0\"; /* mem -> reg */
813 ;; If the destination is a memory address, indexed source operands are
817 [(set (match_operand:DF
0 "memory_operand" "=m")
818 (match_operand:DF
1 "nonindexed_operand" "g"))]
819 "GET_CODE (operands[
1]) != CONST_DOUBLE"
822 ;; Force the destination to a register, so all source operands are allowed.
825 [(set (match_operand:DF
0 "general_operand" "=r")
826 (match_operand:DF
1 "general_operand" "gF"))]
830 extern char *output_move_double ();
831 return output_move_double (operands);
834 ;; If the destination is a memory address, indexed source operands are
838 [(set (match_operand:SF
0 "memory_operand" "=m")
839 (match_operand:SF
1 "nonindexed_operand" "g"))]
843 ;; Force the destination to a register, so all source operands are allowed.
846 [(set (match_operand:SF
0 "general_operand" "=r")
847 (match_operand:SF
1 "general_operand" "g"))]
852 [(set (match_operand:SI
0 "register_operand" "=r")
853 (match_operand:QI
1 "address_operand" "p"))]
857 forget_cc_if_dependent (operands[
0]);
858 return
\"mova %a1,%
0\";
861 ;______________________________________________________________________
863 ; Conversion patterns.
864 ;______________________________________________________________________
866 ;; The trunc patterns are used only when non compile-time constants are used.
868 (define_insn "truncsiqi2"
869 [(set (match_operand:QI
0 "register_operand" "=r")
870 (truncate:QI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
874 if (REG_P (operands[
0]) && REG_P (operands[
1])
875 && REGNO (operands[
0]) == REGNO (operands[
1]))
877 cc_status = cc_prev_status;
880 forget_cc_if_dependent (operands[
0]);
881 return
\"movw %
1,%
0\";
884 (define_insn "truncsihi2"
885 [(set (match_operand:HI
0 "register_operand" "=r")
886 (truncate:HI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
890 if (REG_P (operands[
0]) && REG_P (operands[
1])
891 && REGNO (operands[
0]) == REGNO (operands[
1]))
893 cc_status = cc_prev_status;
896 forget_cc_if_dependent (operands[
0]);
897 return
\"movw %
1,%
0\";
900 (define_insn "extendhisi2"
901 [(set (match_operand:SI
0 "general_operand" "=r,m")
902 (sign_extend:SI (match_operand:HI
1 "nonimmediate_operand" "rm,r")))]
906 if (optimize && REG_P (operands[
0]) && REG_P (operands[
1])
907 && REGNO (operands[
0]) == REGNO (operands[
1])
908 && already_sign_extended (insn, HImode, operands[
0]))
910 cc_status = cc_prev_status;
913 return
\"cvthw %
1,%
0\";
916 (define_insn "extendqisi2"
917 [(set (match_operand:SI
0 "general_operand" "=r,m")
918 (sign_extend:SI (match_operand:QI
1 "nonimmediate_operand" "rm,r")))]
922 if (optimize && REG_P (operands[
0]) && REG_P (operands[
1])
923 && REGNO (operands[
0]) == REGNO (operands[
1])
924 && already_sign_extended (insn, QImode, operands[
0]))
926 cc_status = cc_prev_status;
929 return
\"cvtbw %
1,%
0\";
932 ; Pyramid doesn't have insns *called* "cvtbh" or "movzbh".
933 ; But we can cvtbw/movzbw into a register, where there is no distinction
934 ; between words and halfwords.
936 (define_insn "extendqihi2"
937 [(set (match_operand:HI
0 "register_operand" "=r")
938 (sign_extend:HI (match_operand:QI
1 "nonimmediate_operand" "rm")))]
942 (define_insn "zero_extendhisi2"
943 [(set (match_operand:SI
0 "register_operand" "=r")
944 (zero_extend:SI (match_operand:HI
1 "nonimmediate_operand" "rm")))]
948 cc_status.flags = CC_NOT_NEGATIVE;
949 return
\"movzhw %
1,%
0\";
952 (define_insn "zero_extendqisi2"
953 [(set (match_operand:SI
0 "register_operand" "=r")
954 (zero_extend:SI (match_operand:QI
1 "nonimmediate_operand" "rm")))]
958 cc_status.flags = CC_NOT_NEGATIVE;
959 return
\"movzbw %
1,%
0\";
962 (define_insn "zero_extendqihi2"
963 [(set (match_operand:HI
0 "register_operand" "=r")
964 (zero_extend:HI (match_operand:QI
1 "nonimmediate_operand" "rm")))]
968 cc_status.flags = CC_NOT_NEGATIVE;
969 return
\"movzbw %
1,%
0\";
972 (define_insn "extendsfdf2"
973 [(set (match_operand:DF
0 "general_operand" "=&r,m")
974 (float_extend:DF (match_operand:SF
1 "nonimmediate_operand" "rm,r")))]
978 (define_insn "truncdfsf2"
979 [(set (match_operand:SF
0 "general_operand" "=&r,m")
980 (float_truncate:SF (match_operand:DF
1 "nonimmediate_operand" "rm,r")))]
984 (define_insn "floatsisf2"
985 [(set (match_operand:SF
0 "general_operand" "=&r,m")
986 (float:SF (match_operand:SI
1 "nonimmediate_operand" "rm,r")))]
990 (define_insn "floatsidf2"
991 [(set (match_operand:DF
0 "general_operand" "=&r,m")
992 (float:DF (match_operand:SI
1 "nonimmediate_operand" "rm,r")))]
996 (define_insn "fix_truncsfsi2"
997 [(set (match_operand:SI
0 "general_operand" "=&r,m")
998 (fix:SI (fix:SF (match_operand:SF
1 "nonimmediate_operand" "rm,r"))))]
1002 (define_insn "fix_truncdfsi2"
1003 [(set (match_operand:SI
0 "general_operand" "=&r,m")
1004 (fix:SI (fix:DF (match_operand:DF
1 "nonimmediate_operand" "rm,r"))))]
1008 ;______________________________________________________________________
1010 ; Flow Control Patterns.
1011 ;______________________________________________________________________
1013 ;; Prefer "br" to "jump" for unconditional jumps, since it's faster.
1014 ;; (The assembler can manage with out-of-range branches.)
1018 (label_ref (match_operand
0 "" "")))]
1024 (if_then_else (match_operator
0 "relop" [(cc0) (const_int
0)])
1025 (label_ref (match_operand
1 "" ""))
1031 switch (GET_CODE (operands[
0]))
1035 case LT: case LE: case GE: case GT:
1036 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1039 case LTU: case LEU: case GEU: case GTU:
1040 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1045 return
\"b%N0 %l1
\";
1050 (if_then_else (match_operator
0 "relop" [(cc0) (const_int
0)])
1052 (label_ref (match_operand
1 "" ""))))]
1057 switch (GET_CODE (operands[
0]))
1061 case LT: case LE: case GE: case GT:
1062 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1065 case LTU: case LEU: case GEU: case GTU:
1066 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1071 return
\"b%C0 %l1
\";
1075 [(call (match_operand:QI
0 "memory_operand" "m")
1076 (match_operand:SI
1 "immediate_operand" "n"))]
1080 (define_insn "call_value"
1081 [(set (match_operand
0 "" "=r")
1082 (call (match_operand:QI
1 "memory_operand" "m")
1083 (match_operand:SI
2 "immediate_operand" "n")))]
1084 ;; Operand
2 not really used on Pyramid architecture.
1088 (define_insn "return"
1093 if (get_frame_size () + current_function_pretend_args_size
1094 + current_function_args_size !=
0
1095 || current_function_calls_alloca)
1097 int dealloc_size = current_function_pretend_args_size;
1098 if (current_function_pops_args)
1099 dealloc_size += current_function_args_size;
1100 operands[
0] = gen_rtx (CONST_INT, VOIDmode, dealloc_size);
1107 (define_insn "tablejump"
1108 [(set (pc) (match_operand:SI
0 "register_operand" "r"))
1109 (use (label_ref (match_operand
1 "" "")))]
1116 "movw gr0,gr0 # nop")
1118 ;______________________________________________________________________
1120 ; Peep-hole Optimization Patterns.
1121 ;______________________________________________________________________
1123 ;; Optimize fullword move followed by a test of the moved value.
1126 [(set (match_operand:SI
0 "register_operand" "=r")
1127 (match_operand:SI
1 "nonimmediate_operand" "rm"))
1128 (set (cc0) (match_operand:SI
2 "nonimmediate_operand" "rm"))]
1129 "rtx_equal_p (operands[
2], operands[
0])
1130 || rtx_equal_p (operands[
2], operands[
1])"
1132 cc_status.flags |= CC_NO_OVERFLOW;
1133 return
\"mtstw %
1,%
0\";
1136 ;; Optimize loops with an incremented/decremented variable.
1139 [(set (match_operand:SI
0 "register_operand" "=r")
1140 (plus:SI (match_dup
0)
1143 (compare (match_operand:SI
1 "register_operand" "r")
1144 (match_operand:SI
2 "nonmemory_operand" "ri")))
1146 (if_then_else (match_operator:SI
3 "signed_comparison"
1147 [(cc0) (const_int
0)])
1148 (label_ref (match_operand
4 "" ""))
1150 "(GET_CODE (operands[
2]) == CONST_INT
1151 ? (unsigned)INTVAL (operands[
2]) +
32 >=
64
1152 :
1) && (rtx_equal_p (operands[
0], operands[
1])
1153 || rtx_equal_p (operands[
0], operands[
2]))"
1155 if (rtx_equal_p (operands[
0], operands[
1]))
1157 output_asm_insn (
\"dcmpw %
2,%
0\", operands);
1158 return
\"b%N3 %l4
\";
1162 output_asm_insn (
\"dcmpw %
1,%
0\", operands);
1163 return
\"b%R3 %l4
\";
1168 [(set (match_operand:SI
0 "register_operand" "=r")
1169 (plus:SI (match_dup
0)
1172 (compare (match_operand:SI
1 "register_operand" "r")
1173 (match_operand:SI
2 "nonmemory_operand" "ri")))
1175 (if_then_else (match_operator:SI
3 "signed_comparison"
1176 [(cc0) (const_int
0)])
1177 (label_ref (match_operand
4 "" ""))
1179 "(GET_CODE (operands[
2]) == CONST_INT
1180 ? (unsigned)INTVAL (operands[
2]) +
32 >=
64
1181 :
1) && (rtx_equal_p (operands[
0], operands[
1])
1182 || rtx_equal_p (operands[
0], operands[
2]))"
1184 if (rtx_equal_p (operands[
0], operands[
1]))
1186 output_asm_insn (
\"icmpw %
2,%
0\", operands);
1187 return
\"b%N3 %l4
\";
1191 output_asm_insn (
\"icmpw %
1,%
0\", operands);
1192 return
\"b%R3 %l4
\";
1196 ;; Combine two word moves with consecutive operands into one long move.
1197 ;; Also combines immediate moves, if the high-order destination operand
1198 ;; is loaded with
0 or -
1 and the low-order destination operand is loaded
1199 ;; with a constant with the same sign.
1202 [(set (match_operand:SI
0 "general_operand" "=g")
1203 (match_operand:SI
1 "general_operand" "g"))
1204 (set (match_operand:SI
2 "general_operand" "=g")
1205 (match_operand:SI
3 "general_operand" "g"))]
1206 "movdi_possible (operands)"
1209 output_asm_insn (
\"# COMBINE movw %
1,%
0\", operands);
1210 output_asm_insn (
\"# COMBINE movw %
3,%
2\", operands);
1211 movdi_possible (operands);
1212 if (CONSTANT_P (operands[
1]))
1213 return (swap_operands ?
\"movl %
3,%
0\" :
\"movl %
1,%
2\");
1215 return (swap_operands ?
\"movl %
1,%
0\" :
\"movl %
3,%
2\");
1218 ;; Optimize certain tests after memory stores.
1221 [(set (match_operand
0 "memory_operand" "=m")
1222 (match_operand
1 "register_operand" "r"))
1223 (set (match_operand:SI
2 "register_operand" "=r")
1224 (sign_extend:SI (match_dup
1)))
1227 "dead_or_set_p (insn, operands[
2])"
1229 cc_status.flags |= CC_NO_OVERFLOW;
1230 if (GET_MODE (operands[
0]) == QImode)
1231 return
\"cvtwb %
1,%
0\";
1233 return
\"cvtwh %
1,%
0\";
1236 ;______________________________________________________________________
1239 ;______________________________________________________________________
1241 (define_expand "extendsidi2"
1242 [(set (subreg:SI (match_operand:DI
0 "register_operand" "=r")
1)
1243 (match_operand:SI
1 "general_operand" "g"))
1244 (set (subreg:SI (match_dup
0)
0)
1245 (subreg:SI (match_dup
0)
1))
1246 (set (subreg:SI (match_dup
0)
0)
1247 (ashiftrt:SI (subreg:SI (match_dup
0)
0)
1252 (define_insn "adddi3"
1253 [(set (match_operand:DI
0 "register_operand" "=r")
1254 (plus:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1255 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1261 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1262 if (REG_P (operands[
2]))
1263 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1266 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1267 CONST_DOUBLE_LOW (operands[
2]));
1268 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1269 CONST_DOUBLE_HIGH (operands[
2]));
1271 output_asm_insn (
\"addw %
1,%
0\", xoperands);
1272 return
\"addwc %
2,%
0\";
1275 (define_insn "subdi3"
1276 [(set (match_operand:DI
0 "register_operand" "=r")
1277 (minus:DI (match_operand:DI
1 "register_operand" "
0")
1278 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1284 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1285 if (REG_P (operands[
2]))
1286 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1289 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1290 CONST_DOUBLE_LOW (operands[
2]));
1291 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1292 CONST_DOUBLE_HIGH (operands[
2]));
1294 output_asm_insn (
\"subw %
1,%
0\", xoperands);
1295 return
\"subwb %
2,%
0\";
1298 (define_insn "iordi3"
1299 [(set (match_operand:DI
0 "register_operand" "=r")
1300 (ior:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1301 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1307 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1308 if (REG_P (operands[
2]))
1309 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1312 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1313 CONST_DOUBLE_LOW (operands[
2]));
1314 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1315 CONST_DOUBLE_HIGH (operands[
2]));
1317 output_asm_insn (
\"orw %
1,%
0\", xoperands);
1318 return
\"orw %
2,%
0\";
1321 (define_insn "anddi3"
1322 [(set (match_operand:DI
0 "register_operand" "=r")
1323 (and:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1324 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1330 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1331 if (REG_P (operands[
2]))
1332 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1335 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1336 CONST_DOUBLE_LOW (operands[
2]));
1337 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1338 CONST_DOUBLE_HIGH (operands[
2]));
1340 output_asm_insn (
\"andw %
1,%
0\", xoperands);
1341 return
\"andw %
2,%
0\";
1344 (define_insn "xordi3"
1345 [(set (match_operand:DI
0 "register_operand" "=r")
1346 (xor:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1347 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1353 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1354 if (REG_P (operands[
2]))
1355 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1358 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1359 CONST_DOUBLE_LOW (operands[
2]));
1360 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1361 CONST_DOUBLE_HIGH (operands[
2]));
1363 output_asm_insn (
\"xorw %
1,%
0\", xoperands);
1364 return
\"xorw %
2,%
0\";
1367 ;; My version, modelled after Jonathan Stone's and "tablejump" - S.P.
1368 (define_insn "indirect_jump"
1369 [(set (pc) (match_operand:SI
0 "general_operand" "r"))]