1 ;; GNU C machine description for Pyramid
90x,
9000, MIServer Series
2 ;; Copyright (C)
1989,
1990 Free Software Foundation, Inc.
4 ;; This file is part of GNU CC.
6 ;; GNU CC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version
2, or (at your option)
11 ;; GNU CC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GNU CC; see the file COPYING. If not, write to
18 ;; the Free Software Foundation,
675 Mass Ave, Cambridge, MA
02139, USA.
20 ;; Instruction patterns. When multiple patterns apply,
21 ;; the first one in the file is chosen.
23 ;; See file "rtl.def" for documentation on define_insn, match_*, et. al.
25 ;; cpp macro #define NOTICE_UPDATE_CC in file tm.h handles condition code
26 ;; updates for most instructions.
28 ;; * Try using define_insn instead of some peepholes in more places.
29 ;; * Set REG_NOTES:REG_EQUIV for cvt[bh]w loads. This would make the
30 ;; backward scan in sign_extend needless.
31 ;; * Match (pc) (label_ref) case in peephole patterns.
33 ;; "cmpX op1,op2; b{eq,ne} LY; ucmpX op1.op2; b{lt,le,gt,ge} LZ"
35 ;; "ucmpX op1,op2; b{eq,ne} LY; b{lt,le,gt,ge} LZ"
36 ;; by pre-scanning insn and running notice_update_cc for them.
37 ;; * Is it necessary to do copy_rtx in the test and compare patterns?
38 ;; * Fix true frame pointer omission.
39 ;; * Make the jump tables contain branches, not addresses! This would
40 ;; save us one instruction.
41 ;; * Could the complicated scheme for compares be simplified, if we had
42 ;; no named cmpqi or cmphi patterns, and instead anonymous patterns for
43 ;; the less-than-word compare cases pyr can handle???
44 ;; * The jump insn seems to accept more than just IR addressing. Would
45 ;; we win by telling GCC? Or can we use movw into the global reg which
46 ;; is a synonym for pc?
47 ;; * More DImode patterns.
48 ;; * Scan backwards in "zero_extendhisi2", "zero_extendqisi2" to find out
49 ;; if the extension can be omitted.
50 ;; * "divmodsi" with Pyramid "ediv" insn. Is it possible in rtl??
51 ;; * Would "rcsp tmpreg; u?cmp[bh] op1_regdispl(tmpreg),op2" win in
52 ;; comparison with the two extensions and single test generated now?
53 ;; The rcsp insn could be expanded, and moved out of loops by the
54 ;; optimizer, making
1 (
64 bit) insn of
3 (
32 bit) insns in loops.
55 ;; The rcsp insn could be followed by an add insn, making non-displacement
56 ;; IR addressing sufficient.
58 ;______________________________________________________________________
60 ; Test and Compare Patterns.
61 ;______________________________________________________________________
63 ; The argument for the rather complicated test and compare expansion
64 ; scheme, is the irregular pyramid instructions for these operations.
65 ;
1) Pyramid has different signed and unsigned compares.
2) HImode
66 ; and QImode integers are memory-memory and immediate-memory only.
3)
67 ; Unsigned HImode compares doesn't exist.
4) Only certain
68 ; combinations of addresses are allowed for memory-memory compares.
69 ; Whenever necessary, in order to fulfill these addressing
70 ; constraints, the compare operands are swapped.
72 (define_expand "tstsi"
74 (match_operand:SI
0 "general_operand" ""))]
75 "" "operands[
0] = force_reg (SImode, operands[
0]);")
79 (compare (match_operand:SI
0 "memory_operand" "m")
80 (match_operand:SI
1 "memory_operand" "m")))]
81 "weird_memory_memory (operands[
0], operands[
1])"
84 rtx br_insn = NEXT_INSN (insn);
87 if (GET_CODE (br_insn) != JUMP_INSN)
89 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
91 weird_memory_memory (operands[
0], operands[
1]);
95 cc_status.flags = CC_REVERSED;
96 if (TRULY_UNSIGNED_COMPARE_P (br_code))
98 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
99 return
\"ucmpw %
0,%
1\";
101 return
\"cmpw %
0,%
1\";
104 if (TRULY_UNSIGNED_COMPARE_P (br_code))
106 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
107 return
\"ucmpw %
1,%
0\";
109 return
\"cmpw %
1,%
0\";
114 (compare (match_operand:SI
0 "nonimmediate_operand" "r,g")
115 (match_operand:SI
1 "general_operand" "g,r")))]
119 rtx br_insn = NEXT_INSN (insn);
122 if (GET_CODE (br_insn) != JUMP_INSN)
124 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
126 if (which_alternative !=
0)
128 cc_status.flags = CC_REVERSED;
129 if (TRULY_UNSIGNED_COMPARE_P (br_code))
131 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
132 return
\"ucmpw %
0,%
1\";
134 return
\"cmpw %
0,%
1\";
137 if (TRULY_UNSIGNED_COMPARE_P (br_code))
139 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
140 return
\"ucmpw %
1,%
0\";
142 return
\"cmpw %
1,%
0\";
147 (match_operand:SI
0 "nonimmediate_operand" "r"))]
152 cc_status.flags |= CC_NO_OVERFLOW;
153 return
\"cmpw $
0,%
0\";
155 rtx br_insn = NEXT_INSN (insn);
158 if (GET_CODE (br_insn) != JUMP_INSN)
160 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
162 if (TRULY_UNSIGNED_COMPARE_P (br_code))
164 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
165 return
\"ucmpw $
0,%
0\";
167 return
\"mtstw %
0,%
0\";
170 (define_expand "cmphi"
172 (compare (match_operand:HI
0 "nonimmediate_operand" "")
173 (match_operand:HI
1 "general_operand" "")))]
177 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
178 test_op0 = copy_rtx (operands[
0]);
179 test_op1 = copy_rtx (operands[
1]);
184 (define_expand "tsthi"
186 (match_operand:HI
0 "nonimmediate_operand" ""))]
190 extern rtx test_op0; extern enum machine_mode test_mode;
191 test_op0 = copy_rtx (operands[
0]);
198 (compare (match_operand:HI
0 "memory_operand" "m")
199 (match_operand:HI
1 "memory_operand" "m")))]
200 "weird_memory_memory (operands[
0], operands[
1])"
203 rtx br_insn = NEXT_INSN (insn);
205 if (GET_CODE (br_insn) != JUMP_INSN)
208 weird_memory_memory (operands[
0], operands[
1]);
212 cc_status.flags = CC_REVERSED;
213 return
\"cmph %
0,%
1\";
216 return
\"cmph %
1,%
0\";
221 (compare (match_operand:HI
0 "nonimmediate_operand" "r,m")
222 (match_operand:HI
1 "nonimmediate_operand" "m,r")))]
223 "(GET_CODE (operands[
0]) != GET_CODE (operands[
1]))"
226 rtx br_insn = NEXT_INSN (insn);
228 if (GET_CODE (br_insn) != JUMP_INSN)
231 if (which_alternative !=
0)
233 cc_status.flags = CC_REVERSED;
234 return
\"cmph %
0,%
1\";
237 return
\"cmph %
1,%
0\";
240 (define_expand "cmpqi"
242 (compare (match_operand:QI
0 "nonimmediate_operand" "")
243 (match_operand:QI
1 "general_operand" "")))]
247 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
248 test_op0 = copy_rtx (operands[
0]);
249 test_op1 = copy_rtx (operands[
1]);
254 (define_expand "tstqi"
256 (match_operand:QI
0 "nonimmediate_operand" ""))]
260 extern rtx test_op0; extern enum machine_mode test_mode;
261 test_op0 = copy_rtx (operands[
0]);
268 (compare (match_operand:QI
0 "memory_operand" "m")
269 (match_operand:QI
1 "memory_operand" "m")))]
270 "weird_memory_memory (operands[
0], operands[
1])"
273 rtx br_insn = NEXT_INSN (insn);
276 if (GET_CODE (br_insn) != JUMP_INSN)
278 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
280 weird_memory_memory (operands[
0], operands[
1]);
284 cc_status.flags = CC_REVERSED;
285 if (TRULY_UNSIGNED_COMPARE_P (br_code))
287 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
288 return
\"ucmpb %
0,%
1\";
290 return
\"cmpb %
0,%
1\";
293 if (TRULY_UNSIGNED_COMPARE_P (br_code))
295 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
296 return
\"ucmpb %
1,%
0\";
298 return
\"cmpb %
1,%
0\";
303 (compare (match_operand:QI
0 "nonimmediate_operand" "r,m")
304 (match_operand:QI
1 "nonimmediate_operand" "m,r")))]
305 "(GET_CODE (operands[
0]) != GET_CODE (operands[
1]))"
308 rtx br_insn = NEXT_INSN (insn);
311 if (GET_CODE (br_insn) != JUMP_INSN)
313 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
315 if (which_alternative !=
0)
317 cc_status.flags = CC_REVERSED;
318 if (TRULY_UNSIGNED_COMPARE_P (br_code))
320 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
321 return
\"ucmpb %
0,%
1\";
323 return
\"cmpb %
0,%
1\";
326 if (TRULY_UNSIGNED_COMPARE_P (br_code))
328 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
329 return
\"ucmpb %
1,%
0\";
331 return
\"cmpb %
1,%
0\";
335 [(set (pc) (if_then_else (gt (cc0) (const_int
0))
336 (label_ref (match_operand
0 "" "")) (pc)))]
337 "" "extend_and_branch (SIGN_EXTEND);")
340 [(set (pc) (if_then_else (lt (cc0) (const_int
0))
341 (label_ref (match_operand
0 "" "")) (pc)))]
342 "" "extend_and_branch (SIGN_EXTEND);")
345 [(set (pc) (if_then_else (ge (cc0) (const_int
0))
346 (label_ref (match_operand
0 "" "")) (pc)))]
347 "" "extend_and_branch (SIGN_EXTEND);")
350 [(set (pc) (if_then_else (le (cc0) (const_int
0))
351 (label_ref (match_operand
0 "" "")) (pc)))]
352 "" "extend_and_branch (SIGN_EXTEND);")
355 [(set (pc) (if_then_else (eq (cc0) (const_int
0))
356 (label_ref (match_operand
0 "" "")) (pc)))]
357 "" "extend_and_branch (SIGN_EXTEND);")
360 [(set (pc) (if_then_else (ne (cc0) (const_int
0))
361 (label_ref (match_operand
0 "" "")) (pc)))]
362 "" "extend_and_branch (SIGN_EXTEND);")
364 (define_expand "bgtu"
365 [(set (pc) (if_then_else (gtu (cc0) (const_int
0))
366 (label_ref (match_operand
0 "" "")) (pc)))]
367 "" "extend_and_branch (ZERO_EXTEND);")
369 (define_expand "bltu"
370 [(set (pc) (if_then_else (ltu (cc0) (const_int
0))
371 (label_ref (match_operand
0 "" "")) (pc)))]
372 "" "extend_and_branch (ZERO_EXTEND);")
374 (define_expand "bgeu"
375 [(set (pc) (if_then_else (geu (cc0) (const_int
0))
376 (label_ref (match_operand
0 "" "")) (pc)))]
377 "" "extend_and_branch (ZERO_EXTEND);")
379 (define_expand "bleu"
380 [(set (pc) (if_then_else (leu (cc0) (const_int
0))
381 (label_ref (match_operand
0 "" "")) (pc)))]
382 "" "extend_and_branch (ZERO_EXTEND);")
386 (compare (match_operand:DF
0 "register_operand" "r")
387 (match_operand:DF
1 "register_operand" "r")))]
393 (compare (match_operand:SF
0 "register_operand" "r")
394 (match_operand:SF
1 "register_operand" "r")))]
400 (match_operand:DF
0 "register_operand" "r"))]
406 (match_operand:SF
0 "register_operand" "r"))]
410 ;______________________________________________________________________
412 ; Fixed-point Arithmetic.
413 ;______________________________________________________________________
415 (define_insn "addsi3"
416 [(set (match_operand:SI
0 "register_operand" "=r,!r")
417 (plus:SI (match_operand:SI
1 "general_operand" "%
0,r")
418 (match_operand:SI
2 "general_operand" "g,rJ")))]
422 if (which_alternative ==
0)
423 return (GET_CODE (operands[
2]) == CONST_INT && INTVAL (operands[
2]) ==
32
424 ?
\"subw %n2,%
0\" :
\"addw %
2,%
0\");
427 forget_cc_if_dependent (operands[
0]);
428 return
\"mova %a2[%
1*
1],%
0\";
432 (define_insn "subsi3"
433 [(set (match_operand:SI
0 "register_operand" "=r,r")
434 (minus:SI (match_operand:SI
1 "general_operand" "
0,g")
435 (match_operand:SI
2 "general_operand" "g,
0")))]
437 "* return (which_alternative ==
0) ?
\"subw %
2,%
0\" :
\"rsubw %
1,%
0\";")
439 (define_insn "mulsi3"
440 [(set (match_operand:SI
0 "register_operand" "=r")
441 (mult:SI (match_operand:SI
1 "general_operand" "%
0")
442 (match_operand:SI
2 "general_operand" "g")))]
446 (define_insn "divsi3"
447 [(set (match_operand:SI
0 "register_operand" "=r,r")
448 (div:SI (match_operand:SI
1 "general_operand" "
0,g")
449 (match_operand:SI
2 "general_operand" "g,
0")))]
451 "* return (which_alternative ==
0) ?
\"divw %
2,%
0\" :
\"rdivw %
1,%
0\";")
453 (define_insn "udivsi3"
454 [(set (match_operand:SI
0 "register_operand" "=r")
455 (udiv:SI (match_operand:SI
1 "register_operand" "
0")
456 (match_operand:SI
2 "general_operand" "g")))]
460 (define_insn "modsi3"
461 [(set (match_operand:SI
0 "register_operand" "=r")
462 (mod:SI (match_operand:SI
1 "register_operand" "
0")
463 (match_operand:SI
2 "general_operand" "g")))]
467 (define_insn "umodsi3"
468 [(set (match_operand:SI
0 "register_operand" "=r")
469 (umod:SI (match_operand:SI
1 "register_operand" "
0")
470 (match_operand:SI
2 "general_operand" "g")))]
474 (define_insn "negsi2"
475 [(set (match_operand:SI
0 "register_operand" "=r")
476 (neg:SI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
480 (define_insn "one_cmplsi2"
481 [(set (match_operand:SI
0 "register_operand" "=r")
482 (not:SI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
486 (define_insn "abssi2"
487 [(set (match_operand:SI
0 "register_operand" "=r")
488 (abs:SI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
492 ;______________________________________________________________________
494 ; Floating-point Arithmetic.
495 ;______________________________________________________________________
497 (define_insn "adddf3"
498 [(set (match_operand:DF
0 "register_operand" "=r")
499 (plus:DF (match_operand:DF
1 "register_operand" "%
0")
500 (match_operand:DF
2 "register_operand" "r")))]
504 (define_insn "addsf3"
505 [(set (match_operand:SF
0 "register_operand" "=r")
506 (plus:SF (match_operand:SF
1 "register_operand" "%
0")
507 (match_operand:SF
2 "register_operand" "r")))]
511 (define_insn "subdf3"
512 [(set (match_operand:DF
0 "register_operand" "=r")
513 (minus:DF (match_operand:DF
1 "register_operand" "
0")
514 (match_operand:DF
2 "register_operand" "r")))]
518 (define_insn "subsf3"
519 [(set (match_operand:SF
0 "register_operand" "=r")
520 (minus:SF (match_operand:SF
1 "register_operand" "
0")
521 (match_operand:SF
2 "register_operand" "r")))]
525 (define_insn "muldf3"
526 [(set (match_operand:DF
0 "register_operand" "=r")
527 (mult:DF (match_operand:DF
1 "register_operand" "%
0")
528 (match_operand:DF
2 "register_operand" "r")))]
532 (define_insn "mulsf3"
533 [(set (match_operand:SF
0 "register_operand" "=r")
534 (mult:SF (match_operand:SF
1 "register_operand" "%
0")
535 (match_operand:SF
2 "register_operand" "r")))]
539 (define_insn "divdf3"
540 [(set (match_operand:DF
0 "register_operand" "=r")
541 (div:DF (match_operand:DF
1 "register_operand" "
0")
542 (match_operand:DF
2 "register_operand" "r")))]
546 (define_insn "divsf3"
547 [(set (match_operand:SF
0 "register_operand" "=r")
548 (div:SF (match_operand:SF
1 "register_operand" "
0")
549 (match_operand:SF
2 "register_operand" "r")))]
553 (define_insn "negdf2"
554 [(set (match_operand:DF
0 "register_operand" "=r")
555 (neg:DF (match_operand:DF
1 "register_operand" "r")))]
559 (define_insn "negsf2"
560 [(set (match_operand:SF
0 "register_operand" "=r")
561 (neg:SF (match_operand:SF
1 "register_operand" "r")))]
565 (define_insn "absdf2"
566 [(set (match_operand:DF
0 "register_operand" "=r")
567 (abs:DF (match_operand:DF
1 "register_operand" "r")))]
571 (define_insn "abssf2"
572 [(set (match_operand:SF
0 "register_operand" "=r")
573 (abs:SF (match_operand:SF
1 "register_operand" "r")))]
577 ;______________________________________________________________________
579 ; Logical and Shift Instructions.
580 ;______________________________________________________________________
584 (and:SI (match_operand:SI
0 "general_operand" "%r")
585 (match_operand:SI
1 "general_operand" "g")))]
589 cc_status.flags |= CC_NO_OVERFLOW;
590 return
\"bitw %
1,%
0\";
593 (define_insn "andsi3"
594 [(set (match_operand:SI
0 "register_operand" "=r,r")
595 (and:SI (match_operand:SI
1 "general_operand" "%
0,r")
596 (match_operand:SI
2 "general_operand" "g,K")))]
600 if (which_alternative ==
0)
601 return
\"andw %
2,%
0\";
603 cc_status.flags = CC_NOT_NEGATIVE;
604 return (INTVAL (operands[
2]) ==
255
605 ?
\"movzbw %
1,%
0\" :
\"movzhw %
1,%
0\");
609 [(set (match_operand:SI
0 "register_operand" "=r")
610 (and:SI (not:SI (match_operand:SI
1 "general_operand" "g"))
611 (match_operand:SI
2 "register_operand" "
0")))]
615 (define_insn "iorsi3"
616 [(set (match_operand:SI
0 "register_operand" "=r")
617 (ior:SI (match_operand:SI
1 "general_operand" "%
0")
618 (match_operand:SI
2 "general_operand" "g")))]
622 (define_insn "xorsi3"
623 [(set (match_operand:SI
0 "register_operand" "=r")
624 (xor:SI (match_operand:SI
1 "general_operand" "%
0")
625 (match_operand:SI
2 "general_operand" "g")))]
629 ; The arithmetic left shift instructions work strangely on pyramids.
630 ; They fail to modify the sign bit. Therefore, use logic shifts.
632 (define_insn "ashlsi3"
633 [(set (match_operand:SI
0 "register_operand" "=r")
634 (ashift:SI (match_operand:SI
1 "register_operand" "
0")
635 (match_operand:SI
2 "general_operand" "rnm")))]
637 "* return output_shift (
\"lshlw %
2,%
0\", operands[
2],
32); ")
639 (define_insn "ashrsi3"
640 [(set (match_operand:SI
0 "register_operand" "=r")
641 (ashiftrt:SI (match_operand:SI
1 "register_operand" "
0")
642 (match_operand:SI
2 "general_operand" "rnm")))]
644 "* return output_shift (
\"ashrw %
2,%
0\", operands[
2],
32); ")
646 (define_insn "ashrdi3"
647 [(set (match_operand:DI
0 "register_operand" "=r")
648 (ashiftrt:DI (match_operand:DI
1 "register_operand" "
0")
649 (match_operand:SI
2 "general_operand" "rnm")))]
651 "* return output_shift (
\"ashrl %
2,%
0\", operands[
2],
64); ")
653 (define_insn "lshrsi3"
654 [(set (match_operand:SI
0 "register_operand" "=r")
655 (lshiftrt:SI (match_operand:SI
1 "register_operand" "
0")
656 (match_operand:SI
2 "general_operand" "rnm")))]
658 "* return output_shift (
\"lshrw %
2,%
0\", operands[
2],
32); ")
660 (define_insn "rotlsi3"
661 [(set (match_operand:SI
0 "register_operand" "=r")
662 (rotate:SI (match_operand:SI
1 "register_operand" "
0")
663 (match_operand:SI
2 "general_operand" "rnm")))]
665 "* return output_shift (
\"rotlw %
2,%
0\", operands[
2],
32); ")
667 (define_insn "rotrsi3"
668 [(set (match_operand:SI
0 "register_operand" "=r")
669 (rotatert:SI (match_operand:SI
1 "register_operand" "
0")
670 (match_operand:SI
2 "general_operand" "rnm")))]
672 "* return output_shift (
\"rotrw %
2,%
0\", operands[
2],
32); ")
674 ;______________________________________________________________________
676 ; Fixed and Floating Moves.
677 ;______________________________________________________________________
679 ;; If the destination is a memory operand, indexed source operands are
680 ;; disallowed. Big DImode constants are always loaded into a reg pair,
681 ;; although offsettable memory addresses really could be dealt with.
684 [(set (match_operand:DI
0 "memory_operand" "=m")
685 (match_operand:DI
1 "nonindexed_operand" "gF"))]
686 "(GET_CODE (operands[
1]) == CONST_DOUBLE
687 ? ((CONST_DOUBLE_HIGH (operands[
1]) ==
0
688 && CONST_DOUBLE_LOW (operands[
1]) >=
0)
689 || (CONST_DOUBLE_HIGH (operands[
1]) == -
1
690 && CONST_DOUBLE_LOW (operands[
1]) <
0))
694 if (GET_CODE (operands[
1]) == CONST_DOUBLE)
695 operands[
1] = gen_rtx (CONST_INT, VOIDmode,
696 CONST_DOUBLE_LOW (operands[
1]));
697 return
\"movl %
1,%
0\";
700 ;; Force the destination to a register, so all source operands are allowed.
703 [(set (match_operand:DI
0 "general_operand" "=r")
704 (match_operand:DI
1 "general_operand" "gF"))]
706 "* return output_move_double (operands); ")
708 ;; If the destination is a memory address, indexed source operands are
712 [(set (match_operand:SI
0 "memory_operand" "=m")
713 (match_operand:SI
1 "nonindexed_operand" "g"))]
717 ;; Force the destination to a register, so all source operands are allowed.
720 [(set (match_operand:SI
0 "general_operand" "=r")
721 (match_operand:SI
1 "general_operand" "g"))]
725 ;; If the destination is a memory address, indexed source operands are
729 [(set (match_operand:HI
0 "memory_operand" "=m")
730 (match_operand:HI
1 "nonindexed_operand" "g"))]
734 if (REG_P (operands[
1]))
735 return
\"cvtwh %
1,%
0\"; /* reg -> mem */
737 return
\"movh %
1,%
0\"; /* mem imm -> mem */
740 ;; Force the destination to a register, so all source operands are allowed.
743 [(set (match_operand:HI
0 "general_operand" "=r")
744 (match_operand:HI
1 "general_operand" "g"))]
748 if (GET_CODE (operands[
1]) != MEM)
749 return
\"movw %
1,%
0\"; /* reg imm -> reg */
750 return
\"cvthw %
1,%
0\"; /* mem -> reg */
753 ;; If the destination is a memory address, indexed source operands are
757 [(set (match_operand:QI
0 "memory_operand" "=m")
758 (match_operand:QI
1 "nonindexed_operand" "g"))]
762 if (REG_P (operands[
1]))
763 return
\"cvtwb %
1,%
0\"; /* reg -> mem */
765 return
\"movb %
1,%
0\"; /* mem imm -> mem */
768 ;; Force the destination to a register, so all source operands are allowed.
771 [(set (match_operand:QI
0 "general_operand" "=r")
772 (match_operand:QI
1 "general_operand" "g"))]
776 if (GET_CODE (operands[
1]) != MEM)
777 return
\"movw %
1,%
0\"; /* reg imm -> reg */
778 return
\"cvtbw %
1,%
0\"; /* mem -> reg */
781 ;; If the destination is a memory address, indexed source operands are
785 [(set (match_operand:DF
0 "memory_operand" "=m")
786 (match_operand:DF
1 "nonindexed_operand" "g"))]
787 "GET_CODE (operands[
1]) != CONST_DOUBLE"
790 ;; Force the destination to a register, so all source operands are allowed.
793 [(set (match_operand:DF
0 "general_operand" "=r")
794 (match_operand:DF
1 "general_operand" "gF"))]
796 "* return output_move_double (operands); ")
798 ;; If the destination is a memory address, indexed source operands are
802 [(set (match_operand:SF
0 "memory_operand" "=m")
803 (match_operand:SF
1 "nonindexed_operand" "g"))]
807 ;; Force the destination to a register, so all source operands are allowed.
810 [(set (match_operand:SF
0 "general_operand" "=r")
811 (match_operand:SF
1 "general_operand" "g"))]
816 [(set (match_operand:SI
0 "register_operand" "=r")
817 (match_operand:QI
1 "address_operand" "p"))]
821 forget_cc_if_dependent (operands[
0]);
822 return
\"mova %a1,%
0\";
825 ;______________________________________________________________________
827 ; Conversion patterns.
828 ;______________________________________________________________________
830 ;; The trunc patterns are used only when non compile-time constants are used.
832 (define_insn "truncsiqi2"
833 [(set (match_operand:QI
0 "register_operand" "=r")
834 (truncate:QI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
838 if (REG_P (operands[
0]) && REG_P (operands[
1])
839 && REGNO (operands[
0]) == REGNO (operands[
1]))
841 cc_status = cc_prev_status;
844 forget_cc_if_dependent (operands[
0]);
845 return
\"movw %
1,%
0\";
848 (define_insn "truncsihi2"
849 [(set (match_operand:HI
0 "register_operand" "=r")
850 (truncate:HI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
854 if (REG_P (operands[
0]) && REG_P (operands[
1])
855 && REGNO (operands[
0]) == REGNO (operands[
1]))
857 cc_status = cc_prev_status;
860 forget_cc_if_dependent (operands[
0]);
861 return
\"movw %
1,%
0\";
864 (define_insn "extendhisi2"
865 [(set (match_operand:SI
0 "general_operand" "=r,m")
866 (sign_extend:SI (match_operand:HI
1 "nonimmediate_operand" "rm,r")))]
871 if (optimize && REG_P (operands[
0]) && REG_P (operands[
1])
872 && REGNO (operands[
0]) == REGNO (operands[
1])
873 && already_sign_extended (insn, HImode, operands[
0]))
875 cc_status = cc_prev_status;
878 return
\"cvthw %
1,%
0\";
881 (define_insn "extendqisi2"
882 [(set (match_operand:SI
0 "general_operand" "=r,m")
883 (sign_extend:SI (match_operand:QI
1 "nonimmediate_operand" "rm,r")))]
888 if (optimize && REG_P (operands[
0]) && REG_P (operands[
1])
889 && REGNO (operands[
0]) == REGNO (operands[
1])
890 && already_sign_extended (insn, QImode, operands[
0]))
892 cc_status = cc_prev_status;
895 return
\"cvtbw %
1,%
0\";
898 ; Pyramid doesn't have insns *called* "cvtbh" or "movzbh".
899 ; But we can cvtbw/movzbw into a register, where there is no distinction
900 ; between words and halfwords.
902 (define_insn "extendqihi2"
903 [(set (match_operand:HI
0 "register_operand" "=r")
904 (sign_extend:HI (match_operand:QI
1 "nonimmediate_operand" "rm")))]
908 (define_insn "zero_extendhisi2"
909 [(set (match_operand:SI
0 "register_operand" "=r")
910 (zero_extend:SI (match_operand:HI
1 "nonimmediate_operand" "rm")))]
914 cc_status.flags = CC_NOT_NEGATIVE;
915 return
\"movzhw %
1,%
0\";
918 (define_insn "zero_extendqisi2"
919 [(set (match_operand:SI
0 "register_operand" "=r")
920 (zero_extend:SI (match_operand:QI
1 "nonimmediate_operand" "rm")))]
924 cc_status.flags = CC_NOT_NEGATIVE;
925 return
\"movzbw %
1,%
0\";
928 (define_insn "zero_extendqihi2"
929 [(set (match_operand:HI
0 "register_operand" "=r")
930 (zero_extend:HI (match_operand:QI
1 "nonimmediate_operand" "rm")))]
934 cc_status.flags = CC_NOT_NEGATIVE;
935 return
\"movzbw %
1,%
0\";
938 (define_insn "extendsfdf2"
939 [(set (match_operand:DF
0 "general_operand" "=&r,m")
940 (float_extend:DF (match_operand:SF
1 "nonimmediate_operand" "rm,r")))]
944 (define_insn "truncdfsf2"
945 [(set (match_operand:SF
0 "general_operand" "=&r,m")
946 (float_truncate:SF (match_operand:DF
1 "nonimmediate_operand" "rm,r")))]
950 (define_insn "floatsisf2"
951 [(set (match_operand:SF
0 "general_operand" "=&r,m")
952 (float:SF (match_operand:SI
1 "nonimmediate_operand" "rm,r")))]
956 (define_insn "floatsidf2"
957 [(set (match_operand:DF
0 "general_operand" "=&r,m")
958 (float:DF (match_operand:SI
1 "nonimmediate_operand" "rm,r")))]
962 (define_insn "fix_truncsfsi2"
963 [(set (match_operand:SI
0 "general_operand" "=&r,m")
964 (fix:SI (fix:SF (match_operand:SF
1 "nonimmediate_operand" "rm,r"))))]
968 (define_insn "fix_truncdfsi2"
969 [(set (match_operand:SI
0 "general_operand" "=&r,m")
970 (fix:SI (fix:DF (match_operand:DF
1 "nonimmediate_operand" "rm,r"))))]
974 ;______________________________________________________________________
976 ; Flow Control Patterns.
977 ;______________________________________________________________________
979 ;; Prefer "br" to "jump" for unconditional jumps, since it's faster.
980 ;; (The assembler can manage with out-of-range branches.)
984 (label_ref (match_operand
0 "" "")))]
990 (if_then_else (match_operator
0 "relop" [(cc0) (const_int
0)])
991 (label_ref (match_operand
1 "" ""))
998 switch (GET_CODE (operands[
0]))
1002 case LT: case LE: case GE: case GT:
1003 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1006 case LTU: case LEU: case GEU: case GTU:
1007 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1012 return
\"b%N0 %l1
\";
1017 (if_then_else (match_operator
0 "relop" [(cc0) (const_int
0)])
1019 (label_ref (match_operand
1 "" ""))))]
1023 extern int optimize;
1025 switch (GET_CODE (operands[
0]))
1029 case LT: case LE: case GE: case GT:
1030 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1033 case LTU: case LEU: case GEU: case GTU:
1034 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1039 return
\"b%C0 %l1
\";
1043 [(call (match_operand:QI
0 "memory_operand" "m")
1044 (match_operand:SI
1 "immediate_operand" "n"))]
1048 (define_insn "call_value"
1049 [(set (match_operand
0 "" "=r")
1050 (call (match_operand:QI
1 "memory_operand" "m")
1051 (match_operand:SI
2 "immediate_operand" "n")))]
1052 ;; Operand
2 not really used on Pyramid architecture.
1056 (define_insn "return"
1061 if (get_frame_size () + current_function_pretend_args_size
1062 + current_function_args_size !=
0
1063 || current_function_calls_alloca)
1065 int dealloc_size = current_function_pretend_args_size;
1066 if (current_function_pops_args)
1067 dealloc_size += current_function_args_size;
1068 operands[
0] = gen_rtx (CONST_INT, VOIDmode, dealloc_size);
1075 (define_insn "tablejump"
1076 [(set (pc) (match_operand:SI
0 "register_operand" "r"))
1077 (use (label_ref (match_operand
1 "" "")))]
1084 "movw gr0,gr0 # nop")
1086 ;______________________________________________________________________
1088 ; Peep-hole Optimization Patterns.
1089 ;______________________________________________________________________
1091 ;; Optimize fullword move followed by a test of the moved value.
1094 [(set (match_operand:SI
0 "register_operand" "=r")
1095 (match_operand:SI
1 "nonimmediate_operand" "rm"))
1096 (set (cc0) (match_operand:SI
2 "nonimmediate_operand" "rm"))]
1097 "rtx_equal_p (operands[
2], operands[
0])
1098 || rtx_equal_p (operands[
2], operands[
1])"
1100 cc_status.flags |= CC_NO_OVERFLOW;
1101 return
\"mtstw %
1,%
0\";
1104 ;; Same for HI and QI mode move-test as well.
1107 [(set (match_operand:HI
0 "register_operand" "=r")
1108 (match_operand:HI
1 "nonimmediate_operand" "rm"))
1109 (set (match_operand:SI
2 "register_operand" "=r")
1110 (sign_extend:SI (match_operand:HI
3 "nonimmediate_operand" "rm")))
1111 (set (cc0) (match_dup
2))]
1112 "dead_or_set_p (insn, operands[
2])
1113 && (rtx_equal_p (operands[
3], operands[
0])
1114 || rtx_equal_p (operands[
3], operands[
1]))"
1116 cc_status.flags |= CC_NO_OVERFLOW;
1117 return
\"cvthw %
1,%
0\";
1121 [(set (match_operand:QI
0 "register_operand" "=r")
1122 (match_operand:QI
1 "nonimmediate_operand" "rm"))
1123 (set (match_operand:SI
2 "register_operand" "=r")
1124 (sign_extend:SI (match_operand:QI
3 "nonimmediate_operand" "rm")))
1125 (set (cc0) (match_dup
2))]
1126 "dead_or_set_p (insn, operands[
2])
1127 && (rtx_equal_p (operands[
3], operands[
0])
1128 || rtx_equal_p (operands[
3], operands[
1]))"
1130 cc_status.flags |= CC_NO_OVERFLOW;
1131 return
\"cvtbw %
1,%
0\";
1134 ;; Optimize loops with an incremented/decremented variable.
1137 [(set (match_operand:SI
0 "register_operand" "=r")
1138 (plus:SI (match_dup
0)
1141 (compare (match_operand:SI
1 "register_operand" "r")
1142 (match_operand:SI
2 "nonmemory_operand" "ri")))
1144 (if_then_else (match_operator:SI
3 "signed_comparison"
1145 [(cc0) (const_int
0)])
1146 (label_ref (match_operand
4 "" ""))
1148 "(GET_CODE (operands[
2]) == CONST_INT
1149 ? (unsigned)INTVAL (operands[
2]) +
32 >=
64
1150 :
1) && (rtx_equal_p (operands[
0], operands[
1])
1151 || rtx_equal_p (operands[
0], operands[
2]))"
1153 if (rtx_equal_p (operands[
0], operands[
1]))
1155 output_asm_insn (
\"dcmpw %
2,%
0\", operands);
1156 return
\"b%N3 %l4
\";
1160 output_asm_insn (
\"dcmpw %
1,%
0\", operands);
1161 return
\"b%R3 %l4
\";
1166 [(set (match_operand:SI
0 "register_operand" "=r")
1167 (plus:SI (match_dup
0)
1170 (compare (match_operand:SI
1 "register_operand" "r")
1171 (match_operand:SI
2 "nonmemory_operand" "ri")))
1173 (if_then_else (match_operator:SI
3 "signed_comparison"
1174 [(cc0) (const_int
0)])
1175 (label_ref (match_operand
4 "" ""))
1177 "(GET_CODE (operands[
2]) == CONST_INT
1178 ? (unsigned)INTVAL (operands[
2]) +
32 >=
64
1179 :
1) && (rtx_equal_p (operands[
0], operands[
1])
1180 || rtx_equal_p (operands[
0], operands[
2]))"
1182 if (rtx_equal_p (operands[
0], operands[
1]))
1184 output_asm_insn (
\"icmpw %
2,%
0\", operands);
1185 return
\"b%N3 %l4
\";
1189 output_asm_insn (
\"icmpw %
1,%
0\", operands);
1190 return
\"b%R3 %l4
\";
1194 ;; Combine two word moves with consecutive operands into one long move.
1195 ;; Also combines immediate moves, if the high-order destination operand
1196 ;; is loaded with
0 or -
1 and the low-order destination operand is loaded
1197 ;; with a constant with the same sign.
1200 [(set (match_operand:SI
0 "general_operand" "=g")
1201 (match_operand:SI
1 "general_operand" "g"))
1202 (set (match_operand:SI
2 "general_operand" "=g")
1203 (match_operand:SI
3 "general_operand" "g"))]
1204 "movdi_possible (operands)"
1207 output_asm_insn (
\"# COMBINE movw %
1,%
0\", operands);
1208 output_asm_insn (
\"# COMBINE movw %
3,%
2\", operands);
1209 movdi_possible (operands);
1210 if (CONSTANT_P (operands[
1]))
1211 return (swap_operands ?
\"movl %
3,%
0\" :
\"movl %
1,%
2\");
1213 return (swap_operands ?
\"movl %
1,%
0\" :
\"movl %
3,%
2\");
1216 ;; Optimize certain tests after memory stores.
1219 [(set (match_operand
0 "memory_operand" "=m")
1220 (match_operand
1 "register_operand" "r"))
1221 (set (match_operand:SI
2 "register_operand" "=r")
1222 (sign_extend:SI (match_dup
1)))
1225 "dead_or_set_p (insn, operands[
2])"
1227 cc_status.flags |= CC_NO_OVERFLOW;
1228 if (GET_MODE (operands[
0]) == QImode)
1229 return
\"cvtwb %
1,%
0\";
1231 return
\"cvtwh %
1,%
0\";
1234 ;______________________________________________________________________
1237 ;______________________________________________________________________
1239 (define_expand "extendsidi2"
1240 [(set (subreg:SI (match_operand:DI
0 "register_operand" "=r")
1)
1241 (match_operand:SI
1 "general_operand" "g"))
1242 (set (subreg:SI (match_dup
0)
0)
1243 (subreg:SI (match_dup
0)
1))
1244 (set (subreg:SI (match_dup
0)
0)
1245 (ashiftrt:SI (subreg:SI (match_dup
0)
0)
1250 (define_insn "adddi3"
1251 [(set (match_operand:DI
0 "register_operand" "=r")
1252 (plus:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1253 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1259 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1260 if (REG_P (operands[
2]))
1261 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1264 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1265 CONST_DOUBLE_LOW (operands[
2]));
1266 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1267 CONST_DOUBLE_HIGH (operands[
2]));
1269 output_asm_insn (
\"addw %
1,%
0\", xoperands);
1270 return
\"addwc %
2,%
0\";
1273 (define_insn "subdi3"
1274 [(set (match_operand:DI
0 "register_operand" "=r")
1275 (minus:DI (match_operand:DI
1 "register_operand" "
0")
1276 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1282 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1283 if (REG_P (operands[
2]))
1284 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1287 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1288 CONST_DOUBLE_LOW (operands[
2]));
1289 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1290 CONST_DOUBLE_HIGH (operands[
2]));
1292 output_asm_insn (
\"subw %
1,%
0\", xoperands);
1293 return
\"subwb %
2,%
0\";
1296 (define_insn "iordi3"
1297 [(set (match_operand:DI
0 "register_operand" "=r")
1298 (ior:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1299 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1305 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1306 if (REG_P (operands[
2]))
1307 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1310 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1311 CONST_DOUBLE_LOW (operands[
2]));
1312 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1313 CONST_DOUBLE_HIGH (operands[
2]));
1315 output_asm_insn (
\"orw %
1,%
0\", xoperands);
1316 return
\"orw %
2,%
0\";
1319 (define_insn "anddi3"
1320 [(set (match_operand:DI
0 "register_operand" "=r")
1321 (and:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1322 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1328 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1329 if (REG_P (operands[
2]))
1330 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1333 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1334 CONST_DOUBLE_LOW (operands[
2]));
1335 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1336 CONST_DOUBLE_HIGH (operands[
2]));
1338 output_asm_insn (
\"andw %
1,%
0\", xoperands);
1339 return
\"andw %
2,%
0\";
1342 (define_insn "xordi3"
1343 [(set (match_operand:DI
0 "register_operand" "=r")
1344 (xor:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1345 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1351 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1352 if (REG_P (operands[
2]))
1353 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1356 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1357 CONST_DOUBLE_LOW (operands[
2]));
1358 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1359 CONST_DOUBLE_HIGH (operands[
2]));
1361 output_asm_insn (
\"xorw %
1,%
0\", xoperands);
1362 return
\"xorw %
2,%
0\";
1365 ;; My version, modelled after Jonathan Stone's and "tablejump" - S.P.
1366 (define_insn "indirect_jump"
1367 [(set (pc) (match_operand:SI
0 "general_operand" "r"))]
1371 ;;- Local variables:
1373 ;;- comment-start: ";;- "
1374 ;;- eval: (set-syntax-table (copy-sequence (syntax-table)))
1375 ;;- eval: (modify-syntax-entry ?] ")[")
1376 ;;- eval: (modify-syntax-entry ?{ "(}")
1377 ;;- eval: (modify-syntax-entry ?} "){")