1 ;; GNU C machine description for Pyramid
90x,
9000, MIServer Series
2 ;; Copyright (C)
1989,
1990 Free Software Foundation, Inc.
4 ;; This file is part of GNU CC.
6 ;; GNU CC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version
2, or (at your option)
11 ;; GNU CC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GNU CC; see the file COPYING. If not, write to
18 ;; the Free Software Foundation,
675 Mass Ave, Cambridge, MA
02139, USA.
20 ;; Instruction patterns. When multiple patterns apply,
21 ;; the first one in the file is chosen.
23 ;; See file "rtl.def" for documentation on define_insn, match_*, et. al.
25 ;; cpp macro #define NOTICE_UPDATE_CC in file tm.h handles condition code
26 ;; updates for most instructions.
28 ;; * Try using define_insn instead of some peepholes in more places.
29 ;; * Set REG_NOTES:REG_EQUIV for cvt[bh]w loads. This would make the
30 ;; backward scan in sign_extend needless.
31 ;; * Match (pc) (label_ref) case in peephole patterns.
33 ;; "cmpX op1,op2; b{eq,ne} LY; ucmpX op1.op2; b{lt,le,gt,ge} LZ"
35 ;; "ucmpX op1,op2; b{eq,ne} LY; b{lt,le,gt,ge} LZ"
36 ;; by pre-scanning insn and running notice_update_cc for them.
37 ;; * Is it necessary to do copy_rtx in the test and compare patterns?
38 ;; * Fix true frame pointer omission.
39 ;; * Make the jump tables contain branches, not addresses! This would
40 ;; save us one instruction.
41 ;; * Could the complicated scheme for compares be simplified, if we had
42 ;; no named cmpqi or cmphi patterns, and instead anonymous patterns for
43 ;; the less-than-word compare cases pyr can handle???
44 ;; * The jump insn seems to accept more than just IR addressing. Would
45 ;; we win by telling GCC? Or can we use movw into the global reg which
46 ;; is a synonym for pc?
47 ;; * More DImode patterns.
48 ;; * Scan backwards in "zero_extendhisi2", "zero_extendqisi2" to find out
49 ;; if the extension can be omitted.
50 ;; * "divmodsi" with Pyramid "ediv" insn. Is it possible in rtl??
51 ;; * Would "rcsp tmpreg; u?cmp[bh] op1_regdispl(tmpreg),op2" win in
52 ;; comparison with the two extensions and single test generated now?
53 ;; The rcsp insn could be expanded, and moved out of loops by the
54 ;; optimizer, making
1 (
64 bit) insn of
3 (
32 bit) insns in loops.
55 ;; The rcsp insn could be followed by an add insn, making non-displacement
56 ;; IR addressing sufficient.
58 ;______________________________________________________________________
60 ; Test and Compare Patterns.
61 ;______________________________________________________________________
63 ; The argument for the rather complicated test and compare expansion
64 ; scheme, is the irregular pyramid instructions for these operations.
65 ;
1) Pyramid has different signed and unsigned compares.
2) HImode
66 ; and QImode integers are memory-memory and immediate-memory only.
3)
67 ; Unsigned HImode compares doesn't exist.
4) Only certain
68 ; combinations of addresses are allowed for memory-memory compares.
69 ; Whenever necessary, in order to fulfill these addressing
70 ; constraints, the compare operands are swapped.
72 (define_expand "tstsi"
74 (match_operand:SI
0 "general_operand" ""))]
75 "" "operands[
0] = force_reg (SImode, operands[
0]);")
79 (compare (match_operand:SI
0 "memory_operand" "m")
80 (match_operand:SI
1 "memory_operand" "m")))]
81 "weird_memory_memory (operands[
0], operands[
1])"
84 rtx br_insn = NEXT_INSN (insn);
87 extern int swap_operands;
88 if (GET_CODE (br_insn) != JUMP_INSN)
90 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
92 weird_memory_memory (operands[
0], operands[
1]);
96 cc_status.flags = CC_REVERSED;
97 if (TRULY_UNSIGNED_COMPARE_P (br_code))
99 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
100 return
\"ucmpw %
0,%
1\";
102 return
\"cmpw %
0,%
1\";
105 if (TRULY_UNSIGNED_COMPARE_P (br_code))
107 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
108 return
\"ucmpw %
1,%
0\";
110 return
\"cmpw %
1,%
0\";
115 (compare (match_operand:SI
0 "nonimmediate_operand" "r,g")
116 (match_operand:SI
1 "general_operand" "g,r")))]
120 rtx br_insn = NEXT_INSN (insn);
123 if (GET_CODE (br_insn) != JUMP_INSN)
125 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
127 if (which_alternative !=
0)
129 cc_status.flags = CC_REVERSED;
130 if (TRULY_UNSIGNED_COMPARE_P (br_code))
132 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
133 return
\"ucmpw %
0,%
1\";
135 return
\"cmpw %
0,%
1\";
138 if (TRULY_UNSIGNED_COMPARE_P (br_code))
140 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
141 return
\"ucmpw %
1,%
0\";
143 return
\"cmpw %
1,%
0\";
148 (match_operand:SI
0 "nonimmediate_operand" "r"))]
153 cc_status.flags |= CC_NO_OVERFLOW;
154 return
\"cmpw $
0,%
0\";
156 rtx br_insn = NEXT_INSN (insn);
159 if (GET_CODE (br_insn) != JUMP_INSN)
161 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
163 if (TRULY_UNSIGNED_COMPARE_P (br_code))
165 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
166 return
\"ucmpw $
0,%
0\";
168 return
\"mtstw %
0,%
0\";
171 (define_expand "cmphi"
173 (compare (match_operand:HI
0 "nonimmediate_operand" "")
174 (match_operand:HI
1 "general_operand" "")))]
178 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
179 test_op0 = copy_rtx (operands[
0]);
180 test_op1 = copy_rtx (operands[
1]);
185 (define_expand "tsthi"
187 (match_operand:HI
0 "nonimmediate_operand" ""))]
191 extern rtx test_op0; extern enum machine_mode test_mode;
192 test_op0 = copy_rtx (operands[
0]);
199 (compare (match_operand:HI
0 "memory_operand" "m")
200 (match_operand:HI
1 "memory_operand" "m")))]
201 "weird_memory_memory (operands[
0], operands[
1])"
204 extern int swap_operands;
205 rtx br_insn = NEXT_INSN (insn);
207 if (GET_CODE (br_insn) != JUMP_INSN)
210 weird_memory_memory (operands[
0], operands[
1]);
214 cc_status.flags = CC_REVERSED;
215 return
\"cmph %
0,%
1\";
218 return
\"cmph %
1,%
0\";
223 (compare (match_operand:HI
0 "nonimmediate_operand" "r,m")
224 (match_operand:HI
1 "nonimmediate_operand" "m,r")))]
225 "(GET_CODE (operands[
0]) != GET_CODE (operands[
1]))"
228 rtx br_insn = NEXT_INSN (insn);
230 if (GET_CODE (br_insn) != JUMP_INSN)
233 if (which_alternative !=
0)
235 cc_status.flags = CC_REVERSED;
236 return
\"cmph %
0,%
1\";
239 return
\"cmph %
1,%
0\";
242 (define_expand "cmpqi"
244 (compare (match_operand:QI
0 "nonimmediate_operand" "")
245 (match_operand:QI
1 "general_operand" "")))]
249 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
250 test_op0 = copy_rtx (operands[
0]);
251 test_op1 = copy_rtx (operands[
1]);
256 (define_expand "tstqi"
258 (match_operand:QI
0 "nonimmediate_operand" ""))]
262 extern rtx test_op0; extern enum machine_mode test_mode;
263 test_op0 = copy_rtx (operands[
0]);
270 (compare (match_operand:QI
0 "memory_operand" "m")
271 (match_operand:QI
1 "memory_operand" "m")))]
272 "weird_memory_memory (operands[
0], operands[
1])"
275 extern int swap_operands;
276 rtx br_insn = NEXT_INSN (insn);
279 if (GET_CODE (br_insn) != JUMP_INSN)
281 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
283 weird_memory_memory (operands[
0], operands[
1]);
287 cc_status.flags = CC_REVERSED;
288 if (TRULY_UNSIGNED_COMPARE_P (br_code))
290 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
291 return
\"ucmpb %
0,%
1\";
293 return
\"cmpb %
0,%
1\";
296 if (TRULY_UNSIGNED_COMPARE_P (br_code))
298 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
299 return
\"ucmpb %
1,%
0\";
301 return
\"cmpb %
1,%
0\";
306 (compare (match_operand:QI
0 "nonimmediate_operand" "r,m")
307 (match_operand:QI
1 "nonimmediate_operand" "m,r")))]
308 "(GET_CODE (operands[
0]) != GET_CODE (operands[
1]))"
311 rtx br_insn = NEXT_INSN (insn);
314 if (GET_CODE (br_insn) != JUMP_INSN)
316 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn),
1),
0));
318 if (which_alternative !=
0)
320 cc_status.flags = CC_REVERSED;
321 if (TRULY_UNSIGNED_COMPARE_P (br_code))
323 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
324 return
\"ucmpb %
0,%
1\";
326 return
\"cmpb %
0,%
1\";
329 if (TRULY_UNSIGNED_COMPARE_P (br_code))
331 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
332 return
\"ucmpb %
1,%
0\";
334 return
\"cmpb %
1,%
0\";
338 [(set (pc) (if_then_else (gt (cc0) (const_int
0))
339 (label_ref (match_operand
0 "" "")) (pc)))]
340 "" "extend_and_branch (SIGN_EXTEND);")
343 [(set (pc) (if_then_else (lt (cc0) (const_int
0))
344 (label_ref (match_operand
0 "" "")) (pc)))]
345 "" "extend_and_branch (SIGN_EXTEND);")
348 [(set (pc) (if_then_else (ge (cc0) (const_int
0))
349 (label_ref (match_operand
0 "" "")) (pc)))]
350 "" "extend_and_branch (SIGN_EXTEND);")
353 [(set (pc) (if_then_else (le (cc0) (const_int
0))
354 (label_ref (match_operand
0 "" "")) (pc)))]
355 "" "extend_and_branch (SIGN_EXTEND);")
358 [(set (pc) (if_then_else (eq (cc0) (const_int
0))
359 (label_ref (match_operand
0 "" "")) (pc)))]
360 "" "extend_and_branch (SIGN_EXTEND);")
363 [(set (pc) (if_then_else (ne (cc0) (const_int
0))
364 (label_ref (match_operand
0 "" "")) (pc)))]
365 "" "extend_and_branch (SIGN_EXTEND);")
367 (define_expand "bgtu"
368 [(set (pc) (if_then_else (gtu (cc0) (const_int
0))
369 (label_ref (match_operand
0 "" "")) (pc)))]
370 "" "extend_and_branch (ZERO_EXTEND);")
372 (define_expand "bltu"
373 [(set (pc) (if_then_else (ltu (cc0) (const_int
0))
374 (label_ref (match_operand
0 "" "")) (pc)))]
375 "" "extend_and_branch (ZERO_EXTEND);")
377 (define_expand "bgeu"
378 [(set (pc) (if_then_else (geu (cc0) (const_int
0))
379 (label_ref (match_operand
0 "" "")) (pc)))]
380 "" "extend_and_branch (ZERO_EXTEND);")
382 (define_expand "bleu"
383 [(set (pc) (if_then_else (leu (cc0) (const_int
0))
384 (label_ref (match_operand
0 "" "")) (pc)))]
385 "" "extend_and_branch (ZERO_EXTEND);")
389 (compare (match_operand:DF
0 "register_operand" "r")
390 (match_operand:DF
1 "register_operand" "r")))]
396 (compare (match_operand:SF
0 "register_operand" "r")
397 (match_operand:SF
1 "register_operand" "r")))]
403 (match_operand:DF
0 "register_operand" "r"))]
409 (match_operand:SF
0 "register_operand" "r"))]
413 ;______________________________________________________________________
415 ; Fixed-point Arithmetic.
416 ;______________________________________________________________________
418 (define_insn "addsi3"
419 [(set (match_operand:SI
0 "register_operand" "=r,!r")
420 (plus:SI (match_operand:SI
1 "general_operand" "%
0,r")
421 (match_operand:SI
2 "general_operand" "g,rJ")))]
425 if (which_alternative ==
0)
426 return (GET_CODE (operands[
2]) == CONST_INT && INTVAL (operands[
2]) ==
32
427 ?
\"subw %n2,%
0\" :
\"addw %
2,%
0\");
430 forget_cc_if_dependent (operands[
0]);
431 return
\"mova %a2[%
1*
1],%
0\";
435 (define_insn "subsi3"
436 [(set (match_operand:SI
0 "register_operand" "=r,r")
437 (minus:SI (match_operand:SI
1 "general_operand" "
0,g")
438 (match_operand:SI
2 "general_operand" "g,
0")))]
440 "* return (which_alternative ==
0) ?
\"subw %
2,%
0\" :
\"rsubw %
1,%
0\";")
442 (define_insn "mulsi3"
443 [(set (match_operand:SI
0 "register_operand" "=r")
444 (mult:SI (match_operand:SI
1 "general_operand" "%
0")
445 (match_operand:SI
2 "general_operand" "g")))]
449 (define_insn "divsi3"
450 [(set (match_operand:SI
0 "register_operand" "=r,r")
451 (div:SI (match_operand:SI
1 "general_operand" "
0,g")
452 (match_operand:SI
2 "general_operand" "g,
0")))]
454 "* return (which_alternative ==
0) ?
\"divw %
2,%
0\" :
\"rdivw %
1,%
0\";")
456 (define_insn "udivsi3"
457 [(set (match_operand:SI
0 "register_operand" "=r")
458 (udiv:SI (match_operand:SI
1 "register_operand" "
0")
459 (match_operand:SI
2 "general_operand" "g")))]
463 (define_insn "modsi3"
464 [(set (match_operand:SI
0 "register_operand" "=r")
465 (mod:SI (match_operand:SI
1 "register_operand" "
0")
466 (match_operand:SI
2 "general_operand" "g")))]
470 (define_insn "umodsi3"
471 [(set (match_operand:SI
0 "register_operand" "=r")
472 (umod:SI (match_operand:SI
1 "register_operand" "
0")
473 (match_operand:SI
2 "general_operand" "g")))]
477 (define_insn "negsi2"
478 [(set (match_operand:SI
0 "register_operand" "=r")
479 (neg:SI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
483 (define_insn "one_cmplsi2"
484 [(set (match_operand:SI
0 "register_operand" "=r")
485 (not:SI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
489 (define_insn "abssi2"
490 [(set (match_operand:SI
0 "register_operand" "=r")
491 (abs:SI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
495 ;______________________________________________________________________
497 ; Floating-point Arithmetic.
498 ;______________________________________________________________________
500 (define_insn "adddf3"
501 [(set (match_operand:DF
0 "register_operand" "=r")
502 (plus:DF (match_operand:DF
1 "register_operand" "%
0")
503 (match_operand:DF
2 "register_operand" "r")))]
507 (define_insn "addsf3"
508 [(set (match_operand:SF
0 "register_operand" "=r")
509 (plus:SF (match_operand:SF
1 "register_operand" "%
0")
510 (match_operand:SF
2 "register_operand" "r")))]
514 (define_insn "subdf3"
515 [(set (match_operand:DF
0 "register_operand" "=r")
516 (minus:DF (match_operand:DF
1 "register_operand" "
0")
517 (match_operand:DF
2 "register_operand" "r")))]
521 (define_insn "subsf3"
522 [(set (match_operand:SF
0 "register_operand" "=r")
523 (minus:SF (match_operand:SF
1 "register_operand" "
0")
524 (match_operand:SF
2 "register_operand" "r")))]
528 (define_insn "muldf3"
529 [(set (match_operand:DF
0 "register_operand" "=r")
530 (mult:DF (match_operand:DF
1 "register_operand" "%
0")
531 (match_operand:DF
2 "register_operand" "r")))]
535 (define_insn "mulsf3"
536 [(set (match_operand:SF
0 "register_operand" "=r")
537 (mult:SF (match_operand:SF
1 "register_operand" "%
0")
538 (match_operand:SF
2 "register_operand" "r")))]
542 (define_insn "divdf3"
543 [(set (match_operand:DF
0 "register_operand" "=r")
544 (div:DF (match_operand:DF
1 "register_operand" "
0")
545 (match_operand:DF
2 "register_operand" "r")))]
549 (define_insn "divsf3"
550 [(set (match_operand:SF
0 "register_operand" "=r")
551 (div:SF (match_operand:SF
1 "register_operand" "
0")
552 (match_operand:SF
2 "register_operand" "r")))]
556 (define_insn "negdf2"
557 [(set (match_operand:DF
0 "register_operand" "=r")
558 (neg:DF (match_operand:DF
1 "register_operand" "r")))]
562 (define_insn "negsf2"
563 [(set (match_operand:SF
0 "register_operand" "=r")
564 (neg:SF (match_operand:SF
1 "register_operand" "r")))]
568 (define_insn "absdf2"
569 [(set (match_operand:DF
0 "register_operand" "=r")
570 (abs:DF (match_operand:DF
1 "register_operand" "r")))]
574 (define_insn "abssf2"
575 [(set (match_operand:SF
0 "register_operand" "=r")
576 (abs:SF (match_operand:SF
1 "register_operand" "r")))]
580 ;______________________________________________________________________
582 ; Logical and Shift Instructions.
583 ;______________________________________________________________________
587 (and:SI (match_operand:SI
0 "general_operand" "%r")
588 (match_operand:SI
1 "general_operand" "g")))]
592 cc_status.flags |= CC_NO_OVERFLOW;
593 return
\"bitw %
1,%
0\";
596 (define_insn "andsi3"
597 [(set (match_operand:SI
0 "register_operand" "=r,r")
598 (and:SI (match_operand:SI
1 "general_operand" "%
0,r")
599 (match_operand:SI
2 "general_operand" "g,K")))]
603 if (which_alternative ==
0)
604 return
\"andw %
2,%
0\";
606 cc_status.flags = CC_NOT_NEGATIVE;
607 return (INTVAL (operands[
2]) ==
255
608 ?
\"movzbw %
1,%
0\" :
\"movzhw %
1,%
0\");
612 [(set (match_operand:SI
0 "register_operand" "=r")
613 (and:SI (not:SI (match_operand:SI
1 "general_operand" "g"))
614 (match_operand:SI
2 "register_operand" "
0")))]
618 (define_insn "iorsi3"
619 [(set (match_operand:SI
0 "register_operand" "=r")
620 (ior:SI (match_operand:SI
1 "general_operand" "%
0")
621 (match_operand:SI
2 "general_operand" "g")))]
625 (define_insn "xorsi3"
626 [(set (match_operand:SI
0 "register_operand" "=r")
627 (xor:SI (match_operand:SI
1 "general_operand" "%
0")
628 (match_operand:SI
2 "general_operand" "g")))]
632 ; The arithmetic left shift instructions work strangely on pyramids.
633 ; They fail to modify the sign bit. Therefore, use logic shifts.
635 (define_insn "ashlsi3"
636 [(set (match_operand:SI
0 "register_operand" "=r")
637 (ashift:SI (match_operand:SI
1 "register_operand" "
0")
638 (match_operand:SI
2 "general_operand" "rnm")))]
640 "* return output_shift (
\"lshlw %
2,%
0\", operands[
2],
32); ")
642 (define_insn "ashrsi3"
643 [(set (match_operand:SI
0 "register_operand" "=r")
644 (ashiftrt:SI (match_operand:SI
1 "register_operand" "
0")
645 (match_operand:SI
2 "general_operand" "rnm")))]
647 "* return output_shift (
\"ashrw %
2,%
0\", operands[
2],
32); ")
649 (define_insn "ashrdi3"
650 [(set (match_operand:DI
0 "register_operand" "=r")
651 (ashiftrt:DI (match_operand:DI
1 "register_operand" "
0")
652 (match_operand:SI
2 "general_operand" "rnm")))]
654 "* return output_shift (
\"ashrl %
2,%
0\", operands[
2],
64); ")
656 (define_insn "lshrsi3"
657 [(set (match_operand:SI
0 "register_operand" "=r")
658 (lshiftrt:SI (match_operand:SI
1 "register_operand" "
0")
659 (match_operand:SI
2 "general_operand" "rnm")))]
661 "* return output_shift (
\"lshrw %
2,%
0\", operands[
2],
32); ")
663 (define_insn "rotlsi3"
664 [(set (match_operand:SI
0 "register_operand" "=r")
665 (rotate:SI (match_operand:SI
1 "register_operand" "
0")
666 (match_operand:SI
2 "general_operand" "rnm")))]
668 "* return output_shift (
\"rotlw %
2,%
0\", operands[
2],
32); ")
670 (define_insn "rotrsi3"
671 [(set (match_operand:SI
0 "register_operand" "=r")
672 (rotatert:SI (match_operand:SI
1 "register_operand" "
0")
673 (match_operand:SI
2 "general_operand" "rnm")))]
675 "* return output_shift (
\"rotrw %
2,%
0\", operands[
2],
32); ")
677 ;______________________________________________________________________
679 ; Fixed and Floating Moves.
680 ;______________________________________________________________________
682 ;; If the destination is a memory operand, indexed source operands are
683 ;; disallowed. Big DImode constants are always loaded into a reg pair,
684 ;; although offsettable memory addresses really could be dealt with.
687 [(set (match_operand:DI
0 "memory_operand" "=m")
688 (match_operand:DI
1 "nonindexed_operand" "gF"))]
689 "(GET_CODE (operands[
1]) == CONST_DOUBLE
690 ? ((CONST_DOUBLE_HIGH (operands[
1]) ==
0
691 && CONST_DOUBLE_LOW (operands[
1]) >=
0)
692 || (CONST_DOUBLE_HIGH (operands[
1]) == -
1
693 && CONST_DOUBLE_LOW (operands[
1]) <
0))
697 if (GET_CODE (operands[
1]) == CONST_DOUBLE)
698 operands[
1] = gen_rtx (CONST_INT, VOIDmode,
699 CONST_DOUBLE_LOW (operands[
1]));
700 return
\"movl %
1,%
0\";
703 ;; Force the destination to a register, so all source operands are allowed.
706 [(set (match_operand:DI
0 "general_operand" "=r")
707 (match_operand:DI
1 "general_operand" "gF"))]
709 "* return output_move_double (operands); ")
711 ;; If the destination is a memory address, indexed source operands are
715 [(set (match_operand:SI
0 "memory_operand" "=m")
716 (match_operand:SI
1 "nonindexed_operand" "g"))]
720 ;; Force the destination to a register, so all source operands are allowed.
723 [(set (match_operand:SI
0 "general_operand" "=r")
724 (match_operand:SI
1 "general_operand" "g"))]
728 ;; If the destination is a memory address, indexed source operands are
732 [(set (match_operand:HI
0 "memory_operand" "=m")
733 (match_operand:HI
1 "nonindexed_operand" "g"))]
737 if (REG_P (operands[
1]))
738 return
\"cvtwh %
1,%
0\"; /* reg -> mem */
740 return
\"movh %
1,%
0\"; /* mem imm -> mem */
743 ;; Force the destination to a register, so all source operands are allowed.
746 [(set (match_operand:HI
0 "general_operand" "=r")
747 (match_operand:HI
1 "general_operand" "g"))]
751 if (GET_CODE (operands[
1]) != MEM)
752 return
\"movw %
1,%
0\"; /* reg imm -> reg */
753 return
\"cvthw %
1,%
0\"; /* mem -> reg */
756 ;; If the destination is a memory address, indexed source operands are
760 [(set (match_operand:QI
0 "memory_operand" "=m")
761 (match_operand:QI
1 "nonindexed_operand" "g"))]
765 if (REG_P (operands[
1]))
766 return
\"cvtwb %
1,%
0\"; /* reg -> mem */
768 return
\"movb %
1,%
0\"; /* mem imm -> mem */
771 ;; Force the destination to a register, so all source operands are allowed.
774 [(set (match_operand:QI
0 "general_operand" "=r")
775 (match_operand:QI
1 "general_operand" "g"))]
779 if (GET_CODE (operands[
1]) != MEM)
780 return
\"movw %
1,%
0\"; /* reg imm -> reg */
781 return
\"cvtbw %
1,%
0\"; /* mem -> reg */
784 ;; If the destination is a memory address, indexed source operands are
788 [(set (match_operand:DF
0 "memory_operand" "=m")
789 (match_operand:DF
1 "nonindexed_operand" "g"))]
790 "GET_CODE (operands[
1]) != CONST_DOUBLE"
793 ;; Force the destination to a register, so all source operands are allowed.
796 [(set (match_operand:DF
0 "general_operand" "=r")
797 (match_operand:DF
1 "general_operand" "gF"))]
799 "* return output_move_double (operands); ")
801 ;; If the destination is a memory address, indexed source operands are
805 [(set (match_operand:SF
0 "memory_operand" "=m")
806 (match_operand:SF
1 "nonindexed_operand" "g"))]
810 ;; Force the destination to a register, so all source operands are allowed.
813 [(set (match_operand:SF
0 "general_operand" "=r")
814 (match_operand:SF
1 "general_operand" "g"))]
819 [(set (match_operand:SI
0 "register_operand" "=r")
820 (match_operand:QI
1 "address_operand" "p"))]
824 forget_cc_if_dependent (operands[
0]);
825 return
\"mova %a1,%
0\";
828 ;______________________________________________________________________
830 ; Conversion patterns.
831 ;______________________________________________________________________
833 ;; The trunc patterns are used only when non compile-time constants are used.
835 (define_insn "truncsiqi2"
836 [(set (match_operand:QI
0 "register_operand" "=r")
837 (truncate:QI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
841 if (REG_P (operands[
0]) && REG_P (operands[
1])
842 && REGNO (operands[
0]) == REGNO (operands[
1]))
844 cc_status = cc_prev_status;
847 forget_cc_if_dependent (operands[
0]);
848 return
\"movw %
1,%
0\";
851 (define_insn "truncsihi2"
852 [(set (match_operand:HI
0 "register_operand" "=r")
853 (truncate:HI (match_operand:SI
1 "nonimmediate_operand" "rm")))]
857 if (REG_P (operands[
0]) && REG_P (operands[
1])
858 && REGNO (operands[
0]) == REGNO (operands[
1]))
860 cc_status = cc_prev_status;
863 forget_cc_if_dependent (operands[
0]);
864 return
\"movw %
1,%
0\";
867 (define_insn "extendhisi2"
868 [(set (match_operand:SI
0 "general_operand" "=r,m")
869 (sign_extend:SI (match_operand:HI
1 "nonimmediate_operand" "rm,r")))]
874 if (optimize && REG_P (operands[
0]) && REG_P (operands[
1])
875 && REGNO (operands[
0]) == REGNO (operands[
1])
876 && already_sign_extended (insn, HImode, operands[
0]))
878 cc_status = cc_prev_status;
881 return
\"cvthw %
1,%
0\";
884 (define_insn "extendqisi2"
885 [(set (match_operand:SI
0 "general_operand" "=r,m")
886 (sign_extend:SI (match_operand:QI
1 "nonimmediate_operand" "rm,r")))]
891 if (optimize && REG_P (operands[
0]) && REG_P (operands[
1])
892 && REGNO (operands[
0]) == REGNO (operands[
1])
893 && already_sign_extended (insn, QImode, operands[
0]))
895 cc_status = cc_prev_status;
898 return
\"cvtbw %
1,%
0\";
901 ; Pyramid doesn't have insns *called* "cvtbh" or "movzbh".
902 ; But we can cvtbw/movzbw into a register, where there is no distinction
903 ; between words and halfwords.
905 (define_insn "extendqihi2"
906 [(set (match_operand:HI
0 "register_operand" "=r")
907 (sign_extend:HI (match_operand:QI
1 "nonimmediate_operand" "rm")))]
911 (define_insn "zero_extendhisi2"
912 [(set (match_operand:SI
0 "register_operand" "=r")
913 (zero_extend:SI (match_operand:HI
1 "nonimmediate_operand" "rm")))]
917 cc_status.flags = CC_NOT_NEGATIVE;
918 return
\"movzhw %
1,%
0\";
921 (define_insn "zero_extendqisi2"
922 [(set (match_operand:SI
0 "register_operand" "=r")
923 (zero_extend:SI (match_operand:QI
1 "nonimmediate_operand" "rm")))]
927 cc_status.flags = CC_NOT_NEGATIVE;
928 return
\"movzbw %
1,%
0\";
931 (define_insn "zero_extendqihi2"
932 [(set (match_operand:HI
0 "register_operand" "=r")
933 (zero_extend:HI (match_operand:QI
1 "nonimmediate_operand" "rm")))]
937 cc_status.flags = CC_NOT_NEGATIVE;
938 return
\"movzbw %
1,%
0\";
941 (define_insn "extendsfdf2"
942 [(set (match_operand:DF
0 "general_operand" "=&r,m")
943 (float_extend:DF (match_operand:SF
1 "nonimmediate_operand" "rm,r")))]
947 (define_insn "truncdfsf2"
948 [(set (match_operand:SF
0 "general_operand" "=&r,m")
949 (float_truncate:SF (match_operand:DF
1 "nonimmediate_operand" "rm,r")))]
953 (define_insn "floatsisf2"
954 [(set (match_operand:SF
0 "general_operand" "=&r,m")
955 (float:SF (match_operand:SI
1 "nonimmediate_operand" "rm,r")))]
959 (define_insn "floatsidf2"
960 [(set (match_operand:DF
0 "general_operand" "=&r,m")
961 (float:DF (match_operand:SI
1 "nonimmediate_operand" "rm,r")))]
965 (define_insn "fix_truncsfsi2"
966 [(set (match_operand:SI
0 "general_operand" "=&r,m")
967 (fix:SI (fix:SF (match_operand:SF
1 "nonimmediate_operand" "rm,r"))))]
971 (define_insn "fix_truncdfsi2"
972 [(set (match_operand:SI
0 "general_operand" "=&r,m")
973 (fix:SI (fix:DF (match_operand:DF
1 "nonimmediate_operand" "rm,r"))))]
977 ;______________________________________________________________________
979 ; Flow Control Patterns.
980 ;______________________________________________________________________
982 ;; Prefer "br" to "jump" for unconditional jumps, since it's faster.
983 ;; (The assembler can manage with out-of-range branches.)
987 (label_ref (match_operand
0 "" "")))]
993 (if_then_else (match_operator
0 "relop" [(cc0) (const_int
0)])
994 (label_ref (match_operand
1 "" ""))
1001 switch (GET_CODE (operands[
0]))
1005 case LT: case LE: case GE: case GT:
1006 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1009 case LTU: case LEU: case GEU: case GTU:
1010 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1015 return
\"b%N0 %l1
\";
1020 (if_then_else (match_operator
0 "relop" [(cc0) (const_int
0)])
1022 (label_ref (match_operand
1 "" ""))))]
1026 extern int optimize;
1028 switch (GET_CODE (operands[
0]))
1032 case LT: case LE: case GE: case GT:
1033 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1036 case LTU: case LEU: case GEU: case GTU:
1037 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1042 return
\"b%C0 %l1
\";
1046 [(call (match_operand:QI
0 "memory_operand" "m")
1047 (match_operand:SI
1 "immediate_operand" "n"))]
1051 (define_insn "call_value"
1052 [(set (match_operand
0 "" "=r")
1053 (call (match_operand:QI
1 "memory_operand" "m")
1054 (match_operand:SI
2 "immediate_operand" "n")))]
1055 ;; Operand
2 not really used on Pyramid architecture.
1059 (define_insn "return"
1064 if (get_frame_size () + current_function_pretend_args_size
1065 + current_function_args_size !=
0
1066 || current_function_calls_alloca)
1068 int dealloc_size = current_function_pretend_args_size;
1069 if (current_function_pops_args)
1070 dealloc_size += current_function_args_size;
1071 operands[
0] = gen_rtx (CONST_INT, VOIDmode, dealloc_size);
1078 (define_insn "tablejump"
1079 [(set (pc) (match_operand:SI
0 "register_operand" "r"))
1080 (use (label_ref (match_operand
1 "" "")))]
1087 "movw gr0,gr0 # nop")
1089 ;______________________________________________________________________
1091 ; Peep-hole Optimization Patterns.
1092 ;______________________________________________________________________
1094 ;; Optimize fullword move followed by a test of the moved value.
1097 [(set (match_operand:SI
0 "register_operand" "=r")
1098 (match_operand:SI
1 "nonimmediate_operand" "rm"))
1099 (set (cc0) (match_operand:SI
2 "nonimmediate_operand" "rm"))]
1100 "rtx_equal_p (operands[
2], operands[
0])
1101 || rtx_equal_p (operands[
2], operands[
1])"
1103 cc_status.flags |= CC_NO_OVERFLOW;
1104 return
\"mtstw %
1,%
0\";
1107 ;; Same for HI and QI mode move-test as well.
1110 [(set (match_operand:HI
0 "register_operand" "=r")
1111 (match_operand:HI
1 "nonimmediate_operand" "rm"))
1112 (set (match_operand:SI
2 "register_operand" "=r")
1113 (sign_extend:SI (match_operand:HI
3 "nonimmediate_operand" "rm")))
1114 (set (cc0) (match_dup
2))]
1115 "dead_or_set_p (insn, operands[
2])
1116 && (rtx_equal_p (operands[
3], operands[
0])
1117 || rtx_equal_p (operands[
3], operands[
1]))"
1119 cc_status.flags |= CC_NO_OVERFLOW;
1120 return
\"cvthw %
1,%
0\";
1124 [(set (match_operand:QI
0 "register_operand" "=r")
1125 (match_operand:QI
1 "nonimmediate_operand" "rm"))
1126 (set (match_operand:SI
2 "register_operand" "=r")
1127 (sign_extend:SI (match_operand:QI
3 "nonimmediate_operand" "rm")))
1128 (set (cc0) (match_dup
2))]
1129 "dead_or_set_p (insn, operands[
2])
1130 && (rtx_equal_p (operands[
3], operands[
0])
1131 || rtx_equal_p (operands[
3], operands[
1]))"
1133 cc_status.flags |= CC_NO_OVERFLOW;
1134 return
\"cvtbw %
1,%
0\";
1137 ;; Optimize loops with an incremented/decremented variable.
1140 [(set (match_operand:SI
0 "register_operand" "=r")
1141 (plus:SI (match_dup
0)
1144 (compare (match_operand:SI
1 "register_operand" "r")
1145 (match_operand:SI
2 "nonmemory_operand" "ri")))
1147 (if_then_else (match_operator:SI
3 "signed_comparison"
1148 [(cc0) (const_int
0)])
1149 (label_ref (match_operand
4 "" ""))
1151 "(GET_CODE (operands[
2]) == CONST_INT
1152 ? (unsigned)INTVAL (operands[
2]) +
32 >=
64
1153 :
1) && (rtx_equal_p (operands[
0], operands[
1])
1154 || rtx_equal_p (operands[
0], operands[
2]))"
1156 if (rtx_equal_p (operands[
0], operands[
1]))
1158 output_asm_insn (
\"dcmpw %
2,%
0\", operands);
1159 return
\"b%N3 %l4
\";
1163 output_asm_insn (
\"dcmpw %
1,%
0\", operands);
1164 return
\"b%R3 %l4
\";
1169 [(set (match_operand:SI
0 "register_operand" "=r")
1170 (plus:SI (match_dup
0)
1173 (compare (match_operand:SI
1 "register_operand" "r")
1174 (match_operand:SI
2 "nonmemory_operand" "ri")))
1176 (if_then_else (match_operator:SI
3 "signed_comparison"
1177 [(cc0) (const_int
0)])
1178 (label_ref (match_operand
4 "" ""))
1180 "(GET_CODE (operands[
2]) == CONST_INT
1181 ? (unsigned)INTVAL (operands[
2]) +
32 >=
64
1182 :
1) && (rtx_equal_p (operands[
0], operands[
1])
1183 || rtx_equal_p (operands[
0], operands[
2]))"
1185 if (rtx_equal_p (operands[
0], operands[
1]))
1187 output_asm_insn (
\"icmpw %
2,%
0\", operands);
1188 return
\"b%N3 %l4
\";
1192 output_asm_insn (
\"icmpw %
1,%
0\", operands);
1193 return
\"b%R3 %l4
\";
1197 ;; Combine two word moves with consecutive operands into one long move.
1198 ;; Also combines immediate moves, if the high-order destination operand
1199 ;; is loaded with
0 or -
1 and the low-order destination operand is loaded
1200 ;; with a constant with the same sign.
1203 [(set (match_operand:SI
0 "general_operand" "=g")
1204 (match_operand:SI
1 "general_operand" "g"))
1205 (set (match_operand:SI
2 "general_operand" "=g")
1206 (match_operand:SI
3 "general_operand" "g"))]
1207 "movdi_possible (operands)"
1210 extern int swap_operands;
1211 output_asm_insn (
\"# COMBINE movw %
1,%
0\", operands);
1212 output_asm_insn (
\"# COMBINE movw %
3,%
2\", operands);
1213 movdi_possible (operands);
1214 if (CONSTANT_P (operands[
1]))
1215 return (swap_operands) ?
\"movl %
3,%
0\" :
\"movl %
1,%
2\";
1217 return (swap_operands) ?
\"movl %
1,%
0\" :
\"movl %
3,%
2\";
1220 ;; Optimize certain tests after memory stores.
1223 [(set (match_operand
0 "memory_operand" "=m")
1224 (match_operand
1 "register_operand" "r"))
1225 (set (match_operand:SI
2 "register_operand" "=r")
1226 (sign_extend:SI (match_dup
1)))
1229 "dead_or_set_p (insn, operands[
2])"
1231 cc_status.flags |= CC_NO_OVERFLOW;
1232 if (GET_MODE (operands[
0]) == QImode)
1233 return
\"cvtwb %
1,%
0\";
1235 return
\"cvtwh %
1,%
0\";
1238 ;______________________________________________________________________
1241 ;______________________________________________________________________
1243 (define_expand "extendsidi2"
1244 [(set (subreg:SI (match_operand:DI
0 "register_operand" "=r")
1)
1245 (match_operand:SI
1 "general_operand" "g"))
1246 (set (subreg:SI (match_dup
0)
0)
1247 (subreg:SI (match_dup
0)
1))
1248 (set (subreg:SI (match_dup
0)
0)
1249 (ashiftrt:SI (subreg:SI (match_dup
0)
0)
1254 (define_insn "adddi3"
1255 [(set (match_operand:DI
0 "register_operand" "=r")
1256 (plus:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1257 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1263 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1264 if (REG_P (operands[
2]))
1265 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1268 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1269 CONST_DOUBLE_LOW (operands[
2]));
1270 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1271 CONST_DOUBLE_HIGH (operands[
2]));
1273 output_asm_insn (
\"addw %
1,%
0\", xoperands);
1274 return
\"addwc %
2,%
0\";
1277 (define_insn "subdi3"
1278 [(set (match_operand:DI
0 "register_operand" "=r")
1279 (minus:DI (match_operand:DI
1 "register_operand" "
0")
1280 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1286 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1287 if (REG_P (operands[
2]))
1288 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1291 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1292 CONST_DOUBLE_LOW (operands[
2]));
1293 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1294 CONST_DOUBLE_HIGH (operands[
2]));
1296 output_asm_insn (
\"subw %
1,%
0\", xoperands);
1297 return
\"subwb %
2,%
0\";
1300 (define_insn "iordi3"
1301 [(set (match_operand:DI
0 "register_operand" "=r")
1302 (ior:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1303 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1309 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1310 if (REG_P (operands[
2]))
1311 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1314 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1315 CONST_DOUBLE_LOW (operands[
2]));
1316 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1317 CONST_DOUBLE_HIGH (operands[
2]));
1319 output_asm_insn (
\"orw %
1,%
0\", xoperands);
1320 return
\"orw %
2,%
0\";
1323 (define_insn "anddi3"
1324 [(set (match_operand:DI
0 "register_operand" "=r")
1325 (and:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1326 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1332 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1333 if (REG_P (operands[
2]))
1334 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1337 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1338 CONST_DOUBLE_LOW (operands[
2]));
1339 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1340 CONST_DOUBLE_HIGH (operands[
2]));
1342 output_asm_insn (
\"andw %
1,%
0\", xoperands);
1343 return
\"andw %
2,%
0\";
1346 (define_insn "xordi3"
1347 [(set (match_operand:DI
0 "register_operand" "=r")
1348 (xor:DI (match_operand:DI
1 "nonmemory_operand" "%
0")
1349 (match_operand:DI
2 "nonmemory_operand" "rF")))]
1355 xoperands[
0] = gen_rtx (REG, SImode, REGNO (operands[
0]) +
1);
1356 if (REG_P (operands[
2]))
1357 xoperands[
1] = gen_rtx (REG, SImode, REGNO (operands[
2]) +
1);
1360 xoperands[
1] = gen_rtx (CONST_INT, VOIDmode,
1361 CONST_DOUBLE_LOW (operands[
2]));
1362 operands[
2] = gen_rtx (CONST_INT, VOIDmode,
1363 CONST_DOUBLE_HIGH (operands[
2]));
1365 output_asm_insn (
\"xorw %
1,%
0\", xoperands);
1366 return
\"xorw %
2,%
0\";
1369 ;; My version, modelled after Jonathan Stone's and "tablejump" - S.P.
1370 (define_insn "indirect_jump"
1371 [(set (pc) (match_operand:SI
0 "general_operand" "r"))]
1375 ;;- Local variables:
1377 ;;- comment-start: ";;- "
1378 ;;- eval: (set-syntax-table (copy-sequence (syntax-table)))
1379 ;;- eval: (modify-syntax-entry ?] ")[")
1380 ;;- eval: (modify-syntax-entry ?{ "(}")
1381 ;;- eval: (modify-syntax-entry ?} "){")