]> gcc.gnu.org Git - gcc.git/blame - gcc/config/pyr/pyr.md
*** empty log message ***
[gcc.git] / gcc / config / pyr / pyr.md
CommitLineData
573ade84
RK
1;; GNU C machine description for Pyramid 90x, 9000, MIServer Series
2;; Copyright (C) 1989, 1990 Free Software Foundation, Inc.
3
4;; This file is part of GNU CC.
5
6;; GNU CC is free software; you can redistribute it and/or modify
7;; it under the terms of the GNU General Public License as published by
8;; the Free Software Foundation; either version 2, or (at your option)
9;; any later version.
10
11;; GNU CC is distributed in the hope that it will be useful,
12;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14;; GNU General Public License for more details.
15
16;; You should have received a copy of the GNU General Public License
17;; along with GNU CC; see the file COPYING. If not, write to
18;; the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
19
20;; Instruction patterns. When multiple patterns apply,
21;; the first one in the file is chosen.
22;;
23;; See file "rtl.def" for documentation on define_insn, match_*, et. al.
24;;
25;; cpp macro #define NOTICE_UPDATE_CC in file tm.h handles condition code
26;; updates for most instructions.
27\f
dc2a006b 28;; These comments are mostly obsolete. Written for gcc version 1.XX.
573ade84
RK
29;; * Try using define_insn instead of some peepholes in more places.
30;; * Set REG_NOTES:REG_EQUIV for cvt[bh]w loads. This would make the
31;; backward scan in sign_extend needless.
32;; * Match (pc) (label_ref) case in peephole patterns.
33;; * Should optimize
34;; "cmpX op1,op2; b{eq,ne} LY; ucmpX op1.op2; b{lt,le,gt,ge} LZ"
35;; to
36;; "ucmpX op1,op2; b{eq,ne} LY; b{lt,le,gt,ge} LZ"
37;; by pre-scanning insn and running notice_update_cc for them.
38;; * Is it necessary to do copy_rtx in the test and compare patterns?
39;; * Fix true frame pointer omission.
40;; * Make the jump tables contain branches, not addresses! This would
41;; save us one instruction.
f2d45bd7 42;; * Could the complicated scheme for compares be simplified, if we had
573ade84
RK
43;; no named cmpqi or cmphi patterns, and instead anonymous patterns for
44;; the less-than-word compare cases pyr can handle???
45;; * The jump insn seems to accept more than just IR addressing. Would
46;; we win by telling GCC? Or can we use movw into the global reg which
47;; is a synonym for pc?
48;; * More DImode patterns.
49;; * Scan backwards in "zero_extendhisi2", "zero_extendqisi2" to find out
50;; if the extension can be omitted.
51;; * "divmodsi" with Pyramid "ediv" insn. Is it possible in rtl??
52;; * Would "rcsp tmpreg; u?cmp[bh] op1_regdispl(tmpreg),op2" win in
53;; comparison with the two extensions and single test generated now?
54;; The rcsp insn could be expanded, and moved out of loops by the
55;; optimizer, making 1 (64 bit) insn of 3 (32 bit) insns in loops.
56;; The rcsp insn could be followed by an add insn, making non-displacement
57;; IR addressing sufficient.
58
59;______________________________________________________________________
60;
61; Test and Compare Patterns.
62;______________________________________________________________________
63
64; The argument for the rather complicated test and compare expansion
65; scheme, is the irregular pyramid instructions for these operations.
66; 1) Pyramid has different signed and unsigned compares. 2) HImode
67; and QImode integers are memory-memory and immediate-memory only. 3)
68; Unsigned HImode compares doesn't exist. 4) Only certain
69; combinations of addresses are allowed for memory-memory compares.
70; Whenever necessary, in order to fulfill these addressing
71; constraints, the compare operands are swapped.
72
73(define_expand "tstsi"
74 [(set (cc0)
75 (match_operand:SI 0 "general_operand" ""))]
76 "" "operands[0] = force_reg (SImode, operands[0]);")
77
78(define_insn ""
79 [(set (cc0)
80 (compare (match_operand:SI 0 "memory_operand" "m")
81 (match_operand:SI 1 "memory_operand" "m")))]
82 "weird_memory_memory (operands[0], operands[1])"
83 "*
84{
85 rtx br_insn = NEXT_INSN (insn);
86 RTX_CODE br_code;
87
88 if (GET_CODE (br_insn) != JUMP_INSN)
89 abort();
90 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
91
92 weird_memory_memory (operands[0], operands[1]);
93
94 if (swap_operands)
95 {
96 cc_status.flags = CC_REVERSED;
97 if (TRULY_UNSIGNED_COMPARE_P (br_code))
98 {
99 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
100 return \"ucmpw %0,%1\";
101 }
102 return \"cmpw %0,%1\";
103 }
104
105 if (TRULY_UNSIGNED_COMPARE_P (br_code))
106 {
107 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
108 return \"ucmpw %1,%0\";
109 }
110 return \"cmpw %1,%0\";
111}")
112
113(define_insn "cmpsi"
114 [(set (cc0)
115 (compare (match_operand:SI 0 "nonimmediate_operand" "r,g")
116 (match_operand:SI 1 "general_operand" "g,r")))]
117 ""
118 "*
119{
120 rtx br_insn = NEXT_INSN (insn);
121 RTX_CODE br_code;
122
123 if (GET_CODE (br_insn) != JUMP_INSN)
124 abort();
125 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
126
127 if (which_alternative != 0)
128 {
129 cc_status.flags = CC_REVERSED;
130 if (TRULY_UNSIGNED_COMPARE_P (br_code))
131 {
132 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
133 return \"ucmpw %0,%1\";
134 }
135 return \"cmpw %0,%1\";
136 }
137
138 if (TRULY_UNSIGNED_COMPARE_P (br_code))
139 {
140 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
141 return \"ucmpw %1,%0\";
142 }
143 return \"cmpw %1,%0\";
144}")
145
146(define_insn ""
147 [(set (cc0)
f2d45bd7 148 (match_operand:SI 0 "nonimmediate_operand" "r"))]
573ade84
RK
149 ""
150 "*
151{
152#if 0
153 cc_status.flags |= CC_NO_OVERFLOW;
154 return \"cmpw $0,%0\";
155#endif
156 rtx br_insn = NEXT_INSN (insn);
157 RTX_CODE br_code;
158
159 if (GET_CODE (br_insn) != JUMP_INSN)
160 abort();
161 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
162
163 if (TRULY_UNSIGNED_COMPARE_P (br_code))
164 {
165 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
166 return \"ucmpw $0,%0\";
167 }
168 return \"mtstw %0,%0\";
169}")
170
171(define_expand "cmphi"
172 [(set (cc0)
173 (compare (match_operand:HI 0 "nonimmediate_operand" "")
174 (match_operand:HI 1 "general_operand" "")))]
175 ""
176 "
177{
178 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
179 test_op0 = copy_rtx (operands[0]);
180 test_op1 = copy_rtx (operands[1]);
181 test_mode = HImode;
182 DONE;
183}")
184
185(define_expand "tsthi"
186 [(set (cc0)
f2d45bd7 187 (match_operand:HI 0 "nonimmediate_operand" ""))]
573ade84
RK
188 ""
189 "
190{
191 extern rtx test_op0; extern enum machine_mode test_mode;
192 test_op0 = copy_rtx (operands[0]);
193 test_mode = HImode;
194 DONE;
195}")
196
197(define_insn ""
198 [(set (cc0)
199 (compare (match_operand:HI 0 "memory_operand" "m")
200 (match_operand:HI 1 "memory_operand" "m")))]
dc2a006b
RS
201 "(!TRULY_UNSIGNED_COMPARE_P (GET_CODE (XEXP (SET_SRC (PATTERN (NEXT_INSN (insn))), 0))))
202 && weird_memory_memory (operands[0], operands[1])"
573ade84
RK
203 "*
204{
205 rtx br_insn = NEXT_INSN (insn);
206
207 if (GET_CODE (br_insn) != JUMP_INSN)
208 abort();
209
210 weird_memory_memory (operands[0], operands[1]);
211
212 if (swap_operands)
213 {
214 cc_status.flags = CC_REVERSED;
215 return \"cmph %0,%1\";
216 }
217
218 return \"cmph %1,%0\";
219}")
220
221(define_insn ""
222 [(set (cc0)
223 (compare (match_operand:HI 0 "nonimmediate_operand" "r,m")
224 (match_operand:HI 1 "nonimmediate_operand" "m,r")))]
dc2a006b
RS
225 "(!TRULY_UNSIGNED_COMPARE_P (GET_CODE (XEXP (SET_SRC (PATTERN (NEXT_INSN (insn))), 0))))
226 && (GET_CODE (operands[0]) != GET_CODE (operands[1]))"
573ade84
RK
227 "*
228{
229 rtx br_insn = NEXT_INSN (insn);
230
231 if (GET_CODE (br_insn) != JUMP_INSN)
232 abort();
233
234 if (which_alternative != 0)
235 {
236 cc_status.flags = CC_REVERSED;
237 return \"cmph %0,%1\";
238 }
239
240 return \"cmph %1,%0\";
241}")
242
243(define_expand "cmpqi"
244 [(set (cc0)
245 (compare (match_operand:QI 0 "nonimmediate_operand" "")
246 (match_operand:QI 1 "general_operand" "")))]
247 ""
248 "
249{
250 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
251 test_op0 = copy_rtx (operands[0]);
252 test_op1 = copy_rtx (operands[1]);
253 test_mode = QImode;
254 DONE;
255}")
256
257(define_expand "tstqi"
258 [(set (cc0)
f2d45bd7 259 (match_operand:QI 0 "nonimmediate_operand" ""))]
573ade84
RK
260 ""
261 "
262{
263 extern rtx test_op0; extern enum machine_mode test_mode;
264 test_op0 = copy_rtx (operands[0]);
265 test_mode = QImode;
266 DONE;
267}")
268
269(define_insn ""
270 [(set (cc0)
271 (compare (match_operand:QI 0 "memory_operand" "m")
272 (match_operand:QI 1 "memory_operand" "m")))]
273 "weird_memory_memory (operands[0], operands[1])"
274 "*
275{
276 rtx br_insn = NEXT_INSN (insn);
277 RTX_CODE br_code;
278
279 if (GET_CODE (br_insn) != JUMP_INSN)
280 abort();
281 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
282
283 weird_memory_memory (operands[0], operands[1]);
284
285 if (swap_operands)
286 {
287 cc_status.flags = CC_REVERSED;
288 if (TRULY_UNSIGNED_COMPARE_P (br_code))
289 {
290 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
291 return \"ucmpb %0,%1\";
292 }
293 return \"cmpb %0,%1\";
294 }
295
296 if (TRULY_UNSIGNED_COMPARE_P (br_code))
297 {
298 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
299 return \"ucmpb %1,%0\";
300 }
301 return \"cmpb %1,%0\";
302}")
303
304(define_insn ""
305 [(set (cc0)
306 (compare (match_operand:QI 0 "nonimmediate_operand" "r,m")
307 (match_operand:QI 1 "nonimmediate_operand" "m,r")))]
308 "(GET_CODE (operands[0]) != GET_CODE (operands[1]))"
309 "*
310{
311 rtx br_insn = NEXT_INSN (insn);
312 RTX_CODE br_code;
313
314 if (GET_CODE (br_insn) != JUMP_INSN)
315 abort();
316 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
317
318 if (which_alternative != 0)
319 {
320 cc_status.flags = CC_REVERSED;
321 if (TRULY_UNSIGNED_COMPARE_P (br_code))
322 {
323 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
324 return \"ucmpb %0,%1\";
325 }
326 return \"cmpb %0,%1\";
327 }
328
329 if (TRULY_UNSIGNED_COMPARE_P (br_code))
330 {
331 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
332 return \"ucmpb %1,%0\";
333 }
334 return \"cmpb %1,%0\";
335}")
336
337(define_expand "bgt"
338 [(set (pc) (if_then_else (gt (cc0) (const_int 0))
339 (label_ref (match_operand 0 "" "")) (pc)))]
340 "" "extend_and_branch (SIGN_EXTEND);")
341
342(define_expand "blt"
343 [(set (pc) (if_then_else (lt (cc0) (const_int 0))
344 (label_ref (match_operand 0 "" "")) (pc)))]
345 "" "extend_and_branch (SIGN_EXTEND);")
346
347(define_expand "bge"
348 [(set (pc) (if_then_else (ge (cc0) (const_int 0))
349 (label_ref (match_operand 0 "" "")) (pc)))]
350 "" "extend_and_branch (SIGN_EXTEND);")
351
352(define_expand "ble"
353 [(set (pc) (if_then_else (le (cc0) (const_int 0))
354 (label_ref (match_operand 0 "" "")) (pc)))]
355 "" "extend_and_branch (SIGN_EXTEND);")
356
357(define_expand "beq"
358 [(set (pc) (if_then_else (eq (cc0) (const_int 0))
359 (label_ref (match_operand 0 "" "")) (pc)))]
360 "" "extend_and_branch (SIGN_EXTEND);")
361
362(define_expand "bne"
363 [(set (pc) (if_then_else (ne (cc0) (const_int 0))
364 (label_ref (match_operand 0 "" "")) (pc)))]
365 "" "extend_and_branch (SIGN_EXTEND);")
366
367(define_expand "bgtu"
368 [(set (pc) (if_then_else (gtu (cc0) (const_int 0))
369 (label_ref (match_operand 0 "" "")) (pc)))]
370 "" "extend_and_branch (ZERO_EXTEND);")
371
372(define_expand "bltu"
373 [(set (pc) (if_then_else (ltu (cc0) (const_int 0))
374 (label_ref (match_operand 0 "" "")) (pc)))]
375 "" "extend_and_branch (ZERO_EXTEND);")
376
377(define_expand "bgeu"
378 [(set (pc) (if_then_else (geu (cc0) (const_int 0))
379 (label_ref (match_operand 0 "" "")) (pc)))]
380 "" "extend_and_branch (ZERO_EXTEND);")
381
382(define_expand "bleu"
383 [(set (pc) (if_then_else (leu (cc0) (const_int 0))
384 (label_ref (match_operand 0 "" "")) (pc)))]
385 "" "extend_and_branch (ZERO_EXTEND);")
386
387(define_insn "cmpdf"
388 [(set (cc0)
389 (compare (match_operand:DF 0 "register_operand" "r")
390 (match_operand:DF 1 "register_operand" "r")))]
391 ""
392 "cmpd %1,%0")
393
394(define_insn "cmpsf"
395 [(set (cc0)
396 (compare (match_operand:SF 0 "register_operand" "r")
397 (match_operand:SF 1 "register_operand" "r")))]
398 ""
399 "cmpf %1,%0")
400
401(define_insn "tstdf"
402 [(set (cc0)
403 (match_operand:DF 0 "register_operand" "r"))]
404 ""
405 "mtstd %0,%0")
406
407(define_insn "tstsf"
408 [(set (cc0)
409 (match_operand:SF 0 "register_operand" "r"))]
410 ""
411 "mtstf %0,%0")
412\f
413;______________________________________________________________________
414;
415; Fixed-point Arithmetic.
416;______________________________________________________________________
417
418(define_insn "addsi3"
419 [(set (match_operand:SI 0 "register_operand" "=r,!r")
420 (plus:SI (match_operand:SI 1 "general_operand" "%0,r")
421 (match_operand:SI 2 "general_operand" "g,rJ")))]
422 ""
423 "*
424{
425 if (which_alternative == 0)
426 return (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) == 32
427 ? \"subw %n2,%0\" : \"addw %2,%0\");
428 else
429 {
430 forget_cc_if_dependent (operands[0]);
431 return \"mova %a2[%1*1],%0\";
432 }
433}")
434
435(define_insn "subsi3"
436 [(set (match_operand:SI 0 "register_operand" "=r,r")
437 (minus:SI (match_operand:SI 1 "general_operand" "0,g")
438 (match_operand:SI 2 "general_operand" "g,0")))]
439 ""
440 "* return (which_alternative == 0) ? \"subw %2,%0\" : \"rsubw %1,%0\";")
441
442(define_insn "mulsi3"
443 [(set (match_operand:SI 0 "register_operand" "=r")
444 (mult:SI (match_operand:SI 1 "general_operand" "%0")
445 (match_operand:SI 2 "general_operand" "g")))]
446 ""
447 "mulw %2,%0")
448
449(define_insn "divsi3"
450 [(set (match_operand:SI 0 "register_operand" "=r,r")
451 (div:SI (match_operand:SI 1 "general_operand" "0,g")
452 (match_operand:SI 2 "general_operand" "g,0")))]
453 ""
454 "* return (which_alternative == 0) ? \"divw %2,%0\" : \"rdivw %1,%0\";")
455
456(define_insn "udivsi3"
457 [(set (match_operand:SI 0 "register_operand" "=r")
458 (udiv:SI (match_operand:SI 1 "register_operand" "0")
459 (match_operand:SI 2 "general_operand" "g")))]
460 ""
461 "udivw %2,%0")
462
463(define_insn "modsi3"
464 [(set (match_operand:SI 0 "register_operand" "=r")
465 (mod:SI (match_operand:SI 1 "register_operand" "0")
466 (match_operand:SI 2 "general_operand" "g")))]
467 ""
468 "modw %2,%0")
469
470(define_insn "umodsi3"
471 [(set (match_operand:SI 0 "register_operand" "=r")
472 (umod:SI (match_operand:SI 1 "register_operand" "0")
473 (match_operand:SI 2 "general_operand" "g")))]
474 ""
475 "umodw %2,%0")
476
477(define_insn "negsi2"
478 [(set (match_operand:SI 0 "register_operand" "=r")
479 (neg:SI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
480 ""
481 "mnegw %1,%0")
482
483(define_insn "one_cmplsi2"
484 [(set (match_operand:SI 0 "register_operand" "=r")
485 (not:SI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
486 ""
487 "mcomw %1,%0")
488
489(define_insn "abssi2"
490 [(set (match_operand:SI 0 "register_operand" "=r")
491 (abs:SI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
492 ""
493 "mabsw %1,%0")
494\f
495;______________________________________________________________________
496;
497; Floating-point Arithmetic.
498;______________________________________________________________________
499
500(define_insn "adddf3"
501 [(set (match_operand:DF 0 "register_operand" "=r")
502 (plus:DF (match_operand:DF 1 "register_operand" "%0")
503 (match_operand:DF 2 "register_operand" "r")))]
504 ""
505 "addd %2,%0")
506
507(define_insn "addsf3"
508 [(set (match_operand:SF 0 "register_operand" "=r")
509 (plus:SF (match_operand:SF 1 "register_operand" "%0")
510 (match_operand:SF 2 "register_operand" "r")))]
511 ""
512 "addf %2,%0")
513
514(define_insn "subdf3"
515 [(set (match_operand:DF 0 "register_operand" "=r")
516 (minus:DF (match_operand:DF 1 "register_operand" "0")
517 (match_operand:DF 2 "register_operand" "r")))]
518 ""
519 "subd %2,%0")
520
521(define_insn "subsf3"
522 [(set (match_operand:SF 0 "register_operand" "=r")
523 (minus:SF (match_operand:SF 1 "register_operand" "0")
524 (match_operand:SF 2 "register_operand" "r")))]
525 ""
526 "subf %2,%0")
527
528(define_insn "muldf3"
529 [(set (match_operand:DF 0 "register_operand" "=r")
530 (mult:DF (match_operand:DF 1 "register_operand" "%0")
531 (match_operand:DF 2 "register_operand" "r")))]
532 ""
533 "muld %2,%0")
534
535(define_insn "mulsf3"
536 [(set (match_operand:SF 0 "register_operand" "=r")
537 (mult:SF (match_operand:SF 1 "register_operand" "%0")
538 (match_operand:SF 2 "register_operand" "r")))]
539 ""
540 "mulf %2,%0")
541
542(define_insn "divdf3"
543 [(set (match_operand:DF 0 "register_operand" "=r")
544 (div:DF (match_operand:DF 1 "register_operand" "0")
545 (match_operand:DF 2 "register_operand" "r")))]
546 ""
547 "divd %2,%0")
548
549(define_insn "divsf3"
550 [(set (match_operand:SF 0 "register_operand" "=r")
551 (div:SF (match_operand:SF 1 "register_operand" "0")
552 (match_operand:SF 2 "register_operand" "r")))]
553 ""
554 "divf %2,%0")
555
556(define_insn "negdf2"
557 [(set (match_operand:DF 0 "register_operand" "=r")
558 (neg:DF (match_operand:DF 1 "register_operand" "r")))]
559 ""
560 "mnegd %1,%0")
561
562(define_insn "negsf2"
563 [(set (match_operand:SF 0 "register_operand" "=r")
564 (neg:SF (match_operand:SF 1 "register_operand" "r")))]
565 ""
566 "mnegf %1,%0")
567
568(define_insn "absdf2"
569 [(set (match_operand:DF 0 "register_operand" "=r")
570 (abs:DF (match_operand:DF 1 "register_operand" "r")))]
571 ""
572 "mabsd %1,%0")
573
574(define_insn "abssf2"
575 [(set (match_operand:SF 0 "register_operand" "=r")
576 (abs:SF (match_operand:SF 1 "register_operand" "r")))]
577 ""
578 "mabsf %1,%0")
579\f
580;______________________________________________________________________
581;
582; Logical and Shift Instructions.
583;______________________________________________________________________
584
585(define_insn ""
586 [(set (cc0)
587 (and:SI (match_operand:SI 0 "general_operand" "%r")
588 (match_operand:SI 1 "general_operand" "g")))]
589 ""
590 "*
591{
592 cc_status.flags |= CC_NO_OVERFLOW;
593 return \"bitw %1,%0\";
594}")
595
596(define_insn "andsi3"
597 [(set (match_operand:SI 0 "register_operand" "=r,r")
598 (and:SI (match_operand:SI 1 "general_operand" "%0,r")
599 (match_operand:SI 2 "general_operand" "g,K")))]
600 ""
601 "*
602{
603 if (which_alternative == 0)
604 return \"andw %2,%0\";
605
606 cc_status.flags = CC_NOT_NEGATIVE;
607 return (INTVAL (operands[2]) == 255
608 ? \"movzbw %1,%0\" : \"movzhw %1,%0\");
609}")
610
611(define_insn ""
612 [(set (match_operand:SI 0 "register_operand" "=r")
613 (and:SI (not:SI (match_operand:SI 1 "general_operand" "g"))
614 (match_operand:SI 2 "register_operand" "0")))]
615 ""
616 "bicw %1,%0")
617
618(define_insn "iorsi3"
619 [(set (match_operand:SI 0 "register_operand" "=r")
620 (ior:SI (match_operand:SI 1 "general_operand" "%0")
621 (match_operand:SI 2 "general_operand" "g")))]
622 ""
623 "orw %2,%0")
624
625(define_insn "xorsi3"
626 [(set (match_operand:SI 0 "register_operand" "=r")
627 (xor:SI (match_operand:SI 1 "general_operand" "%0")
628 (match_operand:SI 2 "general_operand" "g")))]
629 ""
630 "xorw %2,%0")
631
632; The arithmetic left shift instructions work strangely on pyramids.
633; They fail to modify the sign bit. Therefore, use logic shifts.
634
635(define_insn "ashlsi3"
636 [(set (match_operand:SI 0 "register_operand" "=r")
637 (ashift:SI (match_operand:SI 1 "register_operand" "0")
638 (match_operand:SI 2 "general_operand" "rnm")))]
639 ""
640 "* return output_shift (\"lshlw %2,%0\", operands[2], 32); ")
641
642(define_insn "ashrsi3"
643 [(set (match_operand:SI 0 "register_operand" "=r")
644 (ashiftrt:SI (match_operand:SI 1 "register_operand" "0")
645 (match_operand:SI 2 "general_operand" "rnm")))]
646 ""
647 "* return output_shift (\"ashrw %2,%0\", operands[2], 32); ")
648
649(define_insn "ashrdi3"
650 [(set (match_operand:DI 0 "register_operand" "=r")
651 (ashiftrt:DI (match_operand:DI 1 "register_operand" "0")
652 (match_operand:SI 2 "general_operand" "rnm")))]
653 ""
654 "* return output_shift (\"ashrl %2,%0\", operands[2], 64); ")
655
656(define_insn "lshrsi3"
657 [(set (match_operand:SI 0 "register_operand" "=r")
658 (lshiftrt:SI (match_operand:SI 1 "register_operand" "0")
659 (match_operand:SI 2 "general_operand" "rnm")))]
660 ""
661 "* return output_shift (\"lshrw %2,%0\", operands[2], 32); ")
662
663(define_insn "rotlsi3"
664 [(set (match_operand:SI 0 "register_operand" "=r")
665 (rotate:SI (match_operand:SI 1 "register_operand" "0")
666 (match_operand:SI 2 "general_operand" "rnm")))]
667 ""
668 "* return output_shift (\"rotlw %2,%0\", operands[2], 32); ")
669
670(define_insn "rotrsi3"
671 [(set (match_operand:SI 0 "register_operand" "=r")
672 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
673 (match_operand:SI 2 "general_operand" "rnm")))]
674 ""
675 "* return output_shift (\"rotrw %2,%0\", operands[2], 32); ")
676\f
677;______________________________________________________________________
678;
679; Fixed and Floating Moves.
680;______________________________________________________________________
681
682;; If the destination is a memory operand, indexed source operands are
683;; disallowed. Big DImode constants are always loaded into a reg pair,
f2d45bd7 684;; although offsettable memory addresses really could be dealt with.
573ade84
RK
685
686(define_insn ""
687 [(set (match_operand:DI 0 "memory_operand" "=m")
688 (match_operand:DI 1 "nonindexed_operand" "gF"))]
689 "(GET_CODE (operands[1]) == CONST_DOUBLE
690 ? ((CONST_DOUBLE_HIGH (operands[1]) == 0
691 && CONST_DOUBLE_LOW (operands[1]) >= 0)
692 || (CONST_DOUBLE_HIGH (operands[1]) == -1
693 && CONST_DOUBLE_LOW (operands[1]) < 0))
694 : 1)"
695 "*
696{
697 if (GET_CODE (operands[1]) == CONST_DOUBLE)
698 operands[1] = gen_rtx (CONST_INT, VOIDmode,
699 CONST_DOUBLE_LOW (operands[1]));
700 return \"movl %1,%0\";
701}")
702
703;; Force the destination to a register, so all source operands are allowed.
704
705(define_insn "movdi"
706 [(set (match_operand:DI 0 "general_operand" "=r")
707 (match_operand:DI 1 "general_operand" "gF"))]
708 ""
709 "* return output_move_double (operands); ")
710
711;; If the destination is a memory address, indexed source operands are
712;; disallowed.
713
714(define_insn ""
715 [(set (match_operand:SI 0 "memory_operand" "=m")
716 (match_operand:SI 1 "nonindexed_operand" "g"))]
717 ""
718 "movw %1,%0")
719
720;; Force the destination to a register, so all source operands are allowed.
721
722(define_insn "movsi"
723 [(set (match_operand:SI 0 "general_operand" "=r")
724 (match_operand:SI 1 "general_operand" "g"))]
725 ""
726 "movw %1,%0")
727
728;; If the destination is a memory address, indexed source operands are
729;; disallowed.
730
731(define_insn ""
732 [(set (match_operand:HI 0 "memory_operand" "=m")
733 (match_operand:HI 1 "nonindexed_operand" "g"))]
734 ""
735 "*
736{
737 if (REG_P (operands[1]))
738 return \"cvtwh %1,%0\"; /* reg -> mem */
739 else
740 return \"movh %1,%0\"; /* mem imm -> mem */
741}")
742
743;; Force the destination to a register, so all source operands are allowed.
744
745(define_insn "movhi"
746 [(set (match_operand:HI 0 "general_operand" "=r")
747 (match_operand:HI 1 "general_operand" "g"))]
748 ""
749 "*
750{
751 if (GET_CODE (operands[1]) != MEM)
752 return \"movw %1,%0\"; /* reg imm -> reg */
753 return \"cvthw %1,%0\"; /* mem -> reg */
754}")
755
756;; If the destination is a memory address, indexed source operands are
757;; disallowed.
758
759(define_insn ""
760 [(set (match_operand:QI 0 "memory_operand" "=m")
761 (match_operand:QI 1 "nonindexed_operand" "g"))]
762 ""
763 "*
764{
765 if (REG_P (operands[1]))
766 return \"cvtwb %1,%0\"; /* reg -> mem */
767 else
768 return \"movb %1,%0\"; /* mem imm -> mem */
769}")
770
771;; Force the destination to a register, so all source operands are allowed.
772
773(define_insn "movqi"
774 [(set (match_operand:QI 0 "general_operand" "=r")
775 (match_operand:QI 1 "general_operand" "g"))]
776 ""
777 "*
778{
779 if (GET_CODE (operands[1]) != MEM)
780 return \"movw %1,%0\"; /* reg imm -> reg */
781 return \"cvtbw %1,%0\"; /* mem -> reg */
782}")
783
784;; If the destination is a memory address, indexed source operands are
785;; disallowed.
786
787(define_insn ""
788 [(set (match_operand:DF 0 "memory_operand" "=m")
789 (match_operand:DF 1 "nonindexed_operand" "g"))]
790 "GET_CODE (operands[1]) != CONST_DOUBLE"
791 "movl %1,%0")
792
793;; Force the destination to a register, so all source operands are allowed.
794
795(define_insn "movdf"
796 [(set (match_operand:DF 0 "general_operand" "=r")
797 (match_operand:DF 1 "general_operand" "gF"))]
798 ""
799 "* return output_move_double (operands); ")
800
801;; If the destination is a memory address, indexed source operands are
802;; disallowed.
803
804(define_insn ""
805 [(set (match_operand:SF 0 "memory_operand" "=m")
806 (match_operand:SF 1 "nonindexed_operand" "g"))]
807 ""
808 "movw %1,%0")
809
810;; Force the destination to a register, so all source operands are allowed.
811
812(define_insn "movsf"
813 [(set (match_operand:SF 0 "general_operand" "=r")
814 (match_operand:SF 1 "general_operand" "g"))]
815 ""
816 "movw %1,%0")
817
818(define_insn ""
819 [(set (match_operand:SI 0 "register_operand" "=r")
820 (match_operand:QI 1 "address_operand" "p"))]
821 ""
822 "*
823{
824 forget_cc_if_dependent (operands[0]);
825 return \"mova %a1,%0\";
826}")
827\f
828;______________________________________________________________________
829;
830; Conversion patterns.
831;______________________________________________________________________
832
833;; The trunc patterns are used only when non compile-time constants are used.
834
835(define_insn "truncsiqi2"
836 [(set (match_operand:QI 0 "register_operand" "=r")
837 (truncate:QI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
838 ""
839 "*
840{
841 if (REG_P (operands[0]) && REG_P (operands[1])
842 && REGNO (operands[0]) == REGNO (operands[1]))
843 {
844 cc_status = cc_prev_status;
845 return \"\";
846 }
847 forget_cc_if_dependent (operands[0]);
848 return \"movw %1,%0\";
849}")
850
851(define_insn "truncsihi2"
852 [(set (match_operand:HI 0 "register_operand" "=r")
853 (truncate:HI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
854 ""
855 "*
856{
857 if (REG_P (operands[0]) && REG_P (operands[1])
858 && REGNO (operands[0]) == REGNO (operands[1]))
859 {
860 cc_status = cc_prev_status;
861 return \"\";
862 }
863 forget_cc_if_dependent (operands[0]);
864 return \"movw %1,%0\";
865}")
866
867(define_insn "extendhisi2"
868 [(set (match_operand:SI 0 "general_operand" "=r,m")
869 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "rm,r")))]
870 ""
871 "*
872{
873 extern int optimize;
874 if (optimize && REG_P (operands[0]) && REG_P (operands[1])
875 && REGNO (operands[0]) == REGNO (operands[1])
876 && already_sign_extended (insn, HImode, operands[0]))
877 {
878 cc_status = cc_prev_status;
879 return \"\";
880 }
881 return \"cvthw %1,%0\";
882}")
883
884(define_insn "extendqisi2"
885 [(set (match_operand:SI 0 "general_operand" "=r,m")
886 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "rm,r")))]
887 ""
888 "*
889{
890 extern int optimize;
891 if (optimize && REG_P (operands[0]) && REG_P (operands[1])
892 && REGNO (operands[0]) == REGNO (operands[1])
893 && already_sign_extended (insn, QImode, operands[0]))
894 {
895 cc_status = cc_prev_status;
896 return \"\";
897 }
898 return \"cvtbw %1,%0\";
899}")
900
901; Pyramid doesn't have insns *called* "cvtbh" or "movzbh".
902; But we can cvtbw/movzbw into a register, where there is no distinction
903; between words and halfwords.
904
905(define_insn "extendqihi2"
906 [(set (match_operand:HI 0 "register_operand" "=r")
907 (sign_extend:HI (match_operand:QI 1 "nonimmediate_operand" "rm")))]
908 ""
909 "cvtbw %1,%0")
910
911(define_insn "zero_extendhisi2"
912 [(set (match_operand:SI 0 "register_operand" "=r")
913 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "rm")))]
914 ""
915 "*
916{
917 cc_status.flags = CC_NOT_NEGATIVE;
918 return \"movzhw %1,%0\";
919}")
920
921(define_insn "zero_extendqisi2"
922 [(set (match_operand:SI 0 "register_operand" "=r")
923 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "rm")))]
924 ""
925 "*
926{
927 cc_status.flags = CC_NOT_NEGATIVE;
928 return \"movzbw %1,%0\";
929}")
930
931(define_insn "zero_extendqihi2"
932 [(set (match_operand:HI 0 "register_operand" "=r")
933 (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "rm")))]
934 ""
935 "*
936{
937 cc_status.flags = CC_NOT_NEGATIVE;
938 return \"movzbw %1,%0\";
939}")
940
941(define_insn "extendsfdf2"
942 [(set (match_operand:DF 0 "general_operand" "=&r,m")
943 (float_extend:DF (match_operand:SF 1 "nonimmediate_operand" "rm,r")))]
944 ""
945 "cvtfd %1,%0")
946
947(define_insn "truncdfsf2"
948 [(set (match_operand:SF 0 "general_operand" "=&r,m")
949 (float_truncate:SF (match_operand:DF 1 "nonimmediate_operand" "rm,r")))]
950 ""
951 "cvtdf %1,%0")
952
953(define_insn "floatsisf2"
954 [(set (match_operand:SF 0 "general_operand" "=&r,m")
955 (float:SF (match_operand:SI 1 "nonimmediate_operand" "rm,r")))]
956 ""
957 "cvtwf %1,%0")
958
959(define_insn "floatsidf2"
960 [(set (match_operand:DF 0 "general_operand" "=&r,m")
961 (float:DF (match_operand:SI 1 "nonimmediate_operand" "rm,r")))]
962 ""
963 "cvtwd %1,%0")
964
965(define_insn "fix_truncsfsi2"
966 [(set (match_operand:SI 0 "general_operand" "=&r,m")
967 (fix:SI (fix:SF (match_operand:SF 1 "nonimmediate_operand" "rm,r"))))]
968 ""
969 "cvtfw %1,%0")
970
971(define_insn "fix_truncdfsi2"
972 [(set (match_operand:SI 0 "general_operand" "=&r,m")
973 (fix:SI (fix:DF (match_operand:DF 1 "nonimmediate_operand" "rm,r"))))]
974 ""
975 "cvtdw %1,%0")
976\f
977;______________________________________________________________________
978;
979; Flow Control Patterns.
980;______________________________________________________________________
981
982;; Prefer "br" to "jump" for unconditional jumps, since it's faster.
983;; (The assembler can manage with out-of-range branches.)
984
985(define_insn "jump"
986 [(set (pc)
987 (label_ref (match_operand 0 "" "")))]
988 ""
989 "br %l0")
990
991(define_insn ""
992 [(set (pc)
993 (if_then_else (match_operator 0 "relop" [(cc0) (const_int 0)])
994 (label_ref (match_operand 1 "" ""))
995 (pc)))]
996 ""
997 "*
998{
999 extern int optimize;
1000 if (optimize)
1001 switch (GET_CODE (operands[0]))
1002 {
1003 case EQ: case NE:
1004 break;
1005 case LT: case LE: case GE: case GT:
1006 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1007 return 0;
1008 break;
1009 case LTU: case LEU: case GEU: case GTU:
1010 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1011 return 0;
1012 break;
1013 }
1014
1015 return \"b%N0 %l1\";
1016}")
1017
1018(define_insn ""
1019 [(set (pc)
1020 (if_then_else (match_operator 0 "relop" [(cc0) (const_int 0)])
1021 (pc)
1022 (label_ref (match_operand 1 "" ""))))]
1023 ""
1024 "*
1025{
1026 extern int optimize;
1027 if (optimize)
1028 switch (GET_CODE (operands[0]))
1029 {
1030 case EQ: case NE:
1031 break;
1032 case LT: case LE: case GE: case GT:
1033 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1034 return 0;
1035 break;
1036 case LTU: case LEU: case GEU: case GTU:
1037 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1038 return 0;
1039 break;
1040 }
1041
1042 return \"b%C0 %l1\";
1043}")
1044
1045(define_insn "call"
1046 [(call (match_operand:QI 0 "memory_operand" "m")
1047 (match_operand:SI 1 "immediate_operand" "n"))]
1048 ""
1049 "call %0")
1050
1051(define_insn "call_value"
1052 [(set (match_operand 0 "" "=r")
1053 (call (match_operand:QI 1 "memory_operand" "m")
1054 (match_operand:SI 2 "immediate_operand" "n")))]
1055 ;; Operand 2 not really used on Pyramid architecture.
1056 ""
1057 "call %1")
1058
1059(define_insn "return"
1060 [(return)]
1061 ""
1062 "*
1063{
1064 if (get_frame_size () + current_function_pretend_args_size
1065 + current_function_args_size != 0
1066 || current_function_calls_alloca)
1067 {
1068 int dealloc_size = current_function_pretend_args_size;
1069 if (current_function_pops_args)
1070 dealloc_size += current_function_args_size;
1071 operands[0] = gen_rtx (CONST_INT, VOIDmode, dealloc_size);
1072 return \"retd %0\";
1073 }
1074 else
1075 return \"ret\";
1076}")
1077
1078(define_insn "tablejump"
1079 [(set (pc) (match_operand:SI 0 "register_operand" "r"))
1080 (use (label_ref (match_operand 1 "" "")))]
1081 ""
1082 "jump (%0)")
1083
1084(define_insn "nop"
1085 [(const_int 0)]
1086 ""
1087 "movw gr0,gr0 # nop")
1088\f
1089;______________________________________________________________________
1090;
1091; Peep-hole Optimization Patterns.
1092;______________________________________________________________________
1093
1094;; Optimize fullword move followed by a test of the moved value.
1095
1096(define_peephole
1097 [(set (match_operand:SI 0 "register_operand" "=r")
1098 (match_operand:SI 1 "nonimmediate_operand" "rm"))
1099 (set (cc0) (match_operand:SI 2 "nonimmediate_operand" "rm"))]
1100 "rtx_equal_p (operands[2], operands[0])
1101 || rtx_equal_p (operands[2], operands[1])"
1102 "*
1103 cc_status.flags |= CC_NO_OVERFLOW;
1104 return \"mtstw %1,%0\";
1105")
1106
1107;; Same for HI and QI mode move-test as well.
1108
1109(define_peephole
1110 [(set (match_operand:HI 0 "register_operand" "=r")
1111 (match_operand:HI 1 "nonimmediate_operand" "rm"))
1112 (set (match_operand:SI 2 "register_operand" "=r")
1113 (sign_extend:SI (match_operand:HI 3 "nonimmediate_operand" "rm")))
1114 (set (cc0) (match_dup 2))]
1115 "dead_or_set_p (insn, operands[2])
1116 && (rtx_equal_p (operands[3], operands[0])
1117 || rtx_equal_p (operands[3], operands[1]))"
1118 "*
1119 cc_status.flags |= CC_NO_OVERFLOW;
1120 return \"cvthw %1,%0\";
1121")
1122
1123(define_peephole
1124 [(set (match_operand:QI 0 "register_operand" "=r")
1125 (match_operand:QI 1 "nonimmediate_operand" "rm"))
1126 (set (match_operand:SI 2 "register_operand" "=r")
1127 (sign_extend:SI (match_operand:QI 3 "nonimmediate_operand" "rm")))
1128 (set (cc0) (match_dup 2))]
1129 "dead_or_set_p (insn, operands[2])
1130 && (rtx_equal_p (operands[3], operands[0])
1131 || rtx_equal_p (operands[3], operands[1]))"
1132 "*
1133 cc_status.flags |= CC_NO_OVERFLOW;
1134 return \"cvtbw %1,%0\";
1135")
1136
1137;; Optimize loops with an incremented/decremented variable.
1138
1139(define_peephole
1140 [(set (match_operand:SI 0 "register_operand" "=r")
1141 (plus:SI (match_dup 0)
1142 (const_int -1)))
1143 (set (cc0)
1144 (compare (match_operand:SI 1 "register_operand" "r")
1145 (match_operand:SI 2 "nonmemory_operand" "ri")))
1146 (set (pc)
1147 (if_then_else (match_operator:SI 3 "signed_comparison"
1148 [(cc0) (const_int 0)])
1149 (label_ref (match_operand 4 "" ""))
1150 (pc)))]
1151 "(GET_CODE (operands[2]) == CONST_INT
1152 ? (unsigned)INTVAL (operands[2]) + 32 >= 64
1153 : 1) && (rtx_equal_p (operands[0], operands[1])
1154 || rtx_equal_p (operands[0], operands[2]))"
1155 "*
1156 if (rtx_equal_p (operands[0], operands[1]))
1157 {
1158 output_asm_insn (\"dcmpw %2,%0\", operands);
1159 return \"b%N3 %l4\";
1160 }
1161 else
1162 {
1163 output_asm_insn (\"dcmpw %1,%0\", operands);
1164 return \"b%R3 %l4\";
1165 }
1166")
1167
1168(define_peephole
1169 [(set (match_operand:SI 0 "register_operand" "=r")
1170 (plus:SI (match_dup 0)
1171 (const_int 1)))
1172 (set (cc0)
1173 (compare (match_operand:SI 1 "register_operand" "r")
1174 (match_operand:SI 2 "nonmemory_operand" "ri")))
1175 (set (pc)
1176 (if_then_else (match_operator:SI 3 "signed_comparison"
1177 [(cc0) (const_int 0)])
1178 (label_ref (match_operand 4 "" ""))
1179 (pc)))]
1180 "(GET_CODE (operands[2]) == CONST_INT
1181 ? (unsigned)INTVAL (operands[2]) + 32 >= 64
1182 : 1) && (rtx_equal_p (operands[0], operands[1])
1183 || rtx_equal_p (operands[0], operands[2]))"
1184 "*
1185 if (rtx_equal_p (operands[0], operands[1]))
1186 {
1187 output_asm_insn (\"icmpw %2,%0\", operands);
1188 return \"b%N3 %l4\";
1189 }
1190 else
1191 {
1192 output_asm_insn (\"icmpw %1,%0\", operands);
1193 return \"b%R3 %l4\";
1194 }
1195")
1196
1197;; Combine two word moves with consecutive operands into one long move.
1198;; Also combines immediate moves, if the high-order destination operand
1199;; is loaded with 0 or -1 and the low-order destination operand is loaded
1200;; with a constant with the same sign.
1201
1202(define_peephole
1203 [(set (match_operand:SI 0 "general_operand" "=g")
1204 (match_operand:SI 1 "general_operand" "g"))
1205 (set (match_operand:SI 2 "general_operand" "=g")
1206 (match_operand:SI 3 "general_operand" "g"))]
1207 "movdi_possible (operands)"
1208 "*
f2d45bd7 1209{
573ade84
RK
1210 output_asm_insn (\"# COMBINE movw %1,%0\", operands);
1211 output_asm_insn (\"# COMBINE movw %3,%2\", operands);
1212 movdi_possible (operands);
1213 if (CONSTANT_P (operands[1]))
767fcd82 1214 return (swap_operands ? \"movl %3,%0\" : \"movl %1,%2\");
573ade84 1215
767fcd82 1216 return (swap_operands ? \"movl %1,%0\" : \"movl %3,%2\");
f2d45bd7 1217}")
573ade84
RK
1218
1219;; Optimize certain tests after memory stores.
1220
1221(define_peephole
1222 [(set (match_operand 0 "memory_operand" "=m")
1223 (match_operand 1 "register_operand" "r"))
1224 (set (match_operand:SI 2 "register_operand" "=r")
1225 (sign_extend:SI (match_dup 1)))
1226 (set (cc0)
1227 (match_dup 2))]
1228 "dead_or_set_p (insn, operands[2])"
1229 "*
1230 cc_status.flags |= CC_NO_OVERFLOW;
1231 if (GET_MODE (operands[0]) == QImode)
1232 return \"cvtwb %1,%0\";
1233 else
1234 return \"cvtwh %1,%0\";
1235")
1236\f
1237;______________________________________________________________________
1238;
1239; DImode Patterns.
1240;______________________________________________________________________
1241
1242(define_expand "extendsidi2"
1243 [(set (subreg:SI (match_operand:DI 0 "register_operand" "=r") 1)
1244 (match_operand:SI 1 "general_operand" "g"))
1245 (set (subreg:SI (match_dup 0) 0)
1246 (subreg:SI (match_dup 0) 1))
1247 (set (subreg:SI (match_dup 0) 0)
1248 (ashiftrt:SI (subreg:SI (match_dup 0) 0)
1249 (const_int 31)))]
1250 ""
1251 "")
1252
1253(define_insn "adddi3"
1254 [(set (match_operand:DI 0 "register_operand" "=r")
1255 (plus:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1256 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1257 ""
1258 "*
1259{
1260 rtx xoperands[2];
1261 CC_STATUS_INIT;
1262 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1263 if (REG_P (operands[2]))
1264 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1265 else
1266 {
1267 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1268 CONST_DOUBLE_LOW (operands[2]));
1269 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1270 CONST_DOUBLE_HIGH (operands[2]));
1271 }
1272 output_asm_insn (\"addw %1,%0\", xoperands);
1273 return \"addwc %2,%0\";
1274}")
1275
1276(define_insn "subdi3"
1277 [(set (match_operand:DI 0 "register_operand" "=r")
1278 (minus:DI (match_operand:DI 1 "register_operand" "0")
1279 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1280 ""
1281 "*
1282{
1283 rtx xoperands[2];
1284 CC_STATUS_INIT;
1285 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1286 if (REG_P (operands[2]))
1287 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1288 else
1289 {
1290 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1291 CONST_DOUBLE_LOW (operands[2]));
1292 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1293 CONST_DOUBLE_HIGH (operands[2]));
1294 }
1295 output_asm_insn (\"subw %1,%0\", xoperands);
1296 return \"subwb %2,%0\";
1297}")
1298
1299(define_insn "iordi3"
1300 [(set (match_operand:DI 0 "register_operand" "=r")
1301 (ior:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1302 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1303 ""
1304 "*
1305{
1306 rtx xoperands[2];
1307 CC_STATUS_INIT;
1308 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1309 if (REG_P (operands[2]))
1310 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1311 else
1312 {
1313 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1314 CONST_DOUBLE_LOW (operands[2]));
1315 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1316 CONST_DOUBLE_HIGH (operands[2]));
1317 }
1318 output_asm_insn (\"orw %1,%0\", xoperands);
1319 return \"orw %2,%0\";
1320}")
1321
1322(define_insn "anddi3"
1323 [(set (match_operand:DI 0 "register_operand" "=r")
1324 (and:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1325 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1326 ""
1327 "*
1328{
1329 rtx xoperands[2];
1330 CC_STATUS_INIT;
1331 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1332 if (REG_P (operands[2]))
1333 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1334 else
1335 {
1336 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1337 CONST_DOUBLE_LOW (operands[2]));
1338 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1339 CONST_DOUBLE_HIGH (operands[2]));
1340 }
1341 output_asm_insn (\"andw %1,%0\", xoperands);
1342 return \"andw %2,%0\";
1343}")
1344
1345(define_insn "xordi3"
1346 [(set (match_operand:DI 0 "register_operand" "=r")
1347 (xor:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1348 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1349 ""
1350 "*
1351{
1352 rtx xoperands[2];
1353 CC_STATUS_INIT;
1354 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1355 if (REG_P (operands[2]))
1356 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1357 else
1358 {
1359 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1360 CONST_DOUBLE_LOW (operands[2]));
1361 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1362 CONST_DOUBLE_HIGH (operands[2]));
1363 }
1364 output_asm_insn (\"xorw %1,%0\", xoperands);
1365 return \"xorw %2,%0\";
1366}")
f2d45bd7
RS
1367
1368;; My version, modelled after Jonathan Stone's and "tablejump" - S.P.
1369(define_insn "indirect_jump"
1370 [(set (pc) (match_operand:SI 0 "general_operand" "r"))]
1371 ""
1372 "jump (%0)")
573ade84
RK
1373\f
1374;;- Local variables:
1375;;- mode:emacs-lisp
1376;;- comment-start: ";;- "
1377;;- eval: (set-syntax-table (copy-sequence (syntax-table)))
1378;;- eval: (modify-syntax-entry ?] ")[")
1379;;- eval: (modify-syntax-entry ?{ "(}")
1380;;- eval: (modify-syntax-entry ?} "){")
1381;;- End:
1382
This page took 0.182128 seconds and 5 git commands to generate.