]> gcc.gnu.org Git - gcc.git/blob - gcc/config/pyr/pyr.md
rtlanal.c (dead_or_set_regno_p): Ignore REG_DEAD notes after reload completes.
[gcc.git] / gcc / config / pyr / pyr.md
1 ;; GNU C machine description for Pyramid 90x, 9000, MIServer Series
2 ;; Copyright (C) 1989, 1990, 1995, 1997 Free Software Foundation, Inc.
3
4 ;; This file is part of GNU CC.
5
6 ;; GNU CC is free software; you can redistribute it and/or modify
7 ;; it under the terms of the GNU General Public License as published by
8 ;; the Free Software Foundation; either version 2, or (at your option)
9 ;; any later version.
10
11 ;; GNU CC is distributed in the hope that it will be useful,
12 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 ;; GNU General Public License for more details.
15
16 ;; You should have received a copy of the GNU General Public License
17 ;; along with GNU CC; see the file COPYING. If not, write to
18 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
19 ;; Boston, MA 02111-1307, USA.
20
21 ;; Instruction patterns. When multiple patterns apply,
22 ;; the first one in the file is chosen.
23 ;;
24 ;; See file "rtl.def" for documentation on define_insn, match_*, et. al.
25 ;;
26 ;; cpp macro #define NOTICE_UPDATE_CC in file tm.h handles condition code
27 ;; updates for most instructions.
28 \f
29 ;; These comments are mostly obsolete. Written for gcc version 1.XX.
30 ;; * Try using define_insn instead of some peepholes in more places.
31 ;; * Set REG_NOTES:REG_EQUIV for cvt[bh]w loads. This would make the
32 ;; backward scan in sign_extend needless.
33 ;; * Match (pc) (label_ref) case in peephole patterns.
34 ;; * Should optimize
35 ;; "cmpX op1,op2; b{eq,ne} LY; ucmpX op1.op2; b{lt,le,gt,ge} LZ"
36 ;; to
37 ;; "ucmpX op1,op2; b{eq,ne} LY; b{lt,le,gt,ge} LZ"
38 ;; by pre-scanning insn and running notice_update_cc for them.
39 ;; * Is it necessary to do copy_rtx in the test and compare patterns?
40 ;; * Fix true frame pointer omission.
41 ;; * Make the jump tables contain branches, not addresses! This would
42 ;; save us one instruction.
43 ;; * Could the complicated scheme for compares be simplified, if we had
44 ;; no named cmpqi or cmphi patterns, and instead anonymous patterns for
45 ;; the less-than-word compare cases pyr can handle???
46 ;; * The jump insn seems to accept more than just IR addressing. Would
47 ;; we win by telling GCC? Or can we use movw into the global reg which
48 ;; is a synonym for pc?
49 ;; * More DImode patterns.
50 ;; * Scan backwards in "zero_extendhisi2", "zero_extendqisi2" to find out
51 ;; if the extension can be omitted.
52 ;; * "divmodsi" with Pyramid "ediv" insn. Is it possible in rtl??
53 ;; * Would "rcsp tmpreg; u?cmp[bh] op1_regdispl(tmpreg),op2" win in
54 ;; comparison with the two extensions and single test generated now?
55 ;; The rcsp insn could be expanded, and moved out of loops by the
56 ;; optimizer, making 1 (64 bit) insn of 3 (32 bit) insns in loops.
57 ;; The rcsp insn could be followed by an add insn, making non-displacement
58 ;; IR addressing sufficient.
59
60 ;______________________________________________________________________
61 ;
62 ; Test and Compare Patterns.
63 ;______________________________________________________________________
64
65 ; The argument for the rather complicated test and compare expansion
66 ; scheme, is the irregular pyramid instructions for these operations.
67 ; 1) Pyramid has different signed and unsigned compares. 2) HImode
68 ; and QImode integers are memory-memory and immediate-memory only. 3)
69 ; Unsigned HImode compares doesn't exist. 4) Only certain
70 ; combinations of addresses are allowed for memory-memory compares.
71 ; Whenever necessary, in order to fulfill these addressing
72 ; constraints, the compare operands are swapped.
73
74 (define_expand "tstsi"
75 [(set (cc0)
76 (match_operand:SI 0 "general_operand" ""))]
77 "" "operands[0] = force_reg (SImode, operands[0]);")
78
79 (define_insn ""
80 [(set (cc0)
81 (compare (match_operand:SI 0 "memory_operand" "m")
82 (match_operand:SI 1 "memory_operand" "m")))]
83 "weird_memory_memory (operands[0], operands[1])"
84 "*
85 {
86 rtx br_insn = NEXT_INSN (insn);
87 RTX_CODE br_code;
88
89 if (GET_CODE (br_insn) != JUMP_INSN)
90 abort();
91 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
92
93 weird_memory_memory (operands[0], operands[1]);
94
95 if (swap_operands)
96 {
97 cc_status.flags = CC_REVERSED;
98 if (TRULY_UNSIGNED_COMPARE_P (br_code))
99 {
100 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
101 return \"ucmpw %0,%1\";
102 }
103 return \"cmpw %0,%1\";
104 }
105
106 if (TRULY_UNSIGNED_COMPARE_P (br_code))
107 {
108 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
109 return \"ucmpw %1,%0\";
110 }
111 return \"cmpw %1,%0\";
112 }")
113
114 (define_insn "cmpsi"
115 [(set (cc0)
116 (compare (match_operand:SI 0 "nonimmediate_operand" "r,g")
117 (match_operand:SI 1 "general_operand" "g,r")))]
118 ""
119 "*
120 {
121 rtx br_insn = NEXT_INSN (insn);
122 RTX_CODE br_code;
123
124 if (GET_CODE (br_insn) != JUMP_INSN)
125 abort();
126 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
127
128 if (which_alternative != 0)
129 {
130 cc_status.flags = CC_REVERSED;
131 if (TRULY_UNSIGNED_COMPARE_P (br_code))
132 {
133 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
134 return \"ucmpw %0,%1\";
135 }
136 return \"cmpw %0,%1\";
137 }
138
139 if (TRULY_UNSIGNED_COMPARE_P (br_code))
140 {
141 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
142 return \"ucmpw %1,%0\";
143 }
144 return \"cmpw %1,%0\";
145 }")
146
147 (define_insn ""
148 [(set (cc0)
149 (match_operand:SI 0 "nonimmediate_operand" "r"))]
150 ""
151 "*
152 {
153 #if 0
154 cc_status.flags |= CC_NO_OVERFLOW;
155 return \"cmpw $0,%0\";
156 #endif
157 rtx br_insn = NEXT_INSN (insn);
158 RTX_CODE br_code;
159
160 if (GET_CODE (br_insn) != JUMP_INSN)
161 abort();
162 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
163
164 if (TRULY_UNSIGNED_COMPARE_P (br_code))
165 {
166 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
167 return \"ucmpw $0,%0\";
168 }
169 return \"mtstw %0,%0\";
170 }")
171
172 (define_expand "cmphi"
173 [(set (cc0)
174 (compare (match_operand:HI 0 "nonimmediate_operand" "")
175 (match_operand:HI 1 "general_operand" "")))]
176 ""
177 "
178 {
179 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
180 test_op0 = copy_rtx (operands[0]);
181 test_op1 = copy_rtx (operands[1]);
182 test_mode = HImode;
183 DONE;
184 }")
185
186 (define_expand "tsthi"
187 [(set (cc0)
188 (match_operand:HI 0 "nonimmediate_operand" ""))]
189 ""
190 "
191 {
192 extern rtx test_op0; extern enum machine_mode test_mode;
193 test_op0 = copy_rtx (operands[0]);
194 test_mode = HImode;
195 DONE;
196 }")
197
198 (define_insn ""
199 [(set (cc0)
200 (compare (match_operand:HI 0 "memory_operand" "m")
201 (match_operand:HI 1 "memory_operand" "m")))]
202 "(!TRULY_UNSIGNED_COMPARE_P (GET_CODE (XEXP (SET_SRC (PATTERN (NEXT_INSN (insn))), 0))))
203 && weird_memory_memory (operands[0], operands[1])"
204 "*
205 {
206 rtx br_insn = NEXT_INSN (insn);
207
208 if (GET_CODE (br_insn) != JUMP_INSN)
209 abort();
210
211 weird_memory_memory (operands[0], operands[1]);
212
213 if (swap_operands)
214 {
215 cc_status.flags = CC_REVERSED;
216 return \"cmph %0,%1\";
217 }
218
219 return \"cmph %1,%0\";
220 }")
221
222 (define_insn ""
223 [(set (cc0)
224 (compare (match_operand:HI 0 "nonimmediate_operand" "r,m")
225 (match_operand:HI 1 "nonimmediate_operand" "m,r")))]
226 "(!TRULY_UNSIGNED_COMPARE_P (GET_CODE (XEXP (SET_SRC (PATTERN (NEXT_INSN (insn))), 0))))
227 && ((GET_CODE (operands[0]) == MEM) != (GET_CODE (operands[1]) == MEM))"
228 "*
229 {
230 rtx br_insn = NEXT_INSN (insn);
231
232 if (GET_CODE (br_insn) != JUMP_INSN)
233 abort();
234
235 if (which_alternative != 0)
236 {
237 cc_status.flags = CC_REVERSED;
238 return \"cmph %0,%1\";
239 }
240
241 return \"cmph %1,%0\";
242 }")
243
244 (define_expand "cmpqi"
245 [(set (cc0)
246 (compare (match_operand:QI 0 "nonimmediate_operand" "")
247 (match_operand:QI 1 "general_operand" "")))]
248 ""
249 "
250 {
251 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
252 test_op0 = copy_rtx (operands[0]);
253 test_op1 = copy_rtx (operands[1]);
254 test_mode = QImode;
255 DONE;
256 }")
257
258 (define_expand "tstqi"
259 [(set (cc0)
260 (match_operand:QI 0 "nonimmediate_operand" ""))]
261 ""
262 "
263 {
264 extern rtx test_op0; extern enum machine_mode test_mode;
265 test_op0 = copy_rtx (operands[0]);
266 test_mode = QImode;
267 DONE;
268 }")
269
270 (define_insn ""
271 [(set (cc0)
272 (compare (match_operand:QI 0 "memory_operand" "m")
273 (match_operand:QI 1 "memory_operand" "m")))]
274 "weird_memory_memory (operands[0], operands[1])"
275 "*
276 {
277 rtx br_insn = NEXT_INSN (insn);
278 RTX_CODE br_code;
279
280 if (GET_CODE (br_insn) != JUMP_INSN)
281 abort();
282 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
283
284 weird_memory_memory (operands[0], operands[1]);
285
286 if (swap_operands)
287 {
288 cc_status.flags = CC_REVERSED;
289 if (TRULY_UNSIGNED_COMPARE_P (br_code))
290 {
291 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
292 return \"ucmpb %0,%1\";
293 }
294 return \"cmpb %0,%1\";
295 }
296
297 if (TRULY_UNSIGNED_COMPARE_P (br_code))
298 {
299 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
300 return \"ucmpb %1,%0\";
301 }
302 return \"cmpb %1,%0\";
303 }")
304
305 (define_insn ""
306 [(set (cc0)
307 (compare (match_operand:QI 0 "nonimmediate_operand" "r,m")
308 (match_operand:QI 1 "nonimmediate_operand" "m,r")))]
309 "((GET_CODE (operands[0]) == MEM) != (GET_CODE (operands[1]) == MEM))"
310 "*
311 {
312 rtx br_insn = NEXT_INSN (insn);
313 RTX_CODE br_code;
314
315 if (GET_CODE (br_insn) != JUMP_INSN)
316 abort();
317 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
318
319 if (which_alternative != 0)
320 {
321 cc_status.flags = CC_REVERSED;
322 if (TRULY_UNSIGNED_COMPARE_P (br_code))
323 {
324 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
325 return \"ucmpb %0,%1\";
326 }
327 return \"cmpb %0,%1\";
328 }
329
330 if (TRULY_UNSIGNED_COMPARE_P (br_code))
331 {
332 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
333 return \"ucmpb %1,%0\";
334 }
335 return \"cmpb %1,%0\";
336 }")
337
338 (define_expand "bgt"
339 [(set (pc) (if_then_else (gt (cc0) (const_int 0))
340 (label_ref (match_operand 0 "" "")) (pc)))]
341 "" "extend_and_branch (SIGN_EXTEND);")
342
343 (define_expand "blt"
344 [(set (pc) (if_then_else (lt (cc0) (const_int 0))
345 (label_ref (match_operand 0 "" "")) (pc)))]
346 "" "extend_and_branch (SIGN_EXTEND);")
347
348 (define_expand "bge"
349 [(set (pc) (if_then_else (ge (cc0) (const_int 0))
350 (label_ref (match_operand 0 "" "")) (pc)))]
351 "" "extend_and_branch (SIGN_EXTEND);")
352
353 (define_expand "ble"
354 [(set (pc) (if_then_else (le (cc0) (const_int 0))
355 (label_ref (match_operand 0 "" "")) (pc)))]
356 "" "extend_and_branch (SIGN_EXTEND);")
357
358 (define_expand "beq"
359 [(set (pc) (if_then_else (eq (cc0) (const_int 0))
360 (label_ref (match_operand 0 "" "")) (pc)))]
361 "" "extend_and_branch (SIGN_EXTEND);")
362
363 (define_expand "bne"
364 [(set (pc) (if_then_else (ne (cc0) (const_int 0))
365 (label_ref (match_operand 0 "" "")) (pc)))]
366 "" "extend_and_branch (SIGN_EXTEND);")
367
368 (define_expand "bgtu"
369 [(set (pc) (if_then_else (gtu (cc0) (const_int 0))
370 (label_ref (match_operand 0 "" "")) (pc)))]
371 "" "extend_and_branch (ZERO_EXTEND);")
372
373 (define_expand "bltu"
374 [(set (pc) (if_then_else (ltu (cc0) (const_int 0))
375 (label_ref (match_operand 0 "" "")) (pc)))]
376 "" "extend_and_branch (ZERO_EXTEND);")
377
378 (define_expand "bgeu"
379 [(set (pc) (if_then_else (geu (cc0) (const_int 0))
380 (label_ref (match_operand 0 "" "")) (pc)))]
381 "" "extend_and_branch (ZERO_EXTEND);")
382
383 (define_expand "bleu"
384 [(set (pc) (if_then_else (leu (cc0) (const_int 0))
385 (label_ref (match_operand 0 "" "")) (pc)))]
386 "" "extend_and_branch (ZERO_EXTEND);")
387
388 (define_insn "cmpdf"
389 [(set (cc0)
390 (compare (match_operand:DF 0 "register_operand" "r")
391 (match_operand:DF 1 "register_operand" "r")))]
392 ""
393 "cmpd %1,%0")
394
395 (define_insn "cmpsf"
396 [(set (cc0)
397 (compare (match_operand:SF 0 "register_operand" "r")
398 (match_operand:SF 1 "register_operand" "r")))]
399 ""
400 "cmpf %1,%0")
401
402 (define_insn "tstdf"
403 [(set (cc0)
404 (match_operand:DF 0 "register_operand" "r"))]
405 ""
406 "mtstd %0,%0")
407
408 (define_insn "tstsf"
409 [(set (cc0)
410 (match_operand:SF 0 "register_operand" "r"))]
411 ""
412 "mtstf %0,%0")
413 \f
414 ;______________________________________________________________________
415 ;
416 ; Fixed-point Arithmetic.
417 ;______________________________________________________________________
418
419 (define_insn "addsi3"
420 [(set (match_operand:SI 0 "register_operand" "=r,!r")
421 (plus:SI (match_operand:SI 1 "general_operand" "%0,r")
422 (match_operand:SI 2 "general_operand" "g,rJ")))]
423 ""
424 "*
425 {
426 if (which_alternative == 0)
427 return (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) == 32
428 ? \"subw %n2,%0\" : \"addw %2,%0\");
429 else
430 {
431 forget_cc_if_dependent (operands[0]);
432 return \"mova %a2[%1*1],%0\";
433 }
434 }")
435
436 (define_insn "subsi3"
437 [(set (match_operand:SI 0 "register_operand" "=r,r")
438 (minus:SI (match_operand:SI 1 "general_operand" "0,g")
439 (match_operand:SI 2 "general_operand" "g,0")))]
440 ""
441 "* return (which_alternative == 0) ? \"subw %2,%0\" : \"rsubw %1,%0\";")
442
443 (define_insn "mulsi3"
444 [(set (match_operand:SI 0 "register_operand" "=r")
445 (mult:SI (match_operand:SI 1 "general_operand" "%0")
446 (match_operand:SI 2 "general_operand" "g")))]
447 ""
448 "mulw %2,%0")
449
450 (define_insn "divsi3"
451 [(set (match_operand:SI 0 "register_operand" "=r,r")
452 (div:SI (match_operand:SI 1 "general_operand" "0,g")
453 (match_operand:SI 2 "general_operand" "g,0")))]
454 ""
455 "* return (which_alternative == 0) ? \"divw %2,%0\" : \"rdivw %1,%0\";")
456
457 (define_insn "udivsi3"
458 [(set (match_operand:SI 0 "register_operand" "=r")
459 (udiv:SI (match_operand:SI 1 "register_operand" "0")
460 (match_operand:SI 2 "general_operand" "g")))]
461 ""
462 "udivw %2,%0")
463
464 (define_insn "modsi3"
465 [(set (match_operand:SI 0 "register_operand" "=r")
466 (mod:SI (match_operand:SI 1 "register_operand" "0")
467 (match_operand:SI 2 "general_operand" "g")))]
468 ""
469 "modw %2,%0")
470
471 (define_insn "umodsi3"
472 [(set (match_operand:SI 0 "register_operand" "=r")
473 (umod:SI (match_operand:SI 1 "register_operand" "0")
474 (match_operand:SI 2 "general_operand" "g")))]
475 ""
476 "umodw %2,%0")
477
478 (define_insn "negsi2"
479 [(set (match_operand:SI 0 "register_operand" "=r")
480 (neg:SI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
481 ""
482 "mnegw %1,%0")
483
484 (define_insn "one_cmplsi2"
485 [(set (match_operand:SI 0 "register_operand" "=r")
486 (not:SI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
487 ""
488 "mcomw %1,%0")
489
490 (define_insn "abssi2"
491 [(set (match_operand:SI 0 "register_operand" "=r")
492 (abs:SI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
493 ""
494 "mabsw %1,%0")
495 \f
496 ;______________________________________________________________________
497 ;
498 ; Floating-point Arithmetic.
499 ;______________________________________________________________________
500
501 (define_insn "adddf3"
502 [(set (match_operand:DF 0 "register_operand" "=r")
503 (plus:DF (match_operand:DF 1 "register_operand" "%0")
504 (match_operand:DF 2 "register_operand" "r")))]
505 ""
506 "addd %2,%0")
507
508 (define_insn "addsf3"
509 [(set (match_operand:SF 0 "register_operand" "=r")
510 (plus:SF (match_operand:SF 1 "register_operand" "%0")
511 (match_operand:SF 2 "register_operand" "r")))]
512 ""
513 "addf %2,%0")
514
515 (define_insn "subdf3"
516 [(set (match_operand:DF 0 "register_operand" "=r")
517 (minus:DF (match_operand:DF 1 "register_operand" "0")
518 (match_operand:DF 2 "register_operand" "r")))]
519 ""
520 "subd %2,%0")
521
522 (define_insn "subsf3"
523 [(set (match_operand:SF 0 "register_operand" "=r")
524 (minus:SF (match_operand:SF 1 "register_operand" "0")
525 (match_operand:SF 2 "register_operand" "r")))]
526 ""
527 "subf %2,%0")
528
529 (define_insn "muldf3"
530 [(set (match_operand:DF 0 "register_operand" "=r")
531 (mult:DF (match_operand:DF 1 "register_operand" "%0")
532 (match_operand:DF 2 "register_operand" "r")))]
533 ""
534 "muld %2,%0")
535
536 (define_insn "mulsf3"
537 [(set (match_operand:SF 0 "register_operand" "=r")
538 (mult:SF (match_operand:SF 1 "register_operand" "%0")
539 (match_operand:SF 2 "register_operand" "r")))]
540 ""
541 "mulf %2,%0")
542
543 (define_insn "divdf3"
544 [(set (match_operand:DF 0 "register_operand" "=r")
545 (div:DF (match_operand:DF 1 "register_operand" "0")
546 (match_operand:DF 2 "register_operand" "r")))]
547 ""
548 "divd %2,%0")
549
550 (define_insn "divsf3"
551 [(set (match_operand:SF 0 "register_operand" "=r")
552 (div:SF (match_operand:SF 1 "register_operand" "0")
553 (match_operand:SF 2 "register_operand" "r")))]
554 ""
555 "divf %2,%0")
556
557 (define_insn "negdf2"
558 [(set (match_operand:DF 0 "register_operand" "=r")
559 (neg:DF (match_operand:DF 1 "register_operand" "r")))]
560 ""
561 "mnegd %1,%0")
562
563 (define_insn "negsf2"
564 [(set (match_operand:SF 0 "register_operand" "=r")
565 (neg:SF (match_operand:SF 1 "register_operand" "r")))]
566 ""
567 "mnegf %1,%0")
568
569 (define_insn "absdf2"
570 [(set (match_operand:DF 0 "register_operand" "=r")
571 (abs:DF (match_operand:DF 1 "register_operand" "r")))]
572 ""
573 "mabsd %1,%0")
574
575 (define_insn "abssf2"
576 [(set (match_operand:SF 0 "register_operand" "=r")
577 (abs:SF (match_operand:SF 1 "register_operand" "r")))]
578 ""
579 "mabsf %1,%0")
580 \f
581 ;______________________________________________________________________
582 ;
583 ; Logical and Shift Instructions.
584 ;______________________________________________________________________
585
586 (define_insn ""
587 [(set (cc0)
588 (and:SI (match_operand:SI 0 "general_operand" "%r")
589 (match_operand:SI 1 "general_operand" "g")))]
590 ""
591 "*
592 {
593 cc_status.flags |= CC_NO_OVERFLOW;
594 return \"bitw %1,%0\";
595 }")
596
597 (define_insn "andsi3"
598 [(set (match_operand:SI 0 "register_operand" "=r,r")
599 (and:SI (match_operand:SI 1 "general_operand" "%0,r")
600 (match_operand:SI 2 "general_operand" "g,K")))]
601 ""
602 "*
603 {
604 if (which_alternative == 0)
605 return \"andw %2,%0\";
606
607 cc_status.flags = CC_NOT_NEGATIVE;
608 return (INTVAL (operands[2]) == 255
609 ? \"movzbw %1,%0\" : \"movzhw %1,%0\");
610 }")
611
612 (define_insn ""
613 [(set (match_operand:SI 0 "register_operand" "=r")
614 (and:SI (not:SI (match_operand:SI 1 "general_operand" "g"))
615 (match_operand:SI 2 "register_operand" "0")))]
616 ""
617 "bicw %1,%0")
618
619 (define_insn "iorsi3"
620 [(set (match_operand:SI 0 "register_operand" "=r")
621 (ior:SI (match_operand:SI 1 "general_operand" "%0")
622 (match_operand:SI 2 "general_operand" "g")))]
623 ""
624 "orw %2,%0")
625
626 (define_insn "xorsi3"
627 [(set (match_operand:SI 0 "register_operand" "=r")
628 (xor:SI (match_operand:SI 1 "general_operand" "%0")
629 (match_operand:SI 2 "general_operand" "g")))]
630 ""
631 "xorw %2,%0")
632
633 ; The arithmetic left shift instructions work strangely on pyramids.
634 ; They fail to modify the sign bit. Therefore, use logic shifts.
635
636 (define_insn "ashlsi3"
637 [(set (match_operand:SI 0 "register_operand" "=r")
638 (ashift:SI (match_operand:SI 1 "register_operand" "0")
639 (match_operand:SI 2 "general_operand" "rnm")))]
640 ""
641 "*
642 {
643 extern char *output_shift ();
644 return output_shift (\"lshlw %2,%0\", operands[2], 32);
645 }")
646
647 (define_insn "ashrsi3"
648 [(set (match_operand:SI 0 "register_operand" "=r")
649 (ashiftrt:SI (match_operand:SI 1 "register_operand" "0")
650 (match_operand:SI 2 "general_operand" "rnm")))]
651 ""
652 "*
653 {
654 extern char *output_shift ();
655 return output_shift (\"ashrw %2,%0\", operands[2], 32);
656 }")
657
658 (define_insn "ashrdi3"
659 [(set (match_operand:DI 0 "register_operand" "=r")
660 (ashiftrt:DI (match_operand:DI 1 "register_operand" "0")
661 (match_operand:SI 2 "general_operand" "rnm")))]
662 ""
663 "*
664 {
665 extern char *output_shift ();
666 return output_shift (\"ashrl %2,%0\", operands[2], 64);
667 }")
668
669 (define_insn "lshrsi3"
670 [(set (match_operand:SI 0 "register_operand" "=r")
671 (lshiftrt:SI (match_operand:SI 1 "register_operand" "0")
672 (match_operand:SI 2 "general_operand" "rnm")))]
673 ""
674 "*
675 {
676 extern char *output_shift ();
677 return output_shift (\"lshrw %2,%0\", operands[2], 32);
678 }")
679
680 (define_insn "rotlsi3"
681 [(set (match_operand:SI 0 "register_operand" "=r")
682 (rotate:SI (match_operand:SI 1 "register_operand" "0")
683 (match_operand:SI 2 "general_operand" "rnm")))]
684 ""
685 "*
686 {
687 extern char *output_shift ();
688 return output_shift (\"rotlw %2,%0\", operands[2], 32);
689 }")
690
691 (define_insn "rotrsi3"
692 [(set (match_operand:SI 0 "register_operand" "=r")
693 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
694 (match_operand:SI 2 "general_operand" "rnm")))]
695 ""
696 "*
697 {
698 extern char *output_shift ();
699 return output_shift (\"rotrw %2,%0\", operands[2], 32);
700 }")
701 \f
702 ;______________________________________________________________________
703 ;
704 ; Fixed and Floating Moves.
705 ;______________________________________________________________________
706
707 ;; If the destination is a memory operand, indexed source operands are
708 ;; disallowed. Big DImode constants are always loaded into a reg pair,
709 ;; although offsettable memory addresses really could be dealt with.
710
711 (define_insn ""
712 [(set (match_operand:DI 0 "memory_operand" "=m")
713 (match_operand:DI 1 "nonindexed_operand" "gF"))]
714 "(GET_CODE (operands[1]) == CONST_DOUBLE
715 ? ((CONST_DOUBLE_HIGH (operands[1]) == 0
716 && CONST_DOUBLE_LOW (operands[1]) >= 0)
717 || (CONST_DOUBLE_HIGH (operands[1]) == -1
718 && CONST_DOUBLE_LOW (operands[1]) < 0))
719 : 1)"
720 "*
721 {
722 if (GET_CODE (operands[1]) == CONST_DOUBLE)
723 operands[1] = gen_rtx (CONST_INT, VOIDmode,
724 CONST_DOUBLE_LOW (operands[1]));
725 return \"movl %1,%0\";
726 }")
727
728 ;; Force the destination to a register, so all source operands are allowed.
729
730 (define_insn "movdi"
731 [(set (match_operand:DI 0 "general_operand" "=r")
732 (match_operand:DI 1 "general_operand" "gF"))]
733 ""
734 "*
735 {
736 extern char *output_move_double ();
737 return output_move_double (operands);
738 }")
739
740 ;; If the destination is a memory address, indexed source operands are
741 ;; disallowed.
742
743 (define_insn ""
744 [(set (match_operand:SI 0 "memory_operand" "=m")
745 (match_operand:SI 1 "nonindexed_operand" "g"))]
746 ""
747 "movw %1,%0")
748
749 ;; Force the destination to a register, so all source operands are allowed.
750
751 (define_insn "movsi"
752 [(set (match_operand:SI 0 "general_operand" "=r")
753 (match_operand:SI 1 "general_operand" "g"))]
754 ""
755 "movw %1,%0")
756
757 ;; If the destination is a memory address, indexed source operands are
758 ;; disallowed.
759
760 (define_insn ""
761 [(set (match_operand:HI 0 "memory_operand" "=m")
762 (match_operand:HI 1 "nonindexed_operand" "g"))]
763 ""
764 "*
765 {
766 if (REG_P (operands[1]))
767 return \"cvtwh %1,%0\"; /* reg -> mem */
768 else
769 return \"movh %1,%0\"; /* mem imm -> mem */
770 }")
771
772 ;; Force the destination to a register, so all source operands are allowed.
773
774 (define_insn "movhi"
775 [(set (match_operand:HI 0 "general_operand" "=r")
776 (match_operand:HI 1 "general_operand" "g"))]
777 ""
778 "*
779 {
780 if (GET_CODE (operands[1]) != MEM)
781 return \"movw %1,%0\"; /* reg imm -> reg */
782 return \"cvthw %1,%0\"; /* mem -> reg */
783 }")
784
785 ;; If the destination is a memory address, indexed source operands are
786 ;; disallowed.
787
788 (define_insn ""
789 [(set (match_operand:QI 0 "memory_operand" "=m")
790 (match_operand:QI 1 "nonindexed_operand" "g"))]
791 ""
792 "*
793 {
794 if (REG_P (operands[1]))
795 return \"cvtwb %1,%0\"; /* reg -> mem */
796 else
797 return \"movb %1,%0\"; /* mem imm -> mem */
798 }")
799
800 ;; Force the destination to a register, so all source operands are allowed.
801
802 (define_insn "movqi"
803 [(set (match_operand:QI 0 "general_operand" "=r")
804 (match_operand:QI 1 "general_operand" "g"))]
805 ""
806 "*
807 {
808 if (GET_CODE (operands[1]) != MEM)
809 return \"movw %1,%0\"; /* reg imm -> reg */
810 return \"cvtbw %1,%0\"; /* mem -> reg */
811 }")
812
813 ;; If the destination is a memory address, indexed source operands are
814 ;; disallowed.
815
816 (define_insn ""
817 [(set (match_operand:DF 0 "memory_operand" "=m")
818 (match_operand:DF 1 "nonindexed_operand" "g"))]
819 "GET_CODE (operands[1]) != CONST_DOUBLE"
820 "movl %1,%0")
821
822 ;; Force the destination to a register, so all source operands are allowed.
823
824 (define_insn "movdf"
825 [(set (match_operand:DF 0 "general_operand" "=r")
826 (match_operand:DF 1 "general_operand" "gF"))]
827 ""
828 "*
829 {
830 extern char *output_move_double ();
831 return output_move_double (operands);
832 }")
833
834 ;; If the destination is a memory address, indexed source operands are
835 ;; disallowed.
836
837 (define_insn ""
838 [(set (match_operand:SF 0 "memory_operand" "=m")
839 (match_operand:SF 1 "nonindexed_operand" "g"))]
840 ""
841 "movw %1,%0")
842
843 ;; Force the destination to a register, so all source operands are allowed.
844
845 (define_insn "movsf"
846 [(set (match_operand:SF 0 "general_operand" "=r")
847 (match_operand:SF 1 "general_operand" "g"))]
848 ""
849 "movw %1,%0")
850
851 (define_insn ""
852 [(set (match_operand:SI 0 "register_operand" "=r")
853 (match_operand:QI 1 "address_operand" "p"))]
854 ""
855 "*
856 {
857 forget_cc_if_dependent (operands[0]);
858 return \"mova %a1,%0\";
859 }")
860 \f
861 ;______________________________________________________________________
862 ;
863 ; Conversion patterns.
864 ;______________________________________________________________________
865
866 ;; The trunc patterns are used only when non compile-time constants are used.
867
868 (define_insn "truncsiqi2"
869 [(set (match_operand:QI 0 "register_operand" "=r")
870 (truncate:QI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
871 ""
872 "*
873 {
874 if (REG_P (operands[0]) && REG_P (operands[1])
875 && REGNO (operands[0]) == REGNO (operands[1]))
876 {
877 cc_status = cc_prev_status;
878 return \"\";
879 }
880 forget_cc_if_dependent (operands[0]);
881 return \"movw %1,%0\";
882 }")
883
884 (define_insn "truncsihi2"
885 [(set (match_operand:HI 0 "register_operand" "=r")
886 (truncate:HI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
887 ""
888 "*
889 {
890 if (REG_P (operands[0]) && REG_P (operands[1])
891 && REGNO (operands[0]) == REGNO (operands[1]))
892 {
893 cc_status = cc_prev_status;
894 return \"\";
895 }
896 forget_cc_if_dependent (operands[0]);
897 return \"movw %1,%0\";
898 }")
899
900 (define_insn "extendhisi2"
901 [(set (match_operand:SI 0 "general_operand" "=r,m")
902 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "rm,r")))]
903 ""
904 "*
905 {
906 if (optimize && REG_P (operands[0]) && REG_P (operands[1])
907 && REGNO (operands[0]) == REGNO (operands[1])
908 && already_sign_extended (insn, HImode, operands[0]))
909 {
910 cc_status = cc_prev_status;
911 return \"\";
912 }
913 return \"cvthw %1,%0\";
914 }")
915
916 (define_insn "extendqisi2"
917 [(set (match_operand:SI 0 "general_operand" "=r,m")
918 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "rm,r")))]
919 ""
920 "*
921 {
922 if (optimize && REG_P (operands[0]) && REG_P (operands[1])
923 && REGNO (operands[0]) == REGNO (operands[1])
924 && already_sign_extended (insn, QImode, operands[0]))
925 {
926 cc_status = cc_prev_status;
927 return \"\";
928 }
929 return \"cvtbw %1,%0\";
930 }")
931
932 ; Pyramid doesn't have insns *called* "cvtbh" or "movzbh".
933 ; But we can cvtbw/movzbw into a register, where there is no distinction
934 ; between words and halfwords.
935
936 (define_insn "extendqihi2"
937 [(set (match_operand:HI 0 "register_operand" "=r")
938 (sign_extend:HI (match_operand:QI 1 "nonimmediate_operand" "rm")))]
939 ""
940 "cvtbw %1,%0")
941
942 (define_insn "zero_extendhisi2"
943 [(set (match_operand:SI 0 "register_operand" "=r")
944 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "rm")))]
945 ""
946 "*
947 {
948 cc_status.flags = CC_NOT_NEGATIVE;
949 return \"movzhw %1,%0\";
950 }")
951
952 (define_insn "zero_extendqisi2"
953 [(set (match_operand:SI 0 "register_operand" "=r")
954 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "rm")))]
955 ""
956 "*
957 {
958 cc_status.flags = CC_NOT_NEGATIVE;
959 return \"movzbw %1,%0\";
960 }")
961
962 (define_insn "zero_extendqihi2"
963 [(set (match_operand:HI 0 "register_operand" "=r")
964 (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "rm")))]
965 ""
966 "*
967 {
968 cc_status.flags = CC_NOT_NEGATIVE;
969 return \"movzbw %1,%0\";
970 }")
971
972 (define_insn "extendsfdf2"
973 [(set (match_operand:DF 0 "general_operand" "=&r,m")
974 (float_extend:DF (match_operand:SF 1 "nonimmediate_operand" "rm,r")))]
975 ""
976 "cvtfd %1,%0")
977
978 (define_insn "truncdfsf2"
979 [(set (match_operand:SF 0 "general_operand" "=&r,m")
980 (float_truncate:SF (match_operand:DF 1 "nonimmediate_operand" "rm,r")))]
981 ""
982 "cvtdf %1,%0")
983
984 (define_insn "floatsisf2"
985 [(set (match_operand:SF 0 "general_operand" "=&r,m")
986 (float:SF (match_operand:SI 1 "nonimmediate_operand" "rm,r")))]
987 ""
988 "cvtwf %1,%0")
989
990 (define_insn "floatsidf2"
991 [(set (match_operand:DF 0 "general_operand" "=&r,m")
992 (float:DF (match_operand:SI 1 "nonimmediate_operand" "rm,r")))]
993 ""
994 "cvtwd %1,%0")
995
996 (define_insn "fix_truncsfsi2"
997 [(set (match_operand:SI 0 "general_operand" "=&r,m")
998 (fix:SI (fix:SF (match_operand:SF 1 "nonimmediate_operand" "rm,r"))))]
999 ""
1000 "cvtfw %1,%0")
1001
1002 (define_insn "fix_truncdfsi2"
1003 [(set (match_operand:SI 0 "general_operand" "=&r,m")
1004 (fix:SI (fix:DF (match_operand:DF 1 "nonimmediate_operand" "rm,r"))))]
1005 ""
1006 "cvtdw %1,%0")
1007 \f
1008 ;______________________________________________________________________
1009 ;
1010 ; Flow Control Patterns.
1011 ;______________________________________________________________________
1012
1013 ;; Prefer "br" to "jump" for unconditional jumps, since it's faster.
1014 ;; (The assembler can manage with out-of-range branches.)
1015
1016 (define_insn "jump"
1017 [(set (pc)
1018 (label_ref (match_operand 0 "" "")))]
1019 ""
1020 "br %l0")
1021
1022 (define_insn ""
1023 [(set (pc)
1024 (if_then_else (match_operator 0 "relop" [(cc0) (const_int 0)])
1025 (label_ref (match_operand 1 "" ""))
1026 (pc)))]
1027 ""
1028 "*
1029 {
1030 if (optimize)
1031 switch (GET_CODE (operands[0]))
1032 {
1033 case EQ: case NE:
1034 break;
1035 case LT: case LE: case GE: case GT:
1036 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1037 return 0;
1038 break;
1039 case LTU: case LEU: case GEU: case GTU:
1040 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1041 return 0;
1042 break;
1043 }
1044
1045 return \"b%N0 %l1\";
1046 }")
1047
1048 (define_insn ""
1049 [(set (pc)
1050 (if_then_else (match_operator 0 "relop" [(cc0) (const_int 0)])
1051 (pc)
1052 (label_ref (match_operand 1 "" ""))))]
1053 ""
1054 "*
1055 {
1056 if (optimize)
1057 switch (GET_CODE (operands[0]))
1058 {
1059 case EQ: case NE:
1060 break;
1061 case LT: case LE: case GE: case GT:
1062 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1063 return 0;
1064 break;
1065 case LTU: case LEU: case GEU: case GTU:
1066 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1067 return 0;
1068 break;
1069 }
1070
1071 return \"b%C0 %l1\";
1072 }")
1073
1074 (define_insn "call"
1075 [(call (match_operand:QI 0 "memory_operand" "m")
1076 (match_operand:SI 1 "immediate_operand" "n"))]
1077 ""
1078 "call %0")
1079
1080 (define_insn "call_value"
1081 [(set (match_operand 0 "" "=r")
1082 (call (match_operand:QI 1 "memory_operand" "m")
1083 (match_operand:SI 2 "immediate_operand" "n")))]
1084 ;; Operand 2 not really used on Pyramid architecture.
1085 ""
1086 "call %1")
1087
1088 (define_insn "return"
1089 [(return)]
1090 ""
1091 "*
1092 {
1093 if (get_frame_size () + current_function_pretend_args_size
1094 + current_function_args_size != 0
1095 || current_function_calls_alloca)
1096 {
1097 int dealloc_size = current_function_pretend_args_size;
1098 if (current_function_pops_args)
1099 dealloc_size += current_function_args_size;
1100 operands[0] = gen_rtx (CONST_INT, VOIDmode, dealloc_size);
1101 return \"retd %0\";
1102 }
1103 else
1104 return \"ret\";
1105 }")
1106
1107 (define_insn "tablejump"
1108 [(set (pc) (match_operand:SI 0 "register_operand" "r"))
1109 (use (label_ref (match_operand 1 "" "")))]
1110 ""
1111 "jump (%0)")
1112
1113 (define_insn "nop"
1114 [(const_int 0)]
1115 ""
1116 "movw gr0,gr0 # nop")
1117 \f
1118 ;______________________________________________________________________
1119 ;
1120 ; Peep-hole Optimization Patterns.
1121 ;______________________________________________________________________
1122
1123 ;; Optimize fullword move followed by a test of the moved value.
1124
1125 (define_peephole
1126 [(set (match_operand:SI 0 "register_operand" "=r")
1127 (match_operand:SI 1 "nonimmediate_operand" "rm"))
1128 (set (cc0) (match_operand:SI 2 "nonimmediate_operand" "rm"))]
1129 "rtx_equal_p (operands[2], operands[0])
1130 || rtx_equal_p (operands[2], operands[1])"
1131 "*
1132 cc_status.flags |= CC_NO_OVERFLOW;
1133 return \"mtstw %1,%0\";
1134 ")
1135
1136 ;; Optimize loops with an incremented/decremented variable.
1137
1138 (define_peephole
1139 [(set (match_operand:SI 0 "register_operand" "=r")
1140 (plus:SI (match_dup 0)
1141 (const_int -1)))
1142 (set (cc0)
1143 (compare (match_operand:SI 1 "register_operand" "r")
1144 (match_operand:SI 2 "nonmemory_operand" "ri")))
1145 (set (pc)
1146 (if_then_else (match_operator:SI 3 "signed_comparison"
1147 [(cc0) (const_int 0)])
1148 (label_ref (match_operand 4 "" ""))
1149 (pc)))]
1150 "(GET_CODE (operands[2]) == CONST_INT
1151 ? (unsigned)INTVAL (operands[2]) + 32 >= 64
1152 : 1) && (rtx_equal_p (operands[0], operands[1])
1153 || rtx_equal_p (operands[0], operands[2]))"
1154 "*
1155 if (rtx_equal_p (operands[0], operands[1]))
1156 {
1157 output_asm_insn (\"dcmpw %2,%0\", operands);
1158 return \"b%N3 %l4\";
1159 }
1160 else
1161 {
1162 output_asm_insn (\"dcmpw %1,%0\", operands);
1163 return \"b%R3 %l4\";
1164 }
1165 ")
1166
1167 (define_peephole
1168 [(set (match_operand:SI 0 "register_operand" "=r")
1169 (plus:SI (match_dup 0)
1170 (const_int 1)))
1171 (set (cc0)
1172 (compare (match_operand:SI 1 "register_operand" "r")
1173 (match_operand:SI 2 "nonmemory_operand" "ri")))
1174 (set (pc)
1175 (if_then_else (match_operator:SI 3 "signed_comparison"
1176 [(cc0) (const_int 0)])
1177 (label_ref (match_operand 4 "" ""))
1178 (pc)))]
1179 "(GET_CODE (operands[2]) == CONST_INT
1180 ? (unsigned)INTVAL (operands[2]) + 32 >= 64
1181 : 1) && (rtx_equal_p (operands[0], operands[1])
1182 || rtx_equal_p (operands[0], operands[2]))"
1183 "*
1184 if (rtx_equal_p (operands[0], operands[1]))
1185 {
1186 output_asm_insn (\"icmpw %2,%0\", operands);
1187 return \"b%N3 %l4\";
1188 }
1189 else
1190 {
1191 output_asm_insn (\"icmpw %1,%0\", operands);
1192 return \"b%R3 %l4\";
1193 }
1194 ")
1195
1196 ;; Combine two word moves with consecutive operands into one long move.
1197 ;; Also combines immediate moves, if the high-order destination operand
1198 ;; is loaded with 0 or -1 and the low-order destination operand is loaded
1199 ;; with a constant with the same sign.
1200
1201 (define_peephole
1202 [(set (match_operand:SI 0 "general_operand" "=g")
1203 (match_operand:SI 1 "general_operand" "g"))
1204 (set (match_operand:SI 2 "general_operand" "=g")
1205 (match_operand:SI 3 "general_operand" "g"))]
1206 "movdi_possible (operands)"
1207 "*
1208 {
1209 output_asm_insn (\"# COMBINE movw %1,%0\", operands);
1210 output_asm_insn (\"# COMBINE movw %3,%2\", operands);
1211 movdi_possible (operands);
1212 if (CONSTANT_P (operands[1]))
1213 return (swap_operands ? \"movl %3,%0\" : \"movl %1,%2\");
1214
1215 return (swap_operands ? \"movl %1,%0\" : \"movl %3,%2\");
1216 }")
1217
1218 ;; Optimize certain tests after memory stores.
1219
1220 (define_peephole
1221 [(set (match_operand 0 "memory_operand" "=m")
1222 (match_operand 1 "register_operand" "r"))
1223 (set (match_operand:SI 2 "register_operand" "=r")
1224 (sign_extend:SI (match_dup 1)))
1225 (set (cc0)
1226 (match_dup 2))]
1227 "dead_or_set_p (insn, operands[2])"
1228 "*
1229 cc_status.flags |= CC_NO_OVERFLOW;
1230 if (GET_MODE (operands[0]) == QImode)
1231 return \"cvtwb %1,%0\";
1232 else
1233 return \"cvtwh %1,%0\";
1234 ")
1235 \f
1236 ;______________________________________________________________________
1237 ;
1238 ; DImode Patterns.
1239 ;______________________________________________________________________
1240
1241 (define_expand "extendsidi2"
1242 [(set (subreg:SI (match_operand:DI 0 "register_operand" "=r") 1)
1243 (match_operand:SI 1 "general_operand" "g"))
1244 (set (subreg:SI (match_dup 0) 0)
1245 (subreg:SI (match_dup 0) 1))
1246 (set (subreg:SI (match_dup 0) 0)
1247 (ashiftrt:SI (subreg:SI (match_dup 0) 0)
1248 (const_int 31)))]
1249 ""
1250 "")
1251
1252 (define_insn "adddi3"
1253 [(set (match_operand:DI 0 "register_operand" "=r")
1254 (plus:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1255 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1256 ""
1257 "*
1258 {
1259 rtx xoperands[2];
1260 CC_STATUS_INIT;
1261 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1262 if (REG_P (operands[2]))
1263 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1264 else
1265 {
1266 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1267 CONST_DOUBLE_LOW (operands[2]));
1268 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1269 CONST_DOUBLE_HIGH (operands[2]));
1270 }
1271 output_asm_insn (\"addw %1,%0\", xoperands);
1272 return \"addwc %2,%0\";
1273 }")
1274
1275 (define_insn "subdi3"
1276 [(set (match_operand:DI 0 "register_operand" "=r")
1277 (minus:DI (match_operand:DI 1 "register_operand" "0")
1278 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1279 ""
1280 "*
1281 {
1282 rtx xoperands[2];
1283 CC_STATUS_INIT;
1284 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1285 if (REG_P (operands[2]))
1286 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1287 else
1288 {
1289 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1290 CONST_DOUBLE_LOW (operands[2]));
1291 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1292 CONST_DOUBLE_HIGH (operands[2]));
1293 }
1294 output_asm_insn (\"subw %1,%0\", xoperands);
1295 return \"subwb %2,%0\";
1296 }")
1297
1298 (define_insn "iordi3"
1299 [(set (match_operand:DI 0 "register_operand" "=r")
1300 (ior:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1301 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1302 ""
1303 "*
1304 {
1305 rtx xoperands[2];
1306 CC_STATUS_INIT;
1307 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1308 if (REG_P (operands[2]))
1309 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1310 else
1311 {
1312 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1313 CONST_DOUBLE_LOW (operands[2]));
1314 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1315 CONST_DOUBLE_HIGH (operands[2]));
1316 }
1317 output_asm_insn (\"orw %1,%0\", xoperands);
1318 return \"orw %2,%0\";
1319 }")
1320
1321 (define_insn "anddi3"
1322 [(set (match_operand:DI 0 "register_operand" "=r")
1323 (and:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1324 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1325 ""
1326 "*
1327 {
1328 rtx xoperands[2];
1329 CC_STATUS_INIT;
1330 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1331 if (REG_P (operands[2]))
1332 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1333 else
1334 {
1335 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1336 CONST_DOUBLE_LOW (operands[2]));
1337 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1338 CONST_DOUBLE_HIGH (operands[2]));
1339 }
1340 output_asm_insn (\"andw %1,%0\", xoperands);
1341 return \"andw %2,%0\";
1342 }")
1343
1344 (define_insn "xordi3"
1345 [(set (match_operand:DI 0 "register_operand" "=r")
1346 (xor:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1347 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1348 ""
1349 "*
1350 {
1351 rtx xoperands[2];
1352 CC_STATUS_INIT;
1353 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1354 if (REG_P (operands[2]))
1355 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1356 else
1357 {
1358 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1359 CONST_DOUBLE_LOW (operands[2]));
1360 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1361 CONST_DOUBLE_HIGH (operands[2]));
1362 }
1363 output_asm_insn (\"xorw %1,%0\", xoperands);
1364 return \"xorw %2,%0\";
1365 }")
1366
1367 ;; My version, modelled after Jonathan Stone's and "tablejump" - S.P.
1368 (define_insn "indirect_jump"
1369 [(set (pc) (match_operand:SI 0 "general_operand" "r"))]
1370 ""
1371 "jump (%0)")
This page took 0.108353 seconds and 5 git commands to generate.