]> gcc.gnu.org Git - gcc.git/blob - gcc/recog.c
rtl.h (rtx_def): Update documentation.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 91-98, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "insn-config.h"
26 #include "insn-attr.h"
27 #include "insn-flags.h"
28 #include "insn-codes.h"
29 #include "recog.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "toplev.h"
35
36 #ifndef STACK_PUSH_CODE
37 #ifdef STACK_GROWS_DOWNWARD
38 #define STACK_PUSH_CODE PRE_DEC
39 #else
40 #define STACK_PUSH_CODE PRE_INC
41 #endif
42 #endif
43
44 static void validate_replace_rtx_1 PROTO((rtx *, rtx, rtx, rtx));
45 static rtx *find_single_use_1 PROTO((rtx, rtx *));
46 static rtx *find_constant_term_loc PROTO((rtx *));
47 static int insn_invalid_p PROTO((rtx));
48
49 /* Nonzero means allow operands to be volatile.
50 This should be 0 if you are generating rtl, such as if you are calling
51 the functions in optabs.c and expmed.c (most of the time).
52 This should be 1 if all valid insns need to be recognized,
53 such as in regclass.c and final.c and reload.c.
54
55 init_recog and init_recog_no_volatile are responsible for setting this. */
56
57 int volatile_ok;
58
59 /* The next variables are set up by extract_insn. The first four of them
60 are also set up during insn_extract. */
61
62 /* Indexed by N, gives value of operand N. */
63 rtx recog_operand[MAX_RECOG_OPERANDS];
64
65 /* Indexed by N, gives location where operand N was found. */
66 rtx *recog_operand_loc[MAX_RECOG_OPERANDS];
67
68 /* Indexed by N, gives location where the Nth duplicate-appearance of
69 an operand was found. This is something that matched MATCH_DUP. */
70 rtx *recog_dup_loc[MAX_RECOG_OPERANDS];
71
72 /* Indexed by N, gives the operand number that was duplicated in the
73 Nth duplicate-appearance of an operand. */
74 char recog_dup_num[MAX_RECOG_OPERANDS];
75
76 /* The number of operands of the insn. */
77 int recog_n_operands;
78
79 /* The number of MATCH_DUPs in the insn. */
80 int recog_n_dups;
81
82 /* The number of alternatives in the constraints for the insn. */
83 int recog_n_alternatives;
84
85 /* Indexed by N, gives the mode of operand N. */
86 enum machine_mode recog_operand_mode[MAX_RECOG_OPERANDS];
87
88 /* Indexed by N, gives the constraint string for operand N. */
89 char *recog_constraints[MAX_RECOG_OPERANDS];
90
91 /* Indexed by N, gives the type (in, out, inout) for operand N. */
92 enum op_type recog_op_type[MAX_RECOG_OPERANDS];
93
94 #ifndef REGISTER_CONSTRAINTS
95 /* Indexed by N, nonzero if operand N should be an address. */
96 char recog_operand_address_p[MAX_RECOG_OPERANDS];
97 #endif
98
99 /* Contains a vector of operand_alternative structures for every operand.
100 Set up by preprocess_constraints. */
101 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
102
103 /* On return from `constrain_operands', indicate which alternative
104 was satisfied. */
105
106 int which_alternative;
107
108 /* Nonzero after end of reload pass.
109 Set to 1 or 0 by toplev.c.
110 Controls the significance of (SUBREG (MEM)). */
111
112 int reload_completed;
113
114 /* Initialize data used by the function `recog'.
115 This must be called once in the compilation of a function
116 before any insn recognition may be done in the function. */
117
118 void
119 init_recog_no_volatile ()
120 {
121 volatile_ok = 0;
122 }
123
124 void
125 init_recog ()
126 {
127 volatile_ok = 1;
128 }
129
130 /* Try recognizing the instruction INSN,
131 and return the code number that results.
132 Remember the code so that repeated calls do not
133 need to spend the time for actual rerecognition.
134
135 This function is the normal interface to instruction recognition.
136 The automatically-generated function `recog' is normally called
137 through this one. (The only exception is in combine.c.) */
138
139 int
140 recog_memoized (insn)
141 rtx insn;
142 {
143 if (INSN_CODE (insn) < 0)
144 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
145 return INSN_CODE (insn);
146 }
147 \f
148 /* Check that X is an insn-body for an `asm' with operands
149 and that the operands mentioned in it are legitimate. */
150
151 int
152 check_asm_operands (x)
153 rtx x;
154 {
155 int noperands;
156 rtx *operands;
157 char **constraints;
158 int i;
159
160 /* Post-reload, be more strict with things. */
161 if (reload_completed)
162 {
163 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
164 extract_insn (make_insn_raw (x));
165 constrain_operands (1);
166 return which_alternative >= 0;
167 }
168
169 noperands = asm_noperands (x);
170 if (noperands < 0)
171 return 0;
172 if (noperands == 0)
173 return 1;
174
175 operands = (rtx *) alloca (noperands * sizeof (rtx));
176 constraints = (char **) alloca (noperands * sizeof (char *));
177
178 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
179
180 for (i = 0; i < noperands; i++)
181 {
182 char *c = constraints[i];
183 if (ISDIGIT ((unsigned char)c[0]))
184 c = constraints[c[0] - '0'];
185
186 if (! asm_operand_ok (operands[i], c))
187 return 0;
188 }
189
190 return 1;
191 }
192 \f
193 /* Static data for the next two routines. */
194
195 typedef struct change_t
196 {
197 rtx object;
198 int old_code;
199 rtx *loc;
200 rtx old;
201 } change_t;
202
203 static change_t *changes;
204 static int changes_allocated;
205
206 static int num_changes = 0;
207
208 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
209 at which NEW will be placed. If OBJECT is zero, no validation is done,
210 the change is simply made.
211
212 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
213 will be called with the address and mode as parameters. If OBJECT is
214 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
215 the change in place.
216
217 IN_GROUP is non-zero if this is part of a group of changes that must be
218 performed as a group. In that case, the changes will be stored. The
219 function `apply_change_group' will validate and apply the changes.
220
221 If IN_GROUP is zero, this is a single change. Try to recognize the insn
222 or validate the memory reference with the change applied. If the result
223 is not valid for the machine, suppress the change and return zero.
224 Otherwise, perform the change and return 1. */
225
226 int
227 validate_change (object, loc, new, in_group)
228 rtx object;
229 rtx *loc;
230 rtx new;
231 int in_group;
232 {
233 rtx old = *loc;
234
235 if (old == new || rtx_equal_p (old, new))
236 return 1;
237
238 if (in_group == 0 && num_changes != 0)
239 abort ();
240
241 *loc = new;
242
243 /* Save the information describing this change. */
244 if (num_changes >= changes_allocated)
245 {
246 if (changes_allocated == 0)
247 /* This value allows for repeated substitutions inside complex
248 indexed addresses, or changes in up to 5 insns. */
249 changes_allocated = MAX_RECOG_OPERANDS * 5;
250 else
251 changes_allocated *= 2;
252
253 changes =
254 (change_t*) xrealloc (changes,
255 sizeof (change_t) * changes_allocated);
256 }
257
258 changes[num_changes].object = object;
259 changes[num_changes].loc = loc;
260 changes[num_changes].old = old;
261
262 if (object && GET_CODE (object) != MEM)
263 {
264 /* Set INSN_CODE to force rerecognition of insn. Save old code in
265 case invalid. */
266 changes[num_changes].old_code = INSN_CODE (object);
267 INSN_CODE (object) = -1;
268 }
269
270 num_changes++;
271
272 /* If we are making a group of changes, return 1. Otherwise, validate the
273 change group we made. */
274
275 if (in_group)
276 return 1;
277 else
278 return apply_change_group ();
279 }
280
281 /* This subroutine of apply_change_group verifies whether the changes to INSN
282 were valid; i.e. whether INSN can still be recognized. */
283
284 static int
285 insn_invalid_p (insn)
286 rtx insn;
287 {
288 int icode = recog_memoized (insn);
289 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
290
291 if (is_asm && ! check_asm_operands (PATTERN (insn)))
292 return 1;
293 if (! is_asm && icode < 0)
294 return 1;
295
296 /* After reload, verify that all constraints are satisfied. */
297 if (reload_completed)
298 {
299 extract_insn (insn);
300
301 if (! constrain_operands (1))
302 return 1;
303 }
304
305 return 0;
306 }
307
308 /* Apply a group of changes previously issued with `validate_change'.
309 Return 1 if all changes are valid, zero otherwise. */
310
311 int
312 apply_change_group ()
313 {
314 int i;
315
316 /* The changes have been applied and all INSN_CODEs have been reset to force
317 rerecognition.
318
319 The changes are valid if we aren't given an object, or if we are
320 given a MEM and it still is a valid address, or if this is in insn
321 and it is recognized. In the latter case, if reload has completed,
322 we also require that the operands meet the constraints for
323 the insn. */
324
325 for (i = 0; i < num_changes; i++)
326 {
327 rtx object = changes[i].object;
328
329 if (object == 0)
330 continue;
331
332 if (GET_CODE (object) == MEM)
333 {
334 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
335 break;
336 }
337 else if (insn_invalid_p (object))
338 {
339 rtx pat = PATTERN (object);
340
341 /* Perhaps we couldn't recognize the insn because there were
342 extra CLOBBERs at the end. If so, try to re-recognize
343 without the last CLOBBER (later iterations will cause each of
344 them to be eliminated, in turn). But don't do this if we
345 have an ASM_OPERAND. */
346 if (GET_CODE (pat) == PARALLEL
347 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
348 && asm_noperands (PATTERN (object)) < 0)
349 {
350 rtx newpat;
351
352 if (XVECLEN (pat, 0) == 2)
353 newpat = XVECEXP (pat, 0, 0);
354 else
355 {
356 int j;
357
358 newpat = gen_rtx_PARALLEL (VOIDmode,
359 gen_rtvec (XVECLEN (pat, 0) - 1));
360 for (j = 0; j < XVECLEN (newpat, 0); j++)
361 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
362 }
363
364 /* Add a new change to this group to replace the pattern
365 with this new pattern. Then consider this change
366 as having succeeded. The change we added will
367 cause the entire call to fail if things remain invalid.
368
369 Note that this can lose if a later change than the one
370 we are processing specified &XVECEXP (PATTERN (object), 0, X)
371 but this shouldn't occur. */
372
373 validate_change (object, &PATTERN (object), newpat, 1);
374 }
375 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
376 /* If this insn is a CLOBBER or USE, it is always valid, but is
377 never recognized. */
378 continue;
379 else
380 break;
381 }
382 }
383
384 if (i == num_changes)
385 {
386 num_changes = 0;
387 return 1;
388 }
389 else
390 {
391 cancel_changes (0);
392 return 0;
393 }
394 }
395
396 /* Return the number of changes so far in the current group. */
397
398 int
399 num_validated_changes ()
400 {
401 return num_changes;
402 }
403
404 /* Retract the changes numbered NUM and up. */
405
406 void
407 cancel_changes (num)
408 int num;
409 {
410 int i;
411
412 /* Back out all the changes. Do this in the opposite order in which
413 they were made. */
414 for (i = num_changes - 1; i >= num; i--)
415 {
416 *changes[i].loc = changes[i].old;
417 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
418 INSN_CODE (changes[i].object) = changes[i].old_code;
419 }
420 num_changes = num;
421 }
422
423 /* Replace every occurrence of FROM in X with TO. Mark each change with
424 validate_change passing OBJECT. */
425
426 static void
427 validate_replace_rtx_1 (loc, from, to, object)
428 rtx *loc;
429 rtx from, to, object;
430 {
431 register int i, j;
432 register char *fmt;
433 register rtx x = *loc;
434 enum rtx_code code = GET_CODE (x);
435
436 /* X matches FROM if it is the same rtx or they are both referring to the
437 same register in the same mode. Avoid calling rtx_equal_p unless the
438 operands look similar. */
439
440 if (x == from
441 || (GET_CODE (x) == REG && GET_CODE (from) == REG
442 && GET_MODE (x) == GET_MODE (from)
443 && REGNO (x) == REGNO (from))
444 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
445 && rtx_equal_p (x, from)))
446 {
447 validate_change (object, loc, to, 1);
448 return;
449 }
450
451 /* For commutative or comparison operations, try replacing each argument
452 separately and seeing if we made any changes. If so, put a constant
453 argument last.*/
454 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
455 {
456 int prev_changes = num_changes;
457
458 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
459 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
460 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
461 {
462 validate_change (object, loc,
463 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
464 : swap_condition (code),
465 GET_MODE (x), XEXP (x, 1),
466 XEXP (x, 0)),
467 1);
468 x = *loc;
469 code = GET_CODE (x);
470 }
471 }
472
473 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
474 done the substitution, otherwise we won't. */
475
476 switch (code)
477 {
478 case PLUS:
479 /* If we have a PLUS whose second operand is now a CONST_INT, use
480 plus_constant to try to simplify it. */
481 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
482 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
483 1);
484 return;
485
486 case MINUS:
487 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
488 {
489 validate_change (object, loc,
490 plus_constant (XEXP (x, 0), - INTVAL (to)),
491 1);
492 return;
493 }
494 break;
495
496 case ZERO_EXTEND:
497 case SIGN_EXTEND:
498 /* In these cases, the operation to be performed depends on the mode
499 of the operand. If we are replacing the operand with a VOIDmode
500 constant, we lose the information. So try to simplify the operation
501 in that case. If it fails, substitute in something that we know
502 won't be recognized. */
503 if (GET_MODE (to) == VOIDmode
504 && (XEXP (x, 0) == from
505 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
506 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
507 && REGNO (XEXP (x, 0)) == REGNO (from))))
508 {
509 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
510 GET_MODE (from));
511 if (new == 0)
512 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
513
514 validate_change (object, loc, new, 1);
515 return;
516 }
517 break;
518
519 case SUBREG:
520 /* If we have a SUBREG of a register that we are replacing and we are
521 replacing it with a MEM, make a new MEM and try replacing the
522 SUBREG with it. Don't do this if the MEM has a mode-dependent address
523 or if we would be widening it. */
524
525 if (SUBREG_REG (x) == from
526 && GET_CODE (from) == REG
527 && GET_CODE (to) == MEM
528 && ! mode_dependent_address_p (XEXP (to, 0))
529 && ! MEM_VOLATILE_P (to)
530 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
531 {
532 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
533 enum machine_mode mode = GET_MODE (x);
534 rtx new;
535
536 if (BYTES_BIG_ENDIAN)
537 offset += (MIN (UNITS_PER_WORD,
538 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
539 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
540
541 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
542 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
543 MEM_COPY_ATTRIBUTES (new, to);
544 validate_change (object, loc, new, 1);
545 return;
546 }
547 break;
548
549 case ZERO_EXTRACT:
550 case SIGN_EXTRACT:
551 /* If we are replacing a register with memory, try to change the memory
552 to be the mode required for memory in extract operations (this isn't
553 likely to be an insertion operation; if it was, nothing bad will
554 happen, we might just fail in some cases). */
555
556 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
557 && GET_CODE (XEXP (x, 1)) == CONST_INT
558 && GET_CODE (XEXP (x, 2)) == CONST_INT
559 && ! mode_dependent_address_p (XEXP (to, 0))
560 && ! MEM_VOLATILE_P (to))
561 {
562 enum machine_mode wanted_mode = VOIDmode;
563 enum machine_mode is_mode = GET_MODE (to);
564 int pos = INTVAL (XEXP (x, 2));
565
566 #ifdef HAVE_extzv
567 if (code == ZERO_EXTRACT)
568 {
569 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
570 if (wanted_mode == VOIDmode)
571 wanted_mode = word_mode;
572 }
573 #endif
574 #ifdef HAVE_extv
575 if (code == SIGN_EXTRACT)
576 {
577 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
578 if (wanted_mode == VOIDmode)
579 wanted_mode = word_mode;
580 }
581 #endif
582
583 /* If we have a narrower mode, we can do something. */
584 if (wanted_mode != VOIDmode
585 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
586 {
587 int offset = pos / BITS_PER_UNIT;
588 rtx newmem;
589
590 /* If the bytes and bits are counted differently, we
591 must adjust the offset. */
592 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
593 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
594 - offset);
595
596 pos %= GET_MODE_BITSIZE (wanted_mode);
597
598 newmem = gen_rtx_MEM (wanted_mode,
599 plus_constant (XEXP (to, 0), offset));
600 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
601 MEM_COPY_ATTRIBUTES (newmem, to);
602
603 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
604 validate_change (object, &XEXP (x, 0), newmem, 1);
605 }
606 }
607
608 break;
609
610 default:
611 break;
612 }
613
614 /* For commutative or comparison operations we've already performed
615 replacements. Don't try to perform them again. */
616 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
617 {
618 fmt = GET_RTX_FORMAT (code);
619 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
620 {
621 if (fmt[i] == 'e')
622 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
623 else if (fmt[i] == 'E')
624 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
625 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
626 }
627 }
628 }
629
630 /* Try replacing every occurrence of FROM in INSN with TO. After all
631 changes have been made, validate by seeing if INSN is still valid. */
632
633 int
634 validate_replace_rtx (from, to, insn)
635 rtx from, to, insn;
636 {
637 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
638 return apply_change_group ();
639 }
640
641 /* Try replacing every occurrence of FROM in INSN with TO. After all
642 changes have been made, validate by seeing if INSN is still valid. */
643
644 void
645 validate_replace_rtx_group (from, to, insn)
646 rtx from, to, insn;
647 {
648 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
649 }
650
651 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
652 SET_DESTs. After all changes have been made, validate by seeing if
653 INSN is still valid. */
654
655 int
656 validate_replace_src (from, to, insn)
657 rtx from, to, insn;
658 {
659 if ((GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
660 || GET_CODE (PATTERN (insn)) != SET)
661 abort ();
662
663 validate_replace_rtx_1 (&SET_SRC (PATTERN (insn)), from, to, insn);
664 if (GET_CODE (SET_DEST (PATTERN (insn))) == MEM)
665 validate_replace_rtx_1 (&XEXP (SET_DEST (PATTERN (insn)), 0),
666 from, to, insn);
667 return apply_change_group ();
668 }
669 \f
670 #ifdef HAVE_cc0
671 /* Return 1 if the insn using CC0 set by INSN does not contain
672 any ordered tests applied to the condition codes.
673 EQ and NE tests do not count. */
674
675 int
676 next_insn_tests_no_inequality (insn)
677 rtx insn;
678 {
679 register rtx next = next_cc0_user (insn);
680
681 /* If there is no next insn, we have to take the conservative choice. */
682 if (next == 0)
683 return 0;
684
685 return ((GET_CODE (next) == JUMP_INSN
686 || GET_CODE (next) == INSN
687 || GET_CODE (next) == CALL_INSN)
688 && ! inequality_comparisons_p (PATTERN (next)));
689 }
690
691 #if 0 /* This is useless since the insn that sets the cc's
692 must be followed immediately by the use of them. */
693 /* Return 1 if the CC value set up by INSN is not used. */
694
695 int
696 next_insns_test_no_inequality (insn)
697 rtx insn;
698 {
699 register rtx next = NEXT_INSN (insn);
700
701 for (; next != 0; next = NEXT_INSN (next))
702 {
703 if (GET_CODE (next) == CODE_LABEL
704 || GET_CODE (next) == BARRIER)
705 return 1;
706 if (GET_CODE (next) == NOTE)
707 continue;
708 if (inequality_comparisons_p (PATTERN (next)))
709 return 0;
710 if (sets_cc0_p (PATTERN (next)) == 1)
711 return 1;
712 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
713 return 1;
714 }
715 return 1;
716 }
717 #endif
718 #endif
719 \f
720 /* This is used by find_single_use to locate an rtx that contains exactly one
721 use of DEST, which is typically either a REG or CC0. It returns a
722 pointer to the innermost rtx expression containing DEST. Appearances of
723 DEST that are being used to totally replace it are not counted. */
724
725 static rtx *
726 find_single_use_1 (dest, loc)
727 rtx dest;
728 rtx *loc;
729 {
730 rtx x = *loc;
731 enum rtx_code code = GET_CODE (x);
732 rtx *result = 0;
733 rtx *this_result;
734 int i;
735 char *fmt;
736
737 switch (code)
738 {
739 case CONST_INT:
740 case CONST:
741 case LABEL_REF:
742 case SYMBOL_REF:
743 case CONST_DOUBLE:
744 case CLOBBER:
745 return 0;
746
747 case SET:
748 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
749 of a REG that occupies all of the REG, the insn uses DEST if
750 it is mentioned in the destination or the source. Otherwise, we
751 need just check the source. */
752 if (GET_CODE (SET_DEST (x)) != CC0
753 && GET_CODE (SET_DEST (x)) != PC
754 && GET_CODE (SET_DEST (x)) != REG
755 && ! (GET_CODE (SET_DEST (x)) == SUBREG
756 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
757 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
758 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
759 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
760 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
761 break;
762
763 return find_single_use_1 (dest, &SET_SRC (x));
764
765 case MEM:
766 case SUBREG:
767 return find_single_use_1 (dest, &XEXP (x, 0));
768
769 default:
770 break;
771 }
772
773 /* If it wasn't one of the common cases above, check each expression and
774 vector of this code. Look for a unique usage of DEST. */
775
776 fmt = GET_RTX_FORMAT (code);
777 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
778 {
779 if (fmt[i] == 'e')
780 {
781 if (dest == XEXP (x, i)
782 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
783 && REGNO (dest) == REGNO (XEXP (x, i))))
784 this_result = loc;
785 else
786 this_result = find_single_use_1 (dest, &XEXP (x, i));
787
788 if (result == 0)
789 result = this_result;
790 else if (this_result)
791 /* Duplicate usage. */
792 return 0;
793 }
794 else if (fmt[i] == 'E')
795 {
796 int j;
797
798 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
799 {
800 if (XVECEXP (x, i, j) == dest
801 || (GET_CODE (dest) == REG
802 && GET_CODE (XVECEXP (x, i, j)) == REG
803 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
804 this_result = loc;
805 else
806 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
807
808 if (result == 0)
809 result = this_result;
810 else if (this_result)
811 return 0;
812 }
813 }
814 }
815
816 return result;
817 }
818 \f
819 /* See if DEST, produced in INSN, is used only a single time in the
820 sequel. If so, return a pointer to the innermost rtx expression in which
821 it is used.
822
823 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
824
825 This routine will return usually zero either before flow is called (because
826 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
827 note can't be trusted).
828
829 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
830 care about REG_DEAD notes or LOG_LINKS.
831
832 Otherwise, we find the single use by finding an insn that has a
833 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
834 only referenced once in that insn, we know that it must be the first
835 and last insn referencing DEST. */
836
837 rtx *
838 find_single_use (dest, insn, ploc)
839 rtx dest;
840 rtx insn;
841 rtx *ploc;
842 {
843 rtx next;
844 rtx *result;
845 rtx link;
846
847 #ifdef HAVE_cc0
848 if (dest == cc0_rtx)
849 {
850 next = NEXT_INSN (insn);
851 if (next == 0
852 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
853 return 0;
854
855 result = find_single_use_1 (dest, &PATTERN (next));
856 if (result && ploc)
857 *ploc = next;
858 return result;
859 }
860 #endif
861
862 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
863 return 0;
864
865 for (next = next_nonnote_insn (insn);
866 next != 0 && GET_CODE (next) != CODE_LABEL;
867 next = next_nonnote_insn (next))
868 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
869 {
870 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
871 if (XEXP (link, 0) == insn)
872 break;
873
874 if (link)
875 {
876 result = find_single_use_1 (dest, &PATTERN (next));
877 if (ploc)
878 *ploc = next;
879 return result;
880 }
881 }
882
883 return 0;
884 }
885 \f
886 /* Return 1 if OP is a valid general operand for machine mode MODE.
887 This is either a register reference, a memory reference,
888 or a constant. In the case of a memory reference, the address
889 is checked for general validity for the target machine.
890
891 Register and memory references must have mode MODE in order to be valid,
892 but some constants have no machine mode and are valid for any mode.
893
894 If MODE is VOIDmode, OP is checked for validity for whatever mode
895 it has.
896
897 The main use of this function is as a predicate in match_operand
898 expressions in the machine description.
899
900 For an explanation of this function's behavior for registers of
901 class NO_REGS, see the comment for `register_operand'. */
902
903 int
904 general_operand (op, mode)
905 register rtx op;
906 enum machine_mode mode;
907 {
908 register enum rtx_code code = GET_CODE (op);
909 int mode_altering_drug = 0;
910
911 if (mode == VOIDmode)
912 mode = GET_MODE (op);
913
914 /* Don't accept CONST_INT or anything similar
915 if the caller wants something floating. */
916 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
917 && GET_MODE_CLASS (mode) != MODE_INT
918 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
919 return 0;
920
921 if (CONSTANT_P (op))
922 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
923 #ifdef LEGITIMATE_PIC_OPERAND_P
924 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
925 #endif
926 && LEGITIMATE_CONSTANT_P (op));
927
928 /* Except for certain constants with VOIDmode, already checked for,
929 OP's mode must match MODE if MODE specifies a mode. */
930
931 if (GET_MODE (op) != mode)
932 return 0;
933
934 if (code == SUBREG)
935 {
936 #ifdef INSN_SCHEDULING
937 /* On machines that have insn scheduling, we want all memory
938 reference to be explicit, so outlaw paradoxical SUBREGs. */
939 if (GET_CODE (SUBREG_REG (op)) == MEM
940 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
941 return 0;
942 #endif
943
944 op = SUBREG_REG (op);
945 code = GET_CODE (op);
946 #if 0
947 /* No longer needed, since (SUBREG (MEM...))
948 will load the MEM into a reload reg in the MEM's own mode. */
949 mode_altering_drug = 1;
950 #endif
951 }
952
953 if (code == REG)
954 /* A register whose class is NO_REGS is not a general operand. */
955 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
956 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
957
958 if (code == MEM)
959 {
960 register rtx y = XEXP (op, 0);
961 if (! volatile_ok && MEM_VOLATILE_P (op))
962 return 0;
963 if (GET_CODE (y) == ADDRESSOF)
964 return 1;
965 /* Use the mem's mode, since it will be reloaded thus. */
966 mode = GET_MODE (op);
967 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
968 }
969
970 /* Pretend this is an operand for now; we'll run force_operand
971 on its replacement in fixup_var_refs_1. */
972 if (code == ADDRESSOF)
973 return 1;
974
975 return 0;
976
977 win:
978 if (mode_altering_drug)
979 return ! mode_dependent_address_p (XEXP (op, 0));
980 return 1;
981 }
982 \f
983 /* Return 1 if OP is a valid memory address for a memory reference
984 of mode MODE.
985
986 The main use of this function is as a predicate in match_operand
987 expressions in the machine description. */
988
989 int
990 address_operand (op, mode)
991 register rtx op;
992 enum machine_mode mode;
993 {
994 return memory_address_p (mode, op);
995 }
996
997 /* Return 1 if OP is a register reference of mode MODE.
998 If MODE is VOIDmode, accept a register in any mode.
999
1000 The main use of this function is as a predicate in match_operand
1001 expressions in the machine description.
1002
1003 As a special exception, registers whose class is NO_REGS are
1004 not accepted by `register_operand'. The reason for this change
1005 is to allow the representation of special architecture artifacts
1006 (such as a condition code register) without extending the rtl
1007 definitions. Since registers of class NO_REGS cannot be used
1008 as registers in any case where register classes are examined,
1009 it is most consistent to keep this function from accepting them. */
1010
1011 int
1012 register_operand (op, mode)
1013 register rtx op;
1014 enum machine_mode mode;
1015 {
1016 if (GET_MODE (op) != mode && mode != VOIDmode)
1017 return 0;
1018
1019 if (GET_CODE (op) == SUBREG)
1020 {
1021 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1022 because it is guaranteed to be reloaded into one.
1023 Just make sure the MEM is valid in itself.
1024 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1025 but currently it does result from (SUBREG (REG)...) where the
1026 reg went on the stack.) */
1027 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1028 return general_operand (op, mode);
1029
1030 #ifdef CLASS_CANNOT_CHANGE_SIZE
1031 if (GET_CODE (SUBREG_REG (op)) == REG
1032 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1033 && TEST_HARD_REG_BIT (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
1034 REGNO (SUBREG_REG (op)))
1035 && (GET_MODE_SIZE (mode)
1036 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1037 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1038 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1039 return 0;
1040 #endif
1041
1042 op = SUBREG_REG (op);
1043 }
1044
1045 /* We don't consider registers whose class is NO_REGS
1046 to be a register operand. */
1047 return (GET_CODE (op) == REG
1048 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1049 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1050 }
1051
1052 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1053 or a hard register. */
1054
1055 int
1056 scratch_operand (op, mode)
1057 register rtx op;
1058 enum machine_mode mode;
1059 {
1060 return (GET_MODE (op) == mode
1061 && (GET_CODE (op) == SCRATCH
1062 || (GET_CODE (op) == REG
1063 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
1064 }
1065
1066 /* Return 1 if OP is a valid immediate operand for mode MODE.
1067
1068 The main use of this function is as a predicate in match_operand
1069 expressions in the machine description. */
1070
1071 int
1072 immediate_operand (op, mode)
1073 register rtx op;
1074 enum machine_mode mode;
1075 {
1076 /* Don't accept CONST_INT or anything similar
1077 if the caller wants something floating. */
1078 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1079 && GET_MODE_CLASS (mode) != MODE_INT
1080 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1081 return 0;
1082
1083 return (CONSTANT_P (op)
1084 && (GET_MODE (op) == mode || mode == VOIDmode
1085 || GET_MODE (op) == VOIDmode)
1086 #ifdef LEGITIMATE_PIC_OPERAND_P
1087 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1088 #endif
1089 && LEGITIMATE_CONSTANT_P (op));
1090 }
1091
1092 /* Returns 1 if OP is an operand that is a CONST_INT. */
1093
1094 int
1095 const_int_operand (op, mode)
1096 register rtx op;
1097 enum machine_mode mode ATTRIBUTE_UNUSED;
1098 {
1099 return GET_CODE (op) == CONST_INT;
1100 }
1101
1102 /* Returns 1 if OP is an operand that is a constant integer or constant
1103 floating-point number. */
1104
1105 int
1106 const_double_operand (op, mode)
1107 register rtx op;
1108 enum machine_mode mode;
1109 {
1110 /* Don't accept CONST_INT or anything similar
1111 if the caller wants something floating. */
1112 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1113 && GET_MODE_CLASS (mode) != MODE_INT
1114 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1115 return 0;
1116
1117 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1118 && (mode == VOIDmode || GET_MODE (op) == mode
1119 || GET_MODE (op) == VOIDmode));
1120 }
1121
1122 /* Return 1 if OP is a general operand that is not an immediate operand. */
1123
1124 int
1125 nonimmediate_operand (op, mode)
1126 register rtx op;
1127 enum machine_mode mode;
1128 {
1129 return (general_operand (op, mode) && ! CONSTANT_P (op));
1130 }
1131
1132 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1133
1134 int
1135 nonmemory_operand (op, mode)
1136 register rtx op;
1137 enum machine_mode mode;
1138 {
1139 if (CONSTANT_P (op))
1140 {
1141 /* Don't accept CONST_INT or anything similar
1142 if the caller wants something floating. */
1143 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1144 && GET_MODE_CLASS (mode) != MODE_INT
1145 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1146 return 0;
1147
1148 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
1149 #ifdef LEGITIMATE_PIC_OPERAND_P
1150 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1151 #endif
1152 && LEGITIMATE_CONSTANT_P (op));
1153 }
1154
1155 if (GET_MODE (op) != mode && mode != VOIDmode)
1156 return 0;
1157
1158 if (GET_CODE (op) == SUBREG)
1159 {
1160 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1161 because it is guaranteed to be reloaded into one.
1162 Just make sure the MEM is valid in itself.
1163 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1164 but currently it does result from (SUBREG (REG)...) where the
1165 reg went on the stack.) */
1166 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1167 return general_operand (op, mode);
1168 op = SUBREG_REG (op);
1169 }
1170
1171 /* We don't consider registers whose class is NO_REGS
1172 to be a register operand. */
1173 return (GET_CODE (op) == REG
1174 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1175 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1176 }
1177
1178 /* Return 1 if OP is a valid operand that stands for pushing a
1179 value of mode MODE onto the stack.
1180
1181 The main use of this function is as a predicate in match_operand
1182 expressions in the machine description. */
1183
1184 int
1185 push_operand (op, mode)
1186 rtx op;
1187 enum machine_mode mode;
1188 {
1189 if (GET_CODE (op) != MEM)
1190 return 0;
1191
1192 if (GET_MODE (op) != mode)
1193 return 0;
1194
1195 op = XEXP (op, 0);
1196
1197 if (GET_CODE (op) != STACK_PUSH_CODE)
1198 return 0;
1199
1200 return XEXP (op, 0) == stack_pointer_rtx;
1201 }
1202
1203 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1204
1205 int
1206 memory_address_p (mode, addr)
1207 enum machine_mode mode;
1208 register rtx addr;
1209 {
1210 if (GET_CODE (addr) == ADDRESSOF)
1211 return 1;
1212
1213 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1214 return 0;
1215
1216 win:
1217 return 1;
1218 }
1219
1220 /* Return 1 if OP is a valid memory reference with mode MODE,
1221 including a valid address.
1222
1223 The main use of this function is as a predicate in match_operand
1224 expressions in the machine description. */
1225
1226 int
1227 memory_operand (op, mode)
1228 register rtx op;
1229 enum machine_mode mode;
1230 {
1231 rtx inner;
1232
1233 if (! reload_completed)
1234 /* Note that no SUBREG is a memory operand before end of reload pass,
1235 because (SUBREG (MEM...)) forces reloading into a register. */
1236 return GET_CODE (op) == MEM && general_operand (op, mode);
1237
1238 if (mode != VOIDmode && GET_MODE (op) != mode)
1239 return 0;
1240
1241 inner = op;
1242 if (GET_CODE (inner) == SUBREG)
1243 inner = SUBREG_REG (inner);
1244
1245 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1246 }
1247
1248 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1249 that is, a memory reference whose address is a general_operand. */
1250
1251 int
1252 indirect_operand (op, mode)
1253 register rtx op;
1254 enum machine_mode mode;
1255 {
1256 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1257 if (! reload_completed
1258 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1259 {
1260 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1261 rtx inner = SUBREG_REG (op);
1262
1263 if (BYTES_BIG_ENDIAN)
1264 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1265 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1266
1267 if (mode != VOIDmode && GET_MODE (op) != mode)
1268 return 0;
1269
1270 /* The only way that we can have a general_operand as the resulting
1271 address is if OFFSET is zero and the address already is an operand
1272 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1273 operand. */
1274
1275 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1276 || (GET_CODE (XEXP (inner, 0)) == PLUS
1277 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1278 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1279 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1280 }
1281
1282 return (GET_CODE (op) == MEM
1283 && memory_operand (op, mode)
1284 && general_operand (XEXP (op, 0), Pmode));
1285 }
1286
1287 /* Return 1 if this is a comparison operator. This allows the use of
1288 MATCH_OPERATOR to recognize all the branch insns. */
1289
1290 int
1291 comparison_operator (op, mode)
1292 register rtx op;
1293 enum machine_mode mode;
1294 {
1295 return ((mode == VOIDmode || GET_MODE (op) == mode)
1296 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1297 }
1298 \f
1299 /* If BODY is an insn body that uses ASM_OPERANDS,
1300 return the number of operands (both input and output) in the insn.
1301 Otherwise return -1. */
1302
1303 int
1304 asm_noperands (body)
1305 rtx body;
1306 {
1307 if (GET_CODE (body) == ASM_OPERANDS)
1308 /* No output operands: return number of input operands. */
1309 return ASM_OPERANDS_INPUT_LENGTH (body);
1310 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1311 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1312 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1313 else if (GET_CODE (body) == PARALLEL
1314 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1315 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1316 {
1317 /* Multiple output operands, or 1 output plus some clobbers:
1318 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1319 int i;
1320 int n_sets;
1321
1322 /* Count backwards through CLOBBERs to determine number of SETs. */
1323 for (i = XVECLEN (body, 0); i > 0; i--)
1324 {
1325 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1326 break;
1327 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1328 return -1;
1329 }
1330
1331 /* N_SETS is now number of output operands. */
1332 n_sets = i;
1333
1334 /* Verify that all the SETs we have
1335 came from a single original asm_operands insn
1336 (so that invalid combinations are blocked). */
1337 for (i = 0; i < n_sets; i++)
1338 {
1339 rtx elt = XVECEXP (body, 0, i);
1340 if (GET_CODE (elt) != SET)
1341 return -1;
1342 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1343 return -1;
1344 /* If these ASM_OPERANDS rtx's came from different original insns
1345 then they aren't allowed together. */
1346 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1347 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1348 return -1;
1349 }
1350 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1351 + n_sets);
1352 }
1353 else if (GET_CODE (body) == PARALLEL
1354 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1355 {
1356 /* 0 outputs, but some clobbers:
1357 body is [(asm_operands ...) (clobber (reg ...))...]. */
1358 int i;
1359
1360 /* Make sure all the other parallel things really are clobbers. */
1361 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1362 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1363 return -1;
1364
1365 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1366 }
1367 else
1368 return -1;
1369 }
1370
1371 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1372 copy its operands (both input and output) into the vector OPERANDS,
1373 the locations of the operands within the insn into the vector OPERAND_LOCS,
1374 and the constraints for the operands into CONSTRAINTS.
1375 Write the modes of the operands into MODES.
1376 Return the assembler-template.
1377
1378 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1379 we don't store that info. */
1380
1381 char *
1382 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1383 rtx body;
1384 rtx *operands;
1385 rtx **operand_locs;
1386 char **constraints;
1387 enum machine_mode *modes;
1388 {
1389 register int i;
1390 int noperands;
1391 char *template = 0;
1392
1393 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1394 {
1395 rtx asmop = SET_SRC (body);
1396 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1397
1398 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1399
1400 for (i = 1; i < noperands; i++)
1401 {
1402 if (operand_locs)
1403 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1404 if (operands)
1405 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1406 if (constraints)
1407 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1408 if (modes)
1409 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1410 }
1411
1412 /* The output is in the SET.
1413 Its constraint is in the ASM_OPERANDS itself. */
1414 if (operands)
1415 operands[0] = SET_DEST (body);
1416 if (operand_locs)
1417 operand_locs[0] = &SET_DEST (body);
1418 if (constraints)
1419 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1420 if (modes)
1421 modes[0] = GET_MODE (SET_DEST (body));
1422 template = ASM_OPERANDS_TEMPLATE (asmop);
1423 }
1424 else if (GET_CODE (body) == ASM_OPERANDS)
1425 {
1426 rtx asmop = body;
1427 /* No output operands: BODY is (asm_operands ....). */
1428
1429 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1430
1431 /* The input operands are found in the 1st element vector. */
1432 /* Constraints for inputs are in the 2nd element vector. */
1433 for (i = 0; i < noperands; i++)
1434 {
1435 if (operand_locs)
1436 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1437 if (operands)
1438 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1439 if (constraints)
1440 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1441 if (modes)
1442 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1443 }
1444 template = ASM_OPERANDS_TEMPLATE (asmop);
1445 }
1446 else if (GET_CODE (body) == PARALLEL
1447 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1448 {
1449 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1450 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1451 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1452 int nout = 0; /* Does not include CLOBBERs. */
1453
1454 /* At least one output, plus some CLOBBERs. */
1455
1456 /* The outputs are in the SETs.
1457 Their constraints are in the ASM_OPERANDS itself. */
1458 for (i = 0; i < nparallel; i++)
1459 {
1460 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1461 break; /* Past last SET */
1462
1463 if (operands)
1464 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1465 if (operand_locs)
1466 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1467 if (constraints)
1468 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1469 if (modes)
1470 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1471 nout++;
1472 }
1473
1474 for (i = 0; i < nin; i++)
1475 {
1476 if (operand_locs)
1477 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1478 if (operands)
1479 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1480 if (constraints)
1481 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1482 if (modes)
1483 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1484 }
1485
1486 template = ASM_OPERANDS_TEMPLATE (asmop);
1487 }
1488 else if (GET_CODE (body) == PARALLEL
1489 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1490 {
1491 /* No outputs, but some CLOBBERs. */
1492
1493 rtx asmop = XVECEXP (body, 0, 0);
1494 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1495
1496 for (i = 0; i < nin; i++)
1497 {
1498 if (operand_locs)
1499 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1500 if (operands)
1501 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1502 if (constraints)
1503 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1504 if (modes)
1505 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1506 }
1507
1508 template = ASM_OPERANDS_TEMPLATE (asmop);
1509 }
1510
1511 return template;
1512 }
1513
1514 /* Check if an asm_operand matches it's constraints. */
1515
1516 int
1517 asm_operand_ok (op, constraint)
1518 rtx op;
1519 const char *constraint;
1520 {
1521 /* Use constrain_operands after reload. */
1522 if (reload_completed)
1523 abort ();
1524
1525 while (*constraint)
1526 {
1527 switch (*constraint++)
1528 {
1529 case '=':
1530 case '+':
1531 case '*':
1532 case '%':
1533 case '?':
1534 case '!':
1535 case '#':
1536 case '&':
1537 case ',':
1538 break;
1539
1540 case '0': case '1': case '2': case '3': case '4':
1541 case '5': case '6': case '7': case '8': case '9':
1542 /* Our caller is supposed to have given us the proper
1543 matching constraint. */
1544 /* abort (); */
1545 break;
1546
1547 case 'p':
1548 if (address_operand (op, VOIDmode))
1549 return 1;
1550 break;
1551
1552 case 'm':
1553 case 'V': /* non-offsettable */
1554 if (memory_operand (op, VOIDmode))
1555 return 1;
1556 break;
1557
1558 case 'o': /* offsettable */
1559 if (offsettable_nonstrict_memref_p (op))
1560 return 1;
1561 break;
1562
1563 case '<':
1564 if (GET_CODE (op) == MEM
1565 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
1566 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1567 return 1;
1568 break;
1569
1570 case '>':
1571 if (GET_CODE (op) == MEM
1572 && (GET_CODE (XEXP (op, 0)) == PRE_INC
1573 || GET_CODE (XEXP (op, 0)) == POST_INC))
1574 return 1;
1575 break;
1576
1577 case 'E':
1578 #ifndef REAL_ARITHMETIC
1579 /* Match any floating double constant, but only if
1580 we can examine the bits of it reliably. */
1581 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1582 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1583 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1584 break;
1585 #endif
1586 /* FALLTHRU */
1587
1588 case 'F':
1589 if (GET_CODE (op) == CONST_DOUBLE)
1590 return 1;
1591 break;
1592
1593 case 'G':
1594 if (GET_CODE (op) == CONST_DOUBLE
1595 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1596 return 1;
1597 break;
1598 case 'H':
1599 if (GET_CODE (op) == CONST_DOUBLE
1600 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1601 return 1;
1602 break;
1603
1604 case 's':
1605 if (GET_CODE (op) == CONST_INT
1606 || (GET_CODE (op) == CONST_DOUBLE
1607 && GET_MODE (op) == VOIDmode))
1608 break;
1609 /* FALLTHRU */
1610
1611 case 'i':
1612 if (CONSTANT_P (op)
1613 #ifdef LEGITIMATE_PIC_OPERAND_P
1614 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1615 #endif
1616 )
1617 return 1;
1618 break;
1619
1620 case 'n':
1621 if (GET_CODE (op) == CONST_INT
1622 || (GET_CODE (op) == CONST_DOUBLE
1623 && GET_MODE (op) == VOIDmode))
1624 return 1;
1625 break;
1626
1627 case 'I':
1628 if (GET_CODE (op) == CONST_INT
1629 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1630 return 1;
1631 break;
1632 case 'J':
1633 if (GET_CODE (op) == CONST_INT
1634 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1635 return 1;
1636 break;
1637 case 'K':
1638 if (GET_CODE (op) == CONST_INT
1639 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1640 return 1;
1641 break;
1642 case 'L':
1643 if (GET_CODE (op) == CONST_INT
1644 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1645 return 1;
1646 break;
1647 case 'M':
1648 if (GET_CODE (op) == CONST_INT
1649 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1650 return 1;
1651 break;
1652 case 'N':
1653 if (GET_CODE (op) == CONST_INT
1654 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1655 return 1;
1656 break;
1657 case 'O':
1658 if (GET_CODE (op) == CONST_INT
1659 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1660 return 1;
1661 break;
1662 case 'P':
1663 if (GET_CODE (op) == CONST_INT
1664 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1665 return 1;
1666 break;
1667
1668 case 'X':
1669 return 1;
1670
1671 case 'g':
1672 if (general_operand (op, VOIDmode))
1673 return 1;
1674 break;
1675
1676 #ifdef EXTRA_CONSTRAINT
1677 case 'Q':
1678 if (EXTRA_CONSTRAINT (op, 'Q'))
1679 return 1;
1680 break;
1681 case 'R':
1682 if (EXTRA_CONSTRAINT (op, 'R'))
1683 return 1;
1684 break;
1685 case 'S':
1686 if (EXTRA_CONSTRAINT (op, 'S'))
1687 return 1;
1688 break;
1689 case 'T':
1690 if (EXTRA_CONSTRAINT (op, 'T'))
1691 return 1;
1692 break;
1693 case 'U':
1694 if (EXTRA_CONSTRAINT (op, 'U'))
1695 return 1;
1696 break;
1697 #endif
1698
1699 case 'r':
1700 default:
1701 if (GET_MODE (op) == BLKmode)
1702 break;
1703 if (register_operand (op, VOIDmode))
1704 return 1;
1705 break;
1706 }
1707 }
1708
1709 return 0;
1710 }
1711 \f
1712 /* Given an rtx *P, if it is a sum containing an integer constant term,
1713 return the location (type rtx *) of the pointer to that constant term.
1714 Otherwise, return a null pointer. */
1715
1716 static rtx *
1717 find_constant_term_loc (p)
1718 rtx *p;
1719 {
1720 register rtx *tem;
1721 register enum rtx_code code = GET_CODE (*p);
1722
1723 /* If *P IS such a constant term, P is its location. */
1724
1725 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1726 || code == CONST)
1727 return p;
1728
1729 /* Otherwise, if not a sum, it has no constant term. */
1730
1731 if (GET_CODE (*p) != PLUS)
1732 return 0;
1733
1734 /* If one of the summands is constant, return its location. */
1735
1736 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1737 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1738 return p;
1739
1740 /* Otherwise, check each summand for containing a constant term. */
1741
1742 if (XEXP (*p, 0) != 0)
1743 {
1744 tem = find_constant_term_loc (&XEXP (*p, 0));
1745 if (tem != 0)
1746 return tem;
1747 }
1748
1749 if (XEXP (*p, 1) != 0)
1750 {
1751 tem = find_constant_term_loc (&XEXP (*p, 1));
1752 if (tem != 0)
1753 return tem;
1754 }
1755
1756 return 0;
1757 }
1758 \f
1759 /* Return 1 if OP is a memory reference
1760 whose address contains no side effects
1761 and remains valid after the addition
1762 of a positive integer less than the
1763 size of the object being referenced.
1764
1765 We assume that the original address is valid and do not check it.
1766
1767 This uses strict_memory_address_p as a subroutine, so
1768 don't use it before reload. */
1769
1770 int
1771 offsettable_memref_p (op)
1772 rtx op;
1773 {
1774 return ((GET_CODE (op) == MEM)
1775 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1776 }
1777
1778 /* Similar, but don't require a strictly valid mem ref:
1779 consider pseudo-regs valid as index or base regs. */
1780
1781 int
1782 offsettable_nonstrict_memref_p (op)
1783 rtx op;
1784 {
1785 return ((GET_CODE (op) == MEM)
1786 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1787 }
1788
1789 /* Return 1 if Y is a memory address which contains no side effects
1790 and would remain valid after the addition of a positive integer
1791 less than the size of that mode.
1792
1793 We assume that the original address is valid and do not check it.
1794 We do check that it is valid for narrower modes.
1795
1796 If STRICTP is nonzero, we require a strictly valid address,
1797 for the sake of use in reload.c. */
1798
1799 int
1800 offsettable_address_p (strictp, mode, y)
1801 int strictp;
1802 enum machine_mode mode;
1803 register rtx y;
1804 {
1805 register enum rtx_code ycode = GET_CODE (y);
1806 register rtx z;
1807 rtx y1 = y;
1808 rtx *y2;
1809 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1810
1811 if (CONSTANT_ADDRESS_P (y))
1812 return 1;
1813
1814 /* Adjusting an offsettable address involves changing to a narrower mode.
1815 Make sure that's OK. */
1816
1817 if (mode_dependent_address_p (y))
1818 return 0;
1819
1820 /* If the expression contains a constant term,
1821 see if it remains valid when max possible offset is added. */
1822
1823 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1824 {
1825 int good;
1826
1827 y1 = *y2;
1828 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1829 /* Use QImode because an odd displacement may be automatically invalid
1830 for any wider mode. But it should be valid for a single byte. */
1831 good = (*addressp) (QImode, y);
1832
1833 /* In any case, restore old contents of memory. */
1834 *y2 = y1;
1835 return good;
1836 }
1837
1838 if (ycode == PRE_DEC || ycode == PRE_INC
1839 || ycode == POST_DEC || ycode == POST_INC)
1840 return 0;
1841
1842 /* The offset added here is chosen as the maximum offset that
1843 any instruction could need to add when operating on something
1844 of the specified mode. We assume that if Y and Y+c are
1845 valid addresses then so is Y+d for all 0<d<c. */
1846
1847 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1848
1849 /* Use QImode because an odd displacement may be automatically invalid
1850 for any wider mode. But it should be valid for a single byte. */
1851 return (*addressp) (QImode, z);
1852 }
1853
1854 /* Return 1 if ADDR is an address-expression whose effect depends
1855 on the mode of the memory reference it is used in.
1856
1857 Autoincrement addressing is a typical example of mode-dependence
1858 because the amount of the increment depends on the mode. */
1859
1860 int
1861 mode_dependent_address_p (addr)
1862 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
1863 {
1864 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1865 return 0;
1866 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1867 win: ATTRIBUTE_UNUSED_LABEL
1868 return 1;
1869 }
1870
1871 /* Return 1 if OP is a general operand
1872 other than a memory ref with a mode dependent address. */
1873
1874 int
1875 mode_independent_operand (op, mode)
1876 enum machine_mode mode;
1877 rtx op;
1878 {
1879 rtx addr;
1880
1881 if (! general_operand (op, mode))
1882 return 0;
1883
1884 if (GET_CODE (op) != MEM)
1885 return 1;
1886
1887 addr = XEXP (op, 0);
1888 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1889 return 1;
1890 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
1891 lose: ATTRIBUTE_UNUSED_LABEL
1892 return 0;
1893 }
1894
1895 /* Given an operand OP that is a valid memory reference
1896 which satisfies offsettable_memref_p,
1897 return a new memory reference whose address has been adjusted by OFFSET.
1898 OFFSET should be positive and less than the size of the object referenced.
1899 */
1900
1901 rtx
1902 adj_offsettable_operand (op, offset)
1903 rtx op;
1904 int offset;
1905 {
1906 register enum rtx_code code = GET_CODE (op);
1907
1908 if (code == MEM)
1909 {
1910 register rtx y = XEXP (op, 0);
1911 register rtx new;
1912
1913 if (CONSTANT_ADDRESS_P (y))
1914 {
1915 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1916 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1917 return new;
1918 }
1919
1920 if (GET_CODE (y) == PLUS)
1921 {
1922 rtx z = y;
1923 register rtx *const_loc;
1924
1925 op = copy_rtx (op);
1926 z = XEXP (op, 0);
1927 const_loc = find_constant_term_loc (&z);
1928 if (const_loc)
1929 {
1930 *const_loc = plus_constant_for_output (*const_loc, offset);
1931 return op;
1932 }
1933 }
1934
1935 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
1936 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1937 return new;
1938 }
1939 abort ();
1940 }
1941 \f
1942 /* Analyze INSN and compute the variables recog_n_operands, recog_n_dups,
1943 recog_n_alternatives, recog_operand, recog_operand_loc, recog_constraints,
1944 recog_operand_mode, recog_dup_loc and recog_dup_num.
1945 If REGISTER_CONSTRAINTS is not defined, also compute
1946 recog_operand_address_p. */
1947 void
1948 extract_insn (insn)
1949 rtx insn;
1950 {
1951 int i;
1952 int icode;
1953 int noperands;
1954 rtx body = PATTERN (insn);
1955
1956 recog_n_operands = 0;
1957 recog_n_alternatives = 0;
1958 recog_n_dups = 0;
1959
1960 switch (GET_CODE (body))
1961 {
1962 case USE:
1963 case CLOBBER:
1964 case ASM_INPUT:
1965 case ADDR_VEC:
1966 case ADDR_DIFF_VEC:
1967 return;
1968
1969 case SET:
1970 case PARALLEL:
1971 case ASM_OPERANDS:
1972 recog_n_operands = noperands = asm_noperands (body);
1973 if (noperands >= 0)
1974 {
1975 /* This insn is an `asm' with operands. */
1976
1977 /* expand_asm_operands makes sure there aren't too many operands. */
1978 if (noperands > MAX_RECOG_OPERANDS)
1979 abort ();
1980
1981 /* Now get the operand values and constraints out of the insn. */
1982 decode_asm_operands (body, recog_operand, recog_operand_loc,
1983 recog_constraints, recog_operand_mode);
1984 if (noperands > 0)
1985 {
1986 char *p = recog_constraints[0];
1987 recog_n_alternatives = 1;
1988 while (*p)
1989 recog_n_alternatives += (*p++ == ',');
1990 }
1991 #ifndef REGISTER_CONSTRAINTS
1992 bzero (recog_operand_address_p, sizeof recog_operand_address_p);
1993 #endif
1994 break;
1995 }
1996
1997 /* FALLTHROUGH */
1998
1999 default:
2000 /* Ordinary insn: recognize it, get the operands via insn_extract
2001 and get the constraints. */
2002
2003 icode = recog_memoized (insn);
2004 if (icode < 0)
2005 fatal_insn_not_found (insn);
2006
2007 recog_n_operands = noperands = insn_n_operands[icode];
2008 recog_n_alternatives = insn_n_alternatives[icode];
2009 recog_n_dups = insn_n_dups[icode];
2010
2011 insn_extract (insn);
2012
2013 for (i = 0; i < noperands; i++)
2014 {
2015 #ifdef REGISTER_CONSTRAINTS
2016 recog_constraints[i] = insn_operand_constraint[icode][i];
2017 #else
2018 recog_operand_address_p[i] = insn_operand_address_p[icode][i];
2019 #endif
2020 recog_operand_mode[i] = insn_operand_mode[icode][i];
2021 }
2022 }
2023 for (i = 0; i < noperands; i++)
2024 recog_op_type[i] = (recog_constraints[i][0] == '=' ? OP_OUT
2025 : recog_constraints[i][0] == '+' ? OP_INOUT
2026 : OP_IN);
2027
2028 if (recog_n_alternatives > MAX_RECOG_ALTERNATIVES)
2029 abort ();
2030 }
2031
2032 /* After calling extract_insn, you can use this function to extract some
2033 information from the constraint strings into a more usable form.
2034 The collected data is stored in recog_op_alt. */
2035 void
2036 preprocess_constraints ()
2037 {
2038 int i;
2039
2040 for (i = 0; i < recog_n_operands; i++)
2041 {
2042 int j;
2043 struct operand_alternative *op_alt;
2044 char *p = recog_constraints[i];
2045
2046 op_alt = recog_op_alt[i];
2047
2048 for (j = 0; j < recog_n_alternatives; j++)
2049 {
2050 op_alt[j].class = NO_REGS;
2051 op_alt[j].constraint = p;
2052 op_alt[j].matches = -1;
2053 op_alt[j].matched = -1;
2054
2055 if (*p == '\0' || *p == ',')
2056 {
2057 op_alt[j].anything_ok = 1;
2058 continue;
2059 }
2060
2061 for (;;)
2062 {
2063 char c = *p++;
2064 if (c == '#')
2065 do
2066 c = *p++;
2067 while (c != ',' && c != '\0');
2068 if (c == ',' || c == '\0')
2069 break;
2070
2071 switch (c)
2072 {
2073 case '=': case '+': case '*': case '%':
2074 case 'E': case 'F': case 'G': case 'H':
2075 case 's': case 'i': case 'n':
2076 case 'I': case 'J': case 'K': case 'L':
2077 case 'M': case 'N': case 'O': case 'P':
2078 #ifdef EXTRA_CONSTRAINT
2079 case 'Q': case 'R': case 'S': case 'T': case 'U':
2080 #endif
2081 /* These don't say anything we care about. */
2082 break;
2083
2084 case '?':
2085 op_alt[j].reject += 6;
2086 break;
2087 case '!':
2088 op_alt[j].reject += 600;
2089 break;
2090 case '&':
2091 op_alt[j].earlyclobber = 1;
2092 break;
2093
2094 case '0': case '1': case '2': case '3': case '4':
2095 case '5': case '6': case '7': case '8': case '9':
2096 op_alt[j].matches = c - '0';
2097 op_alt[op_alt[j].matches].matched = i;
2098 break;
2099
2100 case 'm':
2101 op_alt[j].memory_ok = 1;
2102 break;
2103 case '<':
2104 op_alt[j].decmem_ok = 1;
2105 break;
2106 case '>':
2107 op_alt[j].incmem_ok = 1;
2108 break;
2109 case 'V':
2110 op_alt[j].nonoffmem_ok = 1;
2111 break;
2112 case 'o':
2113 op_alt[j].offmem_ok = 1;
2114 break;
2115 case 'X':
2116 op_alt[j].anything_ok = 1;
2117 break;
2118
2119 case 'p':
2120 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2121 break;
2122
2123 case 'g': case 'r':
2124 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2125 break;
2126
2127 default:
2128 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2129 break;
2130 }
2131 }
2132 }
2133 }
2134 }
2135
2136 #ifdef REGISTER_CONSTRAINTS
2137
2138 /* Check the operands of an insn against the insn's operand constraints
2139 and return 1 if they are valid.
2140 The information about the insn's operands, constraints, operand modes
2141 etc. is obtained from the global variables set up by extract_insn.
2142
2143 WHICH_ALTERNATIVE is set to a number which indicates which
2144 alternative of constraints was matched: 0 for the first alternative,
2145 1 for the next, etc.
2146
2147 In addition, when two operands are match
2148 and it happens that the output operand is (reg) while the
2149 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2150 make the output operand look like the input.
2151 This is because the output operand is the one the template will print.
2152
2153 This is used in final, just before printing the assembler code and by
2154 the routines that determine an insn's attribute.
2155
2156 If STRICT is a positive non-zero value, it means that we have been
2157 called after reload has been completed. In that case, we must
2158 do all checks strictly. If it is zero, it means that we have been called
2159 before reload has completed. In that case, we first try to see if we can
2160 find an alternative that matches strictly. If not, we try again, this
2161 time assuming that reload will fix up the insn. This provides a "best
2162 guess" for the alternative and is used to compute attributes of insns prior
2163 to reload. A negative value of STRICT is used for this internal call. */
2164
2165 struct funny_match
2166 {
2167 int this, other;
2168 };
2169
2170 int
2171 constrain_operands (strict)
2172 int strict;
2173 {
2174 char *constraints[MAX_RECOG_OPERANDS];
2175 int matching_operands[MAX_RECOG_OPERANDS];
2176 int earlyclobber[MAX_RECOG_OPERANDS];
2177 register int c;
2178
2179 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2180 int funny_match_index;
2181
2182 if (recog_n_operands == 0 || recog_n_alternatives == 0)
2183 return 1;
2184
2185 for (c = 0; c < recog_n_operands; c++)
2186 {
2187 constraints[c] = recog_constraints[c];
2188 matching_operands[c] = -1;
2189 }
2190
2191 which_alternative = 0;
2192
2193 while (which_alternative < recog_n_alternatives)
2194 {
2195 register int opno;
2196 int lose = 0;
2197 funny_match_index = 0;
2198
2199 for (opno = 0; opno < recog_n_operands; opno++)
2200 {
2201 register rtx op = recog_operand[opno];
2202 enum machine_mode mode = GET_MODE (op);
2203 register char *p = constraints[opno];
2204 int offset = 0;
2205 int win = 0;
2206 int val;
2207
2208 earlyclobber[opno] = 0;
2209
2210 /* A unary operator may be accepted by the predicate, but it
2211 is irrelevant for matching constraints. */
2212 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2213 op = XEXP (op, 0);
2214
2215 if (GET_CODE (op) == SUBREG)
2216 {
2217 if (GET_CODE (SUBREG_REG (op)) == REG
2218 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2219 offset = SUBREG_WORD (op);
2220 op = SUBREG_REG (op);
2221 }
2222
2223 /* An empty constraint or empty alternative
2224 allows anything which matched the pattern. */
2225 if (*p == 0 || *p == ',')
2226 win = 1;
2227
2228 while (*p && (c = *p++) != ',')
2229 switch (c)
2230 {
2231 case '?':
2232 case '!':
2233 case '*':
2234 case '%':
2235 case '=':
2236 case '+':
2237 break;
2238
2239 case '#':
2240 /* Ignore rest of this alternative as far as
2241 constraint checking is concerned. */
2242 while (*p && *p != ',')
2243 p++;
2244 break;
2245
2246 case '&':
2247 earlyclobber[opno] = 1;
2248 break;
2249
2250 case '0':
2251 case '1':
2252 case '2':
2253 case '3':
2254 case '4':
2255 /* This operand must be the same as a previous one.
2256 This kind of constraint is used for instructions such
2257 as add when they take only two operands.
2258
2259 Note that the lower-numbered operand is passed first.
2260
2261 If we are not testing strictly, assume that this constraint
2262 will be satisfied. */
2263 if (strict < 0)
2264 val = 1;
2265 else
2266 val = operands_match_p (recog_operand[c - '0'],
2267 recog_operand[opno]);
2268
2269 matching_operands[opno] = c - '0';
2270 matching_operands[c - '0'] = opno;
2271
2272 if (val != 0)
2273 win = 1;
2274 /* If output is *x and input is *--x,
2275 arrange later to change the output to *--x as well,
2276 since the output op is the one that will be printed. */
2277 if (val == 2 && strict > 0)
2278 {
2279 funny_match[funny_match_index].this = opno;
2280 funny_match[funny_match_index++].other = c - '0';
2281 }
2282 break;
2283
2284 case 'p':
2285 /* p is used for address_operands. When we are called by
2286 gen_reload, no one will have checked that the address is
2287 strictly valid, i.e., that all pseudos requiring hard regs
2288 have gotten them. */
2289 if (strict <= 0
2290 || (strict_memory_address_p (recog_operand_mode[opno],
2291 op)))
2292 win = 1;
2293 break;
2294
2295 /* No need to check general_operand again;
2296 it was done in insn-recog.c. */
2297 case 'g':
2298 /* Anything goes unless it is a REG and really has a hard reg
2299 but the hard reg is not in the class GENERAL_REGS. */
2300 if (strict < 0
2301 || GENERAL_REGS == ALL_REGS
2302 || GET_CODE (op) != REG
2303 || (reload_in_progress
2304 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2305 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2306 win = 1;
2307 break;
2308
2309 case 'r':
2310 if (strict < 0
2311 || (strict == 0
2312 && GET_CODE (op) == REG
2313 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2314 || (strict == 0 && GET_CODE (op) == SCRATCH)
2315 || (GET_CODE (op) == REG
2316 && ((GENERAL_REGS == ALL_REGS
2317 && REGNO (op) < FIRST_PSEUDO_REGISTER)
2318 || reg_fits_class_p (op, GENERAL_REGS,
2319 offset, mode))))
2320 win = 1;
2321 break;
2322
2323 case 'X':
2324 /* This is used for a MATCH_SCRATCH in the cases when
2325 we don't actually need anything. So anything goes
2326 any time. */
2327 win = 1;
2328 break;
2329
2330 case 'm':
2331 if (GET_CODE (op) == MEM
2332 /* Before reload, accept what reload can turn into mem. */
2333 || (strict < 0 && CONSTANT_P (op))
2334 /* During reload, accept a pseudo */
2335 || (reload_in_progress && GET_CODE (op) == REG
2336 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2337 win = 1;
2338 break;
2339
2340 case '<':
2341 if (GET_CODE (op) == MEM
2342 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2343 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2344 win = 1;
2345 break;
2346
2347 case '>':
2348 if (GET_CODE (op) == MEM
2349 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2350 || GET_CODE (XEXP (op, 0)) == POST_INC))
2351 win = 1;
2352 break;
2353
2354 case 'E':
2355 #ifndef REAL_ARITHMETIC
2356 /* Match any CONST_DOUBLE, but only if
2357 we can examine the bits of it reliably. */
2358 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2359 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2360 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2361 break;
2362 #endif
2363 if (GET_CODE (op) == CONST_DOUBLE)
2364 win = 1;
2365 break;
2366
2367 case 'F':
2368 if (GET_CODE (op) == CONST_DOUBLE)
2369 win = 1;
2370 break;
2371
2372 case 'G':
2373 case 'H':
2374 if (GET_CODE (op) == CONST_DOUBLE
2375 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2376 win = 1;
2377 break;
2378
2379 case 's':
2380 if (GET_CODE (op) == CONST_INT
2381 || (GET_CODE (op) == CONST_DOUBLE
2382 && GET_MODE (op) == VOIDmode))
2383 break;
2384 case 'i':
2385 if (CONSTANT_P (op))
2386 win = 1;
2387 break;
2388
2389 case 'n':
2390 if (GET_CODE (op) == CONST_INT
2391 || (GET_CODE (op) == CONST_DOUBLE
2392 && GET_MODE (op) == VOIDmode))
2393 win = 1;
2394 break;
2395
2396 case 'I':
2397 case 'J':
2398 case 'K':
2399 case 'L':
2400 case 'M':
2401 case 'N':
2402 case 'O':
2403 case 'P':
2404 if (GET_CODE (op) == CONST_INT
2405 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2406 win = 1;
2407 break;
2408
2409 #ifdef EXTRA_CONSTRAINT
2410 case 'Q':
2411 case 'R':
2412 case 'S':
2413 case 'T':
2414 case 'U':
2415 if (EXTRA_CONSTRAINT (op, c))
2416 win = 1;
2417 break;
2418 #endif
2419
2420 case 'V':
2421 if (GET_CODE (op) == MEM
2422 && ((strict > 0 && ! offsettable_memref_p (op))
2423 || (strict < 0
2424 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2425 || (reload_in_progress
2426 && !(GET_CODE (op) == REG
2427 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2428 win = 1;
2429 break;
2430
2431 case 'o':
2432 if ((strict > 0 && offsettable_memref_p (op))
2433 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2434 /* Before reload, accept what reload can handle. */
2435 || (strict < 0
2436 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2437 /* During reload, accept a pseudo */
2438 || (reload_in_progress && GET_CODE (op) == REG
2439 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2440 win = 1;
2441 break;
2442
2443 default:
2444 if (strict < 0
2445 || (strict == 0
2446 && GET_CODE (op) == REG
2447 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2448 || (strict == 0 && GET_CODE (op) == SCRATCH)
2449 || (GET_CODE (op) == REG
2450 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
2451 offset, mode)))
2452 win = 1;
2453 }
2454
2455 constraints[opno] = p;
2456 /* If this operand did not win somehow,
2457 this alternative loses. */
2458 if (! win)
2459 lose = 1;
2460 }
2461 /* This alternative won; the operands are ok.
2462 Change whichever operands this alternative says to change. */
2463 if (! lose)
2464 {
2465 int opno, eopno;
2466
2467 /* See if any earlyclobber operand conflicts with some other
2468 operand. */
2469
2470 if (strict > 0)
2471 for (eopno = 0; eopno < recog_n_operands; eopno++)
2472 /* Ignore earlyclobber operands now in memory,
2473 because we would often report failure when we have
2474 two memory operands, one of which was formerly a REG. */
2475 if (earlyclobber[eopno]
2476 && GET_CODE (recog_operand[eopno]) == REG)
2477 for (opno = 0; opno < recog_n_operands; opno++)
2478 if ((GET_CODE (recog_operand[opno]) == MEM
2479 || recog_op_type[opno] != OP_OUT)
2480 && opno != eopno
2481 /* Ignore things like match_operator operands. */
2482 && *recog_constraints[opno] != 0
2483 && ! (matching_operands[opno] == eopno
2484 && operands_match_p (recog_operand[opno],
2485 recog_operand[eopno]))
2486 && ! safe_from_earlyclobber (recog_operand[opno],
2487 recog_operand[eopno]))
2488 lose = 1;
2489
2490 if (! lose)
2491 {
2492 while (--funny_match_index >= 0)
2493 {
2494 recog_operand[funny_match[funny_match_index].other]
2495 = recog_operand[funny_match[funny_match_index].this];
2496 }
2497
2498 return 1;
2499 }
2500 }
2501
2502 which_alternative++;
2503 }
2504
2505 /* If we are about to reject this, but we are not to test strictly,
2506 try a very loose test. Only return failure if it fails also. */
2507 if (strict == 0)
2508 return constrain_operands (-1);
2509 else
2510 return 0;
2511 }
2512
2513 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2514 is a hard reg in class CLASS when its regno is offset by OFFSET
2515 and changed to mode MODE.
2516 If REG occupies multiple hard regs, all of them must be in CLASS. */
2517
2518 int
2519 reg_fits_class_p (operand, class, offset, mode)
2520 rtx operand;
2521 register enum reg_class class;
2522 int offset;
2523 enum machine_mode mode;
2524 {
2525 register int regno = REGNO (operand);
2526 if (regno < FIRST_PSEUDO_REGISTER
2527 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2528 regno + offset))
2529 {
2530 register int sr;
2531 regno += offset;
2532 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2533 sr > 0; sr--)
2534 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2535 regno + sr))
2536 break;
2537 return sr == 0;
2538 }
2539
2540 return 0;
2541 }
2542
2543 #endif /* REGISTER_CONSTRAINTS */
This page took 0.161812 seconds and 6 git commands to generate.