]> gcc.gnu.org Git - gcc.git/blame - gcc/recog.c
# Fix misspellings in comments.
[gcc.git] / gcc / recog.c
CommitLineData
2055cea7 1/* Subroutines used by or related to instruction recognition.
9e4223f2 2 Copyright (C) 1987, 1988, 1991, 1992 Free Software Foundation, Inc.
2055cea7
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include <stdio.h>
24#include "insn-config.h"
25#include "insn-attr.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "recog.h"
29#include "regs.h"
30#include "hard-reg-set.h"
31#include "flags.h"
32#include "real.h"
33
34#ifndef STACK_PUSH_CODE
35#ifdef STACK_GROWS_DOWNWARD
36#define STACK_PUSH_CODE PRE_DEC
37#else
38#define STACK_PUSH_CODE PRE_INC
39#endif
40#endif
41
42/* Import from final.c: */
43extern rtx alter_subreg ();
44
45int strict_memory_address_p ();
46int memory_address_p ();
47
48/* Nonzero means allow operands to be volatile.
49 This should be 0 if you are generating rtl, such as if you are calling
50 the functions in optabs.c and expmed.c (most of the time).
51 This should be 1 if all valid insns need to be recognized,
52 such as in regclass.c and final.c and reload.c.
53
54 init_recog and init_recog_no_volatile are responsible for setting this. */
55
56int volatile_ok;
57
58/* On return from `constrain_operands', indicate which alternative
59 was satisfied. */
60
61int which_alternative;
62
63/* Nonzero after end of reload pass.
64 Set to 1 or 0 by toplev.c.
65 Controls the significance of (SUBREG (MEM)). */
66
67int reload_completed;
68
69/* Initialize data used by the function `recog'.
70 This must be called once in the compilation of a function
71 before any insn recognition may be done in the function. */
72
73void
74init_recog_no_volatile ()
75{
76 volatile_ok = 0;
77}
78
e0069e43 79void
2055cea7
RK
80init_recog ()
81{
82 volatile_ok = 1;
83}
84
85/* Try recognizing the instruction INSN,
86 and return the code number that results.
9e4223f2 87 Remeber the code so that repeated calls do not
2055cea7
RK
88 need to spend the time for actual rerecognition.
89
90 This function is the normal interface to instruction recognition.
91 The automatically-generated function `recog' is normally called
92 through this one. (The only exception is in combine.c.) */
93
94int
95recog_memoized (insn)
96 rtx insn;
97{
98 if (INSN_CODE (insn) < 0)
9e4223f2 99 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
2055cea7
RK
100 return INSN_CODE (insn);
101}
102\f
103/* Check that X is an insn-body for an `asm' with operands
104 and that the operands mentioned in it are legitimate. */
105
106int
107check_asm_operands (x)
108 rtx x;
109{
110 int noperands = asm_noperands (x);
111 rtx *operands;
112 int i;
113
114 if (noperands < 0)
115 return 0;
116 if (noperands == 0)
117 return 1;
118
119 operands = (rtx *) alloca (noperands * sizeof (rtx));
9e4223f2 120 decode_asm_operands (x, operands, NULL_PTR, NULL_PTR, NULL_PTR);
2055cea7
RK
121
122 for (i = 0; i < noperands; i++)
123 if (!general_operand (operands[i], VOIDmode))
124 return 0;
125
126 return 1;
127}
128\f
129/* Static data for the next two routines.
130
131 The maximum number of changes supported is defined as the maximum
132 number of operands times 5. This allows for repeated substitutions
133 inside complex indexed address, or, alternatively, changes in up
134 to 5 insns. */
135
136#define MAX_CHANGE_LOCS (MAX_RECOG_OPERANDS * 5)
137
138static rtx change_objects[MAX_CHANGE_LOCS];
139static int change_old_codes[MAX_CHANGE_LOCS];
140static rtx *change_locs[MAX_CHANGE_LOCS];
141static rtx change_olds[MAX_CHANGE_LOCS];
142
143static int num_changes = 0;
144
145/* Validate a proposed change to OBJECT. LOC is the location in the rtl for
146 at which NEW will be placed. If OBJECT is zero, no validation is done,
147 the change is simply made.
148
149 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
150 will be called with the address and mode as parameters. If OBJECT is
151 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
152 the change in place.
153
154 IN_GROUP is non-zero if this is part of a group of changes that must be
155 performed as a group. In that case, the changes will be stored. The
156 function `apply_change_group' will validate and apply the changes.
157
158 If IN_GROUP is zero, this is a single change. Try to recognize the insn
159 or validate the memory reference with the change applied. If the result
160 is not valid for the machine, suppress the change and return zero.
161 Otherwise, perform the change and return 1. */
162
163int
164validate_change (object, loc, new, in_group)
165 rtx object;
166 rtx *loc;
167 rtx new;
168 int in_group;
169{
170 rtx old = *loc;
171
172 if (old == new || rtx_equal_p (old, new))
173 return 1;
174
175 if (num_changes >= MAX_CHANGE_LOCS
176 || (in_group == 0 && num_changes != 0))
177 abort ();
178
179 *loc = new;
180
181 /* Save the information describing this change. */
182 change_objects[num_changes] = object;
183 change_locs[num_changes] = loc;
184 change_olds[num_changes] = old;
185
186 if (object && GET_CODE (object) != MEM)
187 {
188 /* Set INSN_CODE to force rerecognition of insn. Save old code in
189 case invalid. */
190 change_old_codes[num_changes] = INSN_CODE (object);
191 INSN_CODE (object) = -1;
192 }
193
194 num_changes++;
195
196 /* If we are making a group of changes, return 1. Otherwise, validate the
197 change group we made. */
198
199 if (in_group)
200 return 1;
201 else
202 return apply_change_group ();
203}
204
205/* Apply a group of changes previously issued with `validate_change'.
206 Return 1 if all changes are valid, zero otherwise. */
207
208int
209apply_change_group ()
210{
211 int i;
212
213 /* The changes have been applied and all INSN_CODEs have been reset to force
214 rerecognition.
215
216 The changes are valid if we aren't given an object, or if we are
217 given a MEM and it still is a valid address, or if this is in insn
218 and it is recognized. In the latter case, if reload has completed,
219 we also require that the operands meet the constraints for
220 the insn. We do not allow modifying an ASM_OPERANDS after reload
221 has completed because verifying the constraints is too difficult. */
222
223 for (i = 0; i < num_changes; i++)
224 {
225 rtx object = change_objects[i];
226
227 if (object == 0)
228 continue;
229
230 if (GET_CODE (object) == MEM)
231 {
232 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
233 break;
234 }
235 else if ((recog_memoized (object) < 0
236 && (asm_noperands (PATTERN (object)) < 0
237 || ! check_asm_operands (PATTERN (object))
238 || reload_completed))
239 || (reload_completed
240 && (insn_extract (object),
241 ! constrain_operands (INSN_CODE (object), 1))))
242 {
243 rtx pat = PATTERN (object);
244
245 /* Perhaps we couldn't recognize the insn because there were
246 extra CLOBBERs at the end. If so, try to re-recognize
247 without the last CLOBBER (later iterations will cause each of
248 them to be eliminated, in turn). But don't do this if we
249 have an ASM_OPERAND. */
250 if (GET_CODE (pat) == PARALLEL
251 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
252 && asm_noperands (PATTERN (object)) < 0)
253 {
254 rtx newpat;
255
256 if (XVECLEN (pat, 0) == 2)
257 newpat = XVECEXP (pat, 0, 0);
258 else
259 {
260 int j;
261
262 newpat = gen_rtx (PARALLEL, VOIDmode,
263 gen_rtvec (XVECLEN (pat, 0) - 1));
264 for (j = 0; j < XVECLEN (newpat, 0); j++)
265 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
266 }
267
268 /* Add a new change to this group to replace the pattern
269 with this new pattern. Then consider this change
270 as having succeeded. The change we added will
271 cause the entire call to fail if things remain invalid.
272
273 Note that this can lose if a later change than the one
274 we are processing specified &XVECEXP (PATTERN (object), 0, X)
275 but this shouldn't occur. */
276
277 validate_change (object, &PATTERN (object), newpat, 1);
278 }
279 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
280 /* If this insn is a CLOBBER or USE, it is always valid, but is
281 never recognized. */
282 continue;
283 else
284 break;
285 }
286 }
287
288 if (i == num_changes)
289 {
290 num_changes = 0;
291 return 1;
292 }
293 else
294 {
295 cancel_changes (0);
296 return 0;
297 }
298}
299
300/* Return the number of changes so far in the current group. */
301
302int
303num_validated_changes ()
304{
305 return num_changes;
306}
307
308/* Retract the changes numbered NUM and up. */
309
310void
311cancel_changes (num)
312 int num;
313{
314 int i;
315
316 /* Back out all the changes. Do this in the opposite order in which
317 they were made. */
318 for (i = num_changes - 1; i >= num; i--)
319 {
320 *change_locs[i] = change_olds[i];
321 if (change_objects[i] && GET_CODE (change_objects[i]) != MEM)
322 INSN_CODE (change_objects[i]) = change_old_codes[i];
323 }
324 num_changes = num;
325}
326
327/* Replace every occurrence of FROM in X with TO. Mark each change with
328 validate_change passing OBJECT. */
329
330static void
331validate_replace_rtx_1 (loc, from, to, object)
332 rtx *loc;
333 rtx from, to, object;
334{
335 register int i, j;
336 register char *fmt;
337 register rtx x = *loc;
338 enum rtx_code code = GET_CODE (x);
339
340 /* X matches FROM if it is the same rtx or they are both referring to the
341 same register in the same mode. Avoid calling rtx_equal_p unless the
342 operands look similar. */
343
344 if (x == from
345 || (GET_CODE (x) == REG && GET_CODE (from) == REG
346 && GET_MODE (x) == GET_MODE (from)
347 && REGNO (x) == REGNO (from))
348 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
349 && rtx_equal_p (x, from)))
350 {
351 validate_change (object, loc, to, 1);
352 return;
353 }
354
355 /* For commutative or comparison operations, try replacing each argument
356 separately and seeing if we made any changes. If so, put a constant
357 argument last.*/
358 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
359 {
360 int prev_changes = num_changes;
361
362 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
363 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
364 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
365 {
366 validate_change (object, loc,
367 gen_rtx (GET_RTX_CLASS (code) == 'c' ? code
368 : swap_condition (code),
369 GET_MODE (x), XEXP (x, 1), XEXP (x, 0)),
370 1);
371 x = *loc;
372 code = GET_CODE (x);
373 }
374 }
375
376 switch (code)
377 {
378 case PLUS:
379 /* If we have have a PLUS whose second operand is now a CONST_INT, use
380 plus_constant to try to simplify it. */
381 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
382 validate_change (object, loc,
383 plus_constant (XEXP (x, 0), INTVAL (XEXP (x, 1))), 1);
384 return;
385
386 case ZERO_EXTEND:
387 case SIGN_EXTEND:
388 /* In these cases, the operation to be performed depends on the mode
389 of the operand. If we are replacing the operand with a VOIDmode
390 constant, we lose the information. So try to simplify the operation
391 in that case. If it fails, substitute in something that we know
6dc42e49 392 won't be recognized. */
2055cea7
RK
393 if (GET_MODE (to) == VOIDmode
394 && (XEXP (x, 0) == from
395 || (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (from) == REG
396 && GET_MODE (XEXP (x, 0)) == GET_MODE (from)
397 && REGNO (XEXP (x, 0)) == REGNO (from))))
398 {
399 rtx new = simplify_unary_operation (code, GET_MODE (x), to,
400 GET_MODE (from));
401 if (new == 0)
402 new = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
403
404 validate_change (object, loc, new, 1);
405 return;
406 }
407 break;
408
409 case SUBREG:
410 /* If we have a SUBREG of a register that we are replacing and we are
411 replacing it with a MEM, make a new MEM and try replacing the
412 SUBREG with it. Don't do this if the MEM has a mode-dependent address
413 or if we would be widening it. */
414
415 if (SUBREG_REG (x) == from
416 && GET_CODE (from) == REG
417 && GET_CODE (to) == MEM
418 && ! mode_dependent_address_p (XEXP (to, 0))
419 && ! MEM_VOLATILE_P (to)
420 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
421 {
422 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
423 enum machine_mode mode = GET_MODE (x);
424 rtx new;
425
426#if BYTES_BIG_ENDIAN
427 offset += (MIN (UNITS_PER_WORD,
428 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
429 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
430#endif
431
432 new = gen_rtx (MEM, mode, plus_constant (XEXP (to, 0), offset));
433 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (to);
434 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (to);
435 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (to);
436 validate_change (object, loc, new, 1);
437 return;
438 }
439 break;
440
441 case ZERO_EXTRACT:
442 case SIGN_EXTRACT:
443 /* If we are replacing a register with memory, try to change the memory
444 to be the mode required for memory in extract operations (this isn't
445 likely to be an insertion operation; if it was, nothing bad will
446 happen, we might just fail in some cases). */
447
448 if (XEXP (x, 0) == from && GET_CODE (from) == REG && GET_CODE (to) == MEM
449 && GET_CODE (XEXP (x, 1)) == CONST_INT
450 && GET_CODE (XEXP (x, 2)) == CONST_INT
451 && ! mode_dependent_address_p (XEXP (to, 0))
452 && ! MEM_VOLATILE_P (to))
453 {
454 enum machine_mode wanted_mode = VOIDmode;
455 enum machine_mode is_mode = GET_MODE (to);
456 int width = INTVAL (XEXP (x, 1));
457 int pos = INTVAL (XEXP (x, 2));
458
459#ifdef HAVE_extzv
460 if (code == ZERO_EXTRACT)
461 wanted_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
462#endif
463#ifdef HAVE_extv
464 if (code == SIGN_EXTRACT)
465 wanted_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
466#endif
467
6dc42e49 468 /* If we have a narrower mode, we can do something. */
2055cea7
RK
469 if (wanted_mode != VOIDmode
470 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
471 {
472 int offset = pos / BITS_PER_UNIT;
473 rtx newmem;
474
475 /* If the bytes and bits are counted differently, we
476 must adjust the offset. */
477#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
478 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
479 - offset);
480#endif
481
482 pos %= GET_MODE_BITSIZE (wanted_mode);
483
484 newmem = gen_rtx (MEM, wanted_mode,
485 plus_constant (XEXP (to, 0), offset));
486 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (to);
487 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (to);
488 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (to);
489
9e4223f2 490 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
2055cea7
RK
491 validate_change (object, &XEXP (x, 0), newmem, 1);
492 }
493 }
494
495 break;
496 }
497
498 fmt = GET_RTX_FORMAT (code);
499 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
500 {
501 if (fmt[i] == 'e')
502 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
503 else if (fmt[i] == 'E')
504 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
505 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
506 }
507}
508
509/* Try replacing every occurrence of FROM in INSN with TO. After all
510 changes have been made, validate by seeing if INSN is still valid. */
511
512int
513validate_replace_rtx (from, to, insn)
514 rtx from, to, insn;
515{
516 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
517 return apply_change_group ();
518}
519\f
520#ifdef HAVE_cc0
521/* Return 1 if the insn using CC0 set by INSN does not contain
522 any ordered tests applied to the condition codes.
523 EQ and NE tests do not count. */
524
525int
526next_insn_tests_no_inequality (insn)
527 rtx insn;
528{
529 register rtx next = next_cc0_user (insn);
530
531 /* If there is no next insn, we have to take the conservative choice. */
532 if (next == 0)
533 return 0;
534
535 return ((GET_CODE (next) == JUMP_INSN
536 || GET_CODE (next) == INSN
537 || GET_CODE (next) == CALL_INSN)
538 && ! inequality_comparisons_p (PATTERN (next)));
539}
540
541#if 0 /* This is useless since the insn that sets the cc's
542 must be followed immediately by the use of them. */
543/* Return 1 if the CC value set up by INSN is not used. */
544
545int
546next_insns_test_no_inequality (insn)
547 rtx insn;
548{
549 register rtx next = NEXT_INSN (insn);
550
551 for (; next != 0; next = NEXT_INSN (next))
552 {
553 if (GET_CODE (next) == CODE_LABEL
554 || GET_CODE (next) == BARRIER)
555 return 1;
556 if (GET_CODE (next) == NOTE)
557 continue;
558 if (inequality_comparisons_p (PATTERN (next)))
559 return 0;
560 if (sets_cc0_p (PATTERN (next)) == 1)
561 return 1;
562 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
563 return 1;
564 }
565 return 1;
566}
567#endif
568#endif
569\f
570/* This is used by find_single_use to locate an rtx that contains exactly one
571 use of DEST, which is typically either a REG or CC0. It returns a
572 pointer to the innermost rtx expression containing DEST. Appearances of
573 DEST that are being used to totally replace it are not counted. */
574
575static rtx *
576find_single_use_1 (dest, loc)
577 rtx dest;
578 rtx *loc;
579{
580 rtx x = *loc;
581 enum rtx_code code = GET_CODE (x);
582 rtx *result = 0;
583 rtx *this_result;
584 int i;
585 char *fmt;
586
587 switch (code)
588 {
589 case CONST_INT:
590 case CONST:
591 case LABEL_REF:
592 case SYMBOL_REF:
593 case CONST_DOUBLE:
594 case CLOBBER:
595 return 0;
596
597 case SET:
598 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
599 of a REG that occupies all of the REG, the insn uses DEST if
600 it is mentioned in the destination or the source. Otherwise, we
601 need just check the source. */
602 if (GET_CODE (SET_DEST (x)) != CC0
603 && GET_CODE (SET_DEST (x)) != PC
604 && GET_CODE (SET_DEST (x)) != REG
605 && ! (GET_CODE (SET_DEST (x)) == SUBREG
606 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
607 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
608 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
609 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
610 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
611 break;
612
613 return find_single_use_1 (dest, &SET_SRC (x));
614
615 case MEM:
616 case SUBREG:
617 return find_single_use_1 (dest, &XEXP (x, 0));
618 }
619
620 /* If it wasn't one of the common cases above, check each expression and
621 vector of this code. Look for a unique usage of DEST. */
622
623 fmt = GET_RTX_FORMAT (code);
624 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
625 {
626 if (fmt[i] == 'e')
627 {
628 if (dest == XEXP (x, i)
629 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
630 && REGNO (dest) == REGNO (XEXP (x, i))))
631 this_result = loc;
632 else
633 this_result = find_single_use_1 (dest, &XEXP (x, i));
634
635 if (result == 0)
636 result = this_result;
637 else if (this_result)
638 /* Duplicate usage. */
639 return 0;
640 }
641 else if (fmt[i] == 'E')
642 {
643 int j;
644
645 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
646 {
647 if (XVECEXP (x, i, j) == dest
648 || (GET_CODE (dest) == REG
649 && GET_CODE (XVECEXP (x, i, j)) == REG
650 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
651 this_result = loc;
652 else
653 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
654
655 if (result == 0)
656 result = this_result;
657 else if (this_result)
658 return 0;
659 }
660 }
661 }
662
663 return result;
664}
665\f
666/* See if DEST, produced in INSN, is used only a single time in the
667 sequel. If so, return a pointer to the innermost rtx expression in which
668 it is used.
669
670 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
671
672 This routine will return usually zero either before flow is called (because
673 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
674 note can't be trusted).
675
676 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
677 care about REG_DEAD notes or LOG_LINKS.
678
679 Otherwise, we find the single use by finding an insn that has a
680 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
681 only referenced once in that insn, we know that it must be the first
682 and last insn referencing DEST. */
683
684rtx *
685find_single_use (dest, insn, ploc)
686 rtx dest;
687 rtx insn;
688 rtx *ploc;
689{
690 rtx next;
691 rtx *result;
692 rtx link;
693
694#ifdef HAVE_cc0
695 if (dest == cc0_rtx)
696 {
697 next = NEXT_INSN (insn);
698 if (next == 0
699 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
700 return 0;
701
702 result = find_single_use_1 (dest, &PATTERN (next));
703 if (result && ploc)
704 *ploc = next;
705 return result;
706 }
707#endif
708
709 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
710 return 0;
711
712 for (next = next_nonnote_insn (insn);
713 next != 0 && GET_CODE (next) != CODE_LABEL;
714 next = next_nonnote_insn (next))
715 if (GET_RTX_CLASS (GET_CODE (next)) == 'i' && dead_or_set_p (next, dest))
716 {
717 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
718 if (XEXP (link, 0) == insn)
719 break;
720
721 if (link)
722 {
723 result = find_single_use_1 (dest, &PATTERN (next));
724 if (ploc)
725 *ploc = next;
726 return result;
727 }
728 }
729
730 return 0;
731}
732\f
733/* Return 1 if OP is a valid general operand for machine mode MODE.
734 This is either a register reference, a memory reference,
735 or a constant. In the case of a memory reference, the address
736 is checked for general validity for the target machine.
737
738 Register and memory references must have mode MODE in order to be valid,
739 but some constants have no machine mode and are valid for any mode.
740
741 If MODE is VOIDmode, OP is checked for validity for whatever mode
742 it has.
743
744 The main use of this function is as a predicate in match_operand
745 expressions in the machine description.
746
6dc42e49 747 For an explanation of this function's behavior for registers of
2055cea7
RK
748 class NO_REGS, see the comment for `register_operand'. */
749
750int
751general_operand (op, mode)
752 register rtx op;
753 enum machine_mode mode;
754{
755 register enum rtx_code code = GET_CODE (op);
756 int mode_altering_drug = 0;
757
758 if (mode == VOIDmode)
759 mode = GET_MODE (op);
760
761 /* Don't accept CONST_INT or anything similar
762 if the caller wants something floating. */
763 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
764 && GET_MODE_CLASS (mode) != MODE_INT
765 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
766 return 0;
767
768 if (CONSTANT_P (op))
769 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
770#ifdef LEGITIMATE_PIC_OPERAND_P
771 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
772#endif
773 && LEGITIMATE_CONSTANT_P (op));
774
775 /* Except for certain constants with VOIDmode, already checked for,
776 OP's mode must match MODE if MODE specifies a mode. */
777
778 if (GET_MODE (op) != mode)
779 return 0;
780
781 if (code == SUBREG)
782 {
783#ifdef INSN_SCHEDULING
784 /* On machines that have insn scheduling, we want all memory
785 reference to be explicit, so outlaw paradoxical SUBREGs. */
786 if (GET_CODE (SUBREG_REG (op)) == MEM
787 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
788 return 0;
789#endif
790
791 op = SUBREG_REG (op);
792 code = GET_CODE (op);
793#if 0
794 /* No longer needed, since (SUBREG (MEM...))
795 will load the MEM into a reload reg in the MEM's own mode. */
796 mode_altering_drug = 1;
797#endif
798 }
799
800 if (code == REG)
801 /* A register whose class is NO_REGS is not a general operand. */
802 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
803 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
804
805 if (code == MEM)
806 {
807 register rtx y = XEXP (op, 0);
808 if (! volatile_ok && MEM_VOLATILE_P (op))
809 return 0;
810 /* Use the mem's mode, since it will be reloaded thus. */
811 mode = GET_MODE (op);
812 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
813 }
814 return 0;
815
816 win:
817 if (mode_altering_drug)
818 return ! mode_dependent_address_p (XEXP (op, 0));
819 return 1;
820}
821\f
822/* Return 1 if OP is a valid memory address for a memory reference
823 of mode MODE.
824
825 The main use of this function is as a predicate in match_operand
826 expressions in the machine description. */
827
828int
829address_operand (op, mode)
830 register rtx op;
831 enum machine_mode mode;
832{
833 return memory_address_p (mode, op);
834}
835
836/* Return 1 if OP is a register reference of mode MODE.
837 If MODE is VOIDmode, accept a register in any mode.
838
839 The main use of this function is as a predicate in match_operand
840 expressions in the machine description.
841
842 As a special exception, registers whose class is NO_REGS are
843 not accepted by `register_operand'. The reason for this change
844 is to allow the representation of special architecture artifacts
845 (such as a condition code register) without extending the rtl
846 definitions. Since registers of class NO_REGS cannot be used
847 as registers in any case where register classes are examined,
848 it is most consistent to keep this function from accepting them. */
849
850int
851register_operand (op, mode)
852 register rtx op;
853 enum machine_mode mode;
854{
855 if (GET_MODE (op) != mode && mode != VOIDmode)
856 return 0;
857
858 if (GET_CODE (op) == SUBREG)
859 {
860 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
861 because it is guaranteed to be reloaded into one.
862 Just make sure the MEM is valid in itself.
863 (Ideally, (SUBREG (MEM)...) should not exist after reload,
864 but currently it does result from (SUBREG (REG)...) where the
865 reg went on the stack.) */
866 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
867 return general_operand (op, mode);
868 op = SUBREG_REG (op);
869 }
870
871 /* We don't consider registers whose class is NO_REGS
872 to be a register operand. */
873 return (GET_CODE (op) == REG
874 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
875 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
876}
877
878/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
879 or a hard register. */
880
881int
882scratch_operand (op, mode)
883 register rtx op;
884 enum machine_mode mode;
885{
886 return (GET_MODE (op) == mode
887 && (GET_CODE (op) == SCRATCH
888 || (GET_CODE (op) == REG
889 && REGNO (op) < FIRST_PSEUDO_REGISTER)));
890}
891
892/* Return 1 if OP is a valid immediate operand for mode MODE.
893
894 The main use of this function is as a predicate in match_operand
895 expressions in the machine description. */
896
897int
898immediate_operand (op, mode)
899 register rtx op;
900 enum machine_mode mode;
901{
902 /* Don't accept CONST_INT or anything similar
903 if the caller wants something floating. */
904 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
905 && GET_MODE_CLASS (mode) != MODE_INT
906 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
907 return 0;
908
909 return (CONSTANT_P (op)
910 && (GET_MODE (op) == mode || mode == VOIDmode
911 || GET_MODE (op) == VOIDmode)
912#ifdef LEGITIMATE_PIC_OPERAND_P
913 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
914#endif
915 && LEGITIMATE_CONSTANT_P (op));
916}
917
918/* Returns 1 if OP is an operand that is a CONST_INT. */
919
920int
921const_int_operand (op, mode)
922 register rtx op;
923 enum machine_mode mode;
924{
925 return GET_CODE (op) == CONST_INT;
926}
927
928/* Returns 1 if OP is an operand that is a constant integer or constant
929 floating-point number. */
930
931int
932const_double_operand (op, mode)
933 register rtx op;
934 enum machine_mode mode;
935{
936 /* Don't accept CONST_INT or anything similar
937 if the caller wants something floating. */
938 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
939 && GET_MODE_CLASS (mode) != MODE_INT
940 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
941 return 0;
942
943 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
944 && (mode == VOIDmode || GET_MODE (op) == mode
945 || GET_MODE (op) == VOIDmode));
946}
947
948/* Return 1 if OP is a general operand that is not an immediate operand. */
949
950int
951nonimmediate_operand (op, mode)
952 register rtx op;
953 enum machine_mode mode;
954{
955 return (general_operand (op, mode) && ! CONSTANT_P (op));
956}
957
958/* Return 1 if OP is a register reference or immediate value of mode MODE. */
959
960int
961nonmemory_operand (op, mode)
962 register rtx op;
963 enum machine_mode mode;
964{
965 if (CONSTANT_P (op))
966 {
967 /* Don't accept CONST_INT or anything similar
968 if the caller wants something floating. */
969 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
970 && GET_MODE_CLASS (mode) != MODE_INT
971 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
972 return 0;
973
974 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode)
975#ifdef LEGITIMATE_PIC_OPERAND_P
976 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
977#endif
978 && LEGITIMATE_CONSTANT_P (op));
979 }
980
981 if (GET_MODE (op) != mode && mode != VOIDmode)
982 return 0;
983
984 if (GET_CODE (op) == SUBREG)
985 {
986 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
987 because it is guaranteed to be reloaded into one.
988 Just make sure the MEM is valid in itself.
989 (Ideally, (SUBREG (MEM)...) should not exist after reload,
990 but currently it does result from (SUBREG (REG)...) where the
991 reg went on the stack.) */
992 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
993 return general_operand (op, mode);
994 op = SUBREG_REG (op);
995 }
996
997 /* We don't consider registers whose class is NO_REGS
998 to be a register operand. */
999 return (GET_CODE (op) == REG
1000 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1001 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1002}
1003
1004/* Return 1 if OP is a valid operand that stands for pushing a
1005 value of mode MODE onto the stack.
1006
1007 The main use of this function is as a predicate in match_operand
1008 expressions in the machine description. */
1009
1010int
1011push_operand (op, mode)
1012 rtx op;
1013 enum machine_mode mode;
1014{
1015 if (GET_CODE (op) != MEM)
1016 return 0;
1017
1018 if (GET_MODE (op) != mode)
1019 return 0;
1020
1021 op = XEXP (op, 0);
1022
1023 if (GET_CODE (op) != STACK_PUSH_CODE)
1024 return 0;
1025
1026 return XEXP (op, 0) == stack_pointer_rtx;
1027}
1028
1029/* Return 1 if ADDR is a valid memory address for mode MODE. */
1030
1031int
1032memory_address_p (mode, addr)
1033 enum machine_mode mode;
1034 register rtx addr;
1035{
1036 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1037 return 0;
1038
1039 win:
1040 return 1;
1041}
1042
1043/* Return 1 if OP is a valid memory reference with mode MODE,
1044 including a valid address.
1045
1046 The main use of this function is as a predicate in match_operand
1047 expressions in the machine description. */
1048
1049int
1050memory_operand (op, mode)
1051 register rtx op;
1052 enum machine_mode mode;
1053{
1054 rtx inner;
1055
1056 if (! reload_completed)
1057 /* Note that no SUBREG is a memory operand before end of reload pass,
1058 because (SUBREG (MEM...)) forces reloading into a register. */
1059 return GET_CODE (op) == MEM && general_operand (op, mode);
1060
1061 if (mode != VOIDmode && GET_MODE (op) != mode)
1062 return 0;
1063
1064 inner = op;
1065 if (GET_CODE (inner) == SUBREG)
1066 inner = SUBREG_REG (inner);
1067
1068 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1069}
1070
1071/* Return 1 if OP is a valid indirect memory reference with mode MODE;
1072 that is, a memory reference whose address is a general_operand. */
1073
1074int
1075indirect_operand (op, mode)
1076 register rtx op;
1077 enum machine_mode mode;
1078{
1079 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1080 if (! reload_completed
1081 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1082 {
1083 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1084 rtx inner = SUBREG_REG (op);
1085
1086#if BYTES_BIG_ENDIAN
1087 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1088 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1089#endif
1090
1091 /* The only way that we can have a general_operand as the resulting
1092 address is if OFFSET is zero and the address already is an operand
1093 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1094 operand. */
1095
1096 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1097 || (GET_CODE (XEXP (inner, 0)) == PLUS
1098 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1099 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1100 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1101 }
1102
1103 return (GET_CODE (op) == MEM
1104 && memory_operand (op, mode)
1105 && general_operand (XEXP (op, 0), Pmode));
1106}
1107
1108/* Return 1 if this is a comparison operator. This allows the use of
1109 MATCH_OPERATOR to recognize all the branch insns. */
1110
1111int
1112comparison_operator (op, mode)
1113 register rtx op;
1114 enum machine_mode mode;
1115{
1116 return ((mode == VOIDmode || GET_MODE (op) == mode)
1117 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1118}
1119\f
1120/* If BODY is an insn body that uses ASM_OPERANDS,
1121 return the number of operands (both input and output) in the insn.
1122 Otherwise return -1. */
1123
1124int
1125asm_noperands (body)
1126 rtx body;
1127{
1128 if (GET_CODE (body) == ASM_OPERANDS)
1129 /* No output operands: return number of input operands. */
1130 return ASM_OPERANDS_INPUT_LENGTH (body);
1131 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1132 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1133 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1134 else if (GET_CODE (body) == PARALLEL
1135 && GET_CODE (XVECEXP (body, 0, 0)) == SET
1136 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1137 {
1138 /* Multiple output operands, or 1 output plus some clobbers:
1139 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1140 int i;
1141 int n_sets;
1142
1143 /* Count backwards through CLOBBERs to determine number of SETs. */
1144 for (i = XVECLEN (body, 0); i > 0; i--)
1145 {
1146 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1147 break;
1148 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1149 return -1;
1150 }
1151
1152 /* N_SETS is now number of output operands. */
1153 n_sets = i;
1154
1155 /* Verify that all the SETs we have
1156 came from a single original asm_operands insn
1157 (so that invalid combinations are blocked). */
1158 for (i = 0; i < n_sets; i++)
1159 {
1160 rtx elt = XVECEXP (body, 0, i);
1161 if (GET_CODE (elt) != SET)
1162 return -1;
1163 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1164 return -1;
1165 /* If these ASM_OPERANDS rtx's came from different original insns
1166 then they aren't allowed together. */
1167 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1168 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1169 return -1;
1170 }
1171 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1172 + n_sets);
1173 }
1174 else if (GET_CODE (body) == PARALLEL
1175 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1176 {
1177 /* 0 outputs, but some clobbers:
1178 body is [(asm_operands ...) (clobber (reg ...))...]. */
1179 int i;
1180
1181 /* Make sure all the other parallel things really are clobbers. */
1182 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1183 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1184 return -1;
1185
1186 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1187 }
1188 else
1189 return -1;
1190}
1191
1192/* Assuming BODY is an insn body that uses ASM_OPERANDS,
1193 copy its operands (both input and output) into the vector OPERANDS,
1194 the locations of the operands within the insn into the vector OPERAND_LOCS,
1195 and the constraints for the operands into CONSTRAINTS.
1196 Write the modes of the operands into MODES.
1197 Return the assembler-template.
1198
1199 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1200 we don't store that info. */
1201
1202char *
1203decode_asm_operands (body, operands, operand_locs, constraints, modes)
1204 rtx body;
1205 rtx *operands;
1206 rtx **operand_locs;
1207 char **constraints;
1208 enum machine_mode *modes;
1209{
1210 register int i;
1211 int noperands;
1212 char *template = 0;
1213
1214 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1215 {
1216 rtx asmop = SET_SRC (body);
1217 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1218
1219 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1220
1221 for (i = 1; i < noperands; i++)
1222 {
1223 if (operand_locs)
1224 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1225 if (operands)
1226 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1227 if (constraints)
1228 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1229 if (modes)
1230 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1231 }
1232
1233 /* The output is in the SET.
1234 Its constraint is in the ASM_OPERANDS itself. */
1235 if (operands)
1236 operands[0] = SET_DEST (body);
1237 if (operand_locs)
1238 operand_locs[0] = &SET_DEST (body);
1239 if (constraints)
1240 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1241 if (modes)
1242 modes[0] = GET_MODE (SET_DEST (body));
1243 template = ASM_OPERANDS_TEMPLATE (asmop);
1244 }
1245 else if (GET_CODE (body) == ASM_OPERANDS)
1246 {
1247 rtx asmop = body;
1248 /* No output operands: BODY is (asm_operands ....). */
1249
1250 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1251
1252 /* The input operands are found in the 1st element vector. */
1253 /* Constraints for inputs are in the 2nd element vector. */
1254 for (i = 0; i < noperands; i++)
1255 {
1256 if (operand_locs)
1257 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1258 if (operands)
1259 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1260 if (constraints)
1261 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1262 if (modes)
1263 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1264 }
1265 template = ASM_OPERANDS_TEMPLATE (asmop);
1266 }
1267 else if (GET_CODE (body) == PARALLEL
1268 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1269 {
1270 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1271 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1272 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1273 int nout = 0; /* Does not include CLOBBERs. */
1274
1275 /* At least one output, plus some CLOBBERs. */
1276
1277 /* The outputs are in the SETs.
1278 Their constraints are in the ASM_OPERANDS itself. */
1279 for (i = 0; i < nparallel; i++)
1280 {
1281 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1282 break; /* Past last SET */
1283
1284 if (operands)
1285 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1286 if (operand_locs)
1287 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1288 if (constraints)
1289 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1290 if (modes)
1291 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1292 nout++;
1293 }
1294
1295 for (i = 0; i < nin; i++)
1296 {
1297 if (operand_locs)
1298 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1299 if (operands)
1300 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1301 if (constraints)
1302 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1303 if (modes)
1304 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1305 }
1306
1307 template = ASM_OPERANDS_TEMPLATE (asmop);
1308 }
1309 else if (GET_CODE (body) == PARALLEL
1310 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1311 {
1312 /* No outputs, but some CLOBBERs. */
1313
1314 rtx asmop = XVECEXP (body, 0, 0);
1315 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1316
1317 for (i = 0; i < nin; i++)
1318 {
1319 if (operand_locs)
1320 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1321 if (operands)
1322 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1323 if (constraints)
1324 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1325 if (modes)
1326 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1327 }
1328
1329 template = ASM_OPERANDS_TEMPLATE (asmop);
1330 }
1331
1332 return template;
1333}
1334\f
2055cea7
RK
1335/* Given an rtx *P, if it is a sum containing an integer constant term,
1336 return the location (type rtx *) of the pointer to that constant term.
1337 Otherwise, return a null pointer. */
1338
1339static rtx *
1340find_constant_term_loc (p)
1341 rtx *p;
1342{
1343 register rtx *tem;
1344 register enum rtx_code code = GET_CODE (*p);
1345
1346 /* If *P IS such a constant term, P is its location. */
1347
1348 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1349 || code == CONST)
1350 return p;
1351
1352 /* Otherwise, if not a sum, it has no constant term. */
1353
1354 if (GET_CODE (*p) != PLUS)
1355 return 0;
1356
1357 /* If one of the summands is constant, return its location. */
1358
1359 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1360 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1361 return p;
1362
1363 /* Otherwise, check each summand for containing a constant term. */
1364
1365 if (XEXP (*p, 0) != 0)
1366 {
1367 tem = find_constant_term_loc (&XEXP (*p, 0));
1368 if (tem != 0)
1369 return tem;
1370 }
1371
1372 if (XEXP (*p, 1) != 0)
1373 {
1374 tem = find_constant_term_loc (&XEXP (*p, 1));
1375 if (tem != 0)
1376 return tem;
1377 }
1378
1379 return 0;
1380}
1381\f
1382/* Return 1 if OP is a memory reference
1383 whose address contains no side effects
1384 and remains valid after the addition
1385 of a positive integer less than the
1386 size of the object being referenced.
1387
1388 We assume that the original address is valid and do not check it.
1389
1390 This uses strict_memory_address_p as a subroutine, so
1391 don't use it before reload. */
1392
1393int
1394offsettable_memref_p (op)
1395 rtx op;
1396{
1397 return ((GET_CODE (op) == MEM)
1398 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1399}
1400
1401/* Similar, but don't require a strictly valid mem ref:
1402 consider pseudo-regs valid as index or base regs. */
1403
1404int
1405offsettable_nonstrict_memref_p (op)
1406 rtx op;
1407{
1408 return ((GET_CODE (op) == MEM)
1409 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1410}
1411
1412/* Return 1 if Y is a memory address which contains no side effects
1413 and would remain valid after the addition of a positive integer
1414 less than the size of that mode.
1415
1416 We assume that the original address is valid and do not check it.
1417 We do check that it is valid for narrower modes.
1418
1419 If STRICTP is nonzero, we require a strictly valid address,
1420 for the sake of use in reload.c. */
1421
1422int
1423offsettable_address_p (strictp, mode, y)
1424 int strictp;
1425 enum machine_mode mode;
1426 register rtx y;
1427{
1428 register enum rtx_code ycode = GET_CODE (y);
1429 register rtx z;
1430 rtx y1 = y;
1431 rtx *y2;
1432 int (*addressp) () = (strictp ? strict_memory_address_p : memory_address_p);
1433
1434 if (CONSTANT_ADDRESS_P (y))
1435 return 1;
1436
1437 /* Adjusting an offsettable address involves changing to a narrower mode.
1438 Make sure that's OK. */
1439
1440 if (mode_dependent_address_p (y))
1441 return 0;
1442
1443 /* If the expression contains a constant term,
1444 see if it remains valid when max possible offset is added. */
1445
1446 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
1447 {
1448 int good;
1449
1450 y1 = *y2;
1451 *y2 = plus_constant (*y2, GET_MODE_SIZE (mode) - 1);
1452 /* Use QImode because an odd displacement may be automatically invalid
1453 for any wider mode. But it should be valid for a single byte. */
1454 good = (*addressp) (QImode, y);
1455
1456 /* In any case, restore old contents of memory. */
1457 *y2 = y1;
1458 return good;
1459 }
1460
1461 if (ycode == PRE_DEC || ycode == PRE_INC
1462 || ycode == POST_DEC || ycode == POST_INC)
1463 return 0;
1464
1465 /* The offset added here is chosen as the maximum offset that
1466 any instruction could need to add when operating on something
1467 of the specified mode. We assume that if Y and Y+c are
1468 valid addresses then so is Y+d for all 0<d<c. */
1469
1470 z = plus_constant_for_output (y, GET_MODE_SIZE (mode) - 1);
1471
1472 /* Use QImode because an odd displacement may be automatically invalid
1473 for any wider mode. But it should be valid for a single byte. */
1474 return (*addressp) (QImode, z);
1475}
1476
1477/* Return 1 if ADDR is an address-expression whose effect depends
1478 on the mode of the memory reference it is used in.
1479
1480 Autoincrement addressing is a typical example of mode-dependence
1481 because the amount of the increment depends on the mode. */
1482
1483int
1484mode_dependent_address_p (addr)
1485 rtx addr;
1486{
1487 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
1488 return 0;
1489 win:
1490 return 1;
1491}
1492
1493/* Return 1 if OP is a general operand
1494 other than a memory ref with a mode dependent address. */
1495
1496int
1497mode_independent_operand (op, mode)
1498 enum machine_mode mode;
1499 rtx op;
1500{
1501 rtx addr;
1502
1503 if (! general_operand (op, mode))
1504 return 0;
1505
1506 if (GET_CODE (op) != MEM)
1507 return 1;
1508
1509 addr = XEXP (op, 0);
1510 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
1511 return 1;
1512 lose:
1513 return 0;
1514}
1515
1516/* Given an operand OP that is a valid memory reference
1517 which satisfies offsettable_memref_p,
1518 return a new memory reference whose address has been adjusted by OFFSET.
1519 OFFSET should be positive and less than the size of the object referenced.
1520*/
1521
1522rtx
1523adj_offsettable_operand (op, offset)
1524 rtx op;
1525 int offset;
1526{
1527 register enum rtx_code code = GET_CODE (op);
1528
1529 if (code == MEM)
1530 {
1531 register rtx y = XEXP (op, 0);
1532 register rtx new;
1533
1534 if (CONSTANT_ADDRESS_P (y))
1535 {
1536 new = gen_rtx (MEM, GET_MODE (op), plus_constant_for_output (y, offset));
1537 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1538 return new;
1539 }
1540
1541 if (GET_CODE (y) == PLUS)
1542 {
1543 rtx z = y;
1544 register rtx *const_loc;
1545
1546 op = copy_rtx (op);
1547 z = XEXP (op, 0);
1548 const_loc = find_constant_term_loc (&z);
1549 if (const_loc)
1550 {
1551 *const_loc = plus_constant_for_output (*const_loc, offset);
1552 return op;
1553 }
1554 }
1555
1556 new = gen_rtx (MEM, GET_MODE (op), plus_constant_for_output (y, offset));
1557 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (op);
1558 return new;
1559 }
1560 abort ();
1561}
1562\f
1563#ifdef REGISTER_CONSTRAINTS
1564
1565/* Check the operands of an insn (found in recog_operands)
1566 against the insn's operand constraints (found via INSN_CODE_NUM)
1567 and return 1 if they are valid.
1568
1569 WHICH_ALTERNATIVE is set to a number which indicates which
1570 alternative of constraints was matched: 0 for the first alternative,
1571 1 for the next, etc.
1572
1573 In addition, when two operands are match
1574 and it happens that the output operand is (reg) while the
1575 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
1576 make the output operand look like the input.
1577 This is because the output operand is the one the template will print.
1578
1579 This is used in final, just before printing the assembler code and by
1580 the routines that determine an insn's attribute.
1581
1582 If STRICT is a positive non-zero value, it means that we have been
1583 called after reload has been completed. In that case, we must
1584 do all checks strictly. If it is zero, it means that we have been called
1585 before reload has completed. In that case, we first try to see if we can
1586 find an alternative that matches strictly. If not, we try again, this
1587 time assuming that reload will fix up the insn. This provides a "best
1588 guess" for the alternative and is used to compute attributes of insns prior
1589 to reload. A negative value of STRICT is used for this internal call. */
1590
1591struct funny_match
1592{
1593 int this, other;
1594};
1595
1596int
1597constrain_operands (insn_code_num, strict)
1598 int insn_code_num;
1599 int strict;
1600{
1601 char *constraints[MAX_RECOG_OPERANDS];
9e21be9d
RK
1602 int matching_operands[MAX_RECOG_OPERANDS];
1603 enum op_type {OP_IN, OP_OUT, OP_INOUT} op_types[MAX_RECOG_OPERANDS];
1604 int earlyclobber[MAX_RECOG_OPERANDS];
2055cea7
RK
1605 register int c;
1606 int noperands = insn_n_operands[insn_code_num];
1607
1608 struct funny_match funny_match[MAX_RECOG_OPERANDS];
1609 int funny_match_index;
1610 int nalternatives = insn_n_alternatives[insn_code_num];
1611
1612 if (noperands == 0 || nalternatives == 0)
1613 return 1;
1614
1615 for (c = 0; c < noperands; c++)
9e21be9d
RK
1616 {
1617 constraints[c] = insn_operand_constraint[insn_code_num][c];
1618 matching_operands[c] = -1;
1619 op_types[c] = OP_IN;
1620 }
2055cea7
RK
1621
1622 which_alternative = 0;
1623
1624 while (which_alternative < nalternatives)
1625 {
1626 register int opno;
1627 int lose = 0;
1628 funny_match_index = 0;
1629
1630 for (opno = 0; opno < noperands; opno++)
1631 {
1632 register rtx op = recog_operand[opno];
1633 enum machine_mode mode = GET_MODE (op);
1634 register char *p = constraints[opno];
1635 int offset = 0;
1636 int win = 0;
1637 int val;
1638
9e21be9d
RK
1639 earlyclobber[opno] = 0;
1640
2055cea7
RK
1641 if (GET_CODE (op) == SUBREG)
1642 {
1643 if (GET_CODE (SUBREG_REG (op)) == REG
1644 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
1645 offset = SUBREG_WORD (op);
1646 op = SUBREG_REG (op);
1647 }
1648
1649 /* An empty constraint or empty alternative
1650 allows anything which matched the pattern. */
1651 if (*p == 0 || *p == ',')
1652 win = 1;
1653
1654 while (*p && (c = *p++) != ',')
1655 switch (c)
1656 {
2055cea7
RK
1657 case '?':
1658 case '#':
2055cea7
RK
1659 case '!':
1660 case '*':
1661 case '%':
1662 break;
1663
9e21be9d
RK
1664 case '=':
1665 op_types[opno] = OP_OUT;
1666 break;
1667
1668 case '+':
1669 op_types[opno] = OP_INOUT;
1670 break;
1671
1672 case '&':
1673 earlyclobber[opno] = 1;
1674 break;
1675
2055cea7
RK
1676 case '0':
1677 case '1':
1678 case '2':
1679 case '3':
1680 case '4':
1681 /* This operand must be the same as a previous one.
1682 This kind of constraint is used for instructions such
1683 as add when they take only two operands.
1684
1685 Note that the lower-numbered operand is passed first.
1686
1687 If we are not testing strictly, assume that this constraint
1688 will be satisfied. */
1689 if (strict < 0)
1690 val = 1;
1691 else
1692 val = operands_match_p (recog_operand[c - '0'],
1693 recog_operand[opno]);
1694
9e21be9d
RK
1695 matching_operands[opno] = c - '0';
1696 matching_operands[c - '0'] = opno;
1697
2055cea7
RK
1698 if (val != 0)
1699 win = 1;
1700 /* If output is *x and input is *--x,
1701 arrange later to change the output to *--x as well,
1702 since the output op is the one that will be printed. */
1703 if (val == 2 && strict > 0)
1704 {
1705 funny_match[funny_match_index].this = opno;
1706 funny_match[funny_match_index++].other = c - '0';
1707 }
1708 break;
1709
1710 case 'p':
1711 /* p is used for address_operands. When we are called by
1712 gen_input_reload, no one will have checked that the
1713 address is strictly valid, i.e., that all pseudos
1714 requiring hard regs have gotten them. */
1715 if (strict <= 0
1716 || (strict_memory_address_p
1717 (insn_operand_mode[insn_code_num][opno], op)))
1718 win = 1;
1719 break;
1720
1721 /* No need to check general_operand again;
1722 it was done in insn-recog.c. */
1723 case 'g':
1724 /* Anything goes unless it is a REG and really has a hard reg
1725 but the hard reg is not in the class GENERAL_REGS. */
1726 if (strict < 0
1727 || GENERAL_REGS == ALL_REGS
1728 || GET_CODE (op) != REG
3c3eeea6
RK
1729 || (reload_in_progress
1730 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2055cea7
RK
1731 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
1732 win = 1;
1733 break;
1734
1735 case 'r':
1736 if (strict < 0
1737 || (strict == 0
1738 && GET_CODE (op) == REG
1739 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1740 || (strict == 0 && GET_CODE (op) == SCRATCH)
1741 || (GET_CODE (op) == REG
1742 && (GENERAL_REGS == ALL_REGS
1743 || reg_fits_class_p (op, GENERAL_REGS,
1744 offset, mode))))
1745 win = 1;
1746 break;
1747
1748 case 'X':
1749 /* This is used for a MATCH_SCRATCH in the cases when we
1750 don't actually need anything. So anything goes any time. */
1751 win = 1;
1752 break;
1753
1754 case 'm':
1755 if (GET_CODE (op) == MEM
1756 /* Before reload, accept what reload can turn into mem. */
3c3eeea6
RK
1757 || (strict < 0 && CONSTANT_P (op))
1758 /* During reload, accept a pseudo */
1759 || (reload_in_progress && GET_CODE (op) == REG
1760 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2055cea7
RK
1761 win = 1;
1762 break;
1763
1764 case '<':
1765 if (GET_CODE (op) == MEM
1766 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
1767 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1768 win = 1;
1769 break;
1770
1771 case '>':
1772 if (GET_CODE (op) == MEM
1773 && (GET_CODE (XEXP (op, 0)) == PRE_INC
1774 || GET_CODE (XEXP (op, 0)) == POST_INC))
1775 win = 1;
1776 break;
1777
1778 case 'E':
1779 /* Match any CONST_DOUBLE, but only if
1780 we can examine the bits of it reliably. */
1781 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
9e4223f2 1782 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
d1b765a5 1783 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2055cea7
RK
1784 break;
1785 if (GET_CODE (op) == CONST_DOUBLE)
1786 win = 1;
1787 break;
1788
1789 case 'F':
1790 if (GET_CODE (op) == CONST_DOUBLE)
1791 win = 1;
1792 break;
1793
1794 case 'G':
1795 case 'H':
1796 if (GET_CODE (op) == CONST_DOUBLE
1797 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
1798 win = 1;
1799 break;
1800
1801 case 's':
1802 if (GET_CODE (op) == CONST_INT
1803 || (GET_CODE (op) == CONST_DOUBLE
1804 && GET_MODE (op) == VOIDmode))
1805 break;
1806 case 'i':
1807 if (CONSTANT_P (op))
1808 win = 1;
1809 break;
1810
1811 case 'n':
1812 if (GET_CODE (op) == CONST_INT
1813 || (GET_CODE (op) == CONST_DOUBLE
1814 && GET_MODE (op) == VOIDmode))
1815 win = 1;
1816 break;
1817
1818 case 'I':
1819 case 'J':
1820 case 'K':
1821 case 'L':
1822 case 'M':
1823 case 'N':
1824 case 'O':
1825 case 'P':
1826 if (GET_CODE (op) == CONST_INT
1827 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
1828 win = 1;
1829 break;
1830
1831#ifdef EXTRA_CONSTRAINT
1832 case 'Q':
1833 case 'R':
1834 case 'S':
1835 case 'T':
1836 case 'U':
1837 if (EXTRA_CONSTRAINT (op, c))
1838 win = 1;
1839 break;
1840#endif
1841
1842 case 'V':
1843 if (GET_CODE (op) == MEM
1844 && ! offsettable_memref_p (op))
1845 win = 1;
1846 break;
1847
1848 case 'o':
1849 if ((strict > 0 && offsettable_memref_p (op))
1850 || (strict == 0 && offsettable_nonstrict_memref_p (op))
1851 /* Before reload, accept what reload can handle. */
1852 || (strict < 0
3c3eeea6
RK
1853 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
1854 /* During reload, accept a pseudo */
1855 || (reload_in_progress && GET_CODE (op) == REG
1856 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2055cea7
RK
1857 win = 1;
1858 break;
1859
1860 default:
1861 if (strict < 0
1862 || (strict == 0
1863 && GET_CODE (op) == REG
1864 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
1865 || (strict == 0 && GET_CODE (op) == SCRATCH)
1866 || (GET_CODE (op) == REG
1867 && reg_fits_class_p (op, REG_CLASS_FROM_LETTER (c),
1868 offset, mode)))
1869 win = 1;
1870 }
1871
1872 constraints[opno] = p;
1873 /* If this operand did not win somehow,
1874 this alternative loses. */
1875 if (! win)
1876 lose = 1;
1877 }
1878 /* This alternative won; the operands are ok.
1879 Change whichever operands this alternative says to change. */
1880 if (! lose)
1881 {
9e21be9d
RK
1882 int opno, eopno;
1883
1884 /* See if any earlyclobber operand conflicts with some other
1885 operand. */
1886
1887 if (strict > 0)
1888 for (eopno = 0; eopno < noperands; eopno++)
62946075
RS
1889 /* Ignore earlyclobber operands now in memory,
1890 because we would often report failure when we have
1891 two memory operands, one of which was formerly a REG. */
1892 if (earlyclobber[eopno]
1893 && GET_CODE (recog_operand[eopno]) == REG)
9e21be9d
RK
1894 for (opno = 0; opno < noperands; opno++)
1895 if ((GET_CODE (recog_operand[opno]) == MEM
1896 || op_types[opno] != OP_OUT)
1897 && opno != eopno
1898 && constraints[opno] != 0
1899 && ! (matching_operands[opno] == eopno
1900 && rtx_equal_p (recog_operand[opno],
1901 recog_operand[eopno]))
1902 && ! safe_from_earlyclobber (recog_operand[opno],
1903 recog_operand[eopno]))
1904 lose = 1;
1905
1906 if (! lose)
2055cea7 1907 {
9e21be9d
RK
1908 while (--funny_match_index >= 0)
1909 {
1910 recog_operand[funny_match[funny_match_index].other]
1911 = recog_operand[funny_match[funny_match_index].this];
1912 }
1913
1914 return 1;
2055cea7 1915 }
2055cea7
RK
1916 }
1917
1918 which_alternative++;
1919 }
1920
1921 /* If we are about to reject this, but we are not to test strictly,
1922 try a very loose test. Only return failure if it fails also. */
1923 if (strict == 0)
1924 return constrain_operands (insn_code_num, -1);
1925 else
1926 return 0;
1927}
1928
1929/* Return 1 iff OPERAND (assumed to be a REG rtx)
1930 is a hard reg in class CLASS when its regno is offsetted by OFFSET
1931 and changed to mode MODE.
1932 If REG occupies multiple hard regs, all of them must be in CLASS. */
1933
1934int
1935reg_fits_class_p (operand, class, offset, mode)
1936 rtx operand;
1937 register enum reg_class class;
1938 int offset;
1939 enum machine_mode mode;
1940{
1941 register int regno = REGNO (operand);
1942 if (regno < FIRST_PSEUDO_REGISTER
1943 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1944 regno + offset))
1945 {
1946 register int sr;
1947 regno += offset;
1948 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
1949 sr > 0; sr--)
1950 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
1951 regno + sr))
1952 break;
1953 return sr == 0;
1954 }
1955
1956 return 0;
1957}
1958
1959#endif /* REGISTER_CONSTRAINTS */
This page took 0.283754 seconds and 5 git commands to generate.