1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
84 #include "basic-block.h"
85 #include "insn-config.h"
86 #include "insn-flags.h"
87 #include "insn-codes.h"
88 #include "insn-attr.h"
92 /* It is not safe to use ordinary gen_lowpart in combine.
93 Use gen_lowpart_for_combine instead. See comments there. */
94 #define gen_lowpart dont_use_gen_lowpart_you_dummy
96 /* Number of attempts to combine instructions in this function. */
98 static int combine_attempts
;
100 /* Number of attempts that got as far as substitution in this function. */
102 static int combine_merges
;
104 /* Number of instructions combined with added SETs in this function. */
106 static int combine_extras
;
108 /* Number of instructions combined in this function. */
110 static int combine_successes
;
112 /* Totals over entire compilation. */
114 static int total_attempts
, total_merges
, total_extras
, total_successes
;
116 /* Vector mapping INSN_UIDs to cuids.
117 The cuids are like uids but increase monononically always.
118 Combine always uses cuids so that it can compare them.
119 But actually renumbering the uids, which we used to do,
120 proves to be a bad idea because it makes it hard to compare
121 the dumps produced by earlier passes with those from later passes. */
123 static int *uid_cuid
;
125 /* Get the cuid of an insn. */
127 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
129 /* Maximum register number, which is the size of the tables below. */
131 static int combine_max_regno
;
133 /* Record last point of death of (hard or pseudo) register n. */
135 static rtx
*reg_last_death
;
137 /* Record last point of modification of (hard or pseudo) register n. */
139 static rtx
*reg_last_set
;
141 /* Record the cuid of the last insn that invalidated memory
142 (anything that writes memory, and subroutine calls, but not pushes). */
144 static int mem_last_set
;
146 /* Record the cuid of the last CALL_INSN
147 so we can tell whether a potential combination crosses any calls. */
149 static int last_call_cuid
;
151 /* When `subst' is called, this is the insn that is being modified
152 (by combining in a previous insn). The PATTERN of this insn
153 is still the old pattern partially modified and it should not be
154 looked at, but this may be used to examine the successors of the insn
155 to judge whether a simplification is valid. */
157 static rtx subst_insn
;
159 /* This is the lowest CUID that `subst' is currently dealing with.
160 get_last_value will not return a value if the register was set at or
161 after this CUID. If not for this mechanism, we could get confused if
162 I2 or I1 in try_combine were an insn that used the old value of a register
163 to obtain a new value. In that case, we might erroneously get the
164 new value of the register when we wanted the old one. */
166 static int subst_low_cuid
;
168 /* This is the value of undobuf.num_undo when we started processing this
169 substitution. This will prevent gen_rtx_combine from re-used a piece
170 from the previous expression. Doing so can produce circular rtl
173 static int previous_num_undos
;
175 /* The next group of arrays allows the recording of the last value assigned
176 to (hard or pseudo) register n. We use this information to see if a
177 operation being processed is redundant given the a prior operation peformed
178 on the register. For example, an `and' with a constant is redundant if
179 all the zero bits are already known to be turned off.
181 We use an approach similar to that used by cse, but change it in the
184 (1) We do not want to reinitialize at each label.
185 (2) It is useful, but not critical, to know the actual value assigned
186 to a register. Often just its form is helpful.
188 Therefore, we maintain the following arrays:
190 reg_last_set_value the last value assigned
191 reg_last_set_label records the value of label_tick when the
192 register was assigned
193 reg_last_set_table_tick records the value of label_tick when a
194 value using the register is assigned
195 reg_last_set_invalid set to non-zero when it is not valid
196 to use the value of this register in some
199 To understand the usage of these tables, it is important to understand
200 the distinction between the value in reg_last_set_value being valid
201 and the register being validly contained in some other expression in the
204 Entry I in reg_last_set_value is valid if it is non-zero, and either
205 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
207 Register I may validly appear in any expression returned for the value
208 of another register if reg_n_sets[i] is 1. It may also appear in the
209 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
210 reg_last_set_invalid[j] is zero.
212 If an expression is found in the table containing a register which may
213 not validly appear in an expression, the register is replaced by
214 something that won't match, (clobber (const_int 0)).
216 reg_last_set_invalid[i] is set non-zero when register I is being assigned
217 to and reg_last_set_table_tick[i] == label_tick. */
219 /* Record last value assigned to (hard or pseudo) register n. */
221 static rtx
*reg_last_set_value
;
223 /* Record the value of label_tick when the value for register n is placed in
224 reg_last_set_value[n]. */
226 static short *reg_last_set_label
;
228 /* Record the value of label_tick when an expression involving register n
229 is placed in reg_last_set_value. */
231 static short *reg_last_set_table_tick
;
233 /* Set non-zero if references to register n in expressions should not be
236 static char *reg_last_set_invalid
;
238 /* Incremented for each label. */
240 static short label_tick
;
242 /* Some registers that are set more than once and used in more than one
243 basic block are nevertheless always set in similar ways. For example,
244 a QImode register may be loaded from memory in two places on a machine
245 where byte loads zero extend.
247 We record in the following array what we know about the significant
248 bits of a register, specifically which bits are known to be zero.
250 If an entry is zero, it means that we don't know anything special. */
252 static int *reg_significant
;
254 /* Mode used to compute significance in reg_significant. It is the largest
255 integer mode that can fit in HOST_BITS_PER_INT. */
257 static enum machine_mode significant_mode
;
259 /* Nonzero when reg_significant can be safely used. It is zero while
260 computing reg_significant. This prevents propagating values based
261 on previously set values, which can be incorrect if a variable
262 is modified in a loop. */
264 static int significant_valid
;
266 /* Record one modification to rtl structure
267 to be undone by storing old_contents into *where.
268 is_int is 1 if the contents are an int. */
284 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
285 num_undo says how many are currently recorded.
287 storage is nonzero if we must undo the allocation of new storage.
288 The value of storage is what to pass to obfree.
290 other_insn is nonzero if we have modified some other insn in the process
291 of working on subst_insn. It must be verified too. */
299 struct undo undo
[MAX_UNDO
];
303 static struct undobuf undobuf
;
305 /* Substitute NEWVAL, an rtx expression, into INTO, a place in a some
306 insn. The substitution can be undone by undo_all. If INTO is already
307 set to NEWVAL, do not record this change. */
309 #define SUBST(INTO, NEWVAL) \
310 do { if (undobuf.num_undo < MAX_UNDO) \
312 undobuf.undo[undobuf.num_undo].where = &INTO; \
313 undobuf.undo[undobuf.num_undo].old_contents = INTO; \
314 undobuf.undo[undobuf.num_undo].is_int = 0; \
316 if (undobuf.undo[undobuf.num_undo].old_contents != INTO) \
317 undobuf.num_undo++; \
321 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
323 Note that substitution for the value of a CONST_INT is not safe. */
325 #define SUBST_INT(INTO, NEWVAL) \
326 do { if (undobuf.num_undo < MAX_UNDO) \
329 = (struct undo_int *)&undobuf.undo[undobuf.num_undo]; \
330 u->where = (int *) &INTO; \
331 u->old_contents = INTO; \
334 if (u->old_contents != INTO) \
335 undobuf.num_undo++; \
339 /* Number of times the pseudo being substituted for
340 was found and replaced. */
342 static int n_occurrences
;
344 static void set_significant ();
345 static void move_deaths ();
347 static void record_value_for_reg ();
348 static void record_dead_and_set_regs ();
349 static int use_crosses_set_p ();
350 static rtx
try_combine ();
351 static rtx
*find_split_point ();
353 static void undo_all ();
354 static int reg_dead_at_p ();
355 static rtx
expand_compound_operation ();
356 static rtx
expand_field_assignment ();
357 static rtx
make_extraction ();
358 static int get_pos_from_mask ();
359 static rtx
make_field_assignment ();
360 static rtx
make_compound_operation ();
361 static rtx
apply_distributive_law ();
362 static rtx
simplify_and_const_int ();
363 static unsigned significant_bits ();
364 static int merge_outer_ops ();
365 static rtx
simplify_shift_const ();
366 static int recog_for_combine ();
367 static rtx
gen_lowpart_for_combine ();
368 static rtx
gen_rtx_combine ();
369 static rtx
gen_binary ();
370 static rtx
gen_unary ();
371 static enum rtx_code
simplify_comparison ();
372 static int reversible_comparison_p ();
373 static int get_last_value_validate ();
374 static rtx
get_last_value ();
375 static void distribute_notes ();
376 static void distribute_links ();
378 /* Main entry point for combiner. F is the first insn of the function.
379 NREGS is the first unused pseudo-reg number. */
382 combine_instructions (f
, nregs
)
386 register rtx insn
, next
, prev
;
388 register rtx links
, nextlinks
;
390 combine_attempts
= 0;
393 combine_successes
= 0;
395 combine_max_regno
= nregs
;
397 reg_last_death
= (rtx
*) alloca (nregs
* sizeof (rtx
));
398 reg_last_set
= (rtx
*) alloca (nregs
* sizeof (rtx
));
399 reg_last_set_value
= (rtx
*) alloca (nregs
* sizeof (rtx
));
400 reg_last_set_table_tick
= (short *) alloca (nregs
* sizeof (short));
401 reg_last_set_label
= (short *) alloca (nregs
* sizeof (short));
402 reg_last_set_invalid
= (char *) alloca (nregs
* sizeof (short));
403 reg_significant
= (int *) alloca (nregs
* sizeof (int));
405 bzero (reg_last_death
, nregs
* sizeof (rtx
));
406 bzero (reg_last_set
, nregs
* sizeof (rtx
));
407 bzero (reg_last_set_value
, nregs
* sizeof (rtx
));
408 bzero (reg_last_set_table_tick
, nregs
* sizeof (short));
409 bzero (reg_last_set_invalid
, nregs
* sizeof (char));
410 bzero (reg_significant
, nregs
* sizeof (int));
412 init_recog_no_volatile ();
414 /* Compute maximum uid value so uid_cuid can be allocated. */
416 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
417 if (INSN_UID (insn
) > i
)
420 uid_cuid
= (int *) alloca ((i
+ 1) * sizeof (int));
422 significant_mode
= mode_for_size (HOST_BITS_PER_INT
, MODE_INT
, 0);
424 /* Don't use reg_significant when computing it. This can cause problems
425 when, for example, we have j <<= 1 in a loop. */
427 significant_valid
= 0;
429 /* Compute the mapping from uids to cuids.
430 Cuids are numbers assigned to insns, like uids,
431 except that cuids increase monotonically through the code.
433 Scan all SETs and see if we can deduce anything about what
434 bits are significant for some registers. */
436 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
438 INSN_CUID (insn
) = ++i
;
439 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
440 note_stores (PATTERN (insn
), set_significant
);
443 significant_valid
= 1;
445 /* Now scan all the insns in forward order. */
451 for (insn
= f
; insn
; insn
= next
? next
: NEXT_INSN (insn
))
455 if (GET_CODE (insn
) == CODE_LABEL
)
458 else if (GET_CODE (insn
) == INSN
459 || GET_CODE (insn
) == CALL_INSN
460 || GET_CODE (insn
) == JUMP_INSN
)
462 /* Try this insn with each insn it links back to. */
464 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
465 if ((next
= try_combine (insn
, XEXP (links
, 0), 0)) != 0)
468 /* Try each sequence of three linked insns ending with this one. */
470 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
471 for (nextlinks
= LOG_LINKS (XEXP (links
, 0)); nextlinks
;
472 nextlinks
= XEXP (nextlinks
, 1))
473 if ((next
= try_combine (insn
, XEXP (links
, 0),
474 XEXP (nextlinks
, 0))) != 0)
478 /* Try to combine a jump insn that uses CC0
479 with a preceding insn that sets CC0, and maybe with its
480 logical predecessor as well.
481 This is how we make decrement-and-branch insns.
482 We need this special code because data flow connections
483 via CC0 do not get entered in LOG_LINKS. */
485 if (GET_CODE (insn
) == JUMP_INSN
486 && (prev
= prev_nonnote_insn (insn
)) != 0
487 && GET_CODE (prev
) == INSN
488 && sets_cc0_p (PATTERN (prev
)))
490 if ((next
= try_combine (insn
, prev
, 0)) != 0)
493 for (nextlinks
= LOG_LINKS (prev
); nextlinks
;
494 nextlinks
= XEXP (nextlinks
, 1))
495 if ((next
= try_combine (insn
, prev
,
496 XEXP (nextlinks
, 0))) != 0)
500 /* Do the same for an insn that explicitly references CC0. */
501 if (GET_CODE (insn
) == INSN
502 && (prev
= prev_nonnote_insn (insn
)) != 0
503 && GET_CODE (prev
) == INSN
504 && sets_cc0_p (PATTERN (prev
))
505 && GET_CODE (PATTERN (insn
)) == SET
506 && reg_mentioned_p (cc0_rtx
, SET_SRC (PATTERN (insn
))))
508 if ((next
= try_combine (insn
, prev
, 0)) != 0)
511 for (nextlinks
= LOG_LINKS (prev
); nextlinks
;
512 nextlinks
= XEXP (nextlinks
, 1))
513 if ((next
= try_combine (insn
, prev
,
514 XEXP (nextlinks
, 0))) != 0)
518 /* Finally, see if any of the insns that this insn links to
519 explicitly references CC0. If so, try this insn, that insn,
520 and its prececessor if it sets CC0. */
521 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
522 if (GET_CODE (XEXP (links
, 0)) == INSN
523 && GET_CODE (PATTERN (XEXP (links
, 0))) == SET
524 && reg_mentioned_p (cc0_rtx
, SET_SRC (PATTERN (XEXP (links
, 0))))
525 && (prev
= prev_nonnote_insn (XEXP (links
, 0))) != 0
526 && GET_CODE (prev
) == INSN
527 && sets_cc0_p (PATTERN (prev
))
528 && (next
= try_combine (insn
, XEXP (links
, 0), prev
)) != 0)
532 /* Try combining an insn with two different insns whose results it
534 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
535 for (nextlinks
= XEXP (links
, 1); nextlinks
;
536 nextlinks
= XEXP (nextlinks
, 1))
537 if ((next
= try_combine (insn
, XEXP (links
, 0),
538 XEXP (nextlinks
, 0))) != 0)
541 if (GET_CODE (insn
) != NOTE
)
542 record_dead_and_set_regs (insn
);
549 total_attempts
+= combine_attempts
;
550 total_merges
+= combine_merges
;
551 total_extras
+= combine_extras
;
552 total_successes
+= combine_successes
;
555 /* Called via note_stores. If X is a pseudo that is used in more than
556 one basic block, is narrower that HOST_BITS_PER_INT, and is being
557 set, record what bits are significant. If we are clobbering X,
558 ignore this "set" because the clobbered value won't be used.
560 If we are setting only a portion of X and we can't figure out what
561 portion, assume all bits will be used since we don't know what will
565 set_significant (x
, set
)
569 if (GET_CODE (x
) == REG
570 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
571 && reg_n_sets
[REGNO (x
)] > 1
572 && reg_basic_block
[REGNO (x
)] < 0
573 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_INT
)
575 if (GET_CODE (set
) == CLOBBER
)
578 /* If this is a complex assignment, see if we can convert it into a
580 set
= expand_field_assignment (set
);
581 if (SET_DEST (set
) == x
)
582 reg_significant
[REGNO (x
)]
583 |= significant_bits (SET_SRC (set
), significant_mode
);
585 reg_significant
[REGNO (x
)] = GET_MODE_MASK (GET_MODE (x
));
589 /* See if INSN can be combined into I3. PRED and SUCC are optionally
590 insns that were previously combined into I3 or that will be combined
591 into the merger of INSN and I3.
593 Return 0 if the combination is not allowed for any reason.
595 If the combination is allowed, *PDEST will be set to the single
596 destination of INSN and *PSRC to the single source, and this function
600 can_combine_p (insn
, i3
, pred
, succ
, pdest
, psrc
)
607 rtx set
= 0, src
, dest
;
609 int all_adjacent
= (succ
? (next_active_insn (insn
) == succ
610 && next_active_insn (succ
) == i3
)
611 : next_active_insn (insn
) == i3
);
613 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
614 or a PARALLEL consisting of such a SET and CLOBBERs.
616 If INSN has CLOBBER parallel parts, ignore them for our processing.
617 By definition, these happen during the execution of the insn. When it
618 is merged with another insn, all bets are off. If they are, in fact,
619 needed and aren't also supplied in I3, they may be added by
620 recog_for_combine. Otherwise, it won't match.
622 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
625 Get the source and destination of INSN. If more than one, can't
628 if (GET_CODE (PATTERN (insn
)) == SET
)
629 set
= PATTERN (insn
);
630 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
631 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
633 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
635 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
637 switch (GET_CODE (elt
))
639 /* We can ignore CLOBBERs. */
644 /* Ignore SETs whose result isn't used but not those that
645 have side-effects. */
646 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (elt
))
647 && ! side_effects_p (elt
))
650 /* If we have already found a SET, this is a second one and
651 so we cannot combine with this insn. */
659 /* Anything else means we can't combine. */
665 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
666 so don't do anything with it. */
667 || GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
)
676 set
= expand_field_assignment (set
);
677 src
= SET_SRC (set
), dest
= SET_DEST (set
);
679 /* Don't eliminate a store in the stack pointer. */
680 if (dest
== stack_pointer_rtx
681 /* Don't install a subreg involving two modes not tieable.
682 It can worsen register allocation, and can even make invalid reload
683 insns, since the reg inside may need to be copied from in the
684 outside mode, and that may be invalid if it is an fp reg copied in
686 || (GET_CODE (src
) == SUBREG
687 && ! MODES_TIEABLE_P (GET_MODE (src
), GET_MODE (SUBREG_REG (src
))))
688 /* If we couldn't eliminate a field assignment, we can't combine. */
689 || GET_CODE (dest
) == ZERO_EXTRACT
|| GET_CODE (dest
) == STRICT_LOW_PART
690 /* Don't combine with an insn that sets a register to itself if it has
691 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
692 || (rtx_equal_p (src
, dest
) && find_reg_note (insn
, REG_EQUAL
, 0))
693 /* Can't merge a function call. */
694 || GET_CODE (src
) == CALL
695 /* Don't substitute into an incremented register. */
696 || FIND_REG_INC_NOTE (i3
, dest
)
697 || (succ
&& FIND_REG_INC_NOTE (succ
, dest
))
698 /* Don't combine the end of a libcall into anything. */
699 || find_reg_note (insn
, REG_RETVAL
, 0)
700 /* Make sure that DEST is not used after SUCC but before I3. */
701 || (succ
&& ! all_adjacent
702 && reg_used_between_p (dest
, succ
, i3
))
703 /* Make sure that the value that is to be substituted for the register
704 does not use any registers whose values alter in between. However,
705 If the insns are adjacent, a use can't cross a set even though we
706 think it might (this can happen for a sequence of insns each setting
707 the same destination; reg_last_set of that register might point to
708 a NOTE). Also, don't move a volatile asm across any other insns. */
710 && (use_crosses_set_p (src
, INSN_CUID (insn
))
711 || (GET_CODE (src
) == ASM_OPERANDS
&& MEM_VOLATILE_P (src
))))
712 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
713 better register allocation by not doing the combine. */
714 || find_reg_note (i3
, REG_NO_CONFLICT
, dest
)
715 || (succ
&& find_reg_note (succ
, REG_NO_CONFLICT
, dest
))
716 /* Don't combine across a CALL_INSN, because that would possibly
717 change whether the life span of some REGs crosses calls or not,
718 and it is a pain to update that information.
719 Exception: if source is a constant, moving it later can't hurt.
720 Accept that special case, because it helps -fforce-addr a lot. */
721 || (INSN_CUID (insn
) < last_call_cuid
&& ! CONSTANT_P (src
)))
724 /* DEST must either be a REG or CC0. */
725 if (GET_CODE (dest
) == REG
)
727 /* If register alignment is being enforced for multi-word items in all
728 cases except for parameters, it is possible to have a register copy
729 insn referencing a hard register that is not allowed to contain the
730 mode being copied and which would not be valid as an operand of most
731 insns. Eliminate this problem by not combining with such an insn.
733 Also, on some machines we don't want to extend the life of a hard
736 if (GET_CODE (src
) == REG
737 && ((REGNO (dest
) < FIRST_PSEUDO_REGISTER
738 && ! HARD_REGNO_MODE_OK (REGNO (dest
), GET_MODE (dest
)))
739 #ifdef SMALL_REGISTER_CLASSES
740 /* Don't extend the life of a hard register. */
741 || REGNO (src
) < FIRST_PSEUDO_REGISTER
743 || (REGNO (src
) < FIRST_PSEUDO_REGISTER
744 && ! HARD_REGNO_MODE_OK (REGNO (src
), GET_MODE (src
)))
749 else if (GET_CODE (dest
) != CC0
)
752 /* Don't substitute for a register intended as a clobberable operand. */
753 if (GET_CODE (PATTERN (i3
)) == PARALLEL
)
754 for (i
= XVECLEN (PATTERN (i3
), 0) - 1; i
>= 0; i
--)
755 if (GET_CODE (XVECEXP (PATTERN (i3
), 0, i
)) == CLOBBER
756 && rtx_equal_p (XEXP (XVECEXP (PATTERN (i3
), 0, i
), 0), dest
))
759 /* If INSN contains anything volatile, or is an `asm' (whether volatile
760 or not), reject, unless nothing volatile comes between it and I3,
761 with the exception of SUCC. */
763 if (GET_CODE (src
) == ASM_OPERANDS
|| volatile_refs_p (src
))
764 for (p
= NEXT_INSN (insn
); p
!= i3
; p
= NEXT_INSN (p
))
765 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
766 && p
!= succ
&& volatile_refs_p (PATTERN (p
)))
769 /* If INSN or I2 contains an autoincrement or autodecrement,
770 make sure that register is not used between there and I3,
771 and not already used in I3 either.
772 Also insist that I3 not be a jump; if it were one
773 and the incremented register were spilled, we would lose. */
776 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
777 if (REG_NOTE_KIND (link
) == REG_INC
778 && (GET_CODE (i3
) == JUMP_INSN
779 || reg_used_between_p (XEXP (link
, 0), insn
, i3
)
780 || reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i3
))))
785 /* Don't combine an insn that follows a CC0-setting insn.
786 An insn that uses CC0 must not be separated from the one that sets it.
787 We do, however, allow I2 to follow a CC0-setting insn if that insn
788 is passed as I1; in that case it will be deleted also.
789 We also allow combining in this case if all the insns are adjacent
790 because that would leave the two CC0 insns adjacent as well.
791 It would be more logical to test whether CC0 occurs inside I1 or I2,
792 but that would be much slower, and this ought to be equivalent. */
794 p
= prev_nonnote_insn (insn
);
795 if (p
&& p
!= pred
&& GET_CODE (p
) == INSN
&& sets_cc0_p (PATTERN (p
))
800 /* If we get here, we have passed all the tests and the combination is
809 /* LOC is the location within I3 that contains its pattern or the component
810 of a PARALLEL of the pattern. We validate that it is valid for combining.
812 One problem is if I3 modifies its output, as opposed to replacing it
813 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
814 so would produce an insn that is not equivalent to the original insns.
818 (set (reg:DI 101) (reg:DI 100))
819 (set (subreg:SI (reg:DI 101) 0) <foo>)
821 This is NOT equivalent to:
823 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
824 (set (reg:DI 101) (reg:DI 100))])
826 Not only does this modify 100 (in which case it might still be valid
827 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
829 We can also run into a problem if I2 sets a register that I1
830 uses and I1 gets directly substituted into I3 (not via I2). In that
831 case, we would be getting the wrong value of I2DEST into I3, so we
832 must reject the combination. This case occurs when I2 and I1 both
833 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
834 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
835 of a SET must prevent combination from occurring.
837 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
838 if the destination of a SET is a hard register.
840 Before doing the above check, we first try to expand a field assignment
841 into a set of logical operations.
843 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
844 we place a register that is both set and used within I3. If more than one
845 such register is detected, we fail.
847 Return 1 if the combination is valid, zero otherwise. */
850 combinable_i3pat (i3
, loc
, i2dest
, i1dest
, i1_not_in_src
, pi3dest_killed
)
860 if (GET_CODE (x
) == SET
)
862 rtx set
= expand_field_assignment (x
);
863 rtx dest
= SET_DEST (set
);
864 rtx src
= SET_SRC (set
);
865 rtx inner_dest
= dest
, inner_src
= src
;
869 while (GET_CODE (inner_dest
) == STRICT_LOW_PART
870 || GET_CODE (inner_dest
) == SUBREG
871 || GET_CODE (inner_dest
) == ZERO_EXTRACT
)
872 inner_dest
= XEXP (inner_dest
, 0);
874 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
877 while (GET_CODE (inner_src
) == STRICT_LOW_PART
878 || GET_CODE (inner_src
) == SUBREG
879 || GET_CODE (inner_src
) == ZERO_EXTRACT
)
880 inner_src
= XEXP (inner_src
, 0);
882 /* If it is better that two different modes keep two different pseudos,
883 avoid combining them. This avoids producing the following pattern
885 (set (subreg:SI (reg/v:QI 21) 0)
886 (lshiftrt:SI (reg/v:SI 20)
888 If that were made, reload could not handle the pair of
889 reg 20/21, since it would try to get any GENERAL_REGS
890 but some of them don't handle QImode. */
892 if (rtx_equal_p (inner_src
, i2dest
)
893 && GET_CODE (inner_dest
) == REG
894 && ! MODES_TIEABLE_P (GET_MODE (i2dest
), GET_MODE (inner_dest
)))
898 /* Check for the case where I3 modifies its output, as
900 if ((inner_dest
!= dest
901 && (reg_overlap_mentioned_p (i2dest
, inner_dest
)
902 || (i1dest
&& reg_overlap_mentioned_p (i1dest
, inner_dest
))))
903 #ifdef SMALL_REGISTER_CLASSES
904 || (GET_CODE (inner_dest
) == REG
905 && REGNO (inner_dest
) < FIRST_PSEUDO_REGISTER
)
907 || (i1_not_in_src
&& reg_overlap_mentioned_p (i1dest
, src
)))
910 /* If DEST is used in I3, it is being killed in this insn,
911 so record that for later. */
912 if (pi3dest_killed
&& GET_CODE (dest
) == REG
913 && reg_referenced_p (dest
, PATTERN (i3
)))
918 *pi3dest_killed
= dest
;
922 else if (GET_CODE (x
) == PARALLEL
)
926 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
927 if (! combinable_i3pat (i3
, &XVECEXP (x
, 0, i
), i2dest
, i1dest
,
928 i1_not_in_src
, pi3dest_killed
))
935 /* Try to combine the insns I1 and I2 into I3.
936 Here I1 and I2 appear earlier than I3.
937 I1 can be zero; then we combine just I2 into I3.
939 It we are combining three insns and the resulting insn is not recognized,
940 try splitting it into two insns. If that happens, I2 and I3 are retained
941 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
944 If we created two insns, return I2; otherwise return I3.
945 Return 0 if the combination does not work. Then nothing is changed. */
948 try_combine (i3
, i2
, i1
)
949 register rtx i3
, i2
, i1
;
951 /* New patterns for I3 and I3, respectively. */
952 rtx newpat
, newi2pat
= 0;
953 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
954 int added_sets_1
, added_sets_2
;
955 /* Total number of SETs to put into I3. */
957 /* Nonzero is I2's body now appears in I3. */
959 /* INSN_CODEs for new I3, new I2, and user of condition code. */
960 int insn_code_number
, i2_code_number
, other_code_number
;
961 /* Contains I3 if the destination of I3 is used in its source, which means
962 that the old life of I3 is being killed. If that usage is placed into
963 I2 and not in I3, a REG_DEAD note must be made. */
964 rtx i3dest_killed
= 0;
965 /* SET_DEST and SET_SRC of I2 and I1. */
966 rtx i2dest
, i2src
, i1dest
= 0, i1src
= 0;
967 /* PATTERN (I2), or a copy of it in certain cases. */
969 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
970 int i2dest_in_i2src
, i1dest_in_i1src
= 0, i2dest_in_i1src
= 0;
972 /* Notes that must be added to REG_NOTES in I3 and I2. */
973 rtx new_i3_notes
, new_i2_notes
;
980 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
981 This can occur when flow deletes an insn that it has merged into an
982 auto-increment address. We also can't do anything if I3 has a
983 REG_LIBCALL note since we don't want to disrupt the contiguity of a
986 if (GET_RTX_CLASS (GET_CODE (i3
)) != 'i'
987 || GET_RTX_CLASS (GET_CODE (i2
)) != 'i'
988 || (i1
&& GET_RTX_CLASS (GET_CODE (i1
)) != 'i')
989 || find_reg_note (i3
, REG_LIBCALL
, 0))
994 undobuf
.num_undo
= previous_num_undos
= 0;
995 undobuf
.other_insn
= 0;
997 /* Save the current high-water-mark so we can free storage if we didn't
998 accept this combination. */
999 undobuf
.storage
= (char *) oballoc (0);
1001 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1002 code below, set I1 to be the earlier of the two insns. */
1003 if (i1
&& INSN_CUID (i1
) > INSN_CUID (i2
))
1004 temp
= i1
, i1
= i2
, i2
= temp
;
1006 /* First check for one important special-case that the code below will
1007 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1008 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1009 we may be able to replace that destination with the destination of I3.
1010 This occurs in the common code where we compute both a quotient and
1011 remainder into a structure, in which case we want to do the computation
1012 directly into the structure to avoid register-register copies.
1014 We make very conservative checks below and only try to handle the
1015 most common cases of this. For example, we only handle the case
1016 where I2 and I3 are adjacent to avoid making difficult register
1019 if (i1
== 0 && GET_CODE (i3
) == INSN
&& GET_CODE (PATTERN (i3
)) == SET
1020 && GET_CODE (SET_SRC (PATTERN (i3
))) == REG
1021 && REGNO (SET_SRC (PATTERN (i3
))) >= FIRST_PSEUDO_REGISTER
1022 #ifdef SMALL_REGISTER_CLASSES
1023 && (GET_CODE (SET_DEST (PATTERN (i3
))) != REG
1024 || REGNO (SET_DEST (PATTERN (i3
))) >= FIRST_PSEUDO_REGISTER
)
1026 && find_reg_note (i3
, REG_DEAD
, SET_SRC (PATTERN (i3
)))
1027 && GET_CODE (PATTERN (i2
)) == PARALLEL
1028 && ! side_effects_p (SET_DEST (PATTERN (i3
)))
1029 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3
)),
1030 SET_DEST (PATTERN (i3
)))
1031 && next_real_insn (i2
) == i3
)
1032 for (i
= 0; i
< XVECLEN (PATTERN (i2
), 0); i
++)
1033 if (SET_DEST (XVECEXP (PATTERN (i2
), 0, i
)) == SET_SRC (PATTERN (i3
)))
1038 subst_low_cuid
= INSN_CUID (i2
);
1041 i2dest
= SET_SRC (PATTERN (i3
));
1043 /* Replace the dest in I2 with our dest and make the resulting
1044 insn the new pattern for I3. Then skip to where we
1045 validate the pattern. Everything was set up above. */
1046 SUBST (SET_DEST (XVECEXP (PATTERN (i2
), 0, i
)),
1047 SET_DEST (PATTERN (i3
)));
1049 newpat
= PATTERN (i2
);
1050 goto validate_replacement
;
1054 /* If we have no I1 and I2 looks like:
1055 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1057 make up a dummy I1 that is
1060 (set (reg:CC X) (compare:CC Y (const_int 0)))
1062 (We can ignore any trailing CLOBBERs.)
1064 This undoes a previous combination and allows us to match a branch-and-
1067 if (i1
== 0 && GET_CODE (PATTERN (i2
)) == PARALLEL
1068 && XVECLEN (PATTERN (i2
), 0) >= 2
1069 && GET_CODE (XVECEXP (PATTERN (i2
), 0, 0)) == SET
1070 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, 0))))
1072 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0))) == COMPARE
1073 && XEXP (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0)), 1) == const0_rtx
1074 && GET_CODE (XVECEXP (PATTERN (i2
), 0, 1)) == SET
1075 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, 1))) == REG
1076 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0)), 0),
1077 SET_SRC (XVECEXP (PATTERN (i2
), 0, 1))))
1079 for (i
= XVECLEN (PATTERN (i2
), 0) - 1; i
>= 2; i
--)
1080 if (GET_CODE (XVECEXP (PATTERN (i2
), 0, i
)) != CLOBBER
)
1085 /* We make I1 with the same INSN_UID as I2. This gives it
1086 the same INSN_CUID for value tracking. Our fake I1 will
1087 never appear in the insn stream so giving it the same INSN_UID
1088 as I2 will not cause a problem. */
1090 i1
= gen_rtx (INSN
, VOIDmode
, INSN_UID (i2
), 0, i2
,
1091 XVECEXP (PATTERN (i2
), 0, 1), -1, 0, 0);
1093 SUBST (PATTERN (i2
), XVECEXP (PATTERN (i2
), 0, 0));
1094 SUBST (XEXP (SET_SRC (PATTERN (i2
)), 0),
1095 SET_DEST (PATTERN (i1
)));
1100 /* Verify that I2 and I1 are valid for combining. */
1101 if (! can_combine_p (i2
, i3
, i1
, 0, &i2dest
, &i2src
)
1102 || (i1
&& ! can_combine_p (i1
, i3
, 0, i2
, &i1dest
, &i1src
)))
1108 /* Record whether I2DEST is used in I2SRC and similarly for the other
1109 cases. Knowing this will help in register status updating below. */
1110 i2dest_in_i2src
= reg_overlap_mentioned_p (i2dest
, i2src
);
1111 i1dest_in_i1src
= i1
&& reg_overlap_mentioned_p (i1dest
, i1src
);
1112 i2dest_in_i1src
= i1
&& reg_overlap_mentioned_p (i2dest
, i1src
);
1114 /* See if I1 directly feeds into I3. It does if I1dest is not used
1116 i1_feeds_i3
= i1
&& ! reg_overlap_mentioned_p (i1dest
, i2src
);
1118 /* Ensure that I3's pattern can be the destination of combines. */
1119 if (! combinable_i3pat (i3
, &PATTERN (i3
), i2dest
, i1dest
,
1120 i1
&& i2dest_in_i1src
&& i1_feeds_i3
,
1127 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1128 We used to do this EXCEPT in one case: I3 has a post-inc in an
1129 output operand. However, that exception can give rise to insns like
1131 which is a famous insn on the PDP-11 where the value of r3 used as the
1132 source was model-dependant. Avoid this sort of thing. */
1135 if (!(GET_CODE (PATTERN (i3
)) == SET
1136 && GET_CODE (SET_SRC (PATTERN (i3
))) == REG
1137 && GET_CODE (SET_DEST (PATTERN (i3
))) == MEM
1138 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3
)), 0)) == POST_INC
1139 || GET_CODE (XEXP (SET_DEST (PATTERN (i3
)), 0)) == POST_DEC
)))
1140 /* It's not the exception. */
1143 for (link
= REG_NOTES (i3
); link
; link
= XEXP (link
, 1))
1144 if (REG_NOTE_KIND (link
) == REG_INC
1145 && (reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i2
))
1147 && reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i1
)))))
1154 /* See if the SETs in I1 or I2 need to be kept around in the merged
1155 instruction: whenever the value set there is still needed past I3.
1156 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1158 For the SET in I1, we have two cases: If I1 and I2 independently
1159 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1160 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1161 in I1 needs to be kept around unless I1DEST dies or is set in either
1162 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1163 I1DEST. If so, we know I1 feeds into I2. */
1165 added_sets_2
= ! dead_or_set_p (i3
, i2dest
);
1168 = i1
&& ! (i1_feeds_i3
? dead_or_set_p (i3
, i1dest
)
1169 : (dead_or_set_p (i3
, i1dest
) || dead_or_set_p (i2
, i1dest
)));
1171 /* If the set in I2 needs to be kept around, we must make a copy of
1172 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1173 PATTERN (I2), we are only substituing for the original I1DEST, not into
1174 an already-substituted copy. This also prevents making self-referential
1175 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1178 i2pat
= (GET_CODE (PATTERN (i2
)) == PARALLEL
1179 ? gen_rtx (SET
, VOIDmode
, i2dest
, i2src
)
1183 i2pat
= copy_rtx (i2pat
);
1187 /* Substitute in the latest insn for the regs set by the earlier ones. */
1189 maxreg
= max_reg_num ();
1192 subst_low_cuid
= i1
? INSN_CUID (i1
) : INSN_CUID (i2
);
1194 /* It is possible that the source of I2 or I1 may be performing an
1195 unneeded operation, such as a ZERO_EXTEND of something that is known
1196 to have the high part zero. Handle that case by letting subst look at
1197 the innermost one of them.
1199 Another way to do this would be to have a function that tries to
1200 simplify a single insn instead of merging two or more insns. We don't
1201 do this because of the potential of infinite loops and because
1202 of the potential extra memory required. However, doing it the way
1203 we are is a bit of a kludge and doesn't catch all cases.
1205 But only do this if -fexpensive-optimizations since it slows things down
1206 and doesn't usually win. */
1208 if (flag_expensive_optimizations
)
1210 /* Pass pc_rtx so no substitutions are done, just simplifications.
1211 The cases that we are interested in here do not involve the few
1212 cases were is_replaced is checked. */
1214 i1src
= subst (i1src
, pc_rtx
, pc_rtx
, 0, 0);
1216 i2src
= subst (i2src
, pc_rtx
, pc_rtx
, 0, 0);
1218 previous_num_undos
= undobuf
.num_undo
;
1222 /* Many machines that don't use CC0 have insns that can both perform an
1223 arithmetic operation and set the condition code. These operations will
1224 be represented as a PARALLEL with the first element of the vector
1225 being a COMPARE of an arithmetic operation with the constant zero.
1226 The second element of the vector will set some pseudo to the result
1227 of the same arithmetic operation. If we simplify the COMPARE, we won't
1228 match such a pattern and so will generate an extra insn. Here we test
1229 for this case, where both the comparison and the operation result are
1230 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1231 I2SRC. Later we will make the PARALLEL that contains I2. */
1233 if (i1
== 0 && added_sets_2
&& GET_CODE (PATTERN (i3
)) == SET
1234 && GET_CODE (SET_SRC (PATTERN (i3
))) == COMPARE
1235 && XEXP (SET_SRC (PATTERN (i3
)), 1) == const0_rtx
1236 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3
)), 0), i2dest
))
1239 enum machine_mode compare_mode
;
1241 newpat
= PATTERN (i3
);
1242 SUBST (XEXP (SET_SRC (newpat
), 0), i2src
);
1246 #ifdef EXTRA_CC_MODES
1247 /* See if a COMPARE with the operand we substituted in should be done
1248 with the mode that is currently being used. If not, do the same
1249 processing we do in `subst' for a SET; namely, if the destination
1250 is used only once, try to replace it with a register of the proper
1251 mode and also replace the COMPARE. */
1252 if (undobuf
.other_insn
== 0
1253 && (cc_use
= find_single_use (SET_DEST (newpat
), i3
,
1254 &undobuf
.other_insn
))
1255 && ((compare_mode
= SELECT_CC_MODE (GET_CODE (*cc_use
), i2src
))
1256 != GET_MODE (SET_DEST (newpat
))))
1258 int regno
= REGNO (SET_DEST (newpat
));
1259 rtx new_dest
= gen_rtx (REG
, compare_mode
, regno
);
1261 if (regno
< FIRST_PSEUDO_REGISTER
1262 || (reg_n_sets
[regno
] == 1 && ! added_sets_2
1263 && ! REG_USERVAR_P (SET_DEST (newpat
))))
1265 if (regno
>= FIRST_PSEUDO_REGISTER
)
1266 SUBST (regno_reg_rtx
[regno
], new_dest
);
1268 SUBST (SET_DEST (newpat
), new_dest
);
1269 SUBST (XEXP (*cc_use
, 0), new_dest
);
1270 SUBST (SET_SRC (newpat
),
1271 gen_rtx_combine (COMPARE
, compare_mode
,
1272 i2src
, const0_rtx
));
1275 undobuf
.other_insn
= 0;
1282 n_occurrences
= 0; /* `subst' counts here */
1284 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1285 need to make a unique copy of I2SRC each time we substitute it
1286 to avoid self-referential rtl. */
1288 newpat
= subst (PATTERN (i3
), i2dest
, i2src
, 0,
1289 ! i1_feeds_i3
&& i1dest_in_i1src
);
1290 previous_num_undos
= undobuf
.num_undo
;
1292 /* Record whether i2's body now appears within i3's body. */
1293 i2_is_used
= n_occurrences
;
1296 /* If we already got a failure, don't try to do more. Otherwise,
1297 try to substitute in I1 if we have it. */
1299 if (i1
&& GET_CODE (newpat
) != CLOBBER
)
1301 /* Before we can do this substitution, we must redo the test done
1302 above (see detailed comments there) that ensures that I1DEST
1303 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1305 if (! combinable_i3pat (0, &newpat
, i1dest
, 0, 0, 0))
1312 newpat
= subst (newpat
, i1dest
, i1src
, 0, 0);
1313 previous_num_undos
= undobuf
.num_undo
;
1316 /* Fail if an autoincrement side-effect has been duplicated. */
1317 if ((i2_is_used
> 1 && FIND_REG_INC_NOTE (i2
, 0) != 0)
1318 || (i1
!= 0 && n_occurrences
> 1 && FIND_REG_INC_NOTE (i1
, 0) != 0)
1319 /* Fail if we tried to make a new register (we used to abort, but there's
1320 really no reason to). */
1321 || max_reg_num () != maxreg
1322 /* Fail if we couldn't do something and have a CLOBBER. */
1323 || GET_CODE (newpat
) == CLOBBER
)
1329 /* If the actions of the earlier insns must be kept
1330 in addition to substituting them into the latest one,
1331 we must make a new PARALLEL for the latest insn
1332 to hold additional the SETs. */
1334 if (added_sets_1
|| added_sets_2
)
1338 if (GET_CODE (newpat
) == PARALLEL
)
1340 rtvec old
= XVEC (newpat
, 0);
1341 total_sets
= XVECLEN (newpat
, 0) + added_sets_1
+ added_sets_2
;
1342 newpat
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (total_sets
));
1343 bcopy (&old
->elem
[0], &XVECEXP (newpat
, 0, 0),
1344 sizeof (old
->elem
[0]) * old
->num_elem
);
1349 total_sets
= 1 + added_sets_1
+ added_sets_2
;
1350 newpat
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (total_sets
));
1351 XVECEXP (newpat
, 0, 0) = old
;
1355 XVECEXP (newpat
, 0, --total_sets
)
1356 = (GET_CODE (PATTERN (i1
)) == PARALLEL
1357 ? gen_rtx (SET
, VOIDmode
, i1dest
, i1src
) : PATTERN (i1
));
1361 /* If there is no I1, use I2's body as is. We used to also not do
1362 the subst call below if I2 was substituted into I3,
1363 but that could lose a simplification. */
1365 XVECEXP (newpat
, 0, --total_sets
) = i2pat
;
1367 /* See comment where i2pat is assigned. */
1368 XVECEXP (newpat
, 0, --total_sets
)
1369 = subst (i2pat
, i1dest
, i1src
, 0, 0);
1373 /* We come here when we are replacing a destination in I2 with the
1374 destination of I3. */
1375 validate_replacement
:
1377 /* Is the result of combination a valid instruction? */
1378 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1380 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1381 the second SET's destination is a register that is unused. In that case,
1382 we just need the first SET. This can occur when simplifying a divmod
1383 insn. We *must* test for this case here because the code below that
1384 splits two independent SETs doesn't handle this case correctly when it
1385 updates the register status. Also check the case where the first
1386 SET's destination is unused. That would not cause incorrect code, but
1387 does cause an unneeded insn to remain. */
1389 if (insn_code_number
< 0 && GET_CODE (newpat
) == PARALLEL
1390 && XVECLEN (newpat
, 0) == 2
1391 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1392 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1393 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) == REG
1394 && find_reg_note (i3
, REG_UNUSED
, SET_DEST (XVECEXP (newpat
, 0, 1)))
1395 && ! side_effects_p (SET_SRC (XVECEXP (newpat
, 0, 1)))
1396 && asm_noperands (newpat
) < 0)
1398 newpat
= XVECEXP (newpat
, 0, 0);
1399 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1402 else if (insn_code_number
< 0 && GET_CODE (newpat
) == PARALLEL
1403 && XVECLEN (newpat
, 0) == 2
1404 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1405 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1406 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) == REG
1407 && find_reg_note (i3
, REG_UNUSED
, SET_DEST (XVECEXP (newpat
, 0, 0)))
1408 && ! side_effects_p (SET_SRC (XVECEXP (newpat
, 0, 0)))
1409 && asm_noperands (newpat
) < 0)
1411 newpat
= XVECEXP (newpat
, 0, 1);
1412 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1415 /* If we were combining three insns and the result is a simple SET
1416 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1418 if (i1
&& insn_code_number
< 0 && GET_CODE (newpat
) == SET
1419 && asm_noperands (newpat
) < 0)
1421 rtx
*split
= find_split_point (&newpat
);
1423 /* If we can split it and use I2DEST, go ahead and see if that
1424 helps things be recognized. Verify that none of the registers
1425 are set between I2 and I3. */
1428 && GET_CODE (i2dest
) == REG
1430 /* We need I2DEST in the proper mode. If it is a hard register
1431 or the only use of a pseudo, we can change its mode. */
1432 && (GET_MODE (*split
) == GET_MODE (i2dest
)
1433 || GET_MODE (*split
) == VOIDmode
1434 || REGNO (i2dest
) < FIRST_PSEUDO_REGISTER
1435 || (reg_n_sets
[REGNO (i2dest
)] == 1 && ! added_sets_2
1436 && ! REG_USERVAR_P (i2dest
)))
1437 && (next_real_insn (i2
) == i3
1438 || ! use_crosses_set_p (*split
, INSN_CUID (i2
)))
1439 /* We can't overwrite I2DEST if its value is still used by
1441 && ! reg_referenced_p (i2dest
, newpat
))
1443 rtx newdest
= i2dest
;
1445 /* Get NEWDEST as a register in the proper mode. We have already
1446 validated that we can do this. */
1447 if (GET_MODE (i2dest
) != GET_MODE (*split
)
1448 && GET_MODE (*split
) != VOIDmode
)
1450 newdest
= gen_rtx (REG
, GET_MODE (*split
), REGNO (i2dest
));
1452 if (REGNO (i2dest
) >= FIRST_PSEUDO_REGISTER
)
1453 SUBST (regno_reg_rtx
[REGNO (i2dest
)], newdest
);
1456 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1457 an ASHIFT. This can occur if it was inside a PLUS and hence
1458 appeared to be a memory address. This is a kludge. */
1459 if (GET_CODE (*split
) == MULT
1460 && GET_CODE (XEXP (*split
, 1)) == CONST_INT
1461 && (i
= exact_log2 (INTVAL (XEXP (*split
, 1)))) >= 0)
1462 SUBST (*split
, gen_rtx_combine (ASHIFT
, GET_MODE (*split
),
1464 gen_rtx (CONST_INT
, VOIDmode
, i
)));
1466 #ifdef INSN_SCHEDULING
1467 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1468 be written as a ZERO_EXTEND. */
1469 if (GET_CODE (*split
) == SUBREG
1470 && GET_CODE (SUBREG_REG (*split
)) == MEM
)
1471 SUBST (*split
, gen_rtx_combine (ZERO_EXTEND
, GET_MODE (*split
),
1475 newi2pat
= gen_rtx_combine (SET
, VOIDmode
, newdest
, *split
);
1476 SUBST (*split
, newdest
);
1477 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1478 if (i2_code_number
>= 0)
1479 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1483 /* Check for a case where we loaded from memory in a narrow mode and
1484 then sign extended it, but we need both registers. In that case,
1485 we have a PARALLEL with both loads from the same memory location.
1486 We can split this into a load from memory followed by a register-register
1487 copy. This saves at least one insn, more if register allocation can
1488 eliminate the copy. */
1490 else if (i1
&& insn_code_number
< 0 && asm_noperands (newpat
) < 0
1491 && GET_CODE (newpat
) == PARALLEL
1492 && XVECLEN (newpat
, 0) == 2
1493 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1494 && GET_CODE (SET_SRC (XVECEXP (newpat
, 0, 0))) == SIGN_EXTEND
1495 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1496 && rtx_equal_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
1497 XEXP (SET_SRC (XVECEXP (newpat
, 0, 0)), 0))
1498 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
1500 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != ZERO_EXTRACT
1501 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != STRICT_LOW_PART
1502 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat
, 0, 1)),
1503 SET_SRC (XVECEXP (newpat
, 0, 1)))
1504 && ! find_reg_note (i3
, REG_UNUSED
,
1505 SET_DEST (XVECEXP (newpat
, 0, 0))))
1507 newi2pat
= XVECEXP (newpat
, 0, 0);
1508 newpat
= XVECEXP (newpat
, 0, 1);
1509 SUBST (SET_SRC (newpat
),
1510 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat
)),
1511 SET_DEST (newi2pat
)));
1512 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1513 if (i2_code_number
>= 0)
1514 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1517 /* Similarly, check for a case where we have a PARALLEL of two independent
1518 SETs but we started with three insns. In this case, we can do the sets
1519 as two separate insns. This case occurs when some SET allows two
1520 other insns to combine, but the destination of that SET is still live. */
1522 else if (i1
&& insn_code_number
< 0 && asm_noperands (newpat
) < 0
1523 && GET_CODE (newpat
) == PARALLEL
1524 && XVECLEN (newpat
, 0) == 2
1525 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1526 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != ZERO_EXTRACT
1527 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != STRICT_LOW_PART
1528 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1529 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != ZERO_EXTRACT
1530 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != STRICT_LOW_PART
1531 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
1533 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1534 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != USE
1535 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != USE
1536 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat
, 0, 1)),
1537 XVECEXP (newpat
, 0, 0))
1538 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat
, 0, 0)),
1539 XVECEXP (newpat
, 0, 1)))
1541 newi2pat
= XVECEXP (newpat
, 0, 1);
1542 newpat
= XVECEXP (newpat
, 0, 0);
1544 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1545 if (i2_code_number
>= 0)
1546 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1549 /* If it still isn't recognized, fail and change things back the way they
1551 if ((insn_code_number
< 0
1552 /* Is the result a reasonable ASM_OPERANDS? */
1553 && (! check_asm_operands (newpat
) || added_sets_1
|| added_sets_2
)))
1559 /* If we had to change another insn, make sure it is valid also. */
1560 if (undobuf
.other_insn
)
1562 rtx other_notes
= REG_NOTES (undobuf
.other_insn
);
1563 rtx other_pat
= PATTERN (undobuf
.other_insn
);
1564 rtx new_other_notes
;
1567 other_code_number
= recog_for_combine (&other_pat
, undobuf
.other_insn
,
1570 if (other_code_number
< 0 && ! check_asm_operands (other_pat
))
1576 PATTERN (undobuf
.other_insn
) = other_pat
;
1578 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1579 are still valid. Then add any non-duplicate notes added by
1580 recog_for_combine. */
1581 for (note
= REG_NOTES (undobuf
.other_insn
); note
; note
= next
)
1583 next
= XEXP (note
, 1);
1585 if (REG_NOTE_KIND (note
) == REG_UNUSED
1586 && ! reg_set_p (XEXP (note
, 0), PATTERN (undobuf
.other_insn
)))
1587 remove_note (undobuf
.other_insn
, note
);
1590 distribute_notes (new_other_notes
, undobuf
.other_insn
,
1591 undobuf
.other_insn
, 0, 0, 0);
1594 /* We now know that we can do this combination. Merge the insns and
1595 update the status of registers and LOG_LINKS. */
1598 rtx i3notes
, i2notes
, i1notes
= 0;
1599 rtx i3links
, i2links
, i1links
= 0;
1601 int all_adjacent
= (next_real_insn (i2
) == i3
1602 && (i1
== 0 || next_real_insn (i1
) == i2
));
1604 /* Compute which registers we expect to eliminate. */
1605 rtx elim_i2
= (newi2pat
|| i2dest_in_i2src
|| i2dest_in_i1src
1607 rtx elim_i1
= i1
== 0 || i1dest_in_i1src
? 0 : i1dest
;
1609 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1611 i3notes
= REG_NOTES (i3
), i3links
= LOG_LINKS (i3
);
1612 i2notes
= REG_NOTES (i2
), i2links
= LOG_LINKS (i2
);
1614 i1notes
= REG_NOTES (i1
), i1links
= LOG_LINKS (i1
);
1616 /* Ensure that we do not have something that should not be shared but
1617 occurs multiple times in the new insns. Check this by first
1618 restting all the `used' flags and then copying anything is shared. */
1620 reset_used_flags (i3notes
);
1621 reset_used_flags (i2notes
);
1622 reset_used_flags (i1notes
);
1623 reset_used_flags (newpat
);
1624 reset_used_flags (newi2pat
);
1625 if (undobuf
.other_insn
)
1626 reset_used_flags (PATTERN (undobuf
.other_insn
));
1628 i3notes
= copy_rtx_if_shared (i3notes
);
1629 i2notes
= copy_rtx_if_shared (i2notes
);
1630 i1notes
= copy_rtx_if_shared (i1notes
);
1631 newpat
= copy_rtx_if_shared (newpat
);
1632 newi2pat
= copy_rtx_if_shared (newi2pat
);
1633 if (undobuf
.other_insn
)
1634 reset_used_flags (PATTERN (undobuf
.other_insn
));
1636 INSN_CODE (i3
) = insn_code_number
;
1637 PATTERN (i3
) = newpat
;
1638 if (undobuf
.other_insn
)
1639 INSN_CODE (undobuf
.other_insn
) = other_code_number
;
1641 /* We had one special case above where I2 had more than one set and
1642 we replaced a destination of one of those sets with the destination
1643 of I3. In that case, we have to update LOG_LINKS of insns later
1644 in this basic block. Note that this (expensive) case is rare. */
1646 if (GET_CODE (PATTERN (i2
)) == PARALLEL
)
1647 for (i
= 0; i
< XVECLEN (PATTERN (i2
), 0); i
++)
1648 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, i
))) == REG
1649 && SET_DEST (XVECEXP (PATTERN (i2
), 0, i
)) != i2dest
1650 && ! find_reg_note (i2
, REG_UNUSED
,
1651 SET_DEST (XVECEXP (PATTERN (i2
), 0, i
))))
1655 for (insn
= NEXT_INSN (i2
); insn
; insn
= NEXT_INSN (insn
))
1657 if (insn
!= i3
&& GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1658 for (link
= LOG_LINKS (insn
); link
; link
= XEXP (link
, 1))
1659 if (XEXP (link
, 0) == i2
)
1660 XEXP (link
, 0) = i3
;
1662 if (GET_CODE (insn
) == CODE_LABEL
1663 || GET_CODE (insn
) == JUMP_INSN
)
1675 INSN_CODE (i2
) = i2_code_number
;
1676 PATTERN (i2
) = newi2pat
;
1680 PUT_CODE (i2
, NOTE
);
1681 NOTE_LINE_NUMBER (i2
) = NOTE_INSN_DELETED
;
1682 NOTE_SOURCE_FILE (i2
) = 0;
1689 PUT_CODE (i1
, NOTE
);
1690 NOTE_LINE_NUMBER (i1
) = NOTE_INSN_DELETED
;
1691 NOTE_SOURCE_FILE (i1
) = 0;
1694 /* Get death notes for everything that is now used in either I3 or
1695 I2 and used to die in a previous insn. */
1697 move_deaths (newpat
, i1
? INSN_CUID (i1
) : INSN_CUID (i2
), i3
, &midnotes
);
1699 move_deaths (newi2pat
, INSN_CUID (i1
), i2
, &midnotes
);
1701 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1703 distribute_notes (i3notes
, i3
, i3
, newi2pat
? i2
: 0, elim_i2
, elim_i1
);
1705 distribute_notes (i2notes
, i2
, i3
, newi2pat
? i2
: 0, elim_i2
, elim_i1
);
1707 distribute_notes (i1notes
, i1
, i3
, newi2pat
? i2
: 0, elim_i2
, elim_i1
);
1709 distribute_notes (midnotes
, 0, i3
, newi2pat
? i2
: 0, elim_i2
, elim_i1
);
1711 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1712 know these are REG_UNUSED and want them to go to the desired insn,
1713 so we always pass it as i3. */
1714 if (newi2pat
&& new_i2_notes
)
1715 distribute_notes (new_i2_notes
, i2
, i2
, 0, 0, 0);
1717 distribute_notes (new_i3_notes
, i3
, i3
, 0, 0, 0);
1719 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1720 put a REG_DEAD note for it somewhere. Similarly for I2 and I1. */
1722 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i3dest_killed
, 0),
1723 0, i3
, newi2pat
? i2
: 0, 0, 0);
1724 if (i2dest_in_i2src
)
1725 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i2dest
, 0),
1726 0, i3
, newi2pat
? i2
: 0, 0, 0);
1727 if (i1dest_in_i1src
)
1728 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i1dest
, 0),
1729 0, i3
, newi2pat
? i2
: 0, 0, 0);
1731 distribute_links (i3links
);
1732 distribute_links (i2links
);
1733 distribute_links (i1links
);
1735 if (GET_CODE (i2dest
) == REG
)
1737 /* The insn that previously set this register doesn't exist, and
1738 this life of the register may not exist either. Show that
1739 we don't know its value any more. If we don't do this and
1740 I2 set the register to a value that depended on its old
1741 contents, we will get confused. If this insn is used, thing
1742 will be set correctly in combine_instructions. */
1743 record_value_for_reg (i2dest
, 0, 0);
1745 /* If the reg formerly set in I2 died only once and that was in I3,
1746 zero its use count so it won't make `reload' do any work. */
1747 if (! added_sets_2
&& newi2pat
== 0)
1749 regno
= REGNO (i2dest
);
1750 reg_n_sets
[regno
]--;
1751 if (reg_n_sets
[regno
] == 0
1752 && ! (basic_block_live_at_start
[0][regno
/ HOST_BITS_PER_INT
]
1753 & (1 << (regno
% HOST_BITS_PER_INT
))))
1754 reg_n_refs
[regno
] = 0;
1758 if (i1
&& GET_CODE (i1dest
) == REG
)
1760 record_value_for_reg (i1dest
, 0, 0);
1761 regno
= REGNO (i1dest
);
1764 reg_n_sets
[regno
]--;
1765 if (reg_n_sets
[regno
] == 0
1766 && ! (basic_block_live_at_start
[0][regno
/ HOST_BITS_PER_INT
]
1767 & (1 << (regno
% HOST_BITS_PER_INT
))))
1768 reg_n_refs
[regno
] = 0;
1772 /* If I3 is now an unconditional jump, ensure that it has a
1773 BARRIER following it since it may have initially been a
1774 conditional jump. */
1776 if ((GET_CODE (newpat
) == RETURN
|| simplejump_p (i3
))
1777 && GET_CODE (next_nonnote_insn (i3
)) != BARRIER
)
1778 emit_barrier_after (i3
);
1781 combine_successes
++;
1783 return newi2pat
? i2
: i3
;
1786 /* Undo all the modifications recorded in undobuf. */
1792 if (undobuf
.num_undo
> MAX_UNDO
)
1793 undobuf
.num_undo
= MAX_UNDO
;
1794 for (i
= undobuf
.num_undo
- 1; i
>= 0; i
--)
1795 *undobuf
.undo
[i
].where
= undobuf
.undo
[i
].old_contents
;
1797 obfree (undobuf
.storage
);
1798 undobuf
.num_undo
= 0;
1801 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
1802 where we have an arithmetic expression and return that point.
1804 try_combine will call this function to see if an insn can be split into
1808 find_split_point (loc
)
1812 enum rtx_code code
= GET_CODE (x
);
1814 int len
= 0, pos
, unsignedp
;
1817 /* First special-case some codes. */
1821 #ifdef INSN_SCHEDULING
1822 /* If we are making a paradoxical SUBREG invalid, it becomes a split
1824 if (GET_CODE (SUBREG_REG (x
)) == MEM
)
1827 return find_split_point (&SUBREG_REG (x
));
1831 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
1832 using LO_SUM and HIGH. */
1833 if (GET_CODE (XEXP (x
, 0)) == CONST
1834 || GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
)
1837 gen_rtx_combine (LO_SUM
, Pmode
,
1838 gen_rtx_combine (HIGH
, Pmode
, XEXP (x
, 0)),
1840 return &XEXP (XEXP (x
, 0), 0);
1847 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
1848 ZERO_EXTRACT, the most likely reason why this doesn't match is that
1849 we need to put the operand into a register. So split at that
1852 if (SET_DEST (x
) == cc0_rtx
1853 && GET_CODE (SET_SRC (x
)) != COMPARE
1854 && GET_CODE (SET_SRC (x
)) != ZERO_EXTRACT
1855 && GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) != 'o'
1856 && ! (GET_CODE (SET_SRC (x
)) == SUBREG
1857 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x
)))) == 'o'))
1858 return &SET_SRC (x
);
1861 /* See if we can split SET_SRC as it stands. */
1862 split
= find_split_point (&SET_SRC (x
));
1863 if (split
&& split
!= &SET_SRC (x
))
1866 /* See if this is a bitfield assignment with everything constant. If
1867 so, this is an IOR of an AND, so split it into that. */
1868 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
1869 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
1870 <= HOST_BITS_PER_INT
)
1871 && GET_CODE (XEXP (SET_DEST (x
), 1)) == CONST_INT
1872 && GET_CODE (XEXP (SET_DEST (x
), 2)) == CONST_INT
1873 && GET_CODE (SET_SRC (x
)) == CONST_INT
1874 && ((INTVAL (XEXP (SET_DEST (x
), 1))
1875 + INTVAL (XEXP (SET_DEST (x
), 2)))
1876 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0))))
1877 && ! side_effects_p (XEXP (SET_DEST (x
), 0)))
1879 int pos
= INTVAL (XEXP (SET_DEST (x
), 2));
1880 int len
= INTVAL (XEXP (SET_DEST (x
), 1));
1881 int src
= INTVAL (SET_SRC (x
));
1882 rtx dest
= XEXP (SET_DEST (x
), 0);
1883 enum machine_mode mode
= GET_MODE (dest
);
1884 unsigned int mask
= (1 << len
) - 1;
1887 pos
= GET_MODE_BITSIZE (mode
) - len
- pos
;
1892 gen_binary (IOR
, mode
, dest
,
1893 gen_rtx (CONST_INT
, VOIDmode
, src
<< pos
)));
1896 gen_binary (IOR
, mode
,
1897 gen_binary (AND
, mode
, dest
,
1898 gen_rtx (CONST_INT
, VOIDmode
,
1900 & GET_MODE_MASK (mode
)))),
1901 gen_rtx (CONST_INT
, VOIDmode
, src
<< pos
)));
1903 SUBST (SET_DEST (x
), dest
);
1905 split
= find_split_point (&SET_SRC (x
));
1906 if (split
&& split
!= &SET_SRC (x
))
1910 /* Otherwise, see if this is an operation that we can split into two.
1911 If so, try to split that. */
1912 code
= GET_CODE (SET_SRC (x
));
1917 inner
= XEXP (SET_SRC (x
), 0);
1919 len
= GET_MODE_BITSIZE (GET_MODE (inner
));
1925 if (GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
1926 && GET_CODE (XEXP (SET_SRC (x
), 2)) == CONST_INT
)
1928 inner
= XEXP (SET_SRC (x
), 0);
1929 len
= INTVAL (XEXP (SET_SRC (x
), 1));
1930 pos
= INTVAL (XEXP (SET_SRC (x
), 2));
1933 pos
= GET_MODE_BITSIZE (GET_MODE (inner
)) - len
- pos
;
1935 unsignedp
= (code
== ZERO_EXTRACT
);
1940 if (len
&& pos
>= 0 && pos
+ len
<= GET_MODE_BITSIZE (GET_MODE (inner
)))
1942 enum machine_mode mode
= GET_MODE (SET_SRC (x
));
1944 if (unsignedp
&& len
< HOST_BITS_PER_INT
)
1949 gen_rtx_combine (LSHIFTRT
, mode
,
1950 gen_lowpart_for_combine (mode
, inner
),
1951 gen_rtx (CONST_INT
, VOIDmode
, pos
)),
1952 gen_rtx (CONST_INT
, VOIDmode
, (1 << len
) - 1)));
1954 split
= find_split_point (&SET_SRC (x
));
1955 if (split
&& split
!= &SET_SRC (x
))
1963 gen_rtx_combine (ASHIFT
, mode
,
1964 gen_lowpart_for_combine (mode
, inner
),
1965 gen_rtx (CONST_INT
, VOIDmode
,
1966 (GET_MODE_BITSIZE (mode
)
1968 gen_rtx (CONST_INT
, VOIDmode
,
1969 GET_MODE_BITSIZE (mode
) - len
)));
1971 split
= find_split_point (&SET_SRC (x
));
1972 if (split
&& split
!= &SET_SRC (x
))
1977 /* See if this is a simple operation with a constant as the second
1978 operand. It might be that this constant is out of range and hence
1979 could be used as a split point. */
1980 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '2'
1981 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == 'c'
1982 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '<')
1983 && CONSTANT_P (XEXP (SET_SRC (x
), 1))
1984 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x
), 0))) == 'o'
1985 || (GET_CODE (XEXP (SET_SRC (x
), 0)) == SUBREG
1986 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x
), 0))))
1988 return &XEXP (SET_SRC (x
), 1);
1990 /* Finally, see if this is a simple operation with its first operand
1991 not in a register. The operation might require this operand in a
1992 register, so return it as a split point. We can always do this
1993 because if the first operand were another operation, we would have
1994 already found it as a split point. */
1995 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '2'
1996 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == 'c'
1997 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '<'
1998 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '1')
1999 && ! register_operand (XEXP (SET_SRC (x
), 0), VOIDmode
))
2000 return &XEXP (SET_SRC (x
), 0);
2006 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2007 it is better to write this as (not (ior A B)) so we can split it.
2008 Similarly for IOR. */
2009 if (GET_CODE (XEXP (x
, 0)) == NOT
&& GET_CODE (XEXP (x
, 1)) == NOT
)
2012 gen_rtx_combine (NOT
, GET_MODE (x
),
2013 gen_rtx_combine (code
== IOR
? AND
: IOR
,
2015 XEXP (XEXP (x
, 0), 0),
2016 XEXP (XEXP (x
, 1), 0))));
2017 return find_split_point (loc
);
2020 /* Many RISC machines have a large set of logical insns. If the
2021 second operand is a NOT, put it first so we will try to split the
2022 other operand first. */
2023 if (GET_CODE (XEXP (x
, 1)) == NOT
)
2025 rtx tem
= XEXP (x
, 0);
2026 SUBST (XEXP (x
, 0), XEXP (x
, 1));
2027 SUBST (XEXP (x
, 1), tem
);
2032 /* Otherwise, select our actions depending on our rtx class. */
2033 switch (GET_RTX_CLASS (code
))
2035 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2037 split
= find_split_point (&XEXP (x
, 2));
2040 /* ... fall through ... */
2044 split
= find_split_point (&XEXP (x
, 1));
2047 /* ... fall through ... */
2049 /* Some machines have (and (shift ...) ...) insns. If X is not
2050 an AND, but XEXP (X, 0) is, use it as our split point. */
2051 if (GET_CODE (x
) != AND
&& GET_CODE (XEXP (x
, 0)) == AND
)
2052 return &XEXP (x
, 0);
2054 split
= find_split_point (&XEXP (x
, 0));
2060 /* Otherwise, we don't have a split point. */
2064 /* Throughout X, replace FROM with TO, and return the result.
2065 The result is TO if X is FROM;
2066 otherwise the result is X, but its contents may have been modified.
2067 If they were modified, a record was made in undobuf so that
2068 undo_all will (among other things) return X to its original state.
2070 If the number of changes necessary is too much to record to undo,
2071 the excess changes are not made, so the result is invalid.
2072 The changes already made can still be undone.
2073 undobuf.num_undo is incremented for such changes, so by testing that
2074 the caller can tell whether the result is valid.
2076 `n_occurrences' is incremented each time FROM is replaced.
2078 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2080 UNIQUE_COPY is non-zero if each substition must be unique. We do this
2081 by copying if `n_occurrences' is non-zero. */
2084 subst (x
, from
, to
, in_dest
, unique_copy
)
2085 register rtx x
, from
, to
;
2090 register int len
, i
;
2091 register enum rtx_code code
= GET_CODE (x
), orig_code
= code
;
2093 enum machine_mode mode
= GET_MODE (x
);
2094 enum machine_mode op0_mode
= VOIDmode
;
2099 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2100 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2101 If it is 0, that cannot be done. We can now do this for any MEM
2102 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2103 If not for that, MEM's would very rarely be safe. */
2105 /* Reject MODEs bigger than a word, because we might not be able
2106 to reference a two-register group starting with an arbitrary register
2107 (and currently gen_lowpart might crash for a SUBREG). */
2109 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2110 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2112 /* Two expressions are equal if they are identical copies of a shared
2113 RTX or if they are both registers with the same register number
2116 #define COMBINE_RTX_EQUAL_P(X,Y) \
2118 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2119 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2121 if (! in_dest
&& COMBINE_RTX_EQUAL_P (x
, from
))
2124 return (unique_copy
&& n_occurrences
> 1 ? copy_rtx (to
) : to
);
2127 /* If X and FROM are the same register but different modes, they will
2128 not have been seen as equal above. However, flow.c will make a
2129 LOG_LINKS entry for that case. If we do nothing, we will try to
2130 rerecognize our original insn and, when it succeeds, we will
2131 delete the feeding insn, which is incorrect.
2133 So force this insn not to match in this (rare) case. */
2134 if (! in_dest
&& code
== REG
&& GET_CODE (from
) == REG
2135 && REGNO (x
) == REGNO (from
))
2136 return gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
2138 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2139 of which may contain things that can be combined. */
2140 if (code
!= MEM
&& code
!= LO_SUM
&& GET_RTX_CLASS (code
) == 'o')
2143 /* It is possible to have a subexpression appear twice in the insn.
2144 Suppose that FROM is a register that appears within TO.
2145 Then, after that subexpression has been scanned once by `subst',
2146 the second time it is scanned, TO may be found. If we were
2147 to scan TO here, we would find FROM within it and create a
2148 self-referent rtl structure which is completely wrong. */
2149 if (COMBINE_RTX_EQUAL_P (x
, to
))
2152 len
= GET_RTX_LENGTH (code
);
2153 fmt
= GET_RTX_FORMAT (code
);
2155 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2156 set up to skip this common case. All other cases where we want to
2157 suppress replacing something inside a SET_SRC are handled via the
2160 && (GET_CODE (SET_DEST (x
)) == REG
2161 || GET_CODE (SET_DEST (x
)) == CC0
2162 || GET_CODE (SET_DEST (x
)) == PC
))
2165 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2167 op0_mode
= GET_MODE (XEXP (x
, 0));
2169 for (i
= 0; i
< len
; i
++)
2174 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2177 if (COMBINE_RTX_EQUAL_P (XVECEXP (x
, i
, j
), from
))
2179 new = (unique_copy
&& n_occurrences
? copy_rtx (to
) : to
);
2184 new = subst (XVECEXP (x
, i
, j
), from
, to
, 0, unique_copy
);
2186 /* If this substitution failed, this whole thing fails. */
2187 if (GET_CODE (new) == CLOBBER
&& XEXP (new, 0) == const0_rtx
)
2191 SUBST (XVECEXP (x
, i
, j
), new);
2194 else if (fmt
[i
] == 'e')
2198 if (COMBINE_RTX_EQUAL_P (XEXP (x
, i
), from
))
2200 new = (unique_copy
&& n_occurrences
? copy_rtx (to
) : to
);
2204 /* If we are in a SET_DEST, suppress most cases unless we
2205 have gone inside a MEM, in which case we want to
2206 simplify the address. We assume here that things that
2207 are actually part of the destination have their inner
2208 parts in the first expression. This is true for SUBREG,
2209 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2210 things aside from REG and MEM that should appear in a
2212 new = subst (XEXP (x
, i
), from
, to
,
2214 && (code
== SUBREG
|| code
== STRICT_LOW_PART
2215 || code
== ZERO_EXTRACT
))
2217 && i
== 0), unique_copy
);
2219 /* If we found that we will have to reject this combination,
2220 indicate that by returning the CLOBBER ourselves, rather than
2221 an expression containing it. This will speed things up as
2222 well as prevent accidents where two CLOBBERs are considered
2223 to be equal, thus producing an incorrect simplification. */
2225 if (GET_CODE (new) == CLOBBER
&& XEXP (new, 0) == const0_rtx
)
2228 SUBST (XEXP (x
, i
), new);
2232 /* If this is a commutative operation, put a constant last and a complex
2233 expression first. We don't need to do this for comparisons here. */
2234 if (GET_RTX_CLASS (code
) == 'c'
2235 && ((CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2236 || (GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == 'o'
2237 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) != 'o')
2238 || (GET_CODE (XEXP (x
, 0)) == SUBREG
2239 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x
, 0)))) == 'o'
2240 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) != 'o')))
2243 SUBST (XEXP (x
, 0), XEXP (x
, 1));
2244 SUBST (XEXP (x
, 1), temp
);
2247 /* Try to fold this expression in case we have constants that weren't
2250 switch (GET_RTX_CLASS (code
))
2253 temp
= simplify_unary_operation (code
, mode
, XEXP (x
, 0), op0_mode
);
2256 temp
= simplify_relational_operation (code
, op0_mode
,
2257 XEXP (x
, 0), XEXP (x
, 1));
2261 temp
= simplify_binary_operation (code
, mode
, XEXP (x
, 0), XEXP (x
, 1));
2265 temp
= simplify_ternary_operation (code
, mode
, op0_mode
, XEXP (x
, 0),
2266 XEXP (x
, 1), XEXP (x
, 2));
2273 /* We come back to here if we have replaced the expression with one of
2274 a different code and it is likely that further simplification will be
2279 /* If we have restarted more than 4 times, we are probably looping, so
2281 if (++n_restarts
> 4)
2284 code
= GET_CODE (x
);
2286 /* First see if we can apply the inverse distributive law. */
2287 if (code
== PLUS
|| code
== MINUS
|| code
== IOR
|| code
== XOR
)
2289 x
= apply_distributive_law (x
);
2290 code
= GET_CODE (x
);
2293 /* If CODE is an associative operation not otherwise handled, see if we
2294 can associate some operands. This can win if they are constants or
2295 if they are logically related (i.e. (a & b) & a. */
2296 if ((code
== PLUS
|| code
== MINUS
2297 || code
== MULT
|| code
== AND
|| code
== IOR
|| code
== XOR
2298 || code
== DIV
|| code
== UDIV
2299 || code
== SMAX
|| code
== SMIN
|| code
== UMAX
|| code
== UMIN
)
2300 && GET_MODE_CLASS (mode
) == MODE_INT
)
2302 if (GET_CODE (XEXP (x
, 0)) == code
)
2304 rtx other
= XEXP (XEXP (x
, 0), 0);
2305 rtx inner_op0
= XEXP (XEXP (x
, 0), 1);
2306 rtx inner_op1
= XEXP (x
, 1);
2309 /* Make sure we pass the constant operand if any as the second
2310 one if this is a commutative operation. */
2311 if (CONSTANT_P (inner_op0
) && GET_RTX_CLASS (code
) == 'c')
2313 rtx tem
= inner_op0
;
2314 inner_op0
= inner_op1
;
2317 inner
= simplify_binary_operation (code
== MINUS
? PLUS
2318 : code
== DIV
? MULT
2319 : code
== UDIV
? MULT
2321 mode
, inner_op0
, inner_op1
);
2323 /* For commutative operations, try the other pair if that one
2325 if (inner
== 0 && GET_RTX_CLASS (code
) == 'c')
2327 other
= XEXP (XEXP (x
, 0), 1);
2328 inner
= simplify_binary_operation (code
, mode
,
2329 XEXP (XEXP (x
, 0), 0),
2335 x
= gen_binary (code
, mode
, other
, inner
);
2342 /* A little bit of algebraic simplification here. */
2346 /* Ensure that our address has any ASHIFTs converted to MULT in case
2347 address-recognizing predicates are called later. */
2348 temp
= make_compound_operation (XEXP (x
, 0), MEM
);
2349 SUBST (XEXP (x
, 0), temp
);
2353 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2354 is paradoxical. If we can't do that safely, then it becomes
2355 something nonsensical so that this combination won't take place. */
2357 if (GET_CODE (SUBREG_REG (x
)) == MEM
2358 && (GET_MODE_SIZE (mode
)
2359 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))))
2361 rtx inner
= SUBREG_REG (x
);
2362 int endian_offset
= 0;
2363 /* Don't change the mode of the MEM
2364 if that would change the meaning of the address. */
2365 if (MEM_VOLATILE_P (SUBREG_REG (x
))
2366 || mode_dependent_address_p (XEXP (inner
, 0)))
2367 return gen_rtx (CLOBBER
, mode
, const0_rtx
);
2369 #if BYTES_BIG_ENDIAN
2370 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2371 endian_offset
+= UNITS_PER_WORD
- GET_MODE_SIZE (mode
);
2372 if (GET_MODE_SIZE (GET_MODE (inner
)) < UNITS_PER_WORD
)
2373 endian_offset
-= UNITS_PER_WORD
- GET_MODE_SIZE (GET_MODE (inner
));
2375 /* Note if the plus_constant doesn't make a valid address
2376 then this combination won't be accepted. */
2377 x
= gen_rtx (MEM
, mode
,
2378 plus_constant (XEXP (inner
, 0),
2379 (SUBREG_WORD (x
) * UNITS_PER_WORD
2381 MEM_VOLATILE_P (x
) = MEM_VOLATILE_P (inner
);
2382 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (inner
);
2383 MEM_IN_STRUCT_P (x
) = MEM_IN_STRUCT_P (inner
);
2387 /* If we are in a SET_DEST, these other cases can't apply. */
2391 /* Changing mode twice with SUBREG => just change it once,
2392 or not at all if changing back to starting mode. */
2393 if (GET_CODE (SUBREG_REG (x
)) == SUBREG
)
2395 if (mode
== GET_MODE (SUBREG_REG (SUBREG_REG (x
)))
2396 && SUBREG_WORD (x
) == 0 && SUBREG_WORD (SUBREG_REG (x
)) == 0)
2397 return SUBREG_REG (SUBREG_REG (x
));
2399 SUBST_INT (SUBREG_WORD (x
),
2400 SUBREG_WORD (x
) + SUBREG_WORD (SUBREG_REG (x
)));
2401 SUBST (SUBREG_REG (x
), SUBREG_REG (SUBREG_REG (x
)));
2404 /* SUBREG of a hard register => just change the register number
2405 and/or mode. If the hard register is not valid in that mode,
2406 suppress this combination. */
2408 if (GET_CODE (SUBREG_REG (x
)) == REG
2409 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
2411 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
),
2413 return gen_rtx (REG
, mode
,
2414 REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
));
2416 return gen_rtx (CLOBBER
, mode
, const0_rtx
);
2419 /* For a constant, try to pick up the part we want. Handle a full
2420 word and low-order part. */
2422 if (CONSTANT_P (SUBREG_REG (x
)) && op0_mode
!= VOIDmode
2423 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
2424 && GET_MODE_CLASS (mode
) == MODE_INT
)
2426 temp
= operand_subword (SUBREG_REG (x
), SUBREG_WORD (x
),
2432 if (CONSTANT_P (SUBREG_REG (x
)) && subreg_lowpart_p (x
))
2433 return gen_lowpart_for_combine (mode
, SUBREG_REG (x
));
2435 /* If we are narrowing the object, we need to see if we can simplify
2436 the expression for the object knowing that we only need the
2437 low-order bits. We do this by computing an AND of the object
2438 with only the bits we care about. That will produce any needed
2439 simplifications. If the resulting computation is just the
2440 AND with the significant bits, our operand is the first operand
2441 of the AND. Otherwise, it is the resulting expression. */
2442 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2443 && subreg_lowpart_p (x
)
2444 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) <= HOST_BITS_PER_INT
)
2446 temp
= simplify_and_const_int (0, GET_MODE (SUBREG_REG (x
)),
2447 SUBREG_REG (x
), GET_MODE_MASK (mode
));
2448 if (GET_CODE (temp
) == AND
&& GET_CODE (XEXP (temp
, 1)) == CONST_INT
2449 && INTVAL (XEXP (temp
, 1)) == GET_MODE_MASK (mode
))
2450 temp
= XEXP (temp
, 0);
2451 return gen_lowpart_for_combine (mode
, temp
);
2457 /* (not (plus X -1)) can become (neg X). */
2458 if (GET_CODE (XEXP (x
, 0)) == PLUS
2459 && XEXP (XEXP (x
, 0), 1) == constm1_rtx
)
2461 x
= gen_rtx_combine (NEG
, mode
, XEXP (XEXP (x
, 0), 0));
2465 /* Similarly, (not (neg X)) is (plus X -1). */
2466 if (GET_CODE (XEXP (x
, 0)) == NEG
)
2468 x
= gen_rtx_combine (PLUS
, mode
, XEXP (XEXP (x
, 0), 0), constm1_rtx
);
2472 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2473 other than 1, but that is not valid. We could do a similar
2474 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2475 but this doesn't seem common enough to bother with. */
2476 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2477 && XEXP (XEXP (x
, 0), 0) == const1_rtx
)
2479 x
= gen_rtx (ROTATE
, mode
, gen_unary (NOT
, mode
, const1_rtx
),
2480 XEXP (XEXP (x
, 0), 1));
2484 if (GET_CODE (XEXP (x
, 0)) == SUBREG
2485 && subreg_lowpart_p (XEXP (x
, 0))
2486 && (GET_MODE_SIZE (GET_MODE (XEXP (x
, 0)))
2487 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x
, 0)))))
2488 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == ASHIFT
2489 && XEXP (SUBREG_REG (XEXP (x
, 0)), 0) == const1_rtx
)
2491 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (XEXP (x
, 0)));
2493 x
= gen_rtx (ROTATE
, inner_mode
,
2494 gen_unary (NOT
, inner_mode
, const1_rtx
),
2495 XEXP (SUBREG_REG (XEXP (x
, 0)), 1));
2496 x
= gen_lowpart_for_combine (mode
, x
);
2500 #if STORE_FLAG_VALUE == -1
2501 /* (not (comparison foo bar)) can be done by reversing the comparison
2503 if (GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
2504 && reversible_comparison_p (XEXP (x
, 0)))
2505 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
2506 mode
, XEXP (XEXP (x
, 0), 0),
2507 XEXP (XEXP (x
, 0), 1));
2510 /* Apply De Morgan's laws to reduce number of patterns for machines
2511 with negating logical insns (and-not, nand, etc.). If result has
2512 only one NOT, put it first, since that is how the patterns are
2515 if (GET_CODE (XEXP (x
, 0)) == IOR
|| GET_CODE (XEXP (x
, 0)) == AND
)
2517 rtx in1
= XEXP (XEXP (x
, 0), 0), in2
= XEXP (XEXP (x
, 0), 1);
2519 if (GET_CODE (in1
) == NOT
)
2520 in1
= XEXP (in1
, 0);
2522 in1
= gen_rtx_combine (NOT
, GET_MODE (in1
), in1
);
2524 if (GET_CODE (in2
) == NOT
)
2525 in2
= XEXP (in2
, 0);
2526 else if (GET_CODE (in2
) == CONST_INT
2527 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
)
2528 in2
= gen_rtx (CONST_INT
, VOIDmode
,
2529 GET_MODE_MASK (mode
) & ~ INTVAL (in2
));
2531 in2
= gen_rtx_combine (NOT
, GET_MODE (in2
), in2
);
2533 if (GET_CODE (in2
) == NOT
)
2536 in2
= in1
; in1
= tem
;
2539 x
= gen_rtx_combine (GET_CODE (XEXP (x
, 0)) == IOR
? AND
: IOR
,
2546 /* (neg (plus X 1)) can become (not X). */
2547 if (GET_CODE (XEXP (x
, 0)) == PLUS
2548 && XEXP (XEXP (x
, 0), 1) == const1_rtx
)
2550 x
= gen_rtx_combine (NOT
, mode
, XEXP (XEXP (x
, 0), 0));
2554 /* Similarly, (neg (not X)) is (plus X 1). */
2555 if (GET_CODE (XEXP (x
, 0)) == NOT
)
2557 x
= gen_rtx_combine (PLUS
, mode
, XEXP (XEXP (x
, 0), 0), const1_rtx
);
2561 /* (neg (abs X)) is X if X is a value known to be either -1 or 0. */
2562 if (GET_CODE (XEXP (x
, 0)) == ABS
2563 && ((GET_CODE (XEXP (XEXP (x
, 0), 0)) == SIGN_EXTRACT
2564 && XEXP (XEXP (XEXP (x
, 0), 0), 1) == const1_rtx
)
2565 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) == ASHIFTRT
2566 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 1)) == CONST_INT
2567 && (INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1))
2568 == GET_MODE_BITSIZE (mode
) - 1))
2569 || ((temp
= get_last_value (XEXP (XEXP (x
, 0), 0))) != 0
2570 && ((GET_CODE (temp
) == SIGN_EXTRACT
2571 && XEXP (temp
, 1) == const1_rtx
)
2572 || (GET_CODE (temp
) == ASHIFTRT
2573 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
2574 && (INTVAL (XEXP (temp
, 1))
2575 == GET_MODE_BITSIZE (mode
) - 1))))))
2576 return XEXP (XEXP (x
, 0), 0);
2578 /* (neg (minus X Y)) can become (minus Y X). */
2579 if (GET_CODE (XEXP (x
, 0)) == MINUS
2580 && (GET_MODE_CLASS (mode
) != MODE_FLOAT
2581 /* x-y != -(y-x) with IEEE floating point. */
2582 || TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
))
2584 x
= gen_binary (MINUS
, mode
, XEXP (XEXP (x
, 0), 1),
2585 XEXP (XEXP (x
, 0), 0));
2589 /* NEG commutes with ASHIFT since it is multiplication. Only do this
2590 if we can then eliminate the NEG (e.g.,
2591 if the operand is a constant). */
2593 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
)
2595 temp
= simplify_unary_operation (NEG
, mode
,
2596 XEXP (XEXP (x
, 0), 0), mode
);
2599 SUBST (XEXP (XEXP (x
, 0), 0), temp
);
2604 temp
= expand_compound_operation (XEXP (x
, 0));
2606 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
2607 replaced by (lshiftrt X C). This will convert
2608 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
2610 if (GET_CODE (temp
) == ASHIFTRT
2611 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
2612 && INTVAL (XEXP (temp
, 1)) == GET_MODE_BITSIZE (mode
) - 1)
2614 x
= simplify_shift_const (temp
, LSHIFTRT
, mode
, XEXP (temp
, 0),
2615 INTVAL (XEXP (temp
, 1)));
2619 /* If X has only a single bit significant, say, bit I, convert
2620 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
2621 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
2622 (sign_extract X 1 Y). But only do this if TEMP isn't a register
2623 or a SUBREG of one since we'd be making the expression more
2624 complex if it was just a register. */
2626 if (GET_CODE (temp
) != REG
2627 && ! (GET_CODE (temp
) == SUBREG
2628 && GET_CODE (SUBREG_REG (temp
)) == REG
)
2629 && (i
= exact_log2 (significant_bits (temp
, mode
))) >= 0)
2631 rtx temp1
= simplify_shift_const
2633 simplify_shift_const (0, ASHIFT
, mode
, temp
,
2634 GET_MODE_BITSIZE (mode
) - 1 - i
),
2635 GET_MODE_BITSIZE (mode
) - 1 - i
);
2637 /* If all we did was surround TEMP with the two shifts, we
2638 haven't improved anything, so don't use it. Otherwise,
2639 we are better off with TEMP1. */
2640 if (GET_CODE (temp1
) != ASHIFTRT
2641 || GET_CODE (XEXP (temp1
, 0)) != ASHIFT
2642 || XEXP (XEXP (temp1
, 0), 0) != temp
)
2650 case FLOAT_TRUNCATE
:
2651 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
2652 if (GET_CODE (XEXP (x
, 0)) == FLOAT_EXTEND
2653 && GET_MODE (XEXP (XEXP (x
, 0), 0)) == mode
)
2654 return XEXP (XEXP (x
, 0), 0);
2659 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
2660 using cc0, in which case we want to leave it as a COMPARE
2661 so we can distinguish it from a register-register-copy. */
2662 if (XEXP (x
, 1) == const0_rtx
)
2665 /* In IEEE floating point, x-0 is not the same as x. */
2666 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
2667 || GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) == MODE_INT
)
2668 && XEXP (x
, 1) == CONST0_RTX (GET_MODE (XEXP (x
, 0))))
2674 /* (const (const X)) can become (const X). Do it this way rather than
2675 returning the inner CONST since CONST can be shared with a
2677 if (GET_CODE (XEXP (x
, 0)) == CONST
)
2678 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
2683 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
2684 can add in an offset. find_split_point will split this address up
2685 again if it doesn't match. */
2686 if (GET_CODE (XEXP (x
, 0)) == HIGH
2687 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)))
2693 /* If we have (plus (plus (A const) B)), associate it so that CONST is
2694 outermost. That's because that's the way indexed addresses are
2695 supposed to appear. This code used to check many more cases, but
2696 they are now checked elsewhere. */
2697 if (GET_CODE (XEXP (x
, 0)) == PLUS
2698 && CONSTANT_ADDRESS_P (XEXP (XEXP (x
, 0), 1)))
2699 return gen_binary (PLUS
, mode
,
2700 gen_binary (PLUS
, mode
, XEXP (XEXP (x
, 0), 0),
2702 XEXP (XEXP (x
, 0), 1));
2704 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
2705 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
2706 bit-field and can be replaced by either a sign_extend or a
2707 sign_extract. The `and' may be a zero_extend. */
2708 if (GET_CODE (XEXP (x
, 0)) == XOR
2709 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2710 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2711 && INTVAL (XEXP (x
, 1)) == - INTVAL (XEXP (XEXP (x
, 0), 1))
2712 && (i
= exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) >= 0
2713 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
2714 && ((GET_CODE (XEXP (XEXP (x
, 0), 0)) == AND
2715 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 1)) == CONST_INT
2716 && (INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1))
2717 == (1 << (i
+ 1)) - 1))
2718 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) == ZERO_EXTEND
2719 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x
, 0), 0), 0)))
2722 x
= simplify_shift_const
2724 simplify_shift_const (0, ASHIFT
, mode
,
2725 XEXP (XEXP (XEXP (x
, 0), 0), 0),
2726 GET_MODE_BITSIZE (mode
) - (i
+ 1)),
2727 GET_MODE_BITSIZE (mode
) - (i
+ 1));
2731 /* If only the low-order bit of X is significant, (plus x -1)
2732 can become (ashiftrt (ashift (xor x 1) C) C) where C is
2733 the bitsize of the mode - 1. This allows simplification of
2734 "a = (b & 8) == 0;" */
2735 if (XEXP (x
, 1) == constm1_rtx
2736 && GET_CODE (XEXP (x
, 0)) != REG
2737 && ! (GET_CODE (XEXP (x
,0)) == SUBREG
2738 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == REG
)
2739 && significant_bits (XEXP (x
, 0), mode
) == 1)
2741 x
= simplify_shift_const
2743 simplify_shift_const (0, ASHIFT
, mode
,
2744 gen_rtx_combine (XOR
, mode
,
2745 XEXP (x
, 0), const1_rtx
),
2746 GET_MODE_BITSIZE (mode
) - 1),
2747 GET_MODE_BITSIZE (mode
) - 1);
2753 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
2754 (and <foo> (const_int pow2-1)) */
2755 if (GET_CODE (XEXP (x
, 1)) == AND
2756 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2757 && exact_log2 (- INTVAL (XEXP (XEXP (x
, 1), 1))) >= 0
2758 && rtx_equal_p (XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)))
2760 x
= simplify_and_const_int (0, mode
, XEXP (x
, 0),
2761 - INTVAL (XEXP (XEXP (x
, 1), 1)) - 1);
2767 /* If we have (mult (plus A B) C), apply the distributive law and then
2768 the inverse distributive law to see if things simplify. This
2769 occurs mostly in addresses, often when unrolling loops. */
2771 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
2773 x
= apply_distributive_law
2774 (gen_binary (PLUS
, mode
,
2775 gen_binary (MULT
, mode
,
2776 XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)),
2777 gen_binary (MULT
, mode
,
2778 XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))));
2780 if (GET_CODE (x
) != MULT
)
2784 /* If this is multiplication by a power of two and its first operand is
2785 a shift, treat the multiply as a shift to allow the shifts to
2786 possibly combine. */
2787 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
2788 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0
2789 && (GET_CODE (XEXP (x
, 0)) == ASHIFT
2790 || GET_CODE (XEXP (x
, 0)) == LSHIFTRT
2791 || GET_CODE (XEXP (x
, 0)) == ASHIFTRT
2792 || GET_CODE (XEXP (x
, 0)) == ROTATE
2793 || GET_CODE (XEXP (x
, 0)) == ROTATERT
))
2795 x
= simplify_shift_const (0, ASHIFT
, mode
, XEXP (x
, 0), i
);
2799 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
2800 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2801 && XEXP (XEXP (x
, 0), 0) == const1_rtx
)
2802 return gen_rtx_combine (ASHIFT
, mode
, XEXP (x
, 1),
2803 XEXP (XEXP (x
, 0), 1));
2807 /* If this is a divide by a power of two, treat it as a shift if
2808 its first operand is a shift. */
2809 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
2810 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0
2811 && (GET_CODE (XEXP (x
, 0)) == ASHIFT
2812 || GET_CODE (XEXP (x
, 0)) == LSHIFTRT
2813 || GET_CODE (XEXP (x
, 0)) == ASHIFTRT
2814 || GET_CODE (XEXP (x
, 0)) == ROTATE
2815 || GET_CODE (XEXP (x
, 0)) == ROTATERT
))
2817 x
= simplify_shift_const (0, LSHIFTRT
, mode
, XEXP (x
, 0), i
);
2823 case GT
: case GTU
: case GE
: case GEU
:
2824 case LT
: case LTU
: case LE
: case LEU
:
2825 /* If the first operand is a condition code, we can't do anything
2827 if (GET_CODE (XEXP (x
, 0)) == COMPARE
2828 || (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) != MODE_CC
2830 && XEXP (x
, 0) != cc0_rtx
2834 rtx op0
= XEXP (x
, 0);
2835 rtx op1
= XEXP (x
, 1);
2836 enum rtx_code new_code
;
2838 if (GET_CODE (op0
) == COMPARE
)
2839 op1
= XEXP (op0
, 1), op0
= XEXP (op0
, 0);
2841 /* Simplify our comparison, if possible. */
2842 new_code
= simplify_comparison (code
, &op0
, &op1
);
2844 #if STORE_FLAG_VALUE == 1
2845 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
2846 if only the low-order bit is significant in X (such as when
2847 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
2849 if (new_code
== NE
&& mode
!= VOIDmode
2850 && op1
== const0_rtx
2851 && significant_bits (op0
, GET_MODE (op0
)) == 1)
2852 return gen_lowpart_for_combine (mode
, op0
);
2853 else if (new_code
== EQ
&& mode
!= VOIDmode
2854 && op1
== const0_rtx
2855 && significant_bits (op0
, GET_MODE (op0
)) == 1)
2856 return gen_rtx_combine (XOR
, mode
,
2857 gen_lowpart_for_combine (mode
, op0
),
2861 #if STORE_FLAG_VALUE == -1
2862 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
2863 to (neg x) if only the low-order bit of X is significant.
2864 This converts (ne (zero_extract X 1 Y) 0) to
2865 (sign_extract X 1 Y). */
2866 if (new_code
== NE
&& mode
!= VOIDmode
2867 && op1
== const0_rtx
2868 && significant_bits (op0
, GET_MODE (op0
)) == 1)
2870 x
= gen_rtx_combine (NEG
, mode
,
2871 gen_lowpart_for_combine (mode
, op0
));
2876 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
2877 one significant bit, we can convert (ne x 0) to (ashift x c)
2878 where C puts the bit in the sign bit. Remove any AND with
2879 STORE_FLAG_VALUE when we are done, since we are only going to
2880 test the sign bit. */
2881 if (new_code
== NE
&& mode
!= VOIDmode
2882 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
2883 && STORE_FLAG_VALUE
== 1 << (GET_MODE_BITSIZE (mode
) - 1)
2884 && op1
== const0_rtx
2885 && mode
== GET_MODE (op0
)
2886 && (i
= exact_log2 (significant_bits (op0
, GET_MODE (op0
)))) >= 0)
2888 x
= simplify_shift_const (0, ASHIFT
, mode
, op0
,
2889 GET_MODE_BITSIZE (mode
) - 1 - i
);
2890 if (GET_CODE (x
) == AND
&& XEXP (x
, 1) == const_true_rtx
)
2896 /* If the code changed, return a whole new comparison. */
2897 if (new_code
!= code
)
2898 return gen_rtx_combine (new_code
, mode
, op0
, op1
);
2900 /* Otherwise, keep this operation, but maybe change its operands.
2901 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
2902 SUBST (XEXP (x
, 0), op0
);
2903 SUBST (XEXP (x
, 1), op1
);
2908 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
2909 reversed, do so to avoid needing two sets of patterns for
2910 subtract-and-branch insns. */
2911 if (XEXP (x
, 1) == pc_rtx
&& reversible_comparison_p (XEXP (x
, 0)))
2914 gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
2915 GET_MODE (XEXP (x
, 0)),
2916 XEXP (XEXP (x
, 0), 0),
2917 XEXP (XEXP (x
, 0), 1)));
2918 SUBST (XEXP (x
, 1), XEXP (x
, 2));
2919 SUBST (XEXP (x
, 2), pc_rtx
);
2927 /* If we are processing SET_DEST, we are done. */
2931 x
= expand_compound_operation (x
);
2932 if (GET_CODE (x
) != code
)
2937 /* (set (pc) (return)) gets written as (return). */
2938 if (GET_CODE (SET_DEST (x
)) == PC
&& GET_CODE (SET_SRC (x
)) == RETURN
)
2941 /* Convert this into a field assignment operation, if possible. */
2942 x
= make_field_assignment (x
);
2944 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
2945 operation, and X being a REG or (subreg (reg)), we may be able to
2946 convert this to (set (subreg:m2 x) (op)).
2948 We can always do this if M1 is narrower than M2 because that
2949 means that we only care about the low bits of the result.
2951 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
2952 not defined), we cannot perform a narrower operation that
2953 requested since the high-order bits will be undefined. On
2954 machine where BYTE_LOADS_ZERO_EXTEND are defined, however, this
2955 transformation is safe as long as M1 and M2 have the same number
2958 if (GET_CODE (SET_SRC (x
)) == SUBREG
2959 && subreg_lowpart_p (SET_SRC (x
))
2960 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x
)))) != 'o'
2961 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x
))) + (UNITS_PER_WORD
- 1))
2963 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x
))))
2964 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
2965 #ifndef BYTE_LOADS_ZERO_EXTEND
2966 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x
)))
2967 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x
)))))
2969 && (GET_CODE (SET_DEST (x
)) == REG
2970 || (GET_CODE (SET_DEST (x
)) == SUBREG
2971 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
)))
2973 /* Get the object that will be the SUBREG_REG of the
2974 SUBREG we are making. Note that SUBREG_WORD will always
2975 be zero because this will either be a paradoxical SUBREG
2976 or a SUBREG with the same number of words on the outside and
2978 rtx object
= (GET_CODE (SET_DEST (x
)) == REG
? SET_DEST (x
)
2979 : SUBREG_REG (SET_DEST (x
)));
2981 SUBST (SET_DEST (x
),
2982 gen_rtx (SUBREG
, GET_MODE (SUBREG_REG (SET_SRC (x
))),
2984 SUBST (SET_SRC (x
), SUBREG_REG (SET_SRC (x
)));
2987 /* If we are setting CC0 or if the source is a COMPARE, look for the
2988 use of the comparison result and try to simplify it unless we already
2989 have used undobuf.other_insn. */
2990 if ((GET_CODE (SET_SRC (x
)) == COMPARE
2992 || SET_DEST (x
) == cc0_rtx
2995 && (cc_use
= find_single_use (SET_DEST (x
), subst_insn
,
2997 && (undobuf
.other_insn
== 0 || other_insn
== undobuf
.other_insn
)
2998 && GET_RTX_CLASS (GET_CODE (*cc_use
)) == '<'
2999 && XEXP (*cc_use
, 0) == SET_DEST (x
))
3001 enum rtx_code old_code
= GET_CODE (*cc_use
);
3002 enum rtx_code new_code
;
3004 int other_changed
= 0;
3005 enum machine_mode compare_mode
= GET_MODE (SET_DEST (x
));
3007 if (GET_CODE (SET_SRC (x
)) == COMPARE
)
3008 op0
= XEXP (SET_SRC (x
), 0), op1
= XEXP (SET_SRC (x
), 1);
3010 op0
= SET_SRC (x
), op1
= const0_rtx
;
3012 /* Simplify our comparison, if possible. */
3013 new_code
= simplify_comparison (old_code
, &op0
, &op1
);
3015 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3016 /* If this machine has CC modes other than CCmode, check to see
3017 if we need to use a different CC mode here. */
3018 compare_mode
= SELECT_CC_MODE (new_code
, op0
);
3020 /* If the mode changed, we have to change SET_DEST, the mode
3021 in the compare, and the mode in the place SET_DEST is used.
3022 If SET_DEST is a hard register, just build new versions with
3023 the proper mode. If it is a pseudo, we lose unless it is only
3024 time we set the pseudo, in which case we can safely change
3026 if (compare_mode
!= GET_MODE (SET_DEST (x
)))
3028 int regno
= REGNO (SET_DEST (x
));
3029 rtx new_dest
= gen_rtx (REG
, compare_mode
, regno
);
3031 if (regno
< FIRST_PSEUDO_REGISTER
3032 || (reg_n_sets
[regno
] == 1
3033 && ! REG_USERVAR_P (SET_DEST (x
))))
3035 if (regno
>= FIRST_PSEUDO_REGISTER
)
3036 SUBST (regno_reg_rtx
[regno
], new_dest
);
3038 SUBST (SET_DEST (x
), new_dest
);
3039 SUBST (XEXP (*cc_use
, 0), new_dest
);
3045 /* If the code changed, we have to build a new comparison
3046 in undobuf.other_insn. */
3047 if (new_code
!= old_code
)
3051 SUBST (*cc_use
, gen_rtx_combine (new_code
, GET_MODE (*cc_use
),
3052 SET_DEST (x
), const0_rtx
));
3054 /* If the only change we made was to change an EQ into an
3055 NE or vice versa, OP0 has only one significant bit,
3056 and OP1 is zero, check if changing the user of the condition
3057 code will produce a valid insn. If it won't, we can keep
3058 the original code in that insn by surrounding our operation
3061 if (((old_code
== NE
&& new_code
== EQ
)
3062 || (old_code
== EQ
&& new_code
== NE
))
3063 && ! other_changed
&& op1
== const0_rtx
3064 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_INT
3065 && (exact_log2 (mask
= significant_bits (op0
,
3069 rtx pat
= PATTERN (other_insn
), note
= 0;
3071 if ((recog_for_combine (&pat
, undobuf
.other_insn
, ¬e
) < 0
3072 && ! check_asm_operands (pat
)))
3074 PUT_CODE (*cc_use
, old_code
);
3077 op0
= gen_binary (XOR
, GET_MODE (op0
), op0
,
3078 gen_rtx (CONST_INT
, VOIDmode
, mask
));
3086 undobuf
.other_insn
= other_insn
;
3089 /* If we are now comparing against zero, change our source if
3090 needed. If we do not use cc0, we always have a COMPARE. */
3091 if (op1
== const0_rtx
&& SET_DEST (x
) == cc0_rtx
)
3092 SUBST (SET_SRC (x
), op0
);
3096 /* Otherwise, if we didn't previously have a COMPARE in the
3097 correct mode, we need one. */
3098 if (GET_CODE (SET_SRC (x
)) != COMPARE
3099 || GET_MODE (SET_SRC (x
)) != compare_mode
)
3100 SUBST (SET_SRC (x
), gen_rtx_combine (COMPARE
, compare_mode
,
3104 /* Otherwise, update the COMPARE if needed. */
3105 SUBST (XEXP (SET_SRC (x
), 0), op0
);
3106 SUBST (XEXP (SET_SRC (x
), 1), op1
);
3111 /* Get SET_SRC in a form where we have placed back any
3112 compound expressions. Then do the checks below. */
3113 temp
= make_compound_operation (SET_SRC (x
), SET
);
3114 SUBST (SET_SRC (x
), temp
);
3117 #ifdef BYTE_LOADS_ZERO_EXTEND
3118 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3119 M wider than N, this would require a paradoxical subreg.
3120 Replace the subreg with a zero_extend to avoid the reload that
3121 would otherwise be required. */
3122 if (GET_CODE (SET_SRC (x
)) == SUBREG
3123 && subreg_lowpart_p (SET_SRC (x
))
3124 && SUBREG_WORD (SET_SRC (x
)) == 0
3125 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x
)))
3126 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x
)))))
3127 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == MEM
)
3128 SUBST (SET_SRC (x
), gen_rtx_combine (ZERO_EXTEND
,
3129 GET_MODE (SET_SRC (x
)),
3130 XEXP (SET_SRC (x
), 0)));
3136 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3138 x
= simplify_and_const_int (x
, mode
, XEXP (x
, 0),
3139 INTVAL (XEXP (x
, 1)));
3141 /* If we have (ior (and (X C1) C2)) and the next restart would be
3142 the last, simplify this by making C1 as small as possible
3144 if (n_restarts
>= 3 && GET_CODE (x
) == IOR
3145 && GET_CODE (XEXP (x
, 0)) == AND
3146 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3147 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3149 temp
= gen_binary (AND
, mode
, XEXP (XEXP (x
, 0), 0),
3150 gen_rtx (CONST_INT
, VOIDmode
,
3151 (INTVAL (XEXP (XEXP (x
, 0), 1))
3152 & ~ INTVAL (XEXP (x
, 1)))));
3153 return gen_binary (IOR
, mode
, temp
, XEXP (x
, 1));
3156 if (GET_CODE (x
) != AND
)
3160 /* Convert (A | B) & A to A. */
3161 if (GET_CODE (XEXP (x
, 0)) == IOR
3162 && (rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3163 || rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1)))
3164 && ! side_effects_p (XEXP (XEXP (x
, 0), 0))
3165 && ! side_effects_p (XEXP (XEXP (x
, 0), 1)))
3168 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3169 insn (and may simplify more). */
3170 else if (GET_CODE (XEXP (x
, 0)) == XOR
3171 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3172 && ! side_effects_p (XEXP (x
, 1)))
3174 x
= gen_binary (AND
, mode
,
3175 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 1)),
3179 else if (GET_CODE (XEXP (x
, 0)) == XOR
3180 && rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))
3181 && ! side_effects_p (XEXP (x
, 1)))
3183 x
= gen_binary (AND
, mode
,
3184 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 0)),
3189 /* Similarly for (~ (A ^ B)) & A. */
3190 else if (GET_CODE (XEXP (x
, 0)) == NOT
3191 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == XOR
3192 && rtx_equal_p (XEXP (XEXP (XEXP (x
, 0), 0), 0), XEXP (x
, 1))
3193 && ! side_effects_p (XEXP (x
, 1)))
3195 x
= gen_binary (AND
, mode
, XEXP (XEXP (XEXP (x
, 0), 0), 1),
3199 else if (GET_CODE (XEXP (x
, 0)) == NOT
3200 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == XOR
3201 && rtx_equal_p (XEXP (XEXP (XEXP (x
, 0), 0), 1), XEXP (x
, 1))
3202 && ! side_effects_p (XEXP (x
, 1)))
3204 x
= gen_binary (AND
, mode
, XEXP (XEXP (XEXP (x
, 0), 0), 0),
3209 /* In the follow group of tests (and those in case IOR below),
3210 we start with some combination of logical operations and apply
3211 the distributive law followed by the inverse distributive law.
3212 Most of the time, this results in no change. However, if some of
3213 the operands are the same or inverses of each other, simplifications
3216 For example, (and (ior A B) (not B)) can occur as the result of
3217 expanding a bit field assignment. When we apply the distributive
3218 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
3219 which then simplifies to (and (A (not B))). */
3221 /* If we have (and (ior A B) C), apply the distributive law and then
3222 the inverse distributive law to see if things simplify. */
3224 if (GET_CODE (XEXP (x
, 0)) == IOR
|| GET_CODE (XEXP (x
, 0)) == XOR
)
3226 x
= apply_distributive_law
3227 (gen_binary (GET_CODE (XEXP (x
, 0)), mode
,
3228 gen_binary (AND
, mode
,
3229 XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)),
3230 gen_binary (AND
, mode
,
3231 XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))));
3232 if (GET_CODE (x
) != AND
)
3236 if (GET_CODE (XEXP (x
, 1)) == IOR
|| GET_CODE (XEXP (x
, 1)) == XOR
)
3238 x
= apply_distributive_law
3239 (gen_binary (GET_CODE (XEXP (x
, 1)), mode
,
3240 gen_binary (AND
, mode
,
3241 XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)),
3242 gen_binary (AND
, mode
,
3243 XEXP (XEXP (x
, 1), 1), XEXP (x
, 0))));
3244 if (GET_CODE (x
) != AND
)
3248 /* Similarly, taking advantage of the fact that
3249 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
3251 if (GET_CODE (XEXP (x
, 0)) == NOT
&& GET_CODE (XEXP (x
, 1)) == XOR
)
3253 x
= apply_distributive_law
3254 (gen_binary (XOR
, mode
,
3255 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 0), 0),
3256 XEXP (XEXP (x
, 1), 0)),
3257 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 0), 0),
3258 XEXP (XEXP (x
, 1), 1))));
3259 if (GET_CODE (x
) != AND
)
3263 else if (GET_CODE (XEXP (x
, 1)) == NOT
&& GET_CODE (XEXP (x
, 0)) == XOR
)
3265 x
= apply_distributive_law
3266 (gen_binary (XOR
, mode
,
3267 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 1), 0),
3268 XEXP (XEXP (x
, 0), 0)),
3269 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 1), 0),
3270 XEXP (XEXP (x
, 0), 1))));
3271 if (GET_CODE (x
) != AND
)
3277 /* Convert (A & B) | A to A. */
3278 if (GET_CODE (XEXP (x
, 0)) == AND
3279 && (rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3280 || rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1)))
3281 && ! side_effects_p (XEXP (XEXP (x
, 0), 0))
3282 && ! side_effects_p (XEXP (XEXP (x
, 0), 1)))
3285 /* If we have (ior (and A B) C), apply the distributive law and then
3286 the inverse distributive law to see if things simplify. */
3288 if (GET_CODE (XEXP (x
, 0)) == AND
)
3290 x
= apply_distributive_law
3291 (gen_binary (AND
, mode
,
3292 gen_binary (IOR
, mode
,
3293 XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)),
3294 gen_binary (IOR
, mode
,
3295 XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))));
3297 if (GET_CODE (x
) != IOR
)
3301 if (GET_CODE (XEXP (x
, 1)) == AND
)
3303 x
= apply_distributive_law
3304 (gen_binary (AND
, mode
,
3305 gen_binary (IOR
, mode
,
3306 XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)),
3307 gen_binary (IOR
, mode
,
3308 XEXP (XEXP (x
, 1), 1), XEXP (x
, 0))));
3310 if (GET_CODE (x
) != IOR
)
3314 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
3315 mode size to (rotate A CX). */
3317 if (((GET_CODE (XEXP (x
, 0)) == ASHIFT
3318 && GET_CODE (XEXP (x
, 1)) == LSHIFTRT
)
3319 || (GET_CODE (XEXP (x
, 1)) == ASHIFT
3320 && GET_CODE (XEXP (x
, 0)) == LSHIFTRT
))
3321 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (XEXP (x
, 1), 0))
3322 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3323 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
3324 && (INTVAL (XEXP (XEXP (x
, 0), 1)) + INTVAL (XEXP (XEXP (x
, 1), 1))
3325 == GET_MODE_BITSIZE (mode
)))
3329 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
)
3330 shift_count
= XEXP (XEXP (x
, 0), 1);
3332 shift_count
= XEXP (XEXP (x
, 1), 1);
3333 x
= gen_rtx (ROTATE
, mode
, XEXP (XEXP (x
, 0), 0), shift_count
);
3339 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
3340 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
3343 int num_negated
= 0;
3344 rtx in1
= XEXP (x
, 0), in2
= XEXP (x
, 1);
3346 if (GET_CODE (in1
) == NOT
)
3347 num_negated
++, in1
= XEXP (in1
, 0);
3348 if (GET_CODE (in2
) == NOT
)
3349 num_negated
++, in2
= XEXP (in2
, 0);
3351 if (num_negated
== 2)
3353 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3354 SUBST (XEXP (x
, 1), XEXP (XEXP (x
, 1), 0));
3356 else if (num_negated
== 1)
3357 return gen_rtx_combine (NOT
, mode
,
3358 gen_rtx_combine (XOR
, mode
, in1
, in2
));
3361 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
3362 correspond to a machine insn or result in further simplifications
3363 if B is a constant. */
3365 if (GET_CODE (XEXP (x
, 0)) == AND
3366 && rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))
3367 && ! side_effects_p (XEXP (x
, 1)))
3369 x
= gen_binary (AND
, mode
,
3370 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 0)),
3374 else if (GET_CODE (XEXP (x
, 0)) == AND
3375 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3376 && ! side_effects_p (XEXP (x
, 1)))
3378 x
= gen_binary (AND
, mode
,
3379 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 1)),
3385 #if STORE_FLAG_VALUE == 1
3386 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
3388 if (XEXP (x
, 1) == const1_rtx
3389 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
3390 && reversible_comparison_p (XEXP (x
, 0)))
3391 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
3392 mode
, XEXP (XEXP (x
, 0), 0),
3393 XEXP (XEXP (x
, 0), 1));
3396 /* (xor (comparison foo bar) (const_int sign-bit))
3397 when STORE_FLAG_VALUE is the sign bit. */
3398 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
3399 && STORE_FLAG_VALUE
== 1 << (GET_MODE_BITSIZE (mode
) - 1)
3400 && XEXP (x
, 1) == const_true_rtx
3401 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
3402 && reversible_comparison_p (XEXP (x
, 0)))
3403 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
3404 mode
, XEXP (XEXP (x
, 0), 0),
3405 XEXP (XEXP (x
, 0), 1));
3409 /* (abs (neg <foo>)) -> (abs <foo>) */
3410 if (GET_CODE (XEXP (x
, 0)) == NEG
)
3411 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3413 /* If operand is something known to be positive, ignore the ABS. */
3414 if (GET_CODE (XEXP (x
, 0)) == FFS
|| GET_CODE (XEXP (x
, 0)) == ABS
3415 || (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) <= HOST_BITS_PER_INT
3416 && ((significant_bits (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)))
3417 & (1 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - 1)))
3422 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3423 if ((GET_CODE (XEXP (x
, 0)) == SIGN_EXTRACT
3424 && XEXP (XEXP (x
, 0), 1) == const1_rtx
)
3425 || (GET_CODE (XEXP (x
, 0)) == ASHIFTRT
3426 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3427 && INTVAL (XEXP (XEXP (x
, 0), 1)) == GET_MODE_BITSIZE (mode
) - 1)
3428 || ((temp
= get_last_value (XEXP (x
, 0))) != 0
3429 && ((GET_CODE (temp
) == SIGN_EXTRACT
3430 && XEXP (temp
, 1) == const1_rtx
)
3431 || (GET_CODE (temp
) == ASHIFTRT
3432 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
3433 && (INTVAL (XEXP (temp
, 1))
3434 == GET_MODE_BITSIZE (mode
) - 1)))))
3436 x
= gen_rtx_combine (NEG
, mode
, XEXP (x
, 0));
3442 /* (float (sign_extend <X>)) = (float <X>). */
3443 if (GET_CODE (XEXP (x
, 0)) == SIGN_EXTEND
)
3444 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3453 #ifdef SHIFT_COUNT_TRUNCATED
3454 /* (*shift <X> (sign_extend <Y>)) = (*shift <X> <Y>) (most machines).
3455 True for all kinds of shifts and also for zero_extend. */
3456 if ((GET_CODE (XEXP (x
, 1)) == SIGN_EXTEND
3457 || GET_CODE (XEXP (x
, 1)) == ZERO_EXTEND
)
3458 && FAKE_EXTEND_SAFE_P (mode
, XEXP (XEXP (x
, 1), 0)))
3460 /* This is a perverse SUBREG, wider than its base. */
3461 gen_lowpart_for_combine (mode
, XEXP (XEXP (x
, 1), 0)));
3463 /* tege: Change (bitshifts ... (and ... mask), c)
3464 to (bitshifts ... c) if mask just masks the bits the bitshift
3465 insns do automatically on this machine. */
3466 if (GET_CODE (XEXP (x
, 1)) == AND
3467 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
3468 && (~ INTVAL (XEXP (XEXP (x
, 1), 1)) & GET_MODE_MASK (mode
)) == 0)
3469 SUBST (XEXP (x
, 1), XEXP (XEXP (x
, 1), 0));
3472 /* If this is a shift by a constant amount, simplify it. */
3473 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3475 x
= simplify_shift_const (x
, code
, mode
, XEXP (x
, 0),
3476 INTVAL (XEXP (x
, 1)));
3477 if (GET_CODE (x
) != code
)
3486 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
3487 operations" because they can be replaced with two more basic operations.
3488 ZERO_EXTEND is also considered "compound" because it can be replaced with
3489 an AND operation, which is simpler, though only one operation.
3491 The function expand_compound_operation is called with an rtx expression
3492 and will convert it to the appropriate shifts and AND operations,
3493 simplifying at each stage.
3495 The function make_compound_operation is called to convert an expression
3496 consisting of shifts and ANDs into the equivalent compound expression.
3497 It is the inverse of this function, loosely speaking. */
3500 expand_compound_operation (x
)
3508 switch (GET_CODE (x
))
3513 /* If we somehow managed to end up with (sign/zero_extend (const_int x)),
3514 just return the CONST_INT. We can't know how much masking to do
3516 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
3519 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x
, 0)), XEXP (x
, 0)))
3522 len
= GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)));
3523 /* If the inner object has VOIDmode (the only way this can happen
3524 is if it is a ASM_OPERANDS), we can't do anything since we don't
3525 know how much masking to do. */
3534 /* If the operand is a CLOBBER, just return it. */
3535 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
3538 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
3539 || GET_CODE (XEXP (x
, 2)) != CONST_INT
3540 || GET_MODE (XEXP (x
, 0)) == VOIDmode
)
3543 len
= INTVAL (XEXP (x
, 1));
3544 pos
= INTVAL (XEXP (x
, 2));
3546 /* If this goes outside the object being extracted, replace the object
3547 with a (use (mem ...)) construct that only combine understands
3548 and is used only for this purpose. */
3549 if (len
+ pos
> GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))))
3550 SUBST (XEXP (x
, 0), gen_rtx (USE
, GET_MODE (x
), XEXP (x
, 0)));
3553 pos
= GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - len
- pos
;
3561 /* If we reach here, we want to return a pair of shifts. The inner
3562 shift is a left shift of BITSIZE - POS - LEN bits. The outer
3563 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
3564 logical depending on the value of UNSIGNEDP.
3566 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
3567 converted into an AND of a shift.
3569 We must check for the case where the left shift would have a negative
3570 count. This can happen in a case like (x >> 31) & 255 on machines
3571 that can't shift by a constant. On those machines, we would first
3572 combine the shift with the AND to produce a variable-position
3573 extraction. Then the constant of 31 would be substituted in to produce
3574 a such a position. */
3576 modewidth
= GET_MODE_BITSIZE (GET_MODE (x
));
3577 if (modewidth
>= pos
- len
)
3578 tem
= simplify_shift_const (0, unsignedp
? LSHIFTRT
: ASHIFTRT
,
3580 simplify_shift_const (0, ASHIFT
, GET_MODE (x
),
3582 modewidth
- pos
- len
),
3585 else if (unsignedp
&& len
< HOST_BITS_PER_INT
)
3586 tem
= simplify_and_const_int (0, GET_MODE (x
),
3587 simplify_shift_const (0, LSHIFTRT
,
3592 /* Any other cases we can't handle. */
3596 /* If we couldn't do this for some reason, return the original
3598 if (GET_CODE (tem
) == CLOBBER
)
3604 /* X is a SET which contains an assignment of one object into
3605 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
3606 or certain SUBREGS). If possible, convert it into a series of
3609 We half-heartedly support variable positions, but do not at all
3610 support variable lengths. */
3613 expand_field_assignment (x
)
3617 rtx pos
; /* Always counts from low bit. */
3620 enum machine_mode compute_mode
;
3622 /* Loop until we find something we can't simplify. */
3625 if (GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
3626 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
)
3628 inner
= SUBREG_REG (XEXP (SET_DEST (x
), 0));
3629 len
= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0)));
3632 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
3633 && GET_CODE (XEXP (SET_DEST (x
), 1)) == CONST_INT
)
3635 inner
= XEXP (SET_DEST (x
), 0);
3636 len
= INTVAL (XEXP (SET_DEST (x
), 1));
3637 pos
= XEXP (SET_DEST (x
), 2);
3639 /* If the position is constant and spans the width of INNER,
3640 surround INNER with a USE to indicate this. */
3641 if (GET_CODE (pos
) == CONST_INT
3642 && INTVAL (pos
) + len
> GET_MODE_BITSIZE (GET_MODE (inner
)))
3643 inner
= gen_rtx (USE
, GET_MODE (SET_DEST (x
)), inner
);
3646 if (GET_CODE (pos
) == CONST_INT
)
3647 pos
= gen_rtx (CONST_INT
, VOIDmode
,
3648 (GET_MODE_BITSIZE (GET_MODE (inner
)) - len
3650 else if (GET_CODE (pos
) == MINUS
3651 && GET_CODE (XEXP (pos
, 1)) == CONST_INT
3652 && (INTVAL (XEXP (pos
, 1))
3653 == GET_MODE_BITSIZE (GET_MODE (inner
)) - len
))
3654 /* If position is ADJUST - X, new position is X. */
3655 pos
= XEXP (pos
, 0);
3657 pos
= gen_binary (MINUS
, GET_MODE (pos
),
3658 gen_rtx (CONST_INT
, VOIDmode
,
3659 (GET_MODE_BITSIZE (GET_MODE (inner
))
3664 /* A SUBREG between two modes that occupy the same numbers of words
3665 can be done by moving the SUBREG to the source. */
3666 else if (GET_CODE (SET_DEST (x
)) == SUBREG
3667 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
3668 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
3669 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
3670 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
3672 x
= gen_rtx (SET
, VOIDmode
, SUBREG_REG (SET_DEST (x
)),
3673 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x
))),
3680 while (GET_CODE (inner
) == SUBREG
&& subreg_lowpart_p (inner
))
3681 inner
= SUBREG_REG (inner
);
3683 compute_mode
= GET_MODE (inner
);
3685 /* Compute a mask of LEN bits, if we can do this on the host machine. */
3686 if (len
< HOST_BITS_PER_INT
)
3687 mask
= gen_rtx (CONST_INT
, VOIDmode
, (1 << len
) - 1);
3691 /* Now compute the equivalent expression. Make a copy of INNER
3692 for the SET_DEST in case it is a MEM into which we will substitute;
3693 we don't want shared RTL in that case. */
3694 x
= gen_rtx (SET
, VOIDmode
, copy_rtx (inner
),
3695 gen_binary (IOR
, compute_mode
,
3696 gen_binary (AND
, compute_mode
,
3697 gen_unary (NOT
, compute_mode
,
3702 gen_binary (ASHIFT
, compute_mode
,
3703 gen_binary (AND
, compute_mode
,
3704 gen_lowpart_for_combine
3714 /* Return an RTX for a reference to LEN bits of INNER. POS is the starting
3715 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
3716 the starting bit position.
3718 INNER may be a USE. This will occur when we started with a bitfield
3719 that went outside the boundary of the object in memory, which is
3720 allowed on most machines. To isolate this case, we produce a USE
3721 whose mode is wide enough and surround the MEM with it. The only
3722 code that understands the USE is this routine. If it is not removed,
3723 it will cause the resulting insn not to match.
3725 UNSIGNEDP is non-zero for an unsigned reference and zero for a
3728 IN_DEST is non-zero if this is a reference in the destination of a
3729 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
3730 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
3733 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
3734 ZERO_EXTRACT should be built even for bits starting at bit 0.
3736 MODE is the desired mode of the result (if IN_DEST == 0). */
3739 make_extraction (mode
, inner
, pos
, pos_rtx
, len
,
3740 unsignedp
, in_dest
, in_compare
)
3741 enum machine_mode mode
;
3747 int in_dest
, in_compare
;
3749 enum machine_mode is_mode
= GET_MODE (inner
);
3750 enum machine_mode inner_mode
;
3751 enum machine_mode wanted_mem_mode
= byte_mode
;
3752 enum machine_mode pos_mode
= word_mode
;
3753 enum machine_mode extraction_mode
= word_mode
;
3754 enum machine_mode tmode
= mode_for_size (len
, MODE_INT
, 1);
3758 /* Get some information about INNER and get the innermost object. */
3759 if (GET_CODE (inner
) == USE
)
3760 /* We don't need to adjust the position because we set up the USE
3761 to pretend that it was a full-word object. */
3762 spans_byte
= 1, inner
= XEXP (inner
, 0);
3763 else if (GET_CODE (inner
) == SUBREG
&& subreg_lowpart_p (inner
))
3764 inner
= SUBREG_REG (inner
);
3766 inner_mode
= GET_MODE (inner
);
3768 if (pos_rtx
&& GET_CODE (pos_rtx
) == CONST_INT
)
3769 pos
= INTVAL (pos_rtx
);
3771 /* See if this can be done without an extraction. We never can if the
3772 width of the field is not the same as that of some integer mode. For
3773 registers, we can only avoid the extraction if the position is at the
3774 low-order bit and this is either not in the destination or we have the
3775 appropriate STRICT_LOW_PART operation available.
3777 For MEM, we can avoid an extract if the field starts on an appropriate
3778 boundary and we can change the mode of the memory reference. However,
3779 we cannot directly access the MEM if we have a USE and the underlying
3780 MEM is not TMODE. This combination means that MEM was being used in a
3781 context where bits outside its mode were being referenced; that is only
3782 valid in bit-field insns. */
3784 if (tmode
!= BLKmode
3785 && ! (spans_byte
&& inner_mode
!= tmode
)
3786 && ((pos
== 0 && GET_CODE (inner
) == REG
3788 || (movstrict_optab
->handlers
[(int) tmode
].insn_code
3789 != CODE_FOR_nothing
)))
3790 || (GET_CODE (inner
) == MEM
&& pos
>= 0
3791 #ifdef STRICT_ALIGNMENT
3792 && (pos
% GET_MODE_ALIGNMENT (tmode
)) == 0
3794 && (pos
% BITS_PER_UNIT
) == 0
3796 /* We can't do this if we are widening INNER_MODE (it
3797 may not be aligned, for one thing). */
3798 && GET_MODE_BITSIZE (inner_mode
) >= GET_MODE_BITSIZE (tmode
)
3799 && (inner_mode
== tmode
3800 || (! mode_dependent_address_p (XEXP (inner
, 0))
3801 && ! MEM_VOLATILE_P (inner
))))))
3803 int offset
= pos
/ BITS_PER_UNIT
;
3805 /* If INNER is a MEM, make a new MEM that encompasses just the desired
3806 field. If the original and current mode are the same, we need not
3807 adjust the offset. Otherwise, we do if bytes big endian.
3809 If INNER is not a MEM, get a piece consisting of the just the field
3810 of interest (in this case INNER must be a REG and POS must be 0). */
3812 if (GET_CODE (inner
) == MEM
)
3814 #if BYTES_BIG_ENDIAN
3815 if (inner_mode
!= tmode
)
3816 offset
= (GET_MODE_SIZE (inner_mode
)
3817 - GET_MODE_SIZE (tmode
) - offset
);
3820 new = gen_rtx (MEM
, tmode
, plus_constant (XEXP (inner
, 0), offset
));
3821 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner
);
3822 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner
);
3823 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner
);
3826 new = gen_lowpart_for_combine (tmode
, inner
);
3828 /* If this extraction is going into the destination of a SET,
3829 make a STRICT_LOW_PART unless we made a MEM. */
3832 return (GET_CODE (new) == MEM
? new
3833 : gen_rtx_combine (STRICT_LOW_PART
, VOIDmode
, new));
3835 /* Otherwise, sign- or zero-extend unless we already are in the
3838 return (mode
== tmode
? new
3839 : gen_rtx_combine (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
,
3843 /* Unless this is in a COMPARE or we have a funny memory reference,
3844 don't do anything with field extracts starting at the low-order
3845 bit since they are simple AND operations. */
3846 if (pos
== 0 && ! in_dest
&& ! in_compare
&& ! spans_byte
)
3849 /* Get the mode to use should INNER be a MEM, the mode for the position,
3850 and the mode for the result. */
3854 wanted_mem_mode
= insn_operand_mode
[(int) CODE_FOR_insv
][0];
3855 pos_mode
= insn_operand_mode
[(int) CODE_FOR_insv
][2];
3856 extraction_mode
= insn_operand_mode
[(int) CODE_FOR_insv
][3];
3861 if (! in_dest
&& unsignedp
)
3863 wanted_mem_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
3864 pos_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][3];
3865 extraction_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][0];
3870 if (! in_dest
&& ! unsignedp
)
3872 wanted_mem_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
3873 pos_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][3];
3874 extraction_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][0];
3878 /* Never narrow an object, since that might not be safe. */
3880 if (mode
!= VOIDmode
3881 && GET_MODE_SIZE (extraction_mode
) < GET_MODE_SIZE (mode
))
3882 extraction_mode
= mode
;
3884 if (pos_rtx
&& GET_MODE (pos_rtx
) != VOIDmode
3885 && GET_MODE_SIZE (pos_mode
) < GET_MODE_SIZE (GET_MODE (pos_rtx
)))
3886 pos_mode
= GET_MODE (pos_rtx
);
3888 /* If this is not from memory or we have to change the mode of memory and
3889 cannot, the desired mode is EXTRACTION_MODE. */
3890 if (GET_CODE (inner
) != MEM
3891 || (inner_mode
!= wanted_mem_mode
3892 && (mode_dependent_address_p (XEXP (inner
, 0))
3893 || MEM_VOLATILE_P (inner
))))
3894 wanted_mem_mode
= extraction_mode
;
3897 /* If position is constant, compute new position. Otherwise, build
3900 pos
= (MAX (GET_MODE_BITSIZE (is_mode
), GET_MODE_BITSIZE (wanted_mem_mode
))
3904 = gen_rtx_combine (MINUS
, GET_MODE (pos_rtx
),
3905 gen_rtx (CONST_INT
, VOIDmode
,
3906 (MAX (GET_MODE_BITSIZE (is_mode
),
3907 GET_MODE_BITSIZE (wanted_mem_mode
))
3911 /* If INNER has a wider mode, make it smaller. If this is a constant
3912 extract, try to adjust the byte to point to the byte containing
3914 if (wanted_mem_mode
!= VOIDmode
3915 && GET_MODE_SIZE (wanted_mem_mode
) < GET_MODE_SIZE (is_mode
)
3916 && ((GET_CODE (inner
) == MEM
3917 && (inner_mode
== wanted_mem_mode
3918 || (! mode_dependent_address_p (XEXP (inner
, 0))
3919 && ! MEM_VOLATILE_P (inner
))))))
3923 /* The computations below will be correct if the machine is big
3924 endian in both bits and bytes or little endian in bits and bytes.
3925 If it is mixed, we must adjust. */
3927 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
3928 if (! spans_byte
&& is_mode
!= wanted_mem_mode
)
3929 offset
= (GET_MODE_SIZE (is_mode
)
3930 - GET_MODE_SIZE (wanted_mem_mode
) - offset
);
3933 /* If bytes are big endian and we had a paradoxical SUBREG, we must
3934 adjust OFFSET to compensate. */
3935 #if BYTES_BIG_ENDIAN
3937 && GET_MODE_SIZE (inner_mode
) < GET_MODE_SIZE (is_mode
))
3938 offset
-= GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (inner_mode
);
3941 /* If this is a constant position, we can move to the desired byte. */
3944 offset
+= pos
/ BITS_PER_UNIT
;
3945 pos
%= GET_MODE_BITSIZE (wanted_mem_mode
);
3948 if (offset
!= 0 || inner_mode
!= wanted_mem_mode
)
3950 rtx newmem
= gen_rtx (MEM
, wanted_mem_mode
,
3951 plus_constant (XEXP (inner
, 0), offset
));
3952 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (inner
);
3953 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (inner
);
3954 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (inner
);
3959 /* If INNER is not memory, we can always get it into the proper mode. */
3960 else if (GET_CODE (inner
) != MEM
)
3961 inner
= gen_lowpart_for_combine (extraction_mode
, inner
);
3963 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
3964 have to zero extend. Otherwise, we can just use a SUBREG. */
3966 && GET_MODE_SIZE (pos_mode
) > GET_MODE_SIZE (GET_MODE (pos_rtx
)))
3967 pos_rtx
= gen_rtx_combine (ZERO_EXTEND
, pos_mode
, pos_rtx
);
3969 && GET_MODE_SIZE (pos_mode
) < GET_MODE_SIZE (GET_MODE (pos_rtx
)))
3970 pos_rtx
= gen_lowpart_for_combine (pos_mode
, pos_rtx
);
3972 /* Make POS_RTX unless we already have it and it is correct. */
3973 if (pos_rtx
== 0 || (pos
>= 0 && INTVAL (pos_rtx
) != pos
))
3974 pos_rtx
= gen_rtx (CONST_INT
, VOIDmode
, pos
);
3976 /* Make the required operation. See if we can use existing rtx. */
3977 new = gen_rtx_combine (unsignedp
? ZERO_EXTRACT
: SIGN_EXTRACT
,
3978 extraction_mode
, inner
,
3979 gen_rtx (CONST_INT
, VOIDmode
, len
), pos_rtx
);
3981 new = gen_lowpart_for_combine (mode
, new);
3986 /* Look at the expression rooted at X. Look for expressions
3987 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
3988 Form these expressions.
3990 Return the new rtx, usually just X.
3992 Also, for machines like the Vax that don't have logical shift insns,
3993 try to convert logical to arithmetic shift operations in cases where
3994 they are equivalent. This undoes the canonicalizations to logical
3995 shifts done elsewhere.
3997 We try, as much as possible, to re-use rtl expressions to save memory.
3999 IN_CODE says what kind of expression we are processing. Normally, it is
4000 SET. In a memory address (inside a MEM or PLUS, the latter being a
4001 kludge), it is MEM. When processing the arguments of a comparison
4002 or a COMPARE against zero, it is COMPARE. */
4005 make_compound_operation (x
, in_code
)
4007 enum rtx_code in_code
;
4009 enum rtx_code code
= GET_CODE (x
);
4010 enum machine_mode mode
= GET_MODE (x
);
4011 int mode_width
= GET_MODE_BITSIZE (mode
);
4012 enum rtx_code next_code
;
4017 /* Select the code to be used in recursive calls. Once we are inside an
4018 address, we stay there. If we have a comparison, set to COMPARE,
4019 but once inside, go back to our default of SET. */
4021 next_code
= (code
== MEM
|| code
== PLUS
? MEM
4022 : ((code
== COMPARE
|| GET_RTX_CLASS (code
) == '<')
4023 && XEXP (x
, 1) == const0_rtx
) ? COMPARE
4024 : in_code
== COMPARE
? SET
: in_code
);
4026 /* Process depending on the code of this operation. If NEW is set
4027 non-zero, it will be returned. */
4033 /* Convert shifts by constants into multiplications if inside
4035 if (in_code
== MEM
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
4036 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_INT
4037 && INTVAL (XEXP (x
, 1)) >= 0)
4038 new = gen_rtx_combine (MULT
, mode
, XEXP (x
, 0),
4039 gen_rtx (CONST_INT
, VOIDmode
,
4040 1 << INTVAL (XEXP (x
, 1))));
4044 /* If the second operand is not a constant, we can't do anything
4046 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
4049 /* If the constant is a power of two minus one and the first operand
4050 is a logical right shift, make an extraction. */
4051 if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
4052 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
4053 new = make_extraction (mode
, XEXP (XEXP (x
, 0), 0), -1,
4054 XEXP (XEXP (x
, 0), 1), i
, 1,
4055 0, in_code
== COMPARE
);
4057 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4058 else if (GET_CODE (XEXP (x
, 0)) == SUBREG
4059 && subreg_lowpart_p (XEXP (x
, 0))
4060 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == LSHIFTRT
4061 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
4062 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x
, 0))),
4063 XEXP (SUBREG_REG (XEXP (x
, 0)), 0), -1,
4064 XEXP (SUBREG_REG (XEXP (x
, 0)), 1), i
, 1,
4065 0, in_code
== COMPARE
);
4068 /* One machines without logical shifts, if the operand of the AND is
4069 a logical shift and our mask turns off all the propagated sign
4070 bits, we can replace the logical shift with an arithmetic shift. */
4082 && GET_CODE (XEXP (x
, 0)) == LSHIFTRT
4083 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4084 && INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0
4085 && INTVAL (XEXP (XEXP (x
, 0), 1)) < HOST_BITS_PER_INT
4086 && mode_width
<= HOST_BITS_PER_INT
)
4088 unsigned mask
= GET_MODE_MASK (mode
);
4090 mask
>>= INTVAL (XEXP (XEXP (x
, 0), 1));
4091 if ((INTVAL (XEXP (x
, 1)) & ~mask
) == 0)
4093 gen_rtx_combine (ASHIFTRT
, mode
, XEXP (XEXP (x
, 0), 0),
4094 XEXP (XEXP (x
, 0), 1)));
4097 /* If the constant is one less than a power of two, this might be
4098 representable by an extraction even if no shift is present.
4099 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4100 we are in a COMPARE. */
4101 else if ((i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
4102 new = make_extraction (mode
, XEXP (x
, 0), 0, 0, i
, 1,
4103 0, in_code
== COMPARE
);
4105 /* If we are in a comparison and this is an AND with a power of two,
4106 convert this into the appropriate bit extract. */
4107 else if (in_code
== COMPARE
4108 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0)
4109 new = make_extraction (mode
, XEXP (x
, 0), i
, 0, 1, 1, 0, 1);
4114 /* If the sign bit is known to be zero, replace this with an
4115 arithmetic shift. */
4127 && mode_width
<= HOST_BITS_PER_INT
4128 && (significant_bits (XEXP (x
, 0), mode
)
4129 & (1 << (mode_width
- 1))) == 0)
4131 new = gen_rtx_combine (ASHIFTRT
, mode
, XEXP (x
, 0), XEXP (x
, 1));
4135 /* ... fall through ... */
4138 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4139 this is a SIGN_EXTRACT. */
4140 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4141 && GET_CODE (XEXP (x
, 0)) == ASHIFT
4142 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4143 && INTVAL (XEXP (x
, 1)) >= INTVAL (XEXP (XEXP (x
, 0), 1)))
4144 new = make_extraction (mode
, XEXP (XEXP (x
, 0), 0),
4145 (INTVAL (XEXP (x
, 1))
4146 - INTVAL (XEXP (XEXP (x
, 0), 1))),
4147 0, mode_width
- INTVAL (XEXP (x
, 1)),
4148 code
== LSHIFTRT
, 0, in_code
== COMPARE
);
4155 code
= GET_CODE (x
);
4158 /* Now recursively process each operand of this operation. */
4159 fmt
= GET_RTX_FORMAT (code
);
4160 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
4163 new = make_compound_operation (XEXP (x
, i
), next_code
);
4164 SUBST (XEXP (x
, i
), new);
4170 /* Given M see if it is a value that would select a field of bits
4171 within an item, but not the entire word. Return -1 if not.
4172 Otherwise, return the starting position of the field, where 0 is the
4175 *PLEN is set to the length of the field. */
4178 get_pos_from_mask (m
, plen
)
4182 /* Get the bit number of the first 1 bit from the right, -1 if none. */
4183 int pos
= exact_log2 (m
& - m
);
4188 /* Now shift off the low-order zero bits and see if we have a power of
4190 *plen
= exact_log2 ((m
>> pos
) + 1);
4198 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
4199 Return that assignment if so.
4201 We only handle the most common cases. */
4204 make_field_assignment (x
)
4207 rtx dest
= SET_DEST (x
);
4208 rtx src
= SET_SRC (x
);
4211 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
4212 a clear of a one-bit field. We will have changed it to
4213 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
4216 if (GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 0)) == ROTATE
4217 && GET_CODE (XEXP (XEXP (src
, 0), 0)) == CONST_INT
4218 && INTVAL (XEXP (XEXP (src
, 0), 0)) == -2
4219 && rtx_equal_p (dest
, XEXP (src
, 1)))
4221 assign
= make_extraction (VOIDmode
, dest
, -1, XEXP (XEXP (src
, 0), 1),
4226 else if (GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 0)) == SUBREG
4227 && subreg_lowpart_p (XEXP (src
, 0))
4228 && (GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
4229 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src
, 0)))))
4230 && GET_CODE (SUBREG_REG (XEXP (src
, 0))) == ROTATE
4231 && INTVAL (XEXP (SUBREG_REG (XEXP (src
, 0)), 0)) == -2
4232 && rtx_equal_p (dest
, XEXP (src
, 1)))
4234 assign
= make_extraction (VOIDmode
, dest
, -1,
4235 XEXP (SUBREG_REG (XEXP (src
, 0)), 1),
4240 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
4242 else if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 0)) == ASHIFT
4243 && XEXP (XEXP (src
, 0), 0) == const1_rtx
4244 && rtx_equal_p (dest
, XEXP (src
, 1)))
4246 assign
= make_extraction (VOIDmode
, dest
, -1, XEXP (XEXP (src
, 0), 1),
4251 /* The common case of a constant assignment into a constant-position
4252 field looks like (ior (and DEST C1) C2). We clear the bits in C1
4253 that are present in C2 and C1 must then be the complement of a mask
4254 that selects a field. */
4256 else if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 1)) == CONST_INT
4257 && GET_CODE (XEXP (src
, 0)) == AND
4258 && GET_CODE (XEXP (XEXP (src
, 0), 1)) == CONST_INT
4259 && GET_MODE_BITSIZE (GET_MODE (dest
)) <= HOST_BITS_PER_INT
4260 && rtx_equal_p (XEXP (XEXP (src
, 0), 0), dest
))
4262 unsigned c1
= INTVAL (XEXP (XEXP (src
, 0), 1));
4263 unsigned c2
= INTVAL (XEXP (src
, 1));
4268 c1
= (~ c1
) & GET_MODE_MASK (GET_MODE (dest
));
4269 if ((pos
= get_pos_from_mask (c1
, &len
)) >= 0)
4271 assign
= make_extraction (VOIDmode
, dest
, pos
, 0, len
, 1, 1, 0);
4272 src
= gen_rtx (CONST_INT
, VOIDmode
, c2
>> pos
);
4276 /* Finally, see if this is an assignment of a varying item into a fixed
4277 field. This looks like (ior (and DEST C1) (and (ashift SRC POS) C2)),
4278 but we have to allow for the operands to be in either order. */
4280 else if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 0)) == AND
4281 && GET_CODE (XEXP (src
, 1)) == AND
4282 && GET_MODE_BITSIZE (GET_MODE (dest
)) <= HOST_BITS_PER_INT
)
4286 /* Set MASK to the (and DEST C1) and OTHER to the mask of the shift. */
4287 if (GET_CODE (XEXP (XEXP (src
, 0), 0)) == ASHIFT
)
4288 mask
= XEXP (src
, 1), other
= XEXP (src
, 0);
4289 else if (GET_CODE (XEXP (XEXP (src
, 1), 0)) == ASHIFT
)
4290 mask
= XEXP (src
, 0), other
= XEXP (src
, 1);
4294 if (rtx_equal_p (XEXP (mask
, 0), dest
)
4295 && GET_CODE (XEXP (mask
, 1)) == CONST_INT
4296 && GET_CODE (XEXP (other
, 1)) == CONST_INT
4297 && GET_CODE (XEXP (XEXP (other
, 0), 1)) == CONST_INT
)
4299 unsigned c1
= INTVAL (XEXP (mask
, 1));
4300 unsigned c2
= INTVAL (XEXP (other
, 1));
4303 /* The two masks must be complements within the relevant mode,
4304 C2 must select a field, and the shift must move to that
4306 if (((c1
% ~c2
) & GET_MODE_MASK (GET_MODE (dest
))) == 0
4307 && (pos
= get_pos_from_mask (c2
, &len
)) >= 0
4308 && pos
== INTVAL (XEXP (XEXP (other
, 0), 1)))
4310 assign
= make_extraction (VOIDmode
, dest
, pos
, 0, len
, 1, 1, 0);
4311 src
= XEXP (XEXP (other
, 0), 0);
4317 return gen_rtx_combine (SET
, VOIDmode
, assign
, src
);
4322 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
4326 apply_distributive_law (x
)
4329 enum rtx_code code
= GET_CODE (x
);
4330 rtx lhs
, rhs
, other
;
4332 enum rtx_code inner_code
;
4334 /* The outer operation can only be one of the following: */
4335 if (code
!= IOR
&& code
!= AND
&& code
!= XOR
4336 && code
!= PLUS
&& code
!= MINUS
)
4339 lhs
= XEXP (x
, 0), rhs
= XEXP (x
, 1);
4341 /* If either operand is a primitive or a complex SUBREG,
4342 we can't do anything. */
4343 if (GET_RTX_CLASS (GET_CODE (lhs
)) == 'o'
4344 || GET_RTX_CLASS (GET_CODE (rhs
)) == 'o'
4345 || (GET_CODE (lhs
) == SUBREG
4346 && (! subreg_lowpart_p (lhs
)
4347 || (GET_MODE_SIZE (GET_MODE (lhs
))
4348 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs
))))))
4349 || (GET_CODE (rhs
) == SUBREG
4350 && (! subreg_lowpart_p (rhs
)
4351 || (GET_MODE_SIZE (GET_MODE (rhs
))
4352 >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (rhs
)))))))
4355 lhs
= expand_compound_operation (lhs
);
4356 rhs
= expand_compound_operation (rhs
);
4357 inner_code
= GET_CODE (lhs
);
4358 if (inner_code
!= GET_CODE (rhs
))
4361 /* See if the inner and outer operations distribute. */
4368 /* These all distribute except over PLUS. */
4369 if (code
== PLUS
|| code
== MINUS
)
4374 if (code
!= PLUS
&& code
!= MINUS
)
4380 /* These are also multiplies, so they distribute over everything. */
4384 /* This distributes over all operations, provided the inner modes
4385 are the same, but we produce the result slightly differently. */
4386 if (GET_MODE (SUBREG_REG (lhs
)) != GET_MODE (SUBREG_REG (rhs
)))
4389 tem
= gen_binary (code
, GET_MODE (SUBREG_REG (lhs
)),
4390 SUBREG_REG (lhs
), SUBREG_REG (rhs
));
4391 return gen_lowpart_for_combine (GET_MODE (x
), tem
);
4397 /* Set LHS and RHS to the inner operands (A and B in the example
4398 above) and set OTHER to the common operand (C in the example).
4399 These is only one way to do this unless the inner operation is
4401 if (GET_RTX_CLASS (inner_code
) == 'c'
4402 && rtx_equal_p (XEXP (lhs
, 0), XEXP (rhs
, 0)))
4403 other
= XEXP (lhs
, 0), lhs
= XEXP (lhs
, 1), rhs
= XEXP (rhs
, 1);
4404 else if (GET_RTX_CLASS (inner_code
) == 'c'
4405 && rtx_equal_p (XEXP (lhs
, 0), XEXP (rhs
, 1)))
4406 other
= XEXP (lhs
, 0), lhs
= XEXP (lhs
, 1), rhs
= XEXP (rhs
, 0);
4407 else if (GET_RTX_CLASS (inner_code
) == 'c'
4408 && rtx_equal_p (XEXP (lhs
, 1), XEXP (rhs
, 0)))
4409 other
= XEXP (lhs
, 1), lhs
= XEXP (lhs
, 0), rhs
= XEXP (rhs
, 1);
4410 else if (rtx_equal_p (XEXP (lhs
, 1), XEXP (rhs
, 1)))
4411 other
= XEXP (lhs
, 1), lhs
= XEXP (lhs
, 0), rhs
= XEXP (rhs
, 0);
4415 /* Form the new inner operation, seeing if it simplifies first. */
4416 tem
= gen_binary (code
, GET_MODE (x
), lhs
, rhs
);
4418 /* There is one exception to the general way of distributing:
4419 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
4420 if (code
== XOR
&& inner_code
== IOR
)
4423 other
= gen_unary (NOT
, GET_MODE (x
), other
);
4426 /* We may be able to continuing distributing the result, so call
4427 ourselves recursively on the inner operation before forming the
4428 outer operation, which we return. */
4429 return gen_binary (inner_code
, GET_MODE (x
),
4430 apply_distributive_law (tem
), other
);
4433 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
4436 Return an equivalent form, if different from X. Otherwise, return X. If
4437 X is zero, we are to always construct the equivalent form. */
4440 simplify_and_const_int (x
, mode
, varop
, constop
)
4442 enum machine_mode mode
;
4446 register enum machine_mode tmode
;
4448 unsigned significant
;
4450 /* There is a large class of optimizations based on the principle that
4451 some operations produce results where certain bits are known to be zero,
4452 and hence are not significant to the AND. For example, if we have just
4453 done a left shift of one bit, the low-order bit is known to be zero and
4454 hence an AND with a mask of ~1 would not do anything.
4456 At the end of the following loop, we set:
4458 VAROP to be the item to be AND'ed with;
4459 CONSTOP to the constant value to AND it with. */
4463 /* If we ever encounter a mode wider than the host machine's word
4464 size, we can't compute the masks accurately, so give up. */
4465 if (GET_MODE_BITSIZE (GET_MODE (varop
)) > HOST_BITS_PER_INT
)
4468 /* Unless one of the cases below does a `continue',
4469 a `break' will be executed to exit the loop. */
4471 switch (GET_CODE (varop
))
4474 /* If VAROP is a (clobber (const_int)), return it since we know
4475 we are generating something that won't match. */
4478 #if ! BITS_BIG_ENDIAN
4480 /* VAROP is a (use (mem ..)) that was made from a bit-field
4481 extraction that spanned the boundary of the MEM. If we are
4482 now masking so it is within that boundary, we don't need the
4484 if ((constop
& ~ GET_MODE_MASK (GET_MODE (XEXP (varop
, 0)))) == 0)
4486 varop
= XEXP (varop
, 0);
4493 if (subreg_lowpart_p (varop
)
4494 /* We can ignore the effect this SUBREG if it narrows the mode
4495 or, on machines where byte operations zero extend, if the
4496 constant masks to zero all the bits the mode doesn't have. */
4497 && ((GET_MODE_SIZE (GET_MODE (varop
))
4498 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop
))))
4499 #ifdef BYTE_LOADS_ZERO_EXTEND
4501 & GET_MODE_MASK (GET_MODE (varop
))
4502 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop
)))))
4506 varop
= SUBREG_REG (varop
);
4515 /* Try to expand these into a series of shifts and then work
4516 with that result. If we can't, for example, if the extract
4517 isn't at a fixed position, give up. */
4518 temp
= expand_compound_operation (varop
);
4527 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
)
4529 constop
&= INTVAL (XEXP (varop
, 1));
4530 varop
= XEXP (varop
, 0);
4537 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
4538 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
4539 operation which may be a bitfield extraction. */
4541 if (GET_CODE (XEXP (varop
, 0)) == LSHIFTRT
4542 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
4543 && INTVAL (XEXP (XEXP (varop
, 0), 1)) >= 0
4544 && INTVAL (XEXP (XEXP (varop
, 0), 1)) < HOST_BITS_PER_INT
4545 && GET_CODE (XEXP (varop
, 1)) == CONST_INT
4546 && (INTVAL (XEXP (varop
, 1))
4547 & ~ significant_bits (XEXP (varop
, 0),
4548 GET_MODE (varop
)) == 0))
4550 temp
= gen_rtx (CONST_INT
, VOIDmode
,
4551 ((INTVAL (XEXP (varop
, 1)) & constop
)
4552 << INTVAL (XEXP (XEXP (varop
, 0), 1))));
4553 temp
= gen_binary (GET_CODE (varop
), GET_MODE (varop
),
4554 XEXP (XEXP (varop
, 0), 0), temp
);
4555 varop
= gen_rtx_combine (LSHIFTRT
, GET_MODE (varop
),
4556 temp
, XEXP (varop
, 1));
4560 /* Apply the AND to both branches of the IOR or XOR, then try to
4561 apply the distributive law. This may eliminate operations
4562 if either branch can be simplified because of the AND.
4563 It may also make some cases more complex, but those cases
4564 probably won't match a pattern either with or without this. */
4566 gen_lowpart_for_combine
4567 (mode
, apply_distributive_law
4569 (GET_CODE (varop
), GET_MODE (varop
),
4570 simplify_and_const_int (0, GET_MODE (varop
),
4571 XEXP (varop
, 0), constop
),
4572 simplify_and_const_int (0, GET_MODE (varop
),
4573 XEXP (varop
, 1), constop
))));
4576 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
4577 LSHIFTRT we can do the same as above. */
4579 if (GET_CODE (XEXP (varop
, 0)) == LSHIFTRT
4580 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
4581 && INTVAL (XEXP (XEXP (varop
, 0), 1)) >= 0
4582 && INTVAL (XEXP (XEXP (varop
, 0), 1)) < HOST_BITS_PER_INT
)
4584 temp
= gen_rtx (CONST_INT
, VOIDmode
,
4585 constop
<< INTVAL (XEXP (XEXP (varop
, 0), 1)));
4586 temp
= gen_binary (XOR
, GET_MODE (varop
),
4587 XEXP (XEXP (varop
, 0), 0), temp
);
4588 varop
= gen_rtx_combine (LSHIFTRT
, GET_MODE (varop
),
4589 temp
, XEXP (XEXP (varop
, 0), 1));
4595 /* If we are just looking for the sign bit, we don't need this
4596 shift at all, even if it has a variable count. */
4597 if (constop
== 1 << (GET_MODE_BITSIZE (GET_MODE (varop
)) - 1))
4599 varop
= XEXP (varop
, 0);
4603 /* If this is a shift by a constant, get a mask that contains
4604 those bits that are not copies of the sign bit. We then have
4605 two cases: If CONSTOP only includes those bits, this can be
4606 a logical shift, which may allow simplifications. If CONSTOP
4607 is a single-bit field not within those bits, we are requesting
4608 a copy of the sign bit and hence can shift the sign bit to
4609 the appropriate location. */
4610 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
4611 && INTVAL (XEXP (varop
, 1)) >= 0
4612 && INTVAL (XEXP (varop
, 1)) < HOST_BITS_PER_INT
)
4616 significant
= GET_MODE_MASK (GET_MODE (varop
));
4617 significant
>>= INTVAL (XEXP (varop
, 1));
4619 if ((constop
& ~significant
) == 0
4620 || (i
= exact_log2 (constop
)) >= 0)
4622 varop
= simplify_shift_const
4623 (varop
, LSHIFTRT
, GET_MODE (varop
), XEXP (varop
, 0),
4624 i
< 0 ? INTVAL (XEXP (varop
, 1))
4625 : GET_MODE_BITSIZE (GET_MODE (varop
)) - 1 - i
);
4626 if (GET_CODE (varop
) != ASHIFTRT
)
4631 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
4632 even if the shift count isn't a constant. */
4634 varop
= gen_rtx_combine (LSHIFTRT
, GET_MODE (varop
),
4635 XEXP (varop
, 0), XEXP (varop
, 1));
4639 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
4640 included in STORE_FLAG_VALUE and FOO has no significant bits
4642 if ((constop
& ~ STORE_FLAG_VALUE
) == 0
4643 && XEXP (varop
, 0) == const0_rtx
4644 && (significant_bits (XEXP (varop
, 0), mode
) & ~ constop
) == 0)
4646 varop
= XEXP (varop
, 0);
4652 /* In (and (plus FOO C1) M), if M is a mask that just turns off
4653 low-order bits (as in an alignment operation) and FOO is already
4654 aligned to that boundary, we can convert remove this AND
4655 and possibly the PLUS if it is now adding zero. */
4656 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
4657 && exact_log2 (-constop
) >= 0
4658 && (significant_bits (XEXP (varop
, 0), mode
) & ~ constop
) == 0)
4660 varop
= plus_constant (XEXP (varop
, 0),
4661 INTVAL (XEXP (varop
, 1)) & constop
);
4666 /* ... fall through ... */
4669 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
4670 less than powers of two and M2 is narrower than M1, we can
4671 eliminate the inner AND. This occurs when incrementing
4674 if (GET_CODE (XEXP (varop
, 0)) == ZERO_EXTRACT
4675 || GET_CODE (XEXP (varop
, 0)) == ZERO_EXTEND
)
4676 SUBST (XEXP (varop
, 0),
4677 expand_compound_operation (XEXP (varop
, 0)));
4679 if (GET_CODE (XEXP (varop
, 0)) == AND
4680 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
4681 && exact_log2 (constop
+ 1) >= 0
4682 && exact_log2 (INTVAL (XEXP (XEXP (varop
, 0), 1)) + 1) >= 0
4683 && (~ INTVAL (XEXP (XEXP (varop
, 0), 1)) & constop
) == 0)
4684 SUBST (XEXP (varop
, 0), XEXP (XEXP (varop
, 0), 0));
4691 /* If we have reached a constant, this whole thing is constant. */
4692 if (GET_CODE (varop
) == CONST_INT
)
4693 return gen_rtx (CONST_INT
, VOIDmode
, constop
& INTVAL (varop
));
4695 /* See what bits are significant in VAROP. */
4696 significant
= significant_bits (varop
, mode
);
4698 /* Turn off all bits in the constant that are known to already be zero.
4699 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
4700 which is tested below. */
4702 constop
&= significant
;
4704 /* If we don't have any bits left, return zero. */
4708 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
4709 if we already had one (just check for the simplest cases). */
4710 if (x
&& GET_CODE (XEXP (x
, 0)) == SUBREG
4711 && GET_MODE (XEXP (x
, 0)) == mode
4712 && SUBREG_REG (XEXP (x
, 0)) == varop
)
4713 varop
= XEXP (x
, 0);
4715 varop
= gen_lowpart_for_combine (mode
, varop
);
4717 /* If we can't make the SUBREG, try to return what we were given. */
4718 if (GET_CODE (varop
) == CLOBBER
)
4719 return x
? x
: varop
;
4721 /* If we are only masking insignificant bits, return VAROP. */
4722 if (constop
== significant
)
4725 /* Otherwise, return an AND. See how much, if any, of X we can use. */
4726 else if (x
== 0 || GET_CODE (x
) != AND
|| GET_MODE (x
) != mode
)
4727 x
= gen_rtx_combine (AND
, mode
, varop
,
4728 gen_rtx (CONST_INT
, VOIDmode
, constop
));
4732 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
4733 || INTVAL (XEXP (x
, 1)) != constop
)
4734 SUBST (XEXP (x
, 1), gen_rtx (CONST_INT
, VOIDmode
, constop
));
4736 SUBST (XEXP (x
, 0), varop
);
4742 /* Given an expression, X, compute which bits in X can be non-zero.
4743 We don't care about bits outside of those defined in MODE.
4745 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
4746 a shift, AND, or zero_extract, we can do better. */
4749 significant_bits (x
, mode
)
4751 enum machine_mode mode
;
4753 unsigned significant
= GET_MODE_MASK (mode
);
4756 int mode_width
= GET_MODE_BITSIZE (mode
);
4759 /* If X is wider than MODE, use its mode instead. */
4760 if (GET_MODE_BITSIZE (GET_MODE (x
)) > mode_width
)
4762 mode
= GET_MODE (x
);
4763 significant
= GET_MODE_MASK (mode
);
4764 mode_width
= GET_MODE_BITSIZE (mode
);
4767 if (mode_width
> HOST_BITS_PER_INT
)
4768 /* Our only callers in this case look for single bit values. So
4769 just return the mode mask. Those tests will then be false. */
4772 code
= GET_CODE (x
);
4776 #ifdef STACK_BOUNDARY
4777 /* If this is the stack pointer, we may know something about its
4778 alignment. If PUSH_ROUNDING is defined, it is possible for the
4779 stack to be momentarily aligned only to that amount, so we pick
4780 the least alignment. */
4782 if (x
== stack_pointer_rtx
)
4784 int sp_alignment
= STACK_BOUNDARY
/ BITS_PER_UNIT
;
4786 #ifdef PUSH_ROUNDING
4787 sp_alignment
= MIN (PUSH_ROUNDING (1), sp_alignment
);
4790 return significant
& ~ (sp_alignment
- 1);
4794 /* If X is a register whose value we can find, use that value.
4795 Otherwise, use the previously-computed significant bits for this
4798 tem
= get_last_value (x
);
4800 return significant_bits (tem
, mode
);
4801 else if (significant_valid
&& reg_significant
[REGNO (x
)])
4802 return reg_significant
[REGNO (x
)] & significant
;
4809 #ifdef BYTE_LOADS_ZERO_EXTEND
4811 /* In many, if not most, RISC machines, reading a byte from memory
4812 zeros the rest of the register. Noticing that fact saves a lot
4813 of extra zero-extends. */
4814 significant
&= GET_MODE_MASK (GET_MODE (x
));
4818 #if STORE_FLAG_VALUE == 1
4826 /* A comparison operation only sets the bits given by its mode. The
4827 rest are set undefined. */
4828 if (GET_MODE_SIZE (GET_MODE (x
)) < mode_width
)
4829 significant
|= (GET_MODE_MASK (mode
) & ~ GET_MODE_MASK (GET_MODE (x
)));
4833 #if STORE_FLAG_VALUE == -1
4835 if (GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
4836 || ((tem
= get_last_value (XEXP (x
, 0))) != 0
4837 && GET_RTX_CLASS (GET_CODE (tem
)) == '<'))
4840 if (GET_MODE_SIZE (GET_MODE (x
)) < mode_width
)
4841 significant
|= (GET_MODE_MASK (mode
) & ~ GET_MODE_MASK (GET_MODE (x
)));
4846 significant
&= (significant_bits (XEXP (x
, 0), mode
)
4847 & GET_MODE_MASK (mode
));
4851 significant
&= significant_bits (XEXP (x
, 0), mode
);
4852 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
4853 significant
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
4857 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4858 Otherwise, show all the bits in the outer mode but not the inner
4860 inner_sig
= significant_bits (XEXP (x
, 0), mode
);
4861 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
4863 inner_sig
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
4865 (1 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - 1)))
4866 inner_sig
|= (GET_MODE_MASK (mode
)
4867 & ~ GET_MODE_MASK (GET_MODE (XEXP (x
, 0))));
4870 significant
&= inner_sig
;
4874 significant
&= (significant_bits (XEXP (x
, 0), mode
)
4875 & significant_bits (XEXP (x
, 1), mode
));
4880 significant
&= (significant_bits (XEXP (x
, 0), mode
)
4881 | significant_bits (XEXP (x
, 1), mode
));
4884 case PLUS
: case MINUS
:
4886 case DIV
: case UDIV
:
4887 case MOD
: case UMOD
:
4888 /* We can apply the rules of arithmetic to compute the number of
4889 high- and low-order zero bits of these operations. We start by
4890 computing the width (position of the highest-order non-zero bit)
4891 and the number of low-order zero bits for each value. */
4893 unsigned sig0
= significant_bits (XEXP (x
, 0), mode
);
4894 unsigned sig1
= significant_bits (XEXP (x
, 1), mode
);
4895 int width0
= floor_log2 (sig0
) + 1;
4896 int width1
= floor_log2 (sig1
) + 1;
4897 int low0
= floor_log2 (sig0
& -sig0
);
4898 int low1
= floor_log2 (sig1
& -sig1
);
4899 int op0_maybe_minusp
= (sig0
& (1 << (mode_width
- 1)));
4900 int op1_maybe_minusp
= (sig1
& (1 << (mode_width
- 1)));
4901 int result_width
= mode_width
;
4907 result_width
= MAX (width0
, width1
) + 1;
4908 result_low
= MIN (low0
, low1
);
4911 result_low
= MIN (low0
, low1
);
4914 result_width
= width0
+ width1
;
4915 result_low
= low0
+ low1
;
4918 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
4919 result_width
= width0
;
4922 result_width
= width0
;
4925 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
4926 result_width
= MIN (width0
, width1
);
4927 result_low
= MIN (low0
, low1
);
4930 result_width
= MIN (width0
, width1
);
4931 result_low
= MIN (low0
, low1
);
4935 if (result_width
< mode_width
)
4936 significant
&= (1 << result_width
) - 1;
4939 significant
&= ~ ((1 << result_low
) - 1);
4944 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4945 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_INT
)
4946 significant
&= (1 << INTVAL (XEXP (x
, 1))) - 1;
4950 /* If the inner mode is a single word for both the host and target
4951 machines, we can compute this from which bits of the inner
4952 object are known significant. */
4953 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) <= BITS_PER_WORD
4954 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) <= HOST_BITS_PER_INT
)
4956 significant
&= significant_bits (SUBREG_REG (x
), mode
);
4957 #ifndef BYTE_LOADS_ZERO_EXTEND
4958 /* On many CISC machines, accessing an object in a wider mode
4959 causes the high-order bits to become undefined. So they are
4960 not known to be zero. */
4961 if (GET_MODE_SIZE (GET_MODE (x
))
4962 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
4963 significant
|= (GET_MODE_MASK (GET_MODE (x
))
4964 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x
))));
4974 /* The significant bits are in two classes: any bits within MODE
4975 that aren't in GET_MODE (x) are always significant. The rest of the
4976 significant bits are those that are significant in the operand of
4977 the shift when shifted the appropriate number of bits. This
4978 shows that high-order bits are cleared by the right shift and
4979 low-order bits by left shifts. */
4980 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4981 && INTVAL (XEXP (x
, 1)) >= 0
4982 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_INT
)
4984 enum machine_mode inner_mode
= GET_MODE (x
);
4985 int width
= GET_MODE_BITSIZE (inner_mode
);
4986 int count
= INTVAL (XEXP (x
, 1));
4987 unsigned mode_mask
= GET_MODE_MASK (inner_mode
);
4988 unsigned op_significant
= significant_bits (XEXP (x
, 0), mode
);
4989 unsigned inner
= op_significant
& mode_mask
;
4992 if (mode_width
> width
)
4993 outer
= (op_significant
& significant
& ~ mode_mask
);
4995 if (code
== LSHIFTRT
)
4997 else if (code
== ASHIFTRT
)
5001 /* If the sign bit was significant at before the shift, we
5002 need to mark all the places it could have been copied to
5003 by the shift significant. */
5004 if (inner
& (1 << (width
- 1 - count
)))
5005 inner
|= ((1 << count
) - 1) << (width
- count
);
5007 else if (code
== LSHIFT
|| code
== ASHIFT
)
5010 inner
= ((inner
<< (count
% width
)
5011 | (inner
>> (width
- (count
% width
)))) & mode_mask
);
5013 significant
&= (outer
| inner
);
5018 /* This is at most the number of bits in the mode. */
5019 significant
= (1 << (floor_log2 (mode_width
) + 1)) - 1;
5026 /* This function is called from `simplify_shift_const' to merge two
5027 outer operations. Specifically, we have already found that we need
5028 to perform operation *POP0 with constant *PCONST0 at the outermost
5029 position. We would now like to also perform OP1 with constant CONST1
5030 (with *POP0 being done last).
5032 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
5033 the resulting operation. *PCOMP_P is set to 1 if we would need to
5034 complement the innermost operand, otherwise it is unchanged.
5036 MODE is the mode in which the operation will be done. No bits outside
5037 the width of this mode matter. It is assumed that the width of this mode
5038 is smaller than or equal to HOST_BITS_PER_INT.
5040 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
5041 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
5042 result is simply *PCONST0.
5044 If the resulting operation cannot be expressed as one operation, we
5045 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
5048 merge_outer_ops (pop0
, pconst0
, op1
, const1
, mode
, pcomp_p
)
5049 enum rtx_code
*pop0
;
5053 enum machine_mode mode
;
5056 enum rtx_code op0
= *pop0
;
5057 int const0
= *pconst0
;
5059 const0
&= GET_MODE_MASK (mode
);
5060 const1
&= GET_MODE_MASK (mode
);
5062 /* If OP0 is an AND, clear unimportant bits in CONST1. */
5066 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
5069 if (op1
== NIL
|| op0
== SET
)
5072 else if (op0
== NIL
)
5073 op0
= op1
, const0
= const1
;
5075 else if (op0
== op1
)
5097 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
5098 else if (op0
== PLUS
|| op1
== PLUS
|| op0
== NEG
|| op1
== NEG
)
5101 /* If the two constants aren't the same, we can't do anything. The
5102 remaining six cases can all be done. */
5103 else if (const0
!= const1
)
5111 /* (a & b) | b == b */
5113 else /* op1 == XOR */
5114 /* (a ^ b) | b == a | b */
5120 /* (a & b) ^ b == (~a) & b */
5121 op0
= AND
, *pcomp_p
= 1;
5122 else /* op1 == IOR */
5123 /* (a | b) ^ b == a & ~b */
5124 op0
= AND
, *pconst0
= ~ const0
;
5129 /* (a | b) & b == b */
5131 else /* op1 == XOR */
5132 /* (a ^ b) & b) == (~a) & b */
5137 /* Check for NO-OP cases. */
5138 const0
&= GET_MODE_MASK (mode
);
5140 && (op0
== IOR
|| op0
== XOR
|| op0
== PLUS
))
5142 else if (const0
== 0 && op0
== AND
)
5144 else if (const0
== GET_MODE_MASK (mode
) && op0
== AND
)
5153 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
5154 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
5155 that we started with.
5157 The shift is normally computed in the widest mode we find in VAROP, as
5158 long as it isn't a different number of words than RESULT_MODE. Exceptions
5159 are ASHIFTRT and ROTATE, which are always done in their original mode, */
5162 simplify_shift_const (x
, code
, result_mode
, varop
, count
)
5165 enum machine_mode result_mode
;
5169 enum rtx_code orig_code
= code
;
5170 int orig_count
= count
;
5171 enum machine_mode mode
= result_mode
;
5172 enum machine_mode shift_mode
, tmode
;
5174 = (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
5175 /* We form (outer_op (code varop count) (outer_const)). */
5176 enum rtx_code outer_op
= NIL
;
5179 int complement_p
= 0;
5182 /* If we were given an invalid count, don't do anything except exactly
5183 what was requested. */
5185 if (count
< 0 || count
> GET_MODE_BITSIZE (mode
))
5190 return gen_rtx (code
, mode
, varop
, gen_rtx (CONST_INT
, VOIDmode
, count
));
5193 /* Unless one of the branches of the `if' in this loop does a `continue',
5194 we will `break' the loop after the `if'. */
5198 /* If we have an operand of (clobber (const_int 0)), just return that
5200 if (GET_CODE (varop
) == CLOBBER
)
5203 /* If we discovered we had to complement VAROP, leave. Making a NOT
5204 here would cause an infinite loop. */
5208 /* Convert ROTATETRT to ROTATE. */
5209 if (code
== ROTATERT
)
5210 code
= ROTATE
, count
= GET_MODE_BITSIZE (result_mode
) - count
;
5212 /* Canonicalize LSHIFT to ASHIFT. */
5216 /* We need to determine what mode we will do the shift in. If the
5217 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
5218 was originally done in. Otherwise, we can do it in MODE, the widest
5219 mode encountered. */
5220 shift_mode
= (code
== ASHIFTRT
|| code
== ROTATE
? result_mode
: mode
);
5222 /* Handle cases where the count is greater than the size of the mode
5223 minus 1. For ASHIFT, use the size minus one as the count (this can
5224 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
5225 take the count modulo the size. For other shifts, the result is
5228 Since these shifts are being produced by the compiler by combining
5229 multiple operations, each of which are defined, we know what the
5230 result is supposed to be. */
5232 if (count
> GET_MODE_BITSIZE (shift_mode
) - 1)
5234 if (code
== ASHIFTRT
)
5235 count
= GET_MODE_BITSIZE (shift_mode
) - 1;
5236 else if (code
== ROTATE
|| code
== ROTATERT
)
5237 count
%= GET_MODE_BITSIZE (shift_mode
);
5240 /* We can't simply return zero because there may be an
5248 /* Negative counts are invalid and should not have been made (a
5249 programmer-specified negative count should have been handled
5254 /* We simplify the tests below and elsewhere by converting
5255 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
5256 `make_compound_operation' will convert it to a ASHIFTRT for
5257 those machines (such as Vax) that don't have a LSHIFTRT. */
5258 if (GET_MODE_BITSIZE (shift_mode
) <= HOST_BITS_PER_INT
5260 && (significant_bits (varop
, shift_mode
)
5261 & (1 << (GET_MODE_BITSIZE (shift_mode
) - 1))) == 0)
5264 switch (GET_CODE (varop
))
5270 new = expand_compound_operation (varop
);
5279 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
5280 minus the width of a smaller mode, we can do this with a
5281 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
5282 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
5283 && ! mode_dependent_address_p (XEXP (varop
, 0))
5284 && ! MEM_VOLATILE_P (varop
)
5285 && (tmode
= mode_for_size (GET_MODE_BITSIZE (mode
) - count
,
5286 MODE_INT
, 1)) != BLKmode
)
5288 #if BYTES_BIG_ENDIAN
5289 new = gen_rtx (MEM
, tmode
, XEXP (varop
, 0));
5291 new = gen_rtx (MEM
, tmode
,
5292 plus_constant (XEXP (varop
, 0),
5293 count
/ BITS_PER_UNIT
));
5294 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop
);
5295 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop
);
5296 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop
);
5298 varop
= gen_rtx_combine (code
== ASHIFTRT
? SIGN_EXTEND
5299 : ZERO_EXTEND
, mode
, new);
5306 /* Similar to the case above, except that we can only do this if
5307 the resulting mode is the same as that of the underlying
5308 MEM and adjust the address depending on the *bits* endianness
5309 because of the way that bit-field extract insns are defined. */
5310 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
5311 && (tmode
= mode_for_size (GET_MODE_BITSIZE (mode
) - count
,
5312 MODE_INT
, 1)) != BLKmode
5313 && tmode
== GET_MODE (XEXP (varop
, 0)))
5316 new = XEXP (varop
, 0);
5318 new = copy_rtx (XEXP (varop
, 0));
5319 SUBST (XEXP (new, 0),
5320 plus_constant (XEXP (new, 0),
5321 count
/ BITS_PER_UNIT
));
5324 varop
= gen_rtx_combine (code
== ASHIFTRT
? SIGN_EXTEND
5325 : ZERO_EXTEND
, mode
, new);
5332 /* If VAROP is a SUBREG, strip it as long as the inner operand has
5333 the same number of words as what we've seen so far. Then store
5334 the widest mode in MODE. */
5335 if (SUBREG_WORD (varop
) == 0
5336 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop
)))
5337 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
5340 varop
= SUBREG_REG (varop
);
5341 if (GET_MODE_SIZE (GET_MODE (varop
)) > GET_MODE_SIZE (mode
))
5342 mode
= GET_MODE (varop
);
5348 /* Some machines use MULT instead of ASHIFT because MULT
5349 is cheaper. But it is still better on those machines to
5350 merge two shifts into one. */
5351 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5352 && exact_log2 (INTVAL (XEXP (varop
, 1))) >= 0)
5354 varop
= gen_binary (ASHIFT
, GET_MODE (varop
), XEXP (varop
, 0),
5355 gen_rtx (CONST_INT
, VOIDmode
,
5356 exact_log2 (INTVAL (XEXP (varop
, 1)))));
5362 /* Similar, for when divides are cheaper. */
5363 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5364 && exact_log2 (INTVAL (XEXP (varop
, 1))) >= 0)
5366 varop
= gen_binary (LSHIFTRT
, GET_MODE (varop
), XEXP (varop
, 0),
5367 gen_rtx (CONST_INT
, VOIDmode
,
5368 exact_log2 (INTVAL (XEXP (varop
, 1)))));
5374 /* If we are extracting just the sign bit of an arithmetic right
5375 shift, that shift is not needed. */
5376 if (code
== LSHIFTRT
&& count
== GET_MODE_BITSIZE (result_mode
) - 1)
5378 varop
= XEXP (varop
, 0);
5382 /* ... fall through ... */
5388 /* Here we have two nested shifts. The result is usually the
5389 AND of a new shift with a mask. We compute the result below. */
5390 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5391 && INTVAL (XEXP (varop
, 1)) >= 0
5392 && INTVAL (XEXP (varop
, 1)) < GET_MODE_BITSIZE (GET_MODE (varop
))
5393 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_INT
5394 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
)
5396 enum rtx_code first_code
= GET_CODE (varop
);
5397 int first_count
= INTVAL (XEXP (varop
, 1));
5402 if (first_code
== LSHIFT
)
5403 first_code
= ASHIFT
;
5405 /* We have one common special case. We can't do any merging if
5406 the inner code is an ASHIFTRT of a smaller mode. However, if
5407 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
5408 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
5409 we can convert it to
5410 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
5411 This simplifies certain SIGN_EXTEND operations. */
5412 if (code
== ASHIFT
&& first_code
== ASHIFTRT
5413 && (GET_MODE_BITSIZE (result_mode
)
5414 - GET_MODE_BITSIZE (GET_MODE (varop
))) == count
)
5416 /* C3 has the low-order C1 bits zero. */
5418 mask
= GET_MODE_MASK (mode
) & ~ ((1 << first_count
) - 1);
5420 varop
= simplify_and_const_int (0, result_mode
,
5421 XEXP (varop
, 0), mask
);
5422 varop
= simplify_shift_const (0, ASHIFT
, result_mode
,
5424 count
= first_count
;
5429 /* If this was (ashiftrt (ashift foo C1) C2) and we know
5430 something about FOO's previous value, we may be able to
5431 optimize this even though the code below can't handle this
5434 If FOO has J high-order bits equal to the sign bit with
5435 J > C1, then we can convert this to either an ASHIFT or
5436 a ASHIFTRT depending on the two counts.
5438 We cannot do this if VAROP's mode is not SHIFT_MODE. */
5440 if (code
== ASHIFTRT
&& first_code
== ASHIFT
5441 && GET_MODE (varop
) == shift_mode
5442 && (inner
= get_last_value (XEXP (varop
, 0))) != 0)
5444 if ((GET_CODE (inner
) == CONST_INT
5445 && (INTVAL (inner
) >> (HOST_BITS_PER_INT
- (first_count
+ 1)) == 0
5446 || (INTVAL (inner
) >> (HOST_BITS_PER_INT
- (first_count
+ 1)) == -1)))
5447 || (GET_CODE (inner
) == SIGN_EXTEND
5448 && ((GET_MODE_BITSIZE (GET_MODE (inner
))
5449 - GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner
))))
5451 || (GET_CODE (inner
) == ASHIFTRT
5452 && GET_CODE (XEXP (inner
, 1)) == CONST_INT
5453 && INTVAL (XEXP (inner
, 1)) >= first_count
))
5455 count
-= first_count
;
5457 count
= - count
, code
= ASHIFT
;
5458 varop
= XEXP (varop
, 0);
5463 /* There are some cases we can't do. If CODE is ASHIFTRT,
5464 we can only do this if FIRST_CODE is also ASHIFTRT.
5466 We can't do the case when CODE is ROTATE and FIRST_CODE is
5469 If the mode of this shift is not the mode of the outer shift,
5470 we can't do this if either shift is ASHIFTRT or ROTATE.
5472 Finally, we can't do any of these if the mode is too wide
5473 unless the codes are the same.
5475 Handle the case where the shift codes are the same
5478 if (code
== first_code
)
5480 if (GET_MODE (varop
) != result_mode
5481 && (code
== ASHIFTRT
|| code
== ROTATE
))
5484 count
+= first_count
;
5485 varop
= XEXP (varop
, 0);
5489 if (code
== ASHIFTRT
5490 || (code
== ROTATE
&& first_code
== ASHIFTRT
)
5491 || GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_INT
5492 || (GET_MODE (varop
) != result_mode
5493 && (first_code
== ASHIFTRT
|| first_code
== ROTATE
5494 || code
== ROTATE
)))
5497 /* To compute the mask to apply after the shift, shift the
5498 significant bits of the inner shift the same way the
5499 outer shift will. */
5501 mask_rtx
= gen_rtx (CONST_INT
, VOIDmode
,
5502 significant_bits (varop
, GET_MODE (varop
)));
5505 = simplify_binary_operation (code
, result_mode
, mask_rtx
,
5506 gen_rtx (CONST_INT
, VOIDmode
,
5509 /* Give up if we can't compute an outer operation to use. */
5511 || GET_CODE (mask_rtx
) != CONST_INT
5512 || ! merge_outer_ops (&outer_op
, &outer_const
, AND
,
5514 result_mode
, &complement_p
))
5517 /* If the shifts are in the same direction, we add the
5518 counts. Otherwise, we subtract them. */
5519 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
5520 == (first_code
== ASHIFTRT
|| first_code
== LSHIFTRT
))
5521 count
+= first_count
;
5523 count
-= first_count
;
5525 /* If COUNT is positive, the new shift is usually CODE,
5526 except for the two exceptions below, in which case it is
5527 FIRST_CODE. If the count is negative, FIRST_CODE should
5530 && ((first_code
== ROTATE
&& code
== ASHIFT
)
5531 || (first_code
== ASHIFTRT
&& code
== LSHIFTRT
)))
5534 code
= first_code
, count
= - count
;
5536 varop
= XEXP (varop
, 0);
5540 /* If we have (A << B << C) for any shift, we can convert this to
5541 (A << C << B). This wins if A is a constant. Only try this if
5542 B is not a constant. */
5544 else if (GET_CODE (varop
) == code
5545 && GET_CODE (XEXP (varop
, 1)) != CONST_INT
5547 = simplify_binary_operation (code
, mode
,
5553 varop
= gen_rtx_combine (code
, mode
, new, XEXP (varop
, 1));
5560 /* Make this fit the case below. */
5561 varop
= gen_rtx_combine (XOR
, mode
, XEXP (varop
, 0),
5562 gen_rtx (CONST_INT
, VOIDmode
,
5563 GET_MODE_MASK (mode
)));
5569 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
5570 with C the size of VAROP - 1 and the shift is logical if
5571 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
5572 we have an (le X 0) operation. If we have an arithmetic shift
5573 and STORE_FLAG_VALUE is 1 or we have a logical shift with
5574 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
5576 if (GET_CODE (varop
) == IOR
&& GET_CODE (XEXP (varop
, 0)) == PLUS
5577 && XEXP (XEXP (varop
, 0), 1) == constm1_rtx
5578 && (STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
5579 && (code
== LSHIFTRT
|| code
== ASHIFTRT
)
5580 && count
== GET_MODE_BITSIZE (GET_MODE (varop
)) - 1
5581 && rtx_equal_p (XEXP (XEXP (varop
, 0), 0), XEXP (varop
, 1)))
5584 varop
= gen_rtx_combine (LE
, GET_MODE (varop
), XEXP (varop
, 1),
5587 if (STORE_FLAG_VALUE
== 1 ? code
== ASHIFTRT
: code
== LSHIFTRT
)
5588 varop
= gen_rtx_combine (NEG
, GET_MODE (varop
), varop
);
5593 /* If we have (shift (logical)), move the logical to the outside
5594 to allow it to possibly combine with another logical and the
5595 shift to combine with another shift. This also canonicalizes to
5596 what a ZERO_EXTRACT looks like. Also, some machines have
5597 (and (shift)) insns. */
5599 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5600 && (new = simplify_binary_operation (code
, result_mode
,
5605 && merge_outer_ops (&outer_op
, &outer_const
, GET_CODE (varop
),
5606 INTVAL (new), result_mode
, &complement_p
))
5608 varop
= XEXP (varop
, 0);
5612 /* If we can't do that, try to simplify the shift in each arm of the
5613 logical expression, make a new logical expression, and apply
5614 the inverse distributive law. */
5616 rtx lhs
= simplify_shift_const (0, code
, result_mode
,
5617 XEXP (varop
, 0), count
);
5618 rtx rhs
= simplify_shift_const (0, code
, result_mode
,
5619 XEXP (varop
, 1), count
);
5621 varop
= gen_binary (GET_CODE (varop
), result_mode
, lhs
, rhs
);
5622 varop
= apply_distributive_law (varop
);
5629 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
5630 says that the sign bit can be tested, FOO has mode MODE, C is
5631 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
5634 && XEXP (varop
, 1) == const0_rtx
5635 && GET_MODE (XEXP (varop
, 0)) == result_mode
5636 && count
== GET_MODE_BITSIZE (result_mode
) - 1
5637 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_INT
5638 && ((STORE_FLAG_VALUE
5639 & (1 << (GET_MODE_BITSIZE (result_mode
) - 1))))
5640 && significant_bits (XEXP (varop
, 0), result_mode
) == 1
5641 && merge_outer_ops (&outer_op
, &outer_const
, XOR
, 1,
5642 result_mode
, &complement_p
))
5644 varop
= XEXP (varop
, 0);
5651 /* If we are doing an arithmetic right shift of something known
5652 to be -1 or 0, we don't need the shift. */
5653 if (code
== ASHIFTRT
5654 && significant_bits (XEXP (varop
, 0), result_mode
) == 1)
5660 /* NEG commutes with ASHIFT since it is multiplication. Move the
5661 NEG outside to allow shifts to combine. */
5663 && merge_outer_ops (&outer_op
, &outer_const
, NEG
, 0,
5664 result_mode
, &complement_p
))
5666 varop
= XEXP (varop
, 0);
5672 /* Similar to case above. If X is 0 or 1 then X - 1 is -1 or 0. */
5673 if (XEXP (varop
, 1) == constm1_rtx
&& code
== ASHIFTRT
5674 && significant_bits (XEXP (varop
, 0), result_mode
) == 1)
5680 /* If we have the same operands as above but we are shifting the
5681 sign bit into the low-order bit, we are exclusive-or'ing
5682 the operand of the PLUS with a one. */
5683 if (code
== LSHIFTRT
&& count
== GET_MODE_BITSIZE (result_mode
) - 1
5684 && XEXP (varop
, 1) == constm1_rtx
5685 && significant_bits (XEXP (varop
, 0), result_mode
) == 1
5686 && merge_outer_ops (&outer_op
, &outer_const
, XOR
, 1,
5687 result_mode
, &complement_p
))
5690 varop
= XEXP (varop
, 0);
5694 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
5696 && GET_CODE (XEXP (varop
, 1)) == CONST_INT
5697 && (new = simplify_binary_operation (ASHIFT
, result_mode
,
5702 && merge_outer_ops (&outer_op
, &outer_const
, PLUS
,
5703 INTVAL (new), result_mode
, &complement_p
))
5705 varop
= XEXP (varop
, 0);
5711 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
5712 with C the size of VAROP - 1 and the shift is logical if
5713 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
5714 we have a (gt X 0) operation. If the shift is arithmetic with
5715 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
5716 we have a (neg (gt X 0)) operation. */
5718 if (GET_CODE (XEXP (varop
, 0)) == ASHIFTRT
5719 && count
== GET_MODE_BITSIZE (GET_MODE (varop
)) - 1
5720 && (STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
5721 && (code
== LSHIFTRT
|| code
== ASHIFTRT
)
5722 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
5723 && INTVAL (XEXP (XEXP (varop
, 0), 1)) == count
5724 && rtx_equal_p (XEXP (XEXP (varop
, 0), 0), XEXP (varop
, 1)))
5727 varop
= gen_rtx_combine (GT
, GET_MODE (varop
), XEXP (varop
, 1),
5730 if (STORE_FLAG_VALUE
== 1 ? code
== ASHIFTRT
: code
== LSHIFTRT
)
5731 varop
= gen_rtx_combine (NEG
, GET_MODE (varop
), varop
);
5741 /* We need to determine what mode to do the shift in. If the shift is
5742 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
5743 done in. Otherwise, we can do it in MODE, the widest mode encountered.
5744 The code we care about is that of the shift that will actually be done,
5745 not the shift that was originally requested. */
5746 shift_mode
= (code
== ASHIFTRT
|| code
== ROTATE
? result_mode
: mode
);
5748 /* We have now finished analyzing the shift. The result should be
5749 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
5750 OUTER_OP is non-NIL, it is an operation that needs to be applied
5751 to the result of the shift. OUTER_CONST is the relevant constant,
5752 but we must turn off all bits turned off in the shift.
5754 If we were passed a value for X, see if we can use any pieces of
5755 it. If not, make new rtx. */
5757 if (x
&& GET_RTX_CLASS (GET_CODE (x
)) == '2'
5758 && GET_CODE (XEXP (x
, 1)) == CONST_INT
5759 && INTVAL (XEXP (x
, 1)) == count
)
5760 const_rtx
= XEXP (x
, 1);
5762 const_rtx
= gen_rtx (CONST_INT
, VOIDmode
, count
);
5764 if (x
&& GET_CODE (XEXP (x
, 0)) == SUBREG
5765 && GET_MODE (XEXP (x
, 0)) == shift_mode
5766 && SUBREG_REG (XEXP (x
, 0)) == varop
)
5767 varop
= XEXP (x
, 0);
5768 else if (GET_MODE (varop
) != shift_mode
)
5769 varop
= gen_lowpart_for_combine (shift_mode
, varop
);
5771 /* If we can't make the SUBREG, try to return what we were given. */
5772 if (GET_CODE (varop
) == CLOBBER
)
5773 return x
? x
: varop
;
5775 new = simplify_binary_operation (code
, shift_mode
, varop
, const_rtx
);
5780 if (x
== 0 || GET_CODE (x
) != code
|| GET_MODE (x
) != shift_mode
)
5781 x
= gen_rtx_combine (code
, shift_mode
, varop
, const_rtx
);
5783 SUBST (XEXP (x
, 0), varop
);
5784 SUBST (XEXP (x
, 1), const_rtx
);
5787 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
5788 turn off all the bits that the shift would have turned off. */
5789 if (orig_code
== LSHIFTRT
&& result_mode
!= shift_mode
)
5790 x
= simplify_and_const_int (0, shift_mode
, x
,
5791 GET_MODE_MASK (result_mode
) >> orig_count
);
5793 /* Do the remainder of the processing in RESULT_MODE. */
5794 x
= gen_lowpart_for_combine (result_mode
, x
);
5796 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
5799 x
= gen_unary (NOT
, result_mode
, x
);
5801 if (outer_op
!= NIL
)
5803 if (GET_MODE_BITSIZE (result_mode
) < HOST_BITS_PER_INT
)
5804 outer_const
&= GET_MODE_MASK (result_mode
);
5806 if (outer_op
== AND
)
5807 x
= simplify_and_const_int (0, result_mode
, x
, outer_const
);
5808 else if (outer_op
== SET
)
5809 /* This means that we have determined that the result is
5810 equivalent to a constant. This should be rare. */
5811 x
= gen_rtx (CONST_INT
, VOIDmode
, outer_const
);
5812 else if (GET_RTX_CLASS (outer_op
) == '1')
5813 x
= gen_unary (outer_op
, result_mode
, x
);
5815 x
= gen_binary (outer_op
, result_mode
, x
,
5816 gen_rtx (CONST_INT
, VOIDmode
, outer_const
));
5822 /* Like recog, but we receive the address of a pointer to a new pattern.
5823 We try to match the rtx that the pointer points to.
5824 If that fails, we may try to modify or replace the pattern,
5825 storing the replacement into the same pointer object.
5827 Modifications include deletion or addition of CLOBBERs.
5829 PNOTES is a pointer to a location where any REG_UNUSED notes added for
5830 the CLOBBERs are placed.
5832 The value is the final insn code from the pattern ultimately matched,
5836 recog_for_combine (pnewpat
, insn
, pnotes
)
5841 register rtx pat
= *pnewpat
;
5842 int insn_code_number
;
5843 int num_clobbers_to_add
= 0;
5847 /* Is the result of combination a valid instruction? */
5848 insn_code_number
= recog (pat
, insn
, &num_clobbers_to_add
);
5850 /* If it isn't, there is the possibility that we previously had an insn
5851 that clobbered some register as a side effect, but the combined
5852 insn doesn't need to do that. So try once more without the clobbers
5853 unless this represents an ASM insn. */
5855 if (insn_code_number
< 0 && ! check_asm_operands (pat
)
5856 && GET_CODE (pat
) == PARALLEL
)
5860 for (pos
= 0, i
= 0; i
< XVECLEN (pat
, 0); i
++)
5861 if (GET_CODE (XVECEXP (pat
, 0, i
)) != CLOBBER
)
5864 SUBST (XVECEXP (pat
, 0, pos
), XVECEXP (pat
, 0, i
));
5868 SUBST_INT (XVECLEN (pat
, 0), pos
);
5871 pat
= XVECEXP (pat
, 0, 0);
5873 insn_code_number
= recog (pat
, insn
, &num_clobbers_to_add
);
5876 /* If we had any clobbers to add, make a new pattern than contains
5877 them. Then check to make sure that all of them are dead. */
5878 if (num_clobbers_to_add
)
5880 rtx newpat
= gen_rtx (PARALLEL
, VOIDmode
,
5881 gen_rtvec (GET_CODE (pat
) == PARALLEL
5882 ? XVECLEN (pat
, 0) + num_clobbers_to_add
5883 : num_clobbers_to_add
+ 1));
5885 if (GET_CODE (pat
) == PARALLEL
)
5886 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
5887 XVECEXP (newpat
, 0, i
) = XVECEXP (pat
, 0, i
);
5889 XVECEXP (newpat
, 0, 0) = pat
;
5891 add_clobbers (newpat
, insn_code_number
);
5893 for (i
= XVECLEN (newpat
, 0) - num_clobbers_to_add
;
5894 i
< XVECLEN (newpat
, 0); i
++)
5896 if (GET_CODE (XEXP (XVECEXP (newpat
, 0, i
), 0)) == REG
5897 && ! reg_dead_at_p (XEXP (XVECEXP (newpat
, 0, i
), 0), insn
))
5899 notes
= gen_rtx (EXPR_LIST
, REG_UNUSED
,
5900 XEXP (XVECEXP (newpat
, 0, i
), 0), notes
);
5908 return insn_code_number
;
5911 /* Like gen_lowpart but for use by combine. In combine it is not possible
5912 to create any new pseudoregs. However, it is safe to create
5913 invalid memory addresses, because combine will try to recognize
5914 them and all they will do is make the combine attempt fail.
5916 If for some reason this cannot do its job, an rtx
5917 (clobber (const_int 0)) is returned.
5918 An insn containing that will not be recognized. */
5923 gen_lowpart_for_combine (mode
, x
)
5924 enum machine_mode mode
;
5929 if (GET_MODE (x
) == mode
)
5932 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
5933 return gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
5935 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
5936 won't know what to do. So we will strip off the SUBREG here and
5937 process normally. */
5938 if (GET_CODE (x
) == SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
5941 if (GET_MODE (x
) == mode
)
5945 result
= gen_lowpart_common (mode
, x
);
5949 if (GET_CODE (x
) == MEM
)
5951 register int offset
= 0;
5954 /* Refuse to work on a volatile memory ref or one with a mode-dependent
5956 if (MEM_VOLATILE_P (x
) || mode_dependent_address_p (XEXP (x
, 0)))
5957 return gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
5959 /* If we want to refer to something bigger than the original memref,
5960 generate a perverse subreg instead. That will force a reload
5961 of the original memref X. */
5962 if (GET_MODE_SIZE (GET_MODE (x
)) < GET_MODE_SIZE (mode
))
5963 return gen_rtx (SUBREG
, mode
, x
, 0);
5965 #if WORDS_BIG_ENDIAN
5966 offset
= (MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
)
5967 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
));
5969 #if BYTES_BIG_ENDIAN
5970 /* Adjust the address so that the address-after-the-data
5972 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
))
5973 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (x
))));
5975 new = gen_rtx (MEM
, mode
, plus_constant (XEXP (x
, 0), offset
));
5976 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
5977 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
5978 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
5982 /* If X is a comparison operator, rewrite it in a new mode. This
5983 probably won't match, but may allow further simplifications. */
5984 else if (GET_RTX_CLASS (GET_CODE (x
)) == '<')
5985 return gen_rtx_combine (GET_CODE (x
), mode
, XEXP (x
, 0), XEXP (x
, 1));
5987 /* If we couldn't simplify X any other way, just enclose it in a
5988 SUBREG. Normally, this SUBREG won't match, but some patterns may
5989 include and explicit SUBREG or we may simplify it further in combine. */
5991 return gen_rtx (SUBREG
, mode
, x
, 0);
5994 /* Make an rtx expression. This is a subset of gen_rtx and only supports
5995 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
5997 If the identical expression was previously in the insn (in the undobuf),
5998 it will be returned. Only if it is not found will a new expression
6003 gen_rtx_combine (va_alist
)
6008 enum machine_mode mode
;
6016 code
= va_arg (p
, enum rtx_code
);
6017 mode
= va_arg (p
, enum machine_mode
);
6018 n_args
= GET_RTX_LENGTH (code
);
6019 fmt
= GET_RTX_FORMAT (code
);
6021 if (n_args
== 0 || n_args
> 3)
6024 /* Get each arg and verify that it is supposed to be an expression. */
6025 for (j
= 0; j
< n_args
; j
++)
6030 args
[j
] = va_arg (p
, rtx
);
6033 /* See if this is in undobuf. Be sure we don't use objects that came
6034 from another insn; this could produce circular rtl structures. */
6036 for (i
= previous_num_undos
; i
< undobuf
.num_undo
; i
++)
6037 if (!undobuf
.undo
[i
].is_int
6038 && GET_CODE (undobuf
.undo
[i
].old_contents
) == code
6039 && GET_MODE (undobuf
.undo
[i
].old_contents
) == mode
)
6041 for (j
= 0; j
< n_args
; j
++)
6042 if (XEXP (undobuf
.undo
[i
].old_contents
, j
) != args
[j
])
6046 return undobuf
.undo
[i
].old_contents
;
6049 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
6050 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
6051 rt
= rtx_alloc (code
);
6052 PUT_MODE (rt
, mode
);
6053 XEXP (rt
, 0) = args
[0];
6056 XEXP (rt
, 1) = args
[1];
6058 XEXP (rt
, 2) = args
[2];
6063 /* These routines make binary and unary operations by first seeing if they
6064 fold; if not, a new expression is allocated. */
6067 gen_binary (code
, mode
, op0
, op1
)
6069 enum machine_mode mode
;
6074 if (GET_RTX_CLASS (code
) == '<')
6076 enum machine_mode op_mode
= GET_MODE (op0
);
6077 if (op_mode
== VOIDmode
)
6078 op_mode
= GET_MODE (op1
);
6079 result
= simplify_relational_operation (code
, op_mode
, op0
, op1
);
6082 result
= simplify_binary_operation (code
, mode
, op0
, op1
);
6087 /* Put complex operands first and constants second. */
6088 if (GET_RTX_CLASS (code
) == 'c'
6089 && ((CONSTANT_P (op0
) && GET_CODE (op1
) != CONST_INT
)
6090 || (GET_RTX_CLASS (GET_CODE (op0
)) == 'o'
6091 && GET_RTX_CLASS (GET_CODE (op1
)) != 'o')
6092 || (GET_CODE (op0
) == SUBREG
6093 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0
))) == 'o'
6094 && GET_RTX_CLASS (GET_CODE (op1
)) != 'o')))
6095 return gen_rtx_combine (code
, mode
, op1
, op0
);
6097 return gen_rtx_combine (code
, mode
, op0
, op1
);
6101 gen_unary (code
, mode
, op0
)
6103 enum machine_mode mode
;
6106 rtx result
= simplify_unary_operation (code
, mode
, op0
, mode
);
6111 return gen_rtx_combine (code
, mode
, op0
);
6114 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
6115 comparison code that will be tested.
6117 The result is a possibly different comparison code to use. *POP0 and
6118 *POP1 may be updated.
6120 It is possible that we might detect that a comparison is either always
6121 true or always false. However, we do not perform general constant
6122 folding in combine, so this knowlege isn't useful. Such tautologies
6123 should have been detected earlier. Hence we ignore all such cases. */
6125 static enum rtx_code
6126 simplify_comparison (code
, pop0
, pop1
)
6135 enum machine_mode mode
, tmode
;
6137 /* Try a few ways of applying the same transformation to both operands. */
6140 /* If both operands are the same constant shift, see if we can ignore the
6141 shift. We can if the shift is a rotate or if the bits shifted out of
6142 this shift are not significant for either input and if the type of
6143 comparison is compatible with the shift. */
6144 if (GET_CODE (op0
) == GET_CODE (op1
)
6145 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_INT
6146 && ((GET_CODE (op0
) == ROTATE
&& (code
== NE
|| code
== EQ
))
6147 || ((GET_CODE (op0
) == LSHIFTRT
6148 || GET_CODE (op0
) == ASHIFT
|| GET_CODE (op0
) == LSHIFT
)
6149 && (code
!= GT
&& code
!= LT
&& code
!= GE
&& code
!= LE
))
6150 || (GET_CODE (op0
) == ASHIFTRT
6151 && (code
!= GTU
&& code
!= LTU
6152 && code
!= GEU
&& code
!= GEU
)))
6153 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6154 && INTVAL (XEXP (op0
, 1)) >= 0
6155 && INTVAL (XEXP (op0
, 1)) < HOST_BITS_PER_INT
6156 && XEXP (op0
, 1) == XEXP (op1
, 1))
6158 enum machine_mode mode
= GET_MODE (op0
);
6159 unsigned mask
= GET_MODE_MASK (mode
);
6160 int shift_count
= INTVAL (XEXP (op0
, 1));
6162 if (GET_CODE (op0
) == LSHIFTRT
|| GET_CODE (op0
) == ASHIFTRT
)
6163 mask
&= (mask
>> shift_count
) << shift_count
;
6164 else if (GET_CODE (op0
) == ASHIFT
|| GET_CODE (op0
) == LSHIFT
)
6165 mask
= (mask
& (mask
<< shift_count
)) >> shift_count
;
6167 if ((significant_bits (XEXP (op0
, 0), mode
) & ~ mask
) == 0
6168 && (significant_bits (XEXP (op1
, 0), mode
) & ~ mask
) == 0)
6169 op0
= XEXP (op0
, 0), op1
= XEXP (op1
, 0);
6174 /* If both operands are AND's of a paradoxical SUBREG by constant, the
6175 SUBREGs are of the same mode, and, in both cases, the AND would
6176 be redundant if the comparison was done in the narrower mode,
6177 do the comparison in the narrower mode (e.g., we are AND'ing with 1
6178 and the operand's significant bits are 0xffffff01; in that case if
6179 we only care about QImode, we don't need the AND). This case occurs
6180 if the output mode of an scc insn is not SImode and
6181 STORE_FLAG_VALUE == 1 (e.g., the 386). */
6183 else if (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == AND
6184 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6185 && GET_CODE (XEXP (op1
, 1)) == CONST_INT
6186 && GET_CODE (XEXP (op0
, 0)) == SUBREG
6187 && GET_CODE (XEXP (op1
, 0)) == SUBREG
6188 && (GET_MODE_SIZE (GET_MODE (XEXP (op0
, 0)))
6189 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0
, 0)))))
6190 && (GET_MODE (SUBREG_REG (XEXP (op0
, 0)))
6191 == GET_MODE (SUBREG_REG (XEXP (op1
, 0))))
6192 && (significant_bits (SUBREG_REG (XEXP (op0
, 0)),
6193 GET_MODE (SUBREG_REG (XEXP (op0
, 0))))
6194 & ~ INTVAL (XEXP (op0
, 1))) == 0
6195 && (significant_bits (SUBREG_REG (XEXP (op1
, 0)),
6196 GET_MODE (SUBREG_REG (XEXP (op1
, 0))))
6197 & ~ INTVAL (XEXP (op1
, 1))) == 0)
6199 op0
= SUBREG_REG (XEXP (op0
, 0));
6200 op1
= SUBREG_REG (XEXP (op1
, 0));
6202 /* the resulting comparison is always unsigned since we masked off
6203 the original sign bit. */
6204 code
= unsigned_condition (code
);
6210 /* If the first operand is a constant, swap the operands and adjust the
6211 comparison code appropriately. */
6212 if (CONSTANT_P (op0
))
6214 tem
= op0
, op0
= op1
, op1
= tem
;
6215 code
= swap_condition (code
);
6218 /* We now enter a loop during which we will try to simplify the comparison.
6219 For the most part, we only are concerned with comparisons with zero,
6220 but some things may really be comparisons with zero but not start
6221 out looking that way. */
6223 while (GET_CODE (op1
) == CONST_INT
)
6225 enum machine_mode mode
= GET_MODE (op0
);
6226 int mode_width
= GET_MODE_BITSIZE (mode
);
6227 unsigned mask
= GET_MODE_MASK (mode
);
6228 int equality_comparison_p
;
6229 int sign_bit_comparison_p
;
6230 int unsigned_comparison_p
;
6233 /* We only want to handle integral modes. This catches VOIDmode,
6234 CCmode, and the floating-point modes. An exception is that we
6235 can handle VOIDmode if OP0 is a COMPARE or a comparison
6238 if (GET_MODE_CLASS (mode
) != MODE_INT
6239 && ! (mode
== VOIDmode
6240 && (GET_CODE (op0
) == COMPARE
6241 || GET_RTX_CLASS (GET_CODE (op0
)) == '<')))
6244 /* Get the constant we are comparing against and turn off all bits
6245 not on in our mode. */
6246 const_op
= INTVAL (op1
);
6247 if (mode_width
<= HOST_BITS_PER_INT
)
6248 const_op
&= GET_MODE_MASK (mode
);
6250 /* If we are comparing against a constant power of two and the value
6251 being compared has only that single significant bit (e.g., it was
6252 `and'ed with that bit), we can replace this with a comparison
6255 && (code
== EQ
|| code
== NE
|| code
== GE
|| code
== GEU
6256 || code
== LT
|| code
== LTU
)
6257 && mode_width
<= HOST_BITS_PER_INT
6258 && exact_log2 (const_op
) >= 0
6259 && significant_bits (op0
, mode
) == const_op
)
6261 code
= (code
== EQ
|| code
== GE
|| code
== GEU
? NE
: EQ
);
6262 op1
= const0_rtx
, const_op
= 0;
6265 /* Do some canonicalizations based on the comparison code. We prefer
6266 comparisons against zero and then prefer equality comparisons. */
6271 /* < 1 is equivalent to <= 0 */
6277 /* ... fall through to LE case below. */
6283 /* <= -1 is equivalent to < 0 */
6284 if (op1
== constm1_rtx
)
6285 op1
= const0_rtx
, const_op
= 0, code
= LT
;
6287 /* If we are doing a <= 0 comparison on a value known to have
6288 a zero sign bit, we can replace this with == 0. */
6289 else if (const_op
== 0
6290 && mode_width
<= HOST_BITS_PER_INT
6291 && (significant_bits (op0
, mode
)
6292 & (1 << (mode_width
- 1))) == 0)
6297 /* >= 1 is equivalent to > 0. */
6303 /* ... fall through to GT below. */
6309 /* > -1 is equivalent to >= 0. */
6310 if (op1
== constm1_rtx
)
6311 op1
= const0_rtx
, const_op
= 0, code
= GE
;
6313 /* If we are doing a > 0 comparison on a value known to have
6314 a zero sign bit, we can replace this with != 0. */
6315 else if (const_op
== 0
6316 && mode_width
<= HOST_BITS_PER_INT
6317 && (significant_bits (op0
, mode
)
6318 & (1 << (mode_width
- 1))) == 0)
6323 /* unsigned >= 1 is equivalent to != 0 */
6325 op1
= const0_rtx
, const_op
= 0, code
= NE
;
6329 /* unsigned < 1 is equivalent to == 0 */
6331 op1
= const0_rtx
, const_op
= 0, code
= EQ
;
6335 /* unsigned <= 0 is equivalent to == 0 */
6341 /* unsigned > 0 is equivalent to != 0 */
6347 /* Compute some predicates to simplify code below. */
6349 equality_comparison_p
= (code
== EQ
|| code
== NE
);
6350 sign_bit_comparison_p
= ((code
== LT
|| code
== GE
) && const_op
== 0);
6351 unsigned_comparison_p
= (code
== LTU
|| code
== LEU
|| code
== GTU
6354 /* Now try cases based on the opcode of OP0. If none of the cases
6355 does a "continue", we exit this loop immediately after the
6358 switch (GET_CODE (op0
))
6361 /* If we are extracting a single bit from a variable position in
6362 a constant that has only a single bit set and are comparing it
6363 with zero, we can convert this into an equality comparison
6364 between the position and the location of the single bit. We can't
6365 do this if bit endian and we don't have an extzv since we then
6366 can't know what mode to use for the endianness adjustment. */
6368 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
6369 if (GET_CODE (XEXP (op0
, 0)) == CONST_INT
6370 && XEXP (op0
, 1) == const1_rtx
6371 && equality_comparison_p
&& const_op
== 0
6372 && (i
= exact_log2 (INTVAL (XEXP (op0
, 0)))) >= 0)
6375 i
= (GET_MODE_BITSIZE
6376 (insn_operand_mode
[(int) CODE_FOR_extzv
][1]) - 1 - i
);
6379 op0
= XEXP (op0
, 2);
6380 op1
= gen_rtx (CONST_INT
, VOIDmode
, i
);
6383 /* Result is nonzero iff shift count is equal to I. */
6384 code
= reverse_condition (code
);
6389 /* ... fall through ... */
6392 tem
= expand_compound_operation (op0
);
6401 /* If testing for equality, we can take the NOT of the constant. */
6402 if (equality_comparison_p
6403 && (tem
= simplify_unary_operation (NOT
, mode
, op1
, mode
)) != 0)
6405 op0
= XEXP (op0
, 0);
6410 /* If just looking at the sign bit, reverse the sense of the
6412 if (sign_bit_comparison_p
)
6414 op0
= XEXP (op0
, 0);
6415 code
= (code
== GE
? LT
: GE
);
6421 /* If testing for equality, we can take the NEG of the constant. */
6422 if (equality_comparison_p
6423 && (tem
= simplify_unary_operation (NEG
, mode
, op1
, mode
)) != 0)
6425 op0
= XEXP (op0
, 0);
6430 /* The remaining cases only apply to comparisons with zero. */
6434 /* When X is ABS or is known positive,
6435 (neg X) is < 0 if and only if X != 0. */
6437 if (sign_bit_comparison_p
6438 && (GET_CODE (XEXP (op0
, 0)) == ABS
6439 || (mode_width
<= HOST_BITS_PER_INT
6440 && (significant_bits (XEXP (op0
, 0), mode
)
6441 & (1 << (mode_width
- 1))) == 0)))
6443 op0
= XEXP (op0
, 0);
6444 code
= (code
== LT
? NE
: EQ
);
6448 /* If we have NEG of something that is the result of a
6449 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
6450 two high-order bits must be the same and hence that
6451 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
6453 if (GET_CODE (XEXP (op0
, 0)) == SIGN_EXTEND
6454 || (GET_CODE (XEXP (op0
, 0)) == SIGN_EXTRACT
6455 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
6456 && (INTVAL (XEXP (XEXP (op0
, 0), 1))
6457 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0
, 0), 0)))))
6458 || (GET_CODE (XEXP (op0
, 0)) == ASHIFTRT
6459 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
6460 && XEXP (XEXP (op0
, 0), 1) != const0_rtx
)
6461 || ((tem
= get_last_value (XEXP (op0
, 0))) != 0
6462 && (GET_CODE (tem
) == SIGN_EXTEND
6463 || (GET_CODE (tem
) == SIGN_EXTRACT
6464 && GET_CODE (XEXP (tem
, 1)) == CONST_INT
6465 && (INTVAL (XEXP (tem
, 1))
6466 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem
, 0)))))
6467 || (GET_CODE (tem
) == ASHIFTRT
6468 && GET_CODE (XEXP (tem
, 1)) == CONST_INT
6469 && XEXP (tem
, 1) != const0_rtx
))))
6471 op0
= XEXP (op0
, 0);
6472 code
= swap_condition (code
);
6478 /* If we are testing equality and our count is a constant, we
6479 can perform the inverse operation on our RHS. */
6480 if (equality_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
6481 && (tem
= simplify_binary_operation (ROTATERT
, mode
,
6482 op1
, XEXP (op0
, 1))) != 0)
6484 op0
= XEXP (op0
, 0);
6489 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
6490 a particular bit. Convert it to an AND of a constant of that
6491 bit. This will be converted into a ZERO_EXTRACT. */
6492 if (const_op
== 0 && sign_bit_comparison_p
6493 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6494 && mode_width
<= HOST_BITS_PER_INT
)
6496 op0
= simplify_and_const_int (0, mode
, XEXP (op0
, 0),
6497 1 << (mode_width
- 1
6498 - INTVAL (XEXP (op0
, 1))));
6499 code
= (code
== LT
? NE
: EQ
);
6503 /* ... fall through ... */
6506 /* ABS is ignorable inside an equality comparison with zero. */
6507 if (const_op
== 0 && equality_comparison_p
)
6509 op0
= XEXP (op0
, 0);
6516 /* Can simplify (compare (zero/sign_extend FOO) CONST)
6517 to (compare FOO CONST) if CONST fits in FOO's mode and we
6518 are either testing inequality or have an unsigned comparison
6519 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
6520 if (! unsigned_comparison_p
6521 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
6522 <= HOST_BITS_PER_INT
)
6523 && ((unsigned) const_op
6524 < (1 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))) - 1))))
6526 op0
= XEXP (op0
, 0);
6532 /* If the inner mode is smaller and we are extracting the low
6533 part, we can treat the SUBREG as if it were a ZERO_EXTEND. */
6534 if (! subreg_lowpart_p (op0
)
6535 || GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0
))) >= mode_width
)
6538 /* ... fall through ... */
6541 if ((unsigned_comparison_p
|| equality_comparison_p
)
6542 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
6543 <= HOST_BITS_PER_INT
)
6544 && ((unsigned) const_op
6545 < GET_MODE_MASK (GET_MODE (XEXP (op0
, 0)))))
6547 op0
= XEXP (op0
, 0);
6553 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
6554 this for equality comparisons due to pathalogical cases involving
6556 if (equality_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
6557 && (tem
= simplify_binary_operation (MINUS
, mode
, op1
,
6558 XEXP (op0
, 1))) != 0)
6560 op0
= XEXP (op0
, 0);
6565 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
6566 if (const_op
== 0 && XEXP (op0
, 1) == constm1_rtx
6567 && GET_CODE (XEXP (op0
, 0)) == ABS
&& sign_bit_comparison_p
)
6569 op0
= XEXP (XEXP (op0
, 0), 0);
6570 code
= (code
== LT
? EQ
: NE
);
6576 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
6577 of bits in X minus 1, is one iff X > 0. */
6578 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 0)) == ASHIFTRT
6579 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
6580 && INTVAL (XEXP (XEXP (op0
, 0), 1)) == mode_width
- 1
6581 && rtx_equal_p (XEXP (XEXP (op0
, 0), 0), XEXP (op0
, 1)))
6583 op0
= XEXP (op0
, 1);
6584 code
= (code
== GE
? LE
: GT
);
6590 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
6591 if C is zero or B is a constant. */
6592 if (equality_comparison_p
6593 && 0 != (tem
= simplify_binary_operation (XOR
, mode
,
6594 XEXP (op0
, 1), op1
)))
6596 op0
= XEXP (op0
, 0);
6603 case LT
: case LTU
: case LE
: case LEU
:
6604 case GT
: case GTU
: case GE
: case GEU
:
6605 /* We can't do anything if OP0 is a condition code value, rather
6606 than an actual data value. */
6609 || XEXP (op0
, 0) == cc0_rtx
6611 || GET_MODE_CLASS (GET_MODE (XEXP (op0
, 0))) == MODE_CC
)
6614 /* Get the two operands being compared. */
6615 if (GET_CODE (XEXP (op0
, 0)) == COMPARE
)
6616 tem
= XEXP (XEXP (op0
, 0), 0), tem1
= XEXP (XEXP (op0
, 0), 1);
6618 tem
= XEXP (op0
, 0), tem1
= XEXP (op0
, 1);
6620 /* Check for the cases where we simply want the result of the
6621 earlier test or the opposite of that result. */
6623 || (code
== EQ
&& reversible_comparison_p (op0
))
6624 || (GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_INT
6625 && (STORE_FLAG_VALUE
6626 & (1 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1)))
6628 || (code
== GE
&& reversible_comparison_p (op0
)))))
6630 code
= (code
== LT
|| code
== NE
6631 ? GET_CODE (op0
) : reverse_condition (GET_CODE (op0
)));
6632 op0
= tem
, op1
= tem1
;
6638 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
6640 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 0)) == PLUS
6641 && XEXP (XEXP (op0
, 0), 1) == constm1_rtx
6642 && rtx_equal_p (XEXP (XEXP (op0
, 0), 0), XEXP (op0
, 1)))
6644 op0
= XEXP (op0
, 1);
6645 code
= (code
== GE
? GT
: LE
);
6651 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
6652 will be converted to a ZERO_EXTRACT later. */
6653 if (const_op
== 0 && equality_comparison_p
6654 && (GET_CODE (XEXP (op0
, 0)) == ASHIFT
6655 || GET_CODE (XEXP (op0
, 0)) == LSHIFT
)
6656 && XEXP (XEXP (op0
, 0), 0) == const1_rtx
)
6658 op0
= simplify_and_const_int
6659 (op0
, mode
, gen_rtx_combine (LSHIFTRT
, mode
,
6661 XEXP (XEXP (op0
, 0), 1)),
6666 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
6667 zero and X is a comparison and C1 and C2 describe only bits set
6668 in STORE_FLAG_VALUE, we can compare with X. */
6669 if (const_op
== 0 && equality_comparison_p
6670 && mode_width
<= HOST_BITS_PER_INT
6671 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6672 && GET_CODE (XEXP (op0
, 0)) == LSHIFTRT
6673 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
6674 && INTVAL (XEXP (XEXP (op0
, 0), 1)) >= 0
6675 && INTVAL (XEXP (XEXP (op0
, 0), 1)) < HOST_BITS_PER_INT
)
6677 mask
= ((INTVAL (XEXP (op0
, 1)) & GET_MODE_MASK (mode
))
6678 << INTVAL (XEXP (XEXP (op0
, 0), 1)));
6679 if ((~ STORE_FLAG_VALUE
& mask
) == 0
6680 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0
, 0), 0))) == '<'
6681 || ((tem
= get_last_value (XEXP (XEXP (op0
, 0), 0))) != 0
6682 && GET_RTX_CLASS (GET_CODE (tem
)) == '<')))
6684 op0
= XEXP (XEXP (op0
, 0), 0);
6689 /* If we are doing an equality comparison of an AND of a bit equal
6690 to the sign bit, replace this with a LT or GE comparison of
6691 the underlying value. */
6692 if (equality_comparison_p
6694 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6695 && mode_width
<= HOST_BITS_PER_INT
6696 && ((INTVAL (XEXP (op0
, 1)) & GET_MODE_MASK (mode
))
6697 == 1 << (mode_width
- 1)))
6699 op0
= XEXP (op0
, 0);
6700 code
= (code
== EQ
? GE
: LT
);
6704 /* If this AND operation is really a ZERO_EXTEND from a narrower
6705 mode, the constant fits within that mode, and this is either an
6706 equality or unsigned comparison, try to do this comparison in
6707 the narrower mode. */
6708 if ((equality_comparison_p
|| unsigned_comparison_p
)
6709 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6710 && (i
= exact_log2 ((INTVAL (XEXP (op0
, 1))
6711 & GET_MODE_MASK (mode
))
6713 && const_op
>> i
== 0
6714 && (tmode
= mode_for_size (i
, MODE_INT
, 1)) != BLKmode
)
6716 op0
= gen_lowpart_for_combine (tmode
, XEXP (op0
, 0));
6723 /* If we have (compare (xshift FOO N) (const_int C)) and
6724 the high order N bits of FOO (N+1 if an inequality comparison)
6725 are not significant, we can do this by comparing FOO with C
6726 shifted right N bits so long as the low-order N bits of C are
6728 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
6729 && INTVAL (XEXP (op0
, 1)) >= 0
6730 && ((INTVAL (XEXP (op0
, 1)) + ! equality_comparison_p
)
6731 < HOST_BITS_PER_INT
)
6732 && (const_op
& ~ ((1 << INTVAL (XEXP (op0
, 1))) - 1)) == 0
6733 && mode_width
<= HOST_BITS_PER_INT
6734 && (significant_bits (XEXP (op0
, 0), mode
)
6735 & ~ (mask
>> (INTVAL (XEXP (op0
, 1))
6736 + ! equality_comparison_p
))) == 0)
6738 const_op
>>= INTVAL (XEXP (op0
, 1));
6739 op1
= gen_rtx (CONST_INT
, VOIDmode
, const_op
);
6740 op0
= XEXP (op0
, 0);
6744 /* If we are doing an LT or GE comparison, it means we are testing
6745 a particular bit. Convert it to the appropriate AND. */
6746 if (const_op
== 0 && sign_bit_comparison_p
6747 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6748 && mode_width
<= HOST_BITS_PER_INT
)
6750 op0
= simplify_and_const_int (0, mode
, XEXP (op0
, 0),
6751 1 << ( mode_width
- 1
6752 - INTVAL (XEXP (op0
, 1))));
6753 code
= (code
== LT
? NE
: EQ
);
6759 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
6760 do the comparison in a narrower mode. */
6761 if (! unsigned_comparison_p
6762 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6763 && GET_CODE (XEXP (op0
, 0)) == ASHIFT
6764 && XEXP (op0
, 1) == XEXP (XEXP (op0
, 0), 1)
6765 && (tmode
= mode_for_size (mode_width
- INTVAL (XEXP (op0
, 1)),
6766 MODE_INT
, 1)) != VOIDmode
6767 && ((unsigned) const_op
<= GET_MODE_MASK (tmode
)
6768 || (unsigned) - const_op
<= GET_MODE_MASK (tmode
)))
6770 op0
= gen_lowpart_for_combine (tmode
, XEXP (XEXP (op0
, 0), 0));
6774 /* ... fall through ... */
6776 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
6777 the low order N bits of FOO are not significant, we can do this
6778 by comparing FOO with C shifted left N bits so long as no
6780 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
6781 && INTVAL (XEXP (op0
, 1)) >= 0
6782 && INTVAL (XEXP (op0
, 1)) < HOST_BITS_PER_INT
6783 && mode_width
<= HOST_BITS_PER_INT
6784 && (significant_bits (XEXP (op0
, 0), mode
)
6785 & ((1 << INTVAL (XEXP (op0
, 1))) - 1)) == 0
6787 || (floor_log2 (const_op
) + INTVAL (XEXP (op0
, 1))
6790 const_op
<<= INTVAL (XEXP (op0
, 1));
6791 op1
= gen_rtx (CONST_INT
, VOIDmode
, const_op
);
6792 op0
= XEXP (op0
, 0);
6796 /* If we are using this shift to extract just the sign bit, we
6797 can replace this with an LT or GE comparison. */
6799 && (equality_comparison_p
|| sign_bit_comparison_p
)
6800 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6801 && INTVAL (XEXP (op0
, 1)) == mode_width
- 1)
6803 op0
= XEXP (op0
, 0);
6804 code
= (code
== NE
|| code
== GT
? LT
: GE
);
6813 /* Now make any compound operations involved in this comparison. Then,
6814 check for an outmost SUBREG on OP0 that isn't doing anything or is
6815 paradoxical. The latter case can only occur when it is known that the
6816 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
6817 We can never remove a SUBREG for a non-equality comparison because the
6818 sign bit is in a different place in the underlying object. */
6820 op0
= make_compound_operation (op0
, op1
== const0_rtx
? COMPARE
: SET
);
6821 op1
= make_compound_operation (op1
, SET
);
6823 if (GET_CODE (op0
) == SUBREG
&& subreg_lowpart_p (op0
)
6824 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
6825 && (code
== NE
|| code
== EQ
)
6826 && ((GET_MODE_SIZE (GET_MODE (op0
))
6827 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
))))))
6829 op0
= SUBREG_REG (op0
);
6830 op1
= gen_lowpart_for_combine (GET_MODE (op0
), op1
);
6833 else if (GET_CODE (op0
) == SUBREG
&& subreg_lowpart_p (op0
)
6834 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
6835 && (code
== NE
|| code
== EQ
)
6836 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_INT
6837 && (significant_bits (SUBREG_REG (op0
), GET_MODE (SUBREG_REG (op0
)))
6838 & ~ GET_MODE_MASK (GET_MODE (op0
))) == 0
6839 && (tem
= gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0
)),
6841 (significant_bits (tem
, GET_MODE (SUBREG_REG (op0
)))
6842 & ~ GET_MODE_MASK (GET_MODE (op0
))) == 0))
6843 op0
= SUBREG_REG (op0
), op1
= tem
;
6845 /* We now do the opposite procedure: Some machines don't have compare
6846 insns in all modes. If OP0's mode is an integer mode smaller than a
6847 word and we can't do a compare in that mode, see if there is a larger
6848 mode for which we can do the compare and where the only significant
6849 bits in OP0 and OP1 are those in the narrower mode. We can do
6850 this if this is an equality comparison, in which case we can
6851 merely widen the operation, or if we are testing the sign bit, in
6852 which case we can explicitly put in the test. */
6854 mode
= GET_MODE (op0
);
6855 if (mode
!= VOIDmode
&& GET_MODE_CLASS (mode
) == MODE_INT
6856 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
6857 && cmp_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
6858 for (tmode
= GET_MODE_WIDER_MODE (mode
);
6859 tmode
!= VOIDmode
&& GET_MODE_BITSIZE (tmode
) <= HOST_BITS_PER_INT
;
6860 tmode
= GET_MODE_WIDER_MODE (tmode
))
6861 if (cmp_optab
->handlers
[(int) tmode
].insn_code
!= CODE_FOR_nothing
6862 && (significant_bits (op0
, tmode
) & ~ GET_MODE_MASK (mode
)) == 0
6863 && (significant_bits (op1
, tmode
) & ~ GET_MODE_MASK (mode
)) == 0
6864 && (code
== EQ
|| code
== NE
6865 || (op1
== const0_rtx
&& (code
== LT
|| code
== GE
)
6866 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_INT
)))
6868 op0
= gen_lowpart_for_combine (tmode
, op0
);
6869 op1
= gen_lowpart_for_combine (tmode
, op1
);
6871 if (code
== LT
|| code
== GE
)
6873 op0
= gen_binary (AND
, tmode
, op0
,
6874 gen_rtx (CONST_INT
, VOIDmode
,
6875 1 << (GET_MODE_BITSIZE (mode
) - 1)));
6876 code
= (code
== LT
) ? NE
: EQ
;
6888 /* Return 1 if we know that X, a comparison operation, is not operating
6889 on a floating-point value or is EQ or NE, meaning that we can safely
6893 reversible_comparison_p (x
)
6896 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
6897 || GET_CODE (x
) == NE
|| GET_CODE (x
) == EQ
)
6900 switch (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))))
6906 x
= get_last_value (XEXP (x
, 0));
6907 return (x
&& GET_CODE (x
) == COMPARE
6908 && GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) == MODE_INT
);
6914 /* Utility function for following routine. Called when X is part of a value
6915 being stored into reg_last_set_value. Sets reg_last_set_table_tick
6916 for each register mentioned. Similar to mention_regs in cse.c */
6919 update_table_tick (x
)
6922 register enum rtx_code code
= GET_CODE (x
);
6923 register char *fmt
= GET_RTX_FORMAT (code
);
6928 int regno
= REGNO (x
);
6929 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
6930 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
6932 for (i
= regno
; i
< endregno
; i
++)
6933 reg_last_set_table_tick
[i
] = label_tick
;
6938 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
6939 /* Note that we can't have an "E" in values stored; see
6940 get_last_value_validate. */
6942 update_table_tick (XEXP (x
, i
));
6945 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
6946 are saying that the register is clobbered and we no longer know its
6947 value. If INSN is zero, don't update reg_last_set; this call is normally
6948 done with VALUE also zero to invalidate the register. */
6951 record_value_for_reg (reg
, insn
, value
)
6956 int regno
= REGNO (reg
);
6957 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
6958 ? HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) : 1);
6961 /* If VALUE contains REG and we have a previous value for REG, substitute
6962 the previous value. */
6963 if (value
&& insn
&& reg_overlap_mentioned_p (reg
, value
))
6967 /* Set things up so get_last_value is allowed to see anything set up to
6969 subst_low_cuid
= INSN_CUID (insn
);
6970 tem
= get_last_value (reg
);
6973 value
= replace_rtx (copy_rtx (value
), reg
, tem
);
6976 /* For each register modified, show we don't know its value, that
6977 its value has been updated, and that we don't know the location of
6978 the death of the register. */
6979 for (i
= regno
; i
< endregno
; i
++)
6982 reg_last_set
[i
] = insn
;
6983 reg_last_set_value
[i
] = 0;
6984 reg_last_death
[i
] = 0;
6987 /* Mark registers that are being referenced in this value. */
6989 update_table_tick (value
);
6991 /* Now update the status of each register being set.
6992 If someone is using this register in this block, set this register
6993 to invalid since we will get confused between the two lives in this
6994 basic block. This makes using this register always invalid. In cse, we
6995 scan the table to invalidate all entries using this register, but this
6996 is too much work for us. */
6998 for (i
= regno
; i
< endregno
; i
++)
7000 reg_last_set_label
[i
] = label_tick
;
7001 if (value
&& reg_last_set_table_tick
[i
] == label_tick
)
7002 reg_last_set_invalid
[i
] = 1;
7004 reg_last_set_invalid
[i
] = 0;
7007 /* The value being assigned might refer to X (like in "x++;"). In that
7008 case, we must replace it with (clobber (const_int 0)) to prevent
7010 if (value
&& ! get_last_value_validate (&value
,
7011 reg_last_set_label
[regno
], 0))
7013 value
= copy_rtx (value
);
7014 if (! get_last_value_validate (&value
, reg_last_set_label
[regno
], 1))
7018 /* For the main register being modified, update the value. */
7019 reg_last_set_value
[regno
] = value
;
7023 /* Used for communication between the following two routines. */
7024 static rtx record_dead_insn
;
7026 /* Called via note_stores from record_dead_and_set_regs to handle one
7027 SET or CLOBBER in an insn. */
7030 record_dead_and_set_regs_1 (dest
, setter
)
7033 if (GET_CODE (dest
) == REG
)
7035 /* If we are setting the whole register, we know its value. Otherwise
7036 show that we don't know the value. We can handle SUBREG in
7038 if (GET_CODE (setter
) == SET
&& dest
== SET_DEST (setter
))
7039 record_value_for_reg (dest
, record_dead_insn
, SET_SRC (setter
));
7040 else if (GET_CODE (setter
) == SET
7041 && GET_CODE (SET_DEST (setter
)) == SUBREG
7042 && SUBREG_REG (SET_DEST (setter
)) == dest
7043 && subreg_lowpart_p (SET_DEST (setter
)))
7044 record_value_for_reg
7045 (dest
, record_dead_insn
,
7046 gen_lowpart_for_combine (GET_MODE (SET_DEST (setter
)),
7049 record_value_for_reg (dest
, record_dead_insn
, 0);
7051 else if (GET_CODE (dest
) == MEM
7052 /* Ignore pushes, they clobber nothing. */
7053 && ! push_operand (dest
, GET_MODE (dest
)))
7054 mem_last_set
= INSN_CUID (record_dead_insn
);
7057 /* Update the records of when each REG was most recently set or killed
7058 for the things done by INSN. This is the last thing done in processing
7059 INSN in the combiner loop.
7061 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
7062 similar information mem_last_set (which insn most recently modified memory)
7063 and last_call_cuid (which insn was the most recent subroutine call). */
7066 record_dead_and_set_regs (insn
)
7070 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
7072 if (REG_NOTE_KIND (link
) == REG_DEAD
)
7073 reg_last_death
[REGNO (XEXP (link
, 0))] = insn
;
7074 else if (REG_NOTE_KIND (link
) == REG_INC
)
7075 record_value_for_reg (XEXP (link
, 0), insn
, 0);
7078 if (GET_CODE (insn
) == CALL_INSN
)
7079 last_call_cuid
= mem_last_set
= INSN_CUID (insn
);
7081 record_dead_insn
= insn
;
7082 note_stores (PATTERN (insn
), record_dead_and_set_regs_1
);
7085 /* Utility routine for the following function. Verify that all the registers
7086 mentioned in *LOC are valid when *LOC was part of a value set when
7087 label_tick == TICK. Return 0 if some are not.
7089 If REPLACE is non-zero, replace the invalid reference with
7090 (clobber (const_int 0)) and return 1. This replacement is useful because
7091 we often can get useful information about the form of a value (e.g., if
7092 it was produced by a shift that always produces -1 or 0) even though
7093 we don't know exactly what registers it was produced from. */
7096 get_last_value_validate (loc
, tick
, replace
)
7102 char *fmt
= GET_RTX_FORMAT (GET_CODE (x
));
7103 int len
= GET_RTX_LENGTH (GET_CODE (x
));
7106 if (GET_CODE (x
) == REG
)
7108 int regno
= REGNO (x
);
7109 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
7110 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
7113 for (j
= regno
; j
< endregno
; j
++)
7114 if (reg_last_set_invalid
[j
]
7115 /* If this is a pseudo-register that was only set once, it is
7117 || (! (regno
>= FIRST_PSEUDO_REGISTER
&& reg_n_sets
[regno
] == 1)
7118 && reg_last_set_label
[j
] > tick
))
7121 *loc
= gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
7128 for (i
= 0; i
< len
; i
++)
7130 && get_last_value_validate (&XEXP (x
, i
), tick
, replace
) == 0)
7131 /* Don't bother with these. They shouldn't occur anyway. */
7135 /* If we haven't found a reason for it to be invalid, it is valid. */
7139 /* Get the last value assigned to X, if known. Some registers
7140 in the value may be replaced with (clobber (const_int 0)) if their value
7141 is known longer known reliably. */
7150 /* If this is a non-paradoxical SUBREG, get the value of its operand and
7151 then convert it to the desired mode. If this is a paradoxical SUBREG,
7152 we cannot predict what values the "extra" bits might have. */
7153 if (GET_CODE (x
) == SUBREG
7154 && subreg_lowpart_p (x
)
7155 && (GET_MODE_SIZE (GET_MODE (x
))
7156 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
7157 && (value
= get_last_value (SUBREG_REG (x
))) != 0)
7158 return gen_lowpart_for_combine (GET_MODE (x
), value
);
7160 if (GET_CODE (x
) != REG
)
7164 value
= reg_last_set_value
[regno
];
7166 /* If we don't have a value, it isn't for this basic block, or if it was
7167 set in a later insn that the ones we are processing, return 0. */
7170 || (reg_n_sets
[regno
] != 1
7171 && (reg_last_set_label
[regno
] != label_tick
7172 || INSN_CUID (reg_last_set
[regno
]) >= subst_low_cuid
)))
7175 /* If the value has all its register valid, return it. */
7176 if (get_last_value_validate (&value
, reg_last_set_label
[regno
], 0))
7179 /* Otherwise, make a copy and replace any invalid register with
7180 (clobber (const_int 0)). If that fails for some reason, return 0. */
7182 value
= copy_rtx (value
);
7183 if (get_last_value_validate (&value
, reg_last_set_label
[regno
], 1))
7189 /* Return nonzero if expression X refers to a REG or to memory
7190 that is set in an instruction more recent than FROM_CUID. */
7193 use_crosses_set_p (x
, from_cuid
)
7199 register enum rtx_code code
= GET_CODE (x
);
7203 register int regno
= REGNO (x
);
7204 #ifdef PUSH_ROUNDING
7205 /* Don't allow uses of the stack pointer to be moved,
7206 because we don't know whether the move crosses a push insn. */
7207 if (regno
== STACK_POINTER_REGNUM
)
7210 return (reg_last_set
[regno
]
7211 && INSN_CUID (reg_last_set
[regno
]) > from_cuid
);
7214 if (code
== MEM
&& mem_last_set
> from_cuid
)
7217 fmt
= GET_RTX_FORMAT (code
);
7219 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7224 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7225 if (use_crosses_set_p (XVECEXP (x
, i
, j
), from_cuid
))
7228 else if (fmt
[i
] == 'e'
7229 && use_crosses_set_p (XEXP (x
, i
), from_cuid
))
7235 /* Define three variables used for communication between the following
7238 static int reg_dead_regno
, reg_dead_endregno
;
7239 static int reg_dead_flag
;
7241 /* Function called via note_stores from reg_dead_at_p.
7243 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
7244 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
7247 reg_dead_at_p_1 (dest
, x
)
7251 int regno
, endregno
;
7253 if (GET_CODE (dest
) != REG
)
7256 regno
= REGNO (dest
);
7257 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
7258 ? HARD_REGNO_NREGS (regno
, GET_MODE (dest
)) : 1);
7260 if (reg_dead_endregno
> regno
&& reg_dead_regno
< endregno
)
7261 reg_dead_flag
= (GET_CODE (x
) == CLOBBER
) ? 1 : -1;
7264 /* Return non-zero if REG is known to be dead at INSN.
7266 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
7267 referencing REG, it is dead. If we hit a SET referencing REG, it is
7268 live. Otherwise, see if it is live or dead at the start of the basic
7272 reg_dead_at_p (reg
, insn
)
7278 /* Set variables for reg_dead_at_p_1. */
7279 reg_dead_regno
= REGNO (reg
);
7280 reg_dead_endregno
= reg_dead_regno
+ (reg_dead_regno
< FIRST_PSEUDO_REGISTER
7281 ? HARD_REGNO_NREGS (reg_dead_regno
,
7287 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
7288 beginning of function. */
7289 for (; insn
&& GET_CODE (insn
) != CODE_LABEL
;
7290 insn
= prev_nonnote_insn (insn
))
7292 note_stores (PATTERN (insn
), reg_dead_at_p_1
);
7294 return reg_dead_flag
== 1 ? 1 : 0;
7296 if (find_regno_note (insn
, REG_DEAD
, reg_dead_regno
))
7300 /* Get the basic block number that we were in. */
7305 for (block
= 0; block
< n_basic_blocks
; block
++)
7306 if (insn
== basic_block_head
[block
])
7309 if (block
== n_basic_blocks
)
7313 for (i
= reg_dead_regno
; i
< reg_dead_endregno
; i
++)
7314 if (basic_block_live_at_start
[block
][i
/ HOST_BITS_PER_INT
]
7315 & (1 << (i
% HOST_BITS_PER_INT
)))
7321 /* Remove register number REGNO from the dead registers list of INSN.
7323 Return the note used to record the death, if there was one. */
7326 remove_death (regno
, insn
)
7330 register rtx note
= find_regno_note (insn
, REG_DEAD
, regno
);
7333 remove_note (insn
, note
);
7338 /* For each register (hardware or pseudo) used within expression X, if its
7339 death is in an instruction with cuid between FROM_CUID (inclusive) and
7340 TO_INSN (exclusive), put a REG_DEAD note for that register in the
7341 list headed by PNOTES.
7343 This is done when X is being merged by combination into TO_INSN. These
7344 notes will then be distributed as needed. */
7347 move_deaths (x
, from_cuid
, to_insn
, pnotes
)
7354 register int len
, i
;
7355 register enum rtx_code code
= GET_CODE (x
);
7359 register int regno
= REGNO (x
);
7360 register rtx where_dead
= reg_last_death
[regno
];
7362 if (where_dead
&& INSN_CUID (where_dead
) >= from_cuid
7363 && INSN_CUID (where_dead
) < INSN_CUID (to_insn
))
7365 rtx note
= remove_death (regno
, reg_last_death
[regno
]);
7367 /* It is possible for the call above to return 0. This can occur
7368 when reg_last_death points to I2 or I1 that we combined with.
7369 In that case make a new note. */
7373 XEXP (note
, 1) = *pnotes
;
7377 *pnotes
= gen_rtx (EXPR_LIST
, REG_DEAD
, x
, *pnotes
);
7383 else if (GET_CODE (x
) == SET
)
7385 rtx dest
= SET_DEST (x
);
7387 move_deaths (SET_SRC (x
), from_cuid
, to_insn
, pnotes
);
7389 if (GET_CODE (dest
) == ZERO_EXTRACT
)
7391 move_deaths (XEXP (dest
, 1), from_cuid
, to_insn
, pnotes
);
7392 move_deaths (XEXP (dest
, 2), from_cuid
, to_insn
, pnotes
);
7395 while (GET_CODE (dest
) == ZERO_EXTRACT
|| GET_CODE (dest
) == SUBREG
7396 || GET_CODE (dest
) == STRICT_LOW_PART
)
7397 dest
= XEXP (dest
, 0);
7399 if (GET_CODE (dest
) == MEM
)
7400 move_deaths (XEXP (dest
, 0), from_cuid
, to_insn
, pnotes
);
7404 else if (GET_CODE (x
) == CLOBBER
)
7407 len
= GET_RTX_LENGTH (code
);
7408 fmt
= GET_RTX_FORMAT (code
);
7410 for (i
= 0; i
< len
; i
++)
7415 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7416 move_deaths (XVECEXP (x
, i
, j
), from_cuid
, to_insn
, pnotes
);
7418 else if (fmt
[i
] == 'e')
7419 move_deaths (XEXP (x
, i
), from_cuid
, to_insn
, pnotes
);
7423 /* Return 1 if REG is the target of a bit-field assignment in BODY, the
7424 pattern of an insn. */
7427 reg_bitfield_target_p (reg
, body
)
7433 if (GET_CODE (body
) == SET
)
7434 return ((GET_CODE (SET_DEST (body
)) == ZERO_EXTRACT
7435 && reg
== XEXP (SET_DEST (body
), 0))
7436 || (GET_CODE (SET_DEST (body
)) == STRICT_LOW_PART
7437 && reg
== SUBREG_REG (XEXP (SET_DEST (body
), 0))));
7439 else if (GET_CODE (body
) == PARALLEL
)
7440 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
7441 if (reg_bitfield_target_p (reg
, XVECEXP (body
, 0, i
)))
7447 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
7448 as appropriate. I3 and I2 are the insns resulting from the combination
7449 insns including FROM (I2 may be zero).
7451 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
7452 not need REG_DEAD notes because they are being substituted for. This
7453 saves searching in the most common cases.
7455 Each note in the list is either ignored or placed on some insns, depending
7456 on the type of note. */
7459 distribute_notes (notes
, from_insn
, i3
, i2
, elim_i2
, elim_i1
)
7463 rtx elim_i2
, elim_i1
;
7465 rtx note
, next_note
;
7468 for (note
= notes
; note
; note
= next_note
)
7470 rtx place
= 0, place2
= 0;
7472 /* If this NOTE references a pseudo register, ensure it references
7473 the latest copy of that register. */
7474 if (XEXP (note
, 0) && GET_CODE (XEXP (note
, 0)) == REG
7475 && REGNO (XEXP (note
, 0)) >= FIRST_PSEUDO_REGISTER
)
7476 XEXP (note
, 0) = regno_reg_rtx
[REGNO (XEXP (note
, 0))];
7478 next_note
= XEXP (note
, 1);
7479 switch (REG_NOTE_KIND (note
))
7482 /* If this register is set or clobbered in I3, put the note there
7483 unless there is one already. */
7484 if (reg_set_p (XEXP (note
, 0), PATTERN (i3
)))
7486 if (! (GET_CODE (XEXP (note
, 0)) == REG
7487 ? find_regno_note (i3
, REG_UNUSED
, REGNO (XEXP (note
, 0)))
7488 : find_reg_note (i3
, REG_UNUSED
, XEXP (note
, 0))))
7491 /* Otherwise, if this register is used by I3, then this register
7492 now dies here, so we must put a REG_DEAD note here unless there
7494 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (i3
))
7495 && ! (GET_CODE (XEXP (note
, 0)) == REG
7496 ? find_regno_note (i3
, REG_DEAD
, REGNO (XEXP (note
, 0)))
7497 : find_reg_note (i3
, REG_DEAD
, XEXP (note
, 0))))
7499 PUT_REG_NOTE_KIND (note
, REG_DEAD
);
7507 /* These notes say something about results of an insn. We can
7508 only support them if they used to be on I3 in which case they
7509 remain on I3. Otherwise they are ignored. */
7510 if (from_insn
== i3
)
7515 case REG_NO_CONFLICT
:
7517 /* These notes say something about how a register is used. They must
7518 be present on any use of the register in I2 or I3. */
7519 if (reg_mentioned_p (XEXP (note
, 0), PATTERN (i3
)))
7522 if (i2
&& reg_mentioned_p (XEXP (note
, 0), PATTERN (i2
)))
7532 /* It is too much trouble to try to see if this note is still
7533 correct in all situations. It is better to simply delete it. */
7537 /* If the insn previously containing this note still exists,
7538 put it back where it was. Otherwise move it to the previous
7539 insn. Adjust the corresponding REG_LIBCALL note. */
7540 if (GET_CODE (from_insn
) != NOTE
)
7544 tem
= find_reg_note (XEXP (note
, 0), REG_LIBCALL
, 0);
7545 place
= prev_real_insn (from_insn
);
7547 XEXP (tem
, 0) = place
;
7552 /* This is handled similarly to REG_RETVAL. */
7553 if (GET_CODE (from_insn
) != NOTE
)
7557 tem
= find_reg_note (XEXP (note
, 0), REG_RETVAL
, 0);
7558 place
= next_real_insn (from_insn
);
7560 XEXP (tem
, 0) = place
;
7565 /* If the register is used as an input in I3, it dies there.
7566 Similarly for I2, if it is non-zero and adjacent to I3.
7568 If the register is not used as an input in either I3 or I2
7569 and it is not one of the registers we were supposed to eliminate,
7570 there are two possibilities. We might have a non-adjacent I2
7571 or we might have somehow eliminated an additional register
7572 from a computation. For example, we might have had A & B where
7573 we discover that B will always be zero. In this case we will
7574 eliminate the reference to A.
7576 In both cases, we must search to see if we can find a previous
7577 use of A and put the death note there. */
7579 if (reg_referenced_p (XEXP (note
, 0), PATTERN (i3
)))
7581 else if (i2
!= 0 && next_nonnote_insn (i2
) == i3
7582 && reg_referenced_p (XEXP (note
, 0), PATTERN (i2
)))
7585 if (XEXP (note
, 0) == elim_i2
|| XEXP (note
, 0) == elim_i1
)
7589 for (tem
= prev_nonnote_insn (i3
);
7590 tem
&& (GET_CODE (tem
) == INSN
7591 || GET_CODE (tem
) == CALL_INSN
);
7592 tem
= prev_nonnote_insn (tem
))
7594 /* If the register is being set at TEM, see if that is all
7595 TEM is doing. If so, delete TEM. Otherwise, make this
7596 into a REG_UNUSED note instead. */
7597 if (reg_set_p (XEXP (note
, 0), PATTERN (tem
)))
7599 rtx set
= single_set (tem
);
7601 if (set
!= 0 && ! side_effects_p (SET_SRC (set
)))
7603 /* Move the notes and links of TEM elsewhere.
7604 This might delete other dead insns recursively.
7605 First set the pattern to something that won't use
7608 PATTERN (tem
) = pc_rtx
;
7610 distribute_notes (REG_NOTES (tem
), tem
, tem
, 0, 0, 0);
7611 distribute_links (LOG_LINKS (tem
));
7613 PUT_CODE (tem
, NOTE
);
7614 NOTE_LINE_NUMBER (tem
) = NOTE_INSN_DELETED
;
7615 NOTE_SOURCE_FILE (tem
) = 0;
7619 PUT_REG_NOTE_KIND (note
, REG_UNUSED
);
7621 /* If there isn't already a REG_UNUSED note, put one
7623 if (! find_regno_note (tem
, REG_UNUSED
,
7624 REGNO (XEXP (note
, 0))))
7629 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (tem
)))
7636 /* If the register is set or already dead at PLACE, we needn't do
7637 anything with this note if it is still a REG_DEAD note.
7639 Note that we cannot use just `dead_or_set_p' here since we can
7640 convert an assignment to a register into a bit-field assignment.
7641 Therefore, we must also omit the note if the register is the
7642 target of a bitfield assignment. */
7644 if (place
&& REG_NOTE_KIND (note
) == REG_DEAD
)
7646 int regno
= REGNO (XEXP (note
, 0));
7648 if (dead_or_set_p (place
, XEXP (note
, 0))
7649 || reg_bitfield_target_p (XEXP (note
, 0), PATTERN (place
)))
7651 /* Unless the register previously died in PLACE, clear
7652 reg_last_death. [I no longer understand why this is
7654 if (reg_last_death
[regno
] != place
)
7655 reg_last_death
[regno
] = 0;
7659 reg_last_death
[regno
] = place
;
7661 /* If this is a death note for a hard reg that is occupying
7662 multiple registers, ensure that we are still using all
7663 parts of the object. If we find a piece of the object
7664 that is unused, we must add a USE for that piece before
7665 PLACE and put the appropriate REG_DEAD note on it.
7667 An alternative would be to put a REG_UNUSED for the pieces
7668 on the insn that set the register, but that can't be done if
7669 it is not in the same block. It is simpler, though less
7670 efficient, to add the USE insns. */
7672 if (place
&& regno
< FIRST_PSEUDO_REGISTER
7673 && HARD_REGNO_NREGS (regno
, GET_MODE (XEXP (note
, 0))) > 1)
7676 = regno
+ HARD_REGNO_NREGS (regno
,
7677 GET_MODE (XEXP (note
, 0)));
7681 for (i
= regno
; i
< endregno
; i
++)
7682 if (! refers_to_regno_p (i
, i
+ 1, PATTERN (place
), 0))
7684 rtx piece
= gen_rtx (REG
, word_mode
, i
);
7686 = emit_insn_before (gen_rtx (USE
, VOIDmode
, piece
),
7689 REG_NOTES (use_insn
)
7690 = gen_rtx (EXPR_LIST
, REG_DEAD
, piece
,
7691 REG_NOTES (use_insn
));
7696 /* Put only REG_DEAD notes for pieces that are
7697 still used and that are not already dead or set. */
7699 for (i
= regno
; i
< endregno
; i
++)
7701 rtx piece
= gen_rtx (REG
, word_mode
, i
);
7703 if (reg_referenced_p (piece
, PATTERN (place
))
7704 && ! dead_or_set_p (place
, piece
)
7705 && ! reg_bitfield_target_p (piece
,
7707 REG_NOTES (place
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
7719 /* Any other notes should not be present at this point in the
7726 XEXP (note
, 1) = REG_NOTES (place
);
7727 REG_NOTES (place
) = note
;
7731 REG_NOTES (place2
) = gen_rtx (GET_CODE (note
), REG_NOTE_KIND (note
),
7732 XEXP (note
, 0), REG_NOTES (place2
));
7736 /* Similarly to above, distribute the LOG_LINKS that used to be present on
7737 I3, I2, and I1 to new locations. */
7740 distribute_links (links
)
7743 rtx link
, next_link
;
7745 for (link
= links
; link
; link
= next_link
)
7751 next_link
= XEXP (link
, 1);
7753 /* If the insn that this link points to is a NOTE or isn't a single
7754 set, ignore it. In the latter case, it isn't clear what we
7755 can do other than ignore the link, since we can't tell which
7756 register it was for. Such links wouldn't be used by combine
7759 It is not possible for the destination of the target of the link to
7760 have been changed by combine. The only potential of this is if we
7761 replace I3, I2, and I1 by I3 and I2. But in that case the
7762 destination of I2 also remains unchanged. */
7764 if (GET_CODE (XEXP (link
, 0)) == NOTE
7765 || (set
= single_set (XEXP (link
, 0))) == 0)
7768 reg
= SET_DEST (set
);
7769 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
7770 || GET_CODE (reg
) == SIGN_EXTRACT
7771 || GET_CODE (reg
) == STRICT_LOW_PART
)
7772 reg
= XEXP (reg
, 0);
7774 /* A LOG_LINK is defined as being placed on the first insn that uses
7775 a register and points to the insn that sets the register. Start
7776 searching at the next insn after the target of the link and stop
7777 when we reach a set of the register or the end of the basic block.
7779 Note that this correctly handles the link that used to point from
7780 I3 to I2. Also note that not much seaching is typically done here
7781 since most links don't point very far away. */
7783 for (insn
= NEXT_INSN (XEXP (link
, 0));
7784 (insn
&& GET_CODE (insn
) != CODE_LABEL
7785 && GET_CODE (PREV_INSN (insn
)) != JUMP_INSN
);
7786 insn
= NEXT_INSN (insn
))
7787 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
7788 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
7790 if (reg_referenced_p (reg
, PATTERN (insn
)))
7795 /* If we found a place to put the link, place it there unless there
7796 is already a link to the same insn as LINK at that point. */
7802 for (link2
= LOG_LINKS (place
); link2
; link2
= XEXP (link2
, 1))
7803 if (XEXP (link2
, 0) == XEXP (link
, 0))
7808 XEXP (link
, 1) = LOG_LINKS (place
);
7809 LOG_LINKS (place
) = link
;
7816 dump_combine_stats (file
)
7821 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
7822 combine_attempts
, combine_merges
, combine_extras
, combine_successes
);
7826 dump_combine_total_stats (file
)
7831 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
7832 total_attempts
, total_merges
, total_extras
, total_successes
);