]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
Change the location of the Sun bundled C compiler (for backup defaults).
[gcc.git] / gcc / combine.c
CommitLineData
230d793d
RS
1/* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
230d793d
RS
76#include "config.h"
77#include "gvarargs.h"
78#include "rtl.h"
79#include "flags.h"
80#include "regs.h"
81#include "expr.h"
82#include "basic-block.h"
83#include "insn-config.h"
84#include "insn-flags.h"
85#include "insn-codes.h"
86#include "insn-attr.h"
87#include "recog.h"
88#include "real.h"
f8d97cf4 89#include <stdio.h>
230d793d
RS
90
91/* It is not safe to use ordinary gen_lowpart in combine.
92 Use gen_lowpart_for_combine instead. See comments there. */
93#define gen_lowpart dont_use_gen_lowpart_you_dummy
94
95/* Number of attempts to combine instructions in this function. */
96
97static int combine_attempts;
98
99/* Number of attempts that got as far as substitution in this function. */
100
101static int combine_merges;
102
103/* Number of instructions combined with added SETs in this function. */
104
105static int combine_extras;
106
107/* Number of instructions combined in this function. */
108
109static int combine_successes;
110
111/* Totals over entire compilation. */
112
113static int total_attempts, total_merges, total_extras, total_successes;
114\f
115/* Vector mapping INSN_UIDs to cuids.
5089e22e 116 The cuids are like uids but increase monotonically always.
230d793d
RS
117 Combine always uses cuids so that it can compare them.
118 But actually renumbering the uids, which we used to do,
119 proves to be a bad idea because it makes it hard to compare
120 the dumps produced by earlier passes with those from later passes. */
121
122static int *uid_cuid;
123
124/* Get the cuid of an insn. */
125
126#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
127
128/* Maximum register number, which is the size of the tables below. */
129
130static int combine_max_regno;
131
132/* Record last point of death of (hard or pseudo) register n. */
133
134static rtx *reg_last_death;
135
136/* Record last point of modification of (hard or pseudo) register n. */
137
138static rtx *reg_last_set;
139
140/* Record the cuid of the last insn that invalidated memory
141 (anything that writes memory, and subroutine calls, but not pushes). */
142
143static int mem_last_set;
144
145/* Record the cuid of the last CALL_INSN
146 so we can tell whether a potential combination crosses any calls. */
147
148static int last_call_cuid;
149
150/* When `subst' is called, this is the insn that is being modified
151 (by combining in a previous insn). The PATTERN of this insn
152 is still the old pattern partially modified and it should not be
153 looked at, but this may be used to examine the successors of the insn
154 to judge whether a simplification is valid. */
155
156static rtx subst_insn;
157
158/* This is the lowest CUID that `subst' is currently dealing with.
159 get_last_value will not return a value if the register was set at or
160 after this CUID. If not for this mechanism, we could get confused if
161 I2 or I1 in try_combine were an insn that used the old value of a register
162 to obtain a new value. In that case, we might erroneously get the
163 new value of the register when we wanted the old one. */
164
165static int subst_low_cuid;
166
167/* This is the value of undobuf.num_undo when we started processing this
168 substitution. This will prevent gen_rtx_combine from re-used a piece
169 from the previous expression. Doing so can produce circular rtl
170 structures. */
171
172static int previous_num_undos;
173\f
174/* The next group of arrays allows the recording of the last value assigned
175 to (hard or pseudo) register n. We use this information to see if a
5089e22e 176 operation being processed is redundant given a prior operation performed
230d793d
RS
177 on the register. For example, an `and' with a constant is redundant if
178 all the zero bits are already known to be turned off.
179
180 We use an approach similar to that used by cse, but change it in the
181 following ways:
182
183 (1) We do not want to reinitialize at each label.
184 (2) It is useful, but not critical, to know the actual value assigned
185 to a register. Often just its form is helpful.
186
187 Therefore, we maintain the following arrays:
188
189 reg_last_set_value the last value assigned
190 reg_last_set_label records the value of label_tick when the
191 register was assigned
192 reg_last_set_table_tick records the value of label_tick when a
193 value using the register is assigned
194 reg_last_set_invalid set to non-zero when it is not valid
195 to use the value of this register in some
196 register's value
197
198 To understand the usage of these tables, it is important to understand
199 the distinction between the value in reg_last_set_value being valid
200 and the register being validly contained in some other expression in the
201 table.
202
203 Entry I in reg_last_set_value is valid if it is non-zero, and either
204 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
205
206 Register I may validly appear in any expression returned for the value
207 of another register if reg_n_sets[i] is 1. It may also appear in the
208 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
209 reg_last_set_invalid[j] is zero.
210
211 If an expression is found in the table containing a register which may
212 not validly appear in an expression, the register is replaced by
213 something that won't match, (clobber (const_int 0)).
214
215 reg_last_set_invalid[i] is set non-zero when register I is being assigned
216 to and reg_last_set_table_tick[i] == label_tick. */
217
218/* Record last value assigned to (hard or pseudo) register n. */
219
220static rtx *reg_last_set_value;
221
222/* Record the value of label_tick when the value for register n is placed in
223 reg_last_set_value[n]. */
224
225static short *reg_last_set_label;
226
227/* Record the value of label_tick when an expression involving register n
228 is placed in reg_last_set_value. */
229
230static short *reg_last_set_table_tick;
231
232/* Set non-zero if references to register n in expressions should not be
233 used. */
234
235static char *reg_last_set_invalid;
236
237/* Incremented for each label. */
238
239static short label_tick;
240
241/* Some registers that are set more than once and used in more than one
242 basic block are nevertheless always set in similar ways. For example,
243 a QImode register may be loaded from memory in two places on a machine
244 where byte loads zero extend.
245
246 We record in the following array what we know about the significant
247 bits of a register, specifically which bits are known to be zero.
248
249 If an entry is zero, it means that we don't know anything special. */
250
5f4f0e22 251static HOST_WIDE_INT *reg_significant;
230d793d
RS
252
253/* Mode used to compute significance in reg_significant. It is the largest
5f4f0e22 254 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d
RS
255
256static enum machine_mode significant_mode;
257
d0ab8cd3
RK
258/* Nonzero if we know that a register has some leading bits that are always
259 equal to the sign bit. */
260
261static char *reg_sign_bit_copies;
262
263/* Nonzero when reg_significant and reg_sign_bit_copies can be safely used.
264 It is zero while computing them. This prevents propagating values based
230d793d
RS
265 on previously set values, which can be incorrect if a variable
266 is modified in a loop. */
267
268static int significant_valid;
269\f
270/* Record one modification to rtl structure
271 to be undone by storing old_contents into *where.
272 is_int is 1 if the contents are an int. */
273
274struct undo
275{
230d793d 276 int is_int;
7c046e4e
RK
277 union {rtx rtx; int i;} old_contents;
278 union {rtx *rtx; int *i;} where;
230d793d
RS
279};
280
281/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
282 num_undo says how many are currently recorded.
283
284 storage is nonzero if we must undo the allocation of new storage.
285 The value of storage is what to pass to obfree.
286
287 other_insn is nonzero if we have modified some other insn in the process
288 of working on subst_insn. It must be verified too. */
289
290#define MAX_UNDO 50
291
292struct undobuf
293{
294 int num_undo;
295 char *storage;
296 struct undo undo[MAX_UNDO];
297 rtx other_insn;
298};
299
300static struct undobuf undobuf;
301
cc876596 302/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 303 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
304 set to NEWVAL, do not record this change. Because computing NEWVAL might
305 also call SUBST, we have to compute it before we put anything into
306 the undo table. */
230d793d
RS
307
308#define SUBST(INTO, NEWVAL) \
cc876596
RK
309 do { rtx _new = (NEWVAL); \
310 if (undobuf.num_undo < MAX_UNDO) \
230d793d 311 { \
230d793d 312 undobuf.undo[undobuf.num_undo].is_int = 0; \
7c046e4e
RK
313 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
314 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
cc876596 315 INTO = _new; \
7c046e4e 316 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
230d793d
RS
317 undobuf.num_undo++; \
318 } \
319 } while (0)
320
321/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
322 expression.
323 Note that substitution for the value of a CONST_INT is not safe. */
324
325#define SUBST_INT(INTO, NEWVAL) \
326 do { if (undobuf.num_undo < MAX_UNDO) \
327{ \
7c046e4e
RK
328 undobuf.undo[undobuf.num_undo].is_int = 1; \
329 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
330 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
230d793d 331 INTO = NEWVAL; \
7c046e4e 332 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
230d793d
RS
333 undobuf.num_undo++; \
334 } \
335 } while (0)
336
337/* Number of times the pseudo being substituted for
338 was found and replaced. */
339
340static int n_occurrences;
341
342static void set_significant ();
343static void move_deaths ();
344rtx remove_death ();
345static void record_value_for_reg ();
346static void record_dead_and_set_regs ();
347static int use_crosses_set_p ();
348static rtx try_combine ();
349static rtx *find_split_point ();
350static rtx subst ();
351static void undo_all ();
352static int reg_dead_at_p ();
353static rtx expand_compound_operation ();
354static rtx expand_field_assignment ();
355static rtx make_extraction ();
356static int get_pos_from_mask ();
77fa0940 357static rtx force_to_mode ();
230d793d
RS
358static rtx make_field_assignment ();
359static rtx make_compound_operation ();
360static rtx apply_distributive_law ();
361static rtx simplify_and_const_int ();
5f4f0e22 362static unsigned HOST_WIDE_INT significant_bits ();
d0ab8cd3 363static int num_sign_bit_copies ();
230d793d
RS
364static int merge_outer_ops ();
365static rtx simplify_shift_const ();
366static int recog_for_combine ();
367static rtx gen_lowpart_for_combine ();
368static rtx gen_rtx_combine ();
369static rtx gen_binary ();
370static rtx gen_unary ();
371static enum rtx_code simplify_comparison ();
372static int reversible_comparison_p ();
373static int get_last_value_validate ();
374static rtx get_last_value ();
375static void distribute_notes ();
376static void distribute_links ();
377\f
378/* Main entry point for combiner. F is the first insn of the function.
379 NREGS is the first unused pseudo-reg number. */
380
381void
382combine_instructions (f, nregs)
383 rtx f;
384 int nregs;
385{
386 register rtx insn, next, prev;
387 register int i;
388 register rtx links, nextlinks;
389
390 combine_attempts = 0;
391 combine_merges = 0;
392 combine_extras = 0;
393 combine_successes = 0;
394
395 combine_max_regno = nregs;
396
397 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
398 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
399 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
400 reg_last_set_table_tick = (short *) alloca (nregs * sizeof (short));
401 reg_last_set_label = (short *) alloca (nregs * sizeof (short));
5f4f0e22
CH
402 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
403 reg_significant = (HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
d0ab8cd3 404 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
230d793d
RS
405
406 bzero (reg_last_death, nregs * sizeof (rtx));
407 bzero (reg_last_set, nregs * sizeof (rtx));
408 bzero (reg_last_set_value, nregs * sizeof (rtx));
409 bzero (reg_last_set_table_tick, nregs * sizeof (short));
410 bzero (reg_last_set_invalid, nregs * sizeof (char));
5f4f0e22 411 bzero (reg_significant, nregs * sizeof (HOST_WIDE_INT));
d0ab8cd3 412 bzero (reg_sign_bit_copies, nregs * sizeof (char));
230d793d
RS
413
414 init_recog_no_volatile ();
415
416 /* Compute maximum uid value so uid_cuid can be allocated. */
417
418 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
419 if (INSN_UID (insn) > i)
420 i = INSN_UID (insn);
421
422 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
423
5f4f0e22 424 significant_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d
RS
425
426 /* Don't use reg_significant when computing it. This can cause problems
427 when, for example, we have j <<= 1 in a loop. */
428
429 significant_valid = 0;
430
431 /* Compute the mapping from uids to cuids.
432 Cuids are numbers assigned to insns, like uids,
433 except that cuids increase monotonically through the code.
434
435 Scan all SETs and see if we can deduce anything about what
436 bits are significant for some registers. */
437
438 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
439 {
440 INSN_CUID (insn) = ++i;
441 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
442 note_stores (PATTERN (insn), set_significant);
443 }
444
445 significant_valid = 1;
446
447 /* Now scan all the insns in forward order. */
448
449 label_tick = 1;
450 last_call_cuid = 0;
451 mem_last_set = 0;
452
453 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
454 {
455 next = 0;
456
457 if (GET_CODE (insn) == CODE_LABEL)
458 label_tick++;
459
460 else if (GET_CODE (insn) == INSN
461 || GET_CODE (insn) == CALL_INSN
462 || GET_CODE (insn) == JUMP_INSN)
463 {
464 /* Try this insn with each insn it links back to. */
465
466 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 467 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
468 goto retry;
469
470 /* Try each sequence of three linked insns ending with this one. */
471
472 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
473 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
474 nextlinks = XEXP (nextlinks, 1))
475 if ((next = try_combine (insn, XEXP (links, 0),
476 XEXP (nextlinks, 0))) != 0)
477 goto retry;
478
479#ifdef HAVE_cc0
480 /* Try to combine a jump insn that uses CC0
481 with a preceding insn that sets CC0, and maybe with its
482 logical predecessor as well.
483 This is how we make decrement-and-branch insns.
484 We need this special code because data flow connections
485 via CC0 do not get entered in LOG_LINKS. */
486
487 if (GET_CODE (insn) == JUMP_INSN
488 && (prev = prev_nonnote_insn (insn)) != 0
489 && GET_CODE (prev) == INSN
490 && sets_cc0_p (PATTERN (prev)))
491 {
5f4f0e22 492 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
493 goto retry;
494
495 for (nextlinks = LOG_LINKS (prev); nextlinks;
496 nextlinks = XEXP (nextlinks, 1))
497 if ((next = try_combine (insn, prev,
498 XEXP (nextlinks, 0))) != 0)
499 goto retry;
500 }
501
502 /* Do the same for an insn that explicitly references CC0. */
503 if (GET_CODE (insn) == INSN
504 && (prev = prev_nonnote_insn (insn)) != 0
505 && GET_CODE (prev) == INSN
506 && sets_cc0_p (PATTERN (prev))
507 && GET_CODE (PATTERN (insn)) == SET
508 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
509 {
5f4f0e22 510 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
511 goto retry;
512
513 for (nextlinks = LOG_LINKS (prev); nextlinks;
514 nextlinks = XEXP (nextlinks, 1))
515 if ((next = try_combine (insn, prev,
516 XEXP (nextlinks, 0))) != 0)
517 goto retry;
518 }
519
520 /* Finally, see if any of the insns that this insn links to
521 explicitly references CC0. If so, try this insn, that insn,
5089e22e 522 and its predecessor if it sets CC0. */
230d793d
RS
523 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
524 if (GET_CODE (XEXP (links, 0)) == INSN
525 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
526 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
527 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
528 && GET_CODE (prev) == INSN
529 && sets_cc0_p (PATTERN (prev))
530 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
531 goto retry;
532#endif
533
534 /* Try combining an insn with two different insns whose results it
535 uses. */
536 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
537 for (nextlinks = XEXP (links, 1); nextlinks;
538 nextlinks = XEXP (nextlinks, 1))
539 if ((next = try_combine (insn, XEXP (links, 0),
540 XEXP (nextlinks, 0))) != 0)
541 goto retry;
542
543 if (GET_CODE (insn) != NOTE)
544 record_dead_and_set_regs (insn);
545
546 retry:
547 ;
548 }
549 }
550
551 total_attempts += combine_attempts;
552 total_merges += combine_merges;
553 total_extras += combine_extras;
554 total_successes += combine_successes;
555}
556\f
557/* Called via note_stores. If X is a pseudo that is used in more than
5f4f0e22 558 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
230d793d
RS
559 set, record what bits are significant. If we are clobbering X,
560 ignore this "set" because the clobbered value won't be used.
561
562 If we are setting only a portion of X and we can't figure out what
563 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
564 be happening.
565
566 Similarly, set how many bits of X are known to be copies of the sign bit
567 at all locations in the function. This is the smallest number implied
568 by any set of X. */
230d793d
RS
569
570static void
571set_significant (x, set)
572 rtx x;
573 rtx set;
574{
d0ab8cd3
RK
575 int num;
576
230d793d
RS
577 if (GET_CODE (x) == REG
578 && REGNO (x) >= FIRST_PSEUDO_REGISTER
579 && reg_n_sets[REGNO (x)] > 1
580 && reg_basic_block[REGNO (x)] < 0
5f4f0e22 581 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
582 {
583 if (GET_CODE (set) == CLOBBER)
584 return;
585
586 /* If this is a complex assignment, see if we can convert it into a
5089e22e 587 simple assignment. */
230d793d
RS
588 set = expand_field_assignment (set);
589 if (SET_DEST (set) == x)
d0ab8cd3
RK
590 {
591 reg_significant[REGNO (x)]
592 |= significant_bits (SET_SRC (set), significant_mode);
593 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
594 if (reg_sign_bit_copies[REGNO (x)] == 0
595 || reg_sign_bit_copies[REGNO (x)] > num)
596 reg_sign_bit_copies[REGNO (x)] = num;
597 }
230d793d 598 else
d0ab8cd3
RK
599 {
600 reg_significant[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
601 reg_sign_bit_copies[REGNO (x)] = 0;
602 }
230d793d
RS
603 }
604}
605\f
606/* See if INSN can be combined into I3. PRED and SUCC are optionally
607 insns that were previously combined into I3 or that will be combined
608 into the merger of INSN and I3.
609
610 Return 0 if the combination is not allowed for any reason.
611
612 If the combination is allowed, *PDEST will be set to the single
613 destination of INSN and *PSRC to the single source, and this function
614 will return 1. */
615
616static int
617can_combine_p (insn, i3, pred, succ, pdest, psrc)
618 rtx insn;
619 rtx i3;
620 rtx pred, succ;
621 rtx *pdest, *psrc;
622{
623 int i;
624 rtx set = 0, src, dest;
625 rtx p, link;
626 int all_adjacent = (succ ? (next_active_insn (insn) == succ
627 && next_active_insn (succ) == i3)
628 : next_active_insn (insn) == i3);
629
630 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
631 or a PARALLEL consisting of such a SET and CLOBBERs.
632
633 If INSN has CLOBBER parallel parts, ignore them for our processing.
634 By definition, these happen during the execution of the insn. When it
635 is merged with another insn, all bets are off. If they are, in fact,
636 needed and aren't also supplied in I3, they may be added by
637 recog_for_combine. Otherwise, it won't match.
638
639 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
640 note.
641
642 Get the source and destination of INSN. If more than one, can't
643 combine. */
644
645 if (GET_CODE (PATTERN (insn)) == SET)
646 set = PATTERN (insn);
647 else if (GET_CODE (PATTERN (insn)) == PARALLEL
648 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
649 {
650 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
651 {
652 rtx elt = XVECEXP (PATTERN (insn), 0, i);
653
654 switch (GET_CODE (elt))
655 {
656 /* We can ignore CLOBBERs. */
657 case CLOBBER:
658 break;
659
660 case SET:
661 /* Ignore SETs whose result isn't used but not those that
662 have side-effects. */
663 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
664 && ! side_effects_p (elt))
665 break;
666
667 /* If we have already found a SET, this is a second one and
668 so we cannot combine with this insn. */
669 if (set)
670 return 0;
671
672 set = elt;
673 break;
674
675 default:
676 /* Anything else means we can't combine. */
677 return 0;
678 }
679 }
680
681 if (set == 0
682 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
683 so don't do anything with it. */
684 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
685 return 0;
686 }
687 else
688 return 0;
689
690 if (set == 0)
691 return 0;
692
693 set = expand_field_assignment (set);
694 src = SET_SRC (set), dest = SET_DEST (set);
695
696 /* Don't eliminate a store in the stack pointer. */
697 if (dest == stack_pointer_rtx
698 /* Don't install a subreg involving two modes not tieable.
699 It can worsen register allocation, and can even make invalid reload
700 insns, since the reg inside may need to be copied from in the
701 outside mode, and that may be invalid if it is an fp reg copied in
5089e22e
RS
702 integer mode. As a special exception, we can allow this if
703 I3 is simply copying DEST, a REG, to CC0. */
230d793d 704 || (GET_CODE (src) == SUBREG
5089e22e
RS
705 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
706#ifdef HAVE_cc0
707 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
708 && SET_DEST (PATTERN (i3)) == cc0_rtx
709 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
710#endif
711 )
230d793d
RS
712 /* If we couldn't eliminate a field assignment, we can't combine. */
713 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
714 /* Don't combine with an insn that sets a register to itself if it has
715 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 716 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
717 /* Can't merge a function call. */
718 || GET_CODE (src) == CALL
719 /* Don't substitute into an incremented register. */
720 || FIND_REG_INC_NOTE (i3, dest)
721 || (succ && FIND_REG_INC_NOTE (succ, dest))
722 /* Don't combine the end of a libcall into anything. */
5f4f0e22 723 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
724 /* Make sure that DEST is not used after SUCC but before I3. */
725 || (succ && ! all_adjacent
726 && reg_used_between_p (dest, succ, i3))
727 /* Make sure that the value that is to be substituted for the register
728 does not use any registers whose values alter in between. However,
729 If the insns are adjacent, a use can't cross a set even though we
730 think it might (this can happen for a sequence of insns each setting
731 the same destination; reg_last_set of that register might point to
732 a NOTE). Also, don't move a volatile asm across any other insns. */
733 || (! all_adjacent
734 && (use_crosses_set_p (src, INSN_CUID (insn))
735 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))))
736 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
737 better register allocation by not doing the combine. */
738 || find_reg_note (i3, REG_NO_CONFLICT, dest)
739 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
740 /* Don't combine across a CALL_INSN, because that would possibly
741 change whether the life span of some REGs crosses calls or not,
742 and it is a pain to update that information.
743 Exception: if source is a constant, moving it later can't hurt.
744 Accept that special case, because it helps -fforce-addr a lot. */
745 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
746 return 0;
747
748 /* DEST must either be a REG or CC0. */
749 if (GET_CODE (dest) == REG)
750 {
751 /* If register alignment is being enforced for multi-word items in all
752 cases except for parameters, it is possible to have a register copy
753 insn referencing a hard register that is not allowed to contain the
754 mode being copied and which would not be valid as an operand of most
755 insns. Eliminate this problem by not combining with such an insn.
756
757 Also, on some machines we don't want to extend the life of a hard
758 register. */
759
760 if (GET_CODE (src) == REG
761 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
762 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
763#ifdef SMALL_REGISTER_CLASSES
764 /* Don't extend the life of a hard register. */
765 || REGNO (src) < FIRST_PSEUDO_REGISTER
766#else
767 || (REGNO (src) < FIRST_PSEUDO_REGISTER
768 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
769#endif
770 ))
771 return 0;
772 }
773 else if (GET_CODE (dest) != CC0)
774 return 0;
775
776 /* Don't substitute for a register intended as a clobberable operand. */
777 if (GET_CODE (PATTERN (i3)) == PARALLEL)
778 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
779 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
780 && rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest))
781 return 0;
782
783 /* If INSN contains anything volatile, or is an `asm' (whether volatile
784 or not), reject, unless nothing volatile comes between it and I3,
785 with the exception of SUCC. */
786
787 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
788 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
789 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
790 && p != succ && volatile_refs_p (PATTERN (p)))
791 return 0;
792
793 /* If INSN or I2 contains an autoincrement or autodecrement,
794 make sure that register is not used between there and I3,
795 and not already used in I3 either.
796 Also insist that I3 not be a jump; if it were one
797 and the incremented register were spilled, we would lose. */
798
799#ifdef AUTO_INC_DEC
800 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
801 if (REG_NOTE_KIND (link) == REG_INC
802 && (GET_CODE (i3) == JUMP_INSN
803 || reg_used_between_p (XEXP (link, 0), insn, i3)
804 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
805 return 0;
806#endif
807
808#ifdef HAVE_cc0
809 /* Don't combine an insn that follows a CC0-setting insn.
810 An insn that uses CC0 must not be separated from the one that sets it.
811 We do, however, allow I2 to follow a CC0-setting insn if that insn
812 is passed as I1; in that case it will be deleted also.
813 We also allow combining in this case if all the insns are adjacent
814 because that would leave the two CC0 insns adjacent as well.
815 It would be more logical to test whether CC0 occurs inside I1 or I2,
816 but that would be much slower, and this ought to be equivalent. */
817
818 p = prev_nonnote_insn (insn);
819 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
820 && ! all_adjacent)
821 return 0;
822#endif
823
824 /* If we get here, we have passed all the tests and the combination is
825 to be allowed. */
826
827 *pdest = dest;
828 *psrc = src;
829
830 return 1;
831}
832\f
833/* LOC is the location within I3 that contains its pattern or the component
834 of a PARALLEL of the pattern. We validate that it is valid for combining.
835
836 One problem is if I3 modifies its output, as opposed to replacing it
837 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
838 so would produce an insn that is not equivalent to the original insns.
839
840 Consider:
841
842 (set (reg:DI 101) (reg:DI 100))
843 (set (subreg:SI (reg:DI 101) 0) <foo>)
844
845 This is NOT equivalent to:
846
847 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
848 (set (reg:DI 101) (reg:DI 100))])
849
850 Not only does this modify 100 (in which case it might still be valid
851 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
852
853 We can also run into a problem if I2 sets a register that I1
854 uses and I1 gets directly substituted into I3 (not via I2). In that
855 case, we would be getting the wrong value of I2DEST into I3, so we
856 must reject the combination. This case occurs when I2 and I1 both
857 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
858 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
859 of a SET must prevent combination from occurring.
860
861 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
862 if the destination of a SET is a hard register.
863
864 Before doing the above check, we first try to expand a field assignment
865 into a set of logical operations.
866
867 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
868 we place a register that is both set and used within I3. If more than one
869 such register is detected, we fail.
870
871 Return 1 if the combination is valid, zero otherwise. */
872
873static int
874combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
875 rtx i3;
876 rtx *loc;
877 rtx i2dest;
878 rtx i1dest;
879 int i1_not_in_src;
880 rtx *pi3dest_killed;
881{
882 rtx x = *loc;
883
884 if (GET_CODE (x) == SET)
885 {
886 rtx set = expand_field_assignment (x);
887 rtx dest = SET_DEST (set);
888 rtx src = SET_SRC (set);
889 rtx inner_dest = dest, inner_src = src;
890
891 SUBST (*loc, set);
892
893 while (GET_CODE (inner_dest) == STRICT_LOW_PART
894 || GET_CODE (inner_dest) == SUBREG
895 || GET_CODE (inner_dest) == ZERO_EXTRACT)
896 inner_dest = XEXP (inner_dest, 0);
897
898 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
899 was added. */
900#if 0
901 while (GET_CODE (inner_src) == STRICT_LOW_PART
902 || GET_CODE (inner_src) == SUBREG
903 || GET_CODE (inner_src) == ZERO_EXTRACT)
904 inner_src = XEXP (inner_src, 0);
905
906 /* If it is better that two different modes keep two different pseudos,
907 avoid combining them. This avoids producing the following pattern
908 on a 386:
909 (set (subreg:SI (reg/v:QI 21) 0)
910 (lshiftrt:SI (reg/v:SI 20)
911 (const_int 24)))
912 If that were made, reload could not handle the pair of
913 reg 20/21, since it would try to get any GENERAL_REGS
914 but some of them don't handle QImode. */
915
916 if (rtx_equal_p (inner_src, i2dest)
917 && GET_CODE (inner_dest) == REG
918 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
919 return 0;
920#endif
921
922 /* Check for the case where I3 modifies its output, as
923 discussed above. */
924 if ((inner_dest != dest
925 && (reg_overlap_mentioned_p (i2dest, inner_dest)
926 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
927 /* This is the same test done in can_combine_p except that we
928 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
929 CALL operation. */
230d793d 930 || (GET_CODE (inner_dest) == REG
dfbe1b2f 931 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
3f508eca
RK
932#ifdef SMALL_REGISTER_CLASSES
933 && GET_CODE (src) != CALL
934#else
dfbe1b2f
RK
935 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
936 GET_MODE (inner_dest))
230d793d 937#endif
dfbe1b2f
RK
938 )
939
230d793d
RS
940 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
941 return 0;
942
943 /* If DEST is used in I3, it is being killed in this insn,
944 so record that for later. */
945 if (pi3dest_killed && GET_CODE (dest) == REG
946 && reg_referenced_p (dest, PATTERN (i3)))
947 {
948 if (*pi3dest_killed)
949 return 0;
950
951 *pi3dest_killed = dest;
952 }
953 }
954
955 else if (GET_CODE (x) == PARALLEL)
956 {
957 int i;
958
959 for (i = 0; i < XVECLEN (x, 0); i++)
960 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
961 i1_not_in_src, pi3dest_killed))
962 return 0;
963 }
964
965 return 1;
966}
967\f
968/* Try to combine the insns I1 and I2 into I3.
969 Here I1 and I2 appear earlier than I3.
970 I1 can be zero; then we combine just I2 into I3.
971
972 It we are combining three insns and the resulting insn is not recognized,
973 try splitting it into two insns. If that happens, I2 and I3 are retained
974 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
975 are pseudo-deleted.
976
977 If we created two insns, return I2; otherwise return I3.
978 Return 0 if the combination does not work. Then nothing is changed. */
979
980static rtx
981try_combine (i3, i2, i1)
982 register rtx i3, i2, i1;
983{
984 /* New patterns for I3 and I3, respectively. */
985 rtx newpat, newi2pat = 0;
986 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
987 int added_sets_1, added_sets_2;
988 /* Total number of SETs to put into I3. */
989 int total_sets;
990 /* Nonzero is I2's body now appears in I3. */
991 int i2_is_used;
992 /* INSN_CODEs for new I3, new I2, and user of condition code. */
993 int insn_code_number, i2_code_number, other_code_number;
994 /* Contains I3 if the destination of I3 is used in its source, which means
995 that the old life of I3 is being killed. If that usage is placed into
996 I2 and not in I3, a REG_DEAD note must be made. */
997 rtx i3dest_killed = 0;
998 /* SET_DEST and SET_SRC of I2 and I1. */
999 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1000 /* PATTERN (I2), or a copy of it in certain cases. */
1001 rtx i2pat;
1002 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1003 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1004 int i1_feeds_i3 = 0;
1005 /* Notes that must be added to REG_NOTES in I3 and I2. */
1006 rtx new_i3_notes, new_i2_notes;
1007
1008 int maxreg;
1009 rtx temp;
1010 register rtx link;
1011 int i;
1012
1013 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1014 This can occur when flow deletes an insn that it has merged into an
1015 auto-increment address. We also can't do anything if I3 has a
1016 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1017 libcall. */
1018
1019 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1020 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1021 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1022 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1023 return 0;
1024
1025 combine_attempts++;
1026
1027 undobuf.num_undo = previous_num_undos = 0;
1028 undobuf.other_insn = 0;
1029
1030 /* Save the current high-water-mark so we can free storage if we didn't
1031 accept this combination. */
1032 undobuf.storage = (char *) oballoc (0);
1033
1034 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1035 code below, set I1 to be the earlier of the two insns. */
1036 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1037 temp = i1, i1 = i2, i2 = temp;
1038
1039 /* First check for one important special-case that the code below will
1040 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1041 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1042 we may be able to replace that destination with the destination of I3.
1043 This occurs in the common code where we compute both a quotient and
1044 remainder into a structure, in which case we want to do the computation
1045 directly into the structure to avoid register-register copies.
1046
1047 We make very conservative checks below and only try to handle the
1048 most common cases of this. For example, we only handle the case
1049 where I2 and I3 are adjacent to avoid making difficult register
1050 usage tests. */
1051
1052 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1053 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1054 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1055#ifdef SMALL_REGISTER_CLASSES
1056 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1057 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1058#endif
1059 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1060 && GET_CODE (PATTERN (i2)) == PARALLEL
1061 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1062 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1063 below would need to check what is inside (and reg_overlap_mentioned_p
1064 doesn't support those codes anyway). Don't allow those destinations;
1065 the resulting insn isn't likely to be recognized anyway. */
1066 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1067 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1068 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1069 SET_DEST (PATTERN (i3)))
1070 && next_real_insn (i2) == i3)
5089e22e
RS
1071 {
1072 rtx p2 = PATTERN (i2);
1073
1074 /* Make sure that the destination of I3,
1075 which we are going to substitute into one output of I2,
1076 is not used within another output of I2. We must avoid making this:
1077 (parallel [(set (mem (reg 69)) ...)
1078 (set (reg 69) ...)])
1079 which is not well-defined as to order of actions.
1080 (Besides, reload can't handle output reloads for this.)
1081
1082 The problem can also happen if the dest of I3 is a memory ref,
1083 if another dest in I2 is an indirect memory ref. */
1084 for (i = 0; i < XVECLEN (p2, 0); i++)
1085 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1086 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1087 SET_DEST (XVECEXP (p2, 0, i))))
1088 break;
230d793d 1089
5089e22e
RS
1090 if (i == XVECLEN (p2, 0))
1091 for (i = 0; i < XVECLEN (p2, 0); i++)
1092 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1093 {
1094 combine_merges++;
230d793d 1095
5089e22e
RS
1096 subst_insn = i3;
1097 subst_low_cuid = INSN_CUID (i2);
230d793d 1098
5089e22e
RS
1099 added_sets_2 = 0;
1100 i2dest = SET_SRC (PATTERN (i3));
230d793d 1101
5089e22e
RS
1102 /* Replace the dest in I2 with our dest and make the resulting
1103 insn the new pattern for I3. Then skip to where we
1104 validate the pattern. Everything was set up above. */
1105 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1106 SET_DEST (PATTERN (i3)));
1107
1108 newpat = p2;
1109 goto validate_replacement;
1110 }
1111 }
230d793d
RS
1112
1113#ifndef HAVE_cc0
1114 /* If we have no I1 and I2 looks like:
1115 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1116 (set Y OP)])
1117 make up a dummy I1 that is
1118 (set Y OP)
1119 and change I2 to be
1120 (set (reg:CC X) (compare:CC Y (const_int 0)))
1121
1122 (We can ignore any trailing CLOBBERs.)
1123
1124 This undoes a previous combination and allows us to match a branch-and-
1125 decrement insn. */
1126
1127 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1128 && XVECLEN (PATTERN (i2), 0) >= 2
1129 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1130 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1131 == MODE_CC)
1132 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1133 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1134 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1135 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1136 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1137 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1138 {
1139 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1140 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1141 break;
1142
1143 if (i == 1)
1144 {
1145 /* We make I1 with the same INSN_UID as I2. This gives it
1146 the same INSN_CUID for value tracking. Our fake I1 will
1147 never appear in the insn stream so giving it the same INSN_UID
1148 as I2 will not cause a problem. */
1149
1150 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1151 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1152
1153 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1154 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1155 SET_DEST (PATTERN (i1)));
1156 }
1157 }
1158#endif
1159
1160 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1161 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1162 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1163 {
1164 undo_all ();
1165 return 0;
1166 }
1167
1168 /* Record whether I2DEST is used in I2SRC and similarly for the other
1169 cases. Knowing this will help in register status updating below. */
1170 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1171 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1172 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1173
916f14f1 1174 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1175 in I2SRC. */
1176 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1177
1178 /* Ensure that I3's pattern can be the destination of combines. */
1179 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1180 i1 && i2dest_in_i1src && i1_feeds_i3,
1181 &i3dest_killed))
1182 {
1183 undo_all ();
1184 return 0;
1185 }
1186
1187 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1188 We used to do this EXCEPT in one case: I3 has a post-inc in an
1189 output operand. However, that exception can give rise to insns like
1190 mov r3,(r3)+
1191 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1192 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1193
1194#if 0
1195 if (!(GET_CODE (PATTERN (i3)) == SET
1196 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1197 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1198 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1199 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1200 /* It's not the exception. */
1201#endif
1202#ifdef AUTO_INC_DEC
1203 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1204 if (REG_NOTE_KIND (link) == REG_INC
1205 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1206 || (i1 != 0
1207 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1208 {
1209 undo_all ();
1210 return 0;
1211 }
1212#endif
1213
1214 /* See if the SETs in I1 or I2 need to be kept around in the merged
1215 instruction: whenever the value set there is still needed past I3.
1216 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1217
1218 For the SET in I1, we have two cases: If I1 and I2 independently
1219 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1220 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1221 in I1 needs to be kept around unless I1DEST dies or is set in either
1222 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1223 I1DEST. If so, we know I1 feeds into I2. */
1224
1225 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1226
1227 added_sets_1
1228 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1229 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1230
1231 /* If the set in I2 needs to be kept around, we must make a copy of
1232 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1233 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1234 an already-substituted copy. This also prevents making self-referential
1235 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1236 I2DEST. */
1237
1238 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1239 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1240 : PATTERN (i2));
1241
1242 if (added_sets_2)
1243 i2pat = copy_rtx (i2pat);
1244
1245 combine_merges++;
1246
1247 /* Substitute in the latest insn for the regs set by the earlier ones. */
1248
1249 maxreg = max_reg_num ();
1250
1251 subst_insn = i3;
230d793d
RS
1252
1253 /* It is possible that the source of I2 or I1 may be performing an
1254 unneeded operation, such as a ZERO_EXTEND of something that is known
1255 to have the high part zero. Handle that case by letting subst look at
1256 the innermost one of them.
1257
1258 Another way to do this would be to have a function that tries to
1259 simplify a single insn instead of merging two or more insns. We don't
1260 do this because of the potential of infinite loops and because
1261 of the potential extra memory required. However, doing it the way
1262 we are is a bit of a kludge and doesn't catch all cases.
1263
1264 But only do this if -fexpensive-optimizations since it slows things down
1265 and doesn't usually win. */
1266
1267 if (flag_expensive_optimizations)
1268 {
1269 /* Pass pc_rtx so no substitutions are done, just simplifications.
1270 The cases that we are interested in here do not involve the few
1271 cases were is_replaced is checked. */
1272 if (i1)
d0ab8cd3
RK
1273 {
1274 subst_low_cuid = INSN_CUID (i1);
1275 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1276 }
230d793d 1277 else
d0ab8cd3
RK
1278 {
1279 subst_low_cuid = INSN_CUID (i2);
1280 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1281 }
230d793d
RS
1282
1283 previous_num_undos = undobuf.num_undo;
1284 }
1285
1286#ifndef HAVE_cc0
1287 /* Many machines that don't use CC0 have insns that can both perform an
1288 arithmetic operation and set the condition code. These operations will
1289 be represented as a PARALLEL with the first element of the vector
1290 being a COMPARE of an arithmetic operation with the constant zero.
1291 The second element of the vector will set some pseudo to the result
1292 of the same arithmetic operation. If we simplify the COMPARE, we won't
1293 match such a pattern and so will generate an extra insn. Here we test
1294 for this case, where both the comparison and the operation result are
1295 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1296 I2SRC. Later we will make the PARALLEL that contains I2. */
1297
1298 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1299 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1300 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1301 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1302 {
1303 rtx *cc_use;
1304 enum machine_mode compare_mode;
1305
1306 newpat = PATTERN (i3);
1307 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1308
1309 i2_is_used = 1;
1310
1311#ifdef EXTRA_CC_MODES
1312 /* See if a COMPARE with the operand we substituted in should be done
1313 with the mode that is currently being used. If not, do the same
1314 processing we do in `subst' for a SET; namely, if the destination
1315 is used only once, try to replace it with a register of the proper
1316 mode and also replace the COMPARE. */
1317 if (undobuf.other_insn == 0
1318 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1319 &undobuf.other_insn))
77fa0940
RK
1320 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1321 i2src, const0_rtx))
230d793d
RS
1322 != GET_MODE (SET_DEST (newpat))))
1323 {
1324 int regno = REGNO (SET_DEST (newpat));
1325 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1326
1327 if (regno < FIRST_PSEUDO_REGISTER
1328 || (reg_n_sets[regno] == 1 && ! added_sets_2
1329 && ! REG_USERVAR_P (SET_DEST (newpat))))
1330 {
1331 if (regno >= FIRST_PSEUDO_REGISTER)
1332 SUBST (regno_reg_rtx[regno], new_dest);
1333
1334 SUBST (SET_DEST (newpat), new_dest);
1335 SUBST (XEXP (*cc_use, 0), new_dest);
1336 SUBST (SET_SRC (newpat),
1337 gen_rtx_combine (COMPARE, compare_mode,
1338 i2src, const0_rtx));
1339 }
1340 else
1341 undobuf.other_insn = 0;
1342 }
1343#endif
1344 }
1345 else
1346#endif
1347 {
1348 n_occurrences = 0; /* `subst' counts here */
1349
1350 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1351 need to make a unique copy of I2SRC each time we substitute it
1352 to avoid self-referential rtl. */
1353
d0ab8cd3 1354 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1355 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1356 ! i1_feeds_i3 && i1dest_in_i1src);
1357 previous_num_undos = undobuf.num_undo;
1358
1359 /* Record whether i2's body now appears within i3's body. */
1360 i2_is_used = n_occurrences;
1361 }
1362
1363 /* If we already got a failure, don't try to do more. Otherwise,
1364 try to substitute in I1 if we have it. */
1365
1366 if (i1 && GET_CODE (newpat) != CLOBBER)
1367 {
1368 /* Before we can do this substitution, we must redo the test done
1369 above (see detailed comments there) that ensures that I1DEST
1370 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1371
5f4f0e22
CH
1372 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1373 0, NULL_PTR))
230d793d
RS
1374 {
1375 undo_all ();
1376 return 0;
1377 }
1378
1379 n_occurrences = 0;
d0ab8cd3 1380 subst_low_cuid = INSN_CUID (i1);
230d793d
RS
1381 newpat = subst (newpat, i1dest, i1src, 0, 0);
1382 previous_num_undos = undobuf.num_undo;
1383 }
1384
916f14f1
RK
1385 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1386 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1387 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1388 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1389 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1390 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1391 > 1))
230d793d
RS
1392 /* Fail if we tried to make a new register (we used to abort, but there's
1393 really no reason to). */
1394 || max_reg_num () != maxreg
1395 /* Fail if we couldn't do something and have a CLOBBER. */
1396 || GET_CODE (newpat) == CLOBBER)
1397 {
1398 undo_all ();
1399 return 0;
1400 }
1401
1402 /* If the actions of the earlier insns must be kept
1403 in addition to substituting them into the latest one,
1404 we must make a new PARALLEL for the latest insn
1405 to hold additional the SETs. */
1406
1407 if (added_sets_1 || added_sets_2)
1408 {
1409 combine_extras++;
1410
1411 if (GET_CODE (newpat) == PARALLEL)
1412 {
1413 rtvec old = XVEC (newpat, 0);
1414 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1415 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1416 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1417 sizeof (old->elem[0]) * old->num_elem);
1418 }
1419 else
1420 {
1421 rtx old = newpat;
1422 total_sets = 1 + added_sets_1 + added_sets_2;
1423 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1424 XVECEXP (newpat, 0, 0) = old;
1425 }
1426
1427 if (added_sets_1)
1428 XVECEXP (newpat, 0, --total_sets)
1429 = (GET_CODE (PATTERN (i1)) == PARALLEL
1430 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1431
1432 if (added_sets_2)
1433 {
1434 /* If there is no I1, use I2's body as is. We used to also not do
1435 the subst call below if I2 was substituted into I3,
1436 but that could lose a simplification. */
1437 if (i1 == 0)
1438 XVECEXP (newpat, 0, --total_sets) = i2pat;
1439 else
1440 /* See comment where i2pat is assigned. */
1441 XVECEXP (newpat, 0, --total_sets)
1442 = subst (i2pat, i1dest, i1src, 0, 0);
1443 }
1444 }
1445
1446 /* We come here when we are replacing a destination in I2 with the
1447 destination of I3. */
1448 validate_replacement:
1449
1450 /* Is the result of combination a valid instruction? */
1451 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1452
1453 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1454 the second SET's destination is a register that is unused. In that case,
1455 we just need the first SET. This can occur when simplifying a divmod
1456 insn. We *must* test for this case here because the code below that
1457 splits two independent SETs doesn't handle this case correctly when it
1458 updates the register status. Also check the case where the first
1459 SET's destination is unused. That would not cause incorrect code, but
1460 does cause an unneeded insn to remain. */
1461
1462 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1463 && XVECLEN (newpat, 0) == 2
1464 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1465 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1466 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1467 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1468 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1469 && asm_noperands (newpat) < 0)
1470 {
1471 newpat = XVECEXP (newpat, 0, 0);
1472 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1473 }
1474
1475 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1476 && XVECLEN (newpat, 0) == 2
1477 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1478 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1479 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1480 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1481 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1482 && asm_noperands (newpat) < 0)
1483 {
1484 newpat = XVECEXP (newpat, 0, 1);
1485 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1486 }
1487
d0ab8cd3
RK
1488 /* See if this is an XOR. If so, perhaps the problem is that the
1489 constant is out of range. Replace it with a complemented XOR with
1490 a complemented constant; it might be in range. */
1491
1492 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1493 && GET_CODE (SET_SRC (newpat)) == XOR
1494 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1495 && ((temp = simplify_unary_operation (NOT,
1496 GET_MODE (SET_SRC (newpat)),
1497 XEXP (SET_SRC (newpat), 1),
1498 GET_MODE (SET_SRC (newpat))))
1499 != 0))
1500 {
1501 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1502 rtx pat
1503 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1504 gen_unary (NOT, i_mode,
1505 gen_binary (XOR, i_mode,
1506 XEXP (SET_SRC (newpat), 0),
1507 temp)));
1508
1509 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1510 if (insn_code_number >= 0)
1511 newpat = pat;
1512 }
1513
230d793d
RS
1514 /* If we were combining three insns and the result is a simple SET
1515 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1516 insns. There are two ways to do this. It can be split using a
1517 machine-specific method (like when you have an addition of a large
1518 constant) or by combine in the function find_split_point. */
1519
230d793d
RS
1520 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1521 && asm_noperands (newpat) < 0)
1522 {
916f14f1 1523 rtx m_split, *split;
42495ca0 1524 rtx ni2dest = i2dest;
916f14f1
RK
1525
1526 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1527 use I2DEST as a scratch register will help. In the latter case,
1528 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1529
1530 m_split = split_insns (newpat, i3);
1531 if (m_split == 0)
42495ca0
RK
1532 {
1533 /* If I2DEST is a hard register or the only use of a pseudo,
1534 we can change its mode. */
1535 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1536 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1537 && GET_CODE (i2dest) == REG
42495ca0
RK
1538 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1539 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1540 && ! REG_USERVAR_P (i2dest))))
1541 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1542 REGNO (i2dest));
1543
1544 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1545 gen_rtvec (2, newpat,
1546 gen_rtx (CLOBBER,
1547 VOIDmode,
1548 ni2dest))),
1549 i3);
1550 }
916f14f1
RK
1551
1552 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1553 && XVECLEN (m_split, 0) == 2
1554 && (next_real_insn (i2) == i3
1555 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1556 INSN_CUID (i2))))
916f14f1 1557 {
d0ab8cd3 1558 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1559 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1560
42495ca0
RK
1561 /* In case we changed the mode of I2DEST, replace it in the
1562 pseudo-register table here. We can't do it above in case this
1563 code doesn't get executed and we do a split the other way. */
1564
1565 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1566 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1567
916f14f1
RK
1568 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1569 if (i2_code_number >= 0)
1570 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
c767f54b 1571
d0ab8cd3
RK
1572 if (insn_code_number >= 0)
1573 newpat = newi3pat;
1574
c767f54b
RK
1575 /* It is possible that both insns now set the destination of I3.
1576 If so, we must show an extra use of it and update
1577 reg_significant. */
1578
1579 if (insn_code_number >= 0 && GET_CODE (SET_DEST (newpat)) == REG
1580 && GET_CODE (SET_DEST (newi2pat)) == REG
1581 && REGNO (SET_DEST (newpat)) == REGNO (SET_DEST (newi2pat)))
1582 {
1583 reg_n_sets[REGNO (SET_DEST (newpat))]++;
1584 set_significant (SET_DEST (newi2pat), newi2pat);
1585 set_significant (SET_DEST (newpat), newpat);
1586 }
916f14f1 1587 }
230d793d
RS
1588
1589 /* If we can split it and use I2DEST, go ahead and see if that
1590 helps things be recognized. Verify that none of the registers
1591 are set between I2 and I3. */
d0ab8cd3 1592 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1593#ifdef HAVE_cc0
1594 && GET_CODE (i2dest) == REG
1595#endif
1596 /* We need I2DEST in the proper mode. If it is a hard register
1597 or the only use of a pseudo, we can change its mode. */
1598 && (GET_MODE (*split) == GET_MODE (i2dest)
1599 || GET_MODE (*split) == VOIDmode
1600 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1601 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1602 && ! REG_USERVAR_P (i2dest)))
1603 && (next_real_insn (i2) == i3
1604 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1605 /* We can't overwrite I2DEST if its value is still used by
1606 NEWPAT. */
1607 && ! reg_referenced_p (i2dest, newpat))
1608 {
1609 rtx newdest = i2dest;
1610
1611 /* Get NEWDEST as a register in the proper mode. We have already
1612 validated that we can do this. */
1613 if (GET_MODE (i2dest) != GET_MODE (*split)
1614 && GET_MODE (*split) != VOIDmode)
1615 {
1616 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1617
1618 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1619 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1620 }
1621
1622 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1623 an ASHIFT. This can occur if it was inside a PLUS and hence
1624 appeared to be a memory address. This is a kludge. */
1625 if (GET_CODE (*split) == MULT
1626 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1627 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1628 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
5f4f0e22 1629 XEXP (*split, 0), GEN_INT (i)));
230d793d
RS
1630
1631#ifdef INSN_SCHEDULING
1632 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1633 be written as a ZERO_EXTEND. */
1634 if (GET_CODE (*split) == SUBREG
1635 && GET_CODE (SUBREG_REG (*split)) == MEM)
1636 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1637 XEXP (*split, 0)));
1638#endif
1639
1640 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1641 SUBST (*split, newdest);
1642 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1643 if (i2_code_number >= 0)
1644 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1645 }
1646 }
1647
1648 /* Check for a case where we loaded from memory in a narrow mode and
1649 then sign extended it, but we need both registers. In that case,
1650 we have a PARALLEL with both loads from the same memory location.
1651 We can split this into a load from memory followed by a register-register
1652 copy. This saves at least one insn, more if register allocation can
1653 eliminate the copy. */
1654
1655 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1656 && GET_CODE (newpat) == PARALLEL
1657 && XVECLEN (newpat, 0) == 2
1658 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1659 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1660 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1661 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1662 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1663 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1664 INSN_CUID (i2))
1665 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1666 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1667 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1668 SET_SRC (XVECEXP (newpat, 0, 1)))
1669 && ! find_reg_note (i3, REG_UNUSED,
1670 SET_DEST (XVECEXP (newpat, 0, 0))))
1671 {
1672 newi2pat = XVECEXP (newpat, 0, 0);
1673 newpat = XVECEXP (newpat, 0, 1);
1674 SUBST (SET_SRC (newpat),
1675 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)),
1676 SET_DEST (newi2pat)));
1677 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1678 if (i2_code_number >= 0)
1679 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
1680
1681 if (insn_code_number >= 0)
1682 {
1683 rtx insn;
1684 rtx link;
1685
1686 /* If we will be able to accept this, we have made a change to the
1687 destination of I3. This can invalidate a LOG_LINKS pointing
1688 to I3. No other part of combine.c makes such a transformation.
1689
1690 The new I3 will have a destination that was previously the
1691 destination of I1 or I2 and which was used in i2 or I3. Call
1692 distribute_links to make a LOG_LINK from the next use of
1693 that destination. */
1694
1695 PATTERN (i3) = newpat;
5f4f0e22 1696 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
1697
1698 /* I3 now uses what used to be its destination and which is
1699 now I2's destination. That means we need a LOG_LINK from
1700 I3 to I2. But we used to have one, so we still will.
1701
1702 However, some later insn might be using I2's dest and have
1703 a LOG_LINK pointing at I3. We must remove this link.
1704 The simplest way to remove the link is to point it at I1,
1705 which we know will be a NOTE. */
1706
1707 for (insn = NEXT_INSN (i3);
1708 insn && GET_CODE (insn) != CODE_LABEL
1709 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1710 insn = NEXT_INSN (insn))
1711 {
1712 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1713 && reg_referenced_p (SET_DEST (newi2pat), PATTERN (insn)))
1714 {
1715 for (link = LOG_LINKS (insn); link;
1716 link = XEXP (link, 1))
1717 if (XEXP (link, 0) == i3)
1718 XEXP (link, 0) = i1;
1719
1720 break;
1721 }
1722 }
1723 }
230d793d
RS
1724 }
1725
1726 /* Similarly, check for a case where we have a PARALLEL of two independent
1727 SETs but we started with three insns. In this case, we can do the sets
1728 as two separate insns. This case occurs when some SET allows two
1729 other insns to combine, but the destination of that SET is still live. */
1730
1731 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1732 && GET_CODE (newpat) == PARALLEL
1733 && XVECLEN (newpat, 0) == 2
1734 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1735 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1736 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1737 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1738 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1739 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1740 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1741 INSN_CUID (i2))
1742 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1743 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1744 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1745 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1746 XVECEXP (newpat, 0, 0))
1747 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1748 XVECEXP (newpat, 0, 1)))
1749 {
1750 newi2pat = XVECEXP (newpat, 0, 1);
1751 newpat = XVECEXP (newpat, 0, 0);
1752
1753 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1754 if (i2_code_number >= 0)
1755 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1756 }
1757
1758 /* If it still isn't recognized, fail and change things back the way they
1759 were. */
1760 if ((insn_code_number < 0
1761 /* Is the result a reasonable ASM_OPERANDS? */
1762 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1763 {
1764 undo_all ();
1765 return 0;
1766 }
1767
1768 /* If we had to change another insn, make sure it is valid also. */
1769 if (undobuf.other_insn)
1770 {
1771 rtx other_notes = REG_NOTES (undobuf.other_insn);
1772 rtx other_pat = PATTERN (undobuf.other_insn);
1773 rtx new_other_notes;
1774 rtx note, next;
1775
1776 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1777 &new_other_notes);
1778
1779 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1780 {
1781 undo_all ();
1782 return 0;
1783 }
1784
1785 PATTERN (undobuf.other_insn) = other_pat;
1786
1787 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1788 are still valid. Then add any non-duplicate notes added by
1789 recog_for_combine. */
1790 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1791 {
1792 next = XEXP (note, 1);
1793
1794 if (REG_NOTE_KIND (note) == REG_UNUSED
1795 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1796 remove_note (undobuf.other_insn, note);
1797 }
1798
1799 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 1800 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
1801 }
1802
1803 /* We now know that we can do this combination. Merge the insns and
1804 update the status of registers and LOG_LINKS. */
1805
1806 {
1807 rtx i3notes, i2notes, i1notes = 0;
1808 rtx i3links, i2links, i1links = 0;
1809 rtx midnotes = 0;
1810 int all_adjacent = (next_real_insn (i2) == i3
1811 && (i1 == 0 || next_real_insn (i1) == i2));
1812 register int regno;
1813 /* Compute which registers we expect to eliminate. */
1814 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1815 ? 0 : i2dest);
1816 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1817
1818 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1819 clear them. */
1820 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1821 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1822 if (i1)
1823 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1824
1825 /* Ensure that we do not have something that should not be shared but
1826 occurs multiple times in the new insns. Check this by first
5089e22e 1827 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
1828
1829 reset_used_flags (i3notes);
1830 reset_used_flags (i2notes);
1831 reset_used_flags (i1notes);
1832 reset_used_flags (newpat);
1833 reset_used_flags (newi2pat);
1834 if (undobuf.other_insn)
1835 reset_used_flags (PATTERN (undobuf.other_insn));
1836
1837 i3notes = copy_rtx_if_shared (i3notes);
1838 i2notes = copy_rtx_if_shared (i2notes);
1839 i1notes = copy_rtx_if_shared (i1notes);
1840 newpat = copy_rtx_if_shared (newpat);
1841 newi2pat = copy_rtx_if_shared (newi2pat);
1842 if (undobuf.other_insn)
1843 reset_used_flags (PATTERN (undobuf.other_insn));
1844
1845 INSN_CODE (i3) = insn_code_number;
1846 PATTERN (i3) = newpat;
1847 if (undobuf.other_insn)
1848 INSN_CODE (undobuf.other_insn) = other_code_number;
1849
1850 /* We had one special case above where I2 had more than one set and
1851 we replaced a destination of one of those sets with the destination
1852 of I3. In that case, we have to update LOG_LINKS of insns later
1853 in this basic block. Note that this (expensive) case is rare. */
1854
1855 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1856 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1857 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1858 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1859 && ! find_reg_note (i2, REG_UNUSED,
1860 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1861 {
1862 register rtx insn;
1863
1864 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1865 {
1866 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1867 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1868 if (XEXP (link, 0) == i2)
1869 XEXP (link, 0) = i3;
1870
1871 if (GET_CODE (insn) == CODE_LABEL
1872 || GET_CODE (insn) == JUMP_INSN)
1873 break;
1874 }
1875 }
1876
1877 LOG_LINKS (i3) = 0;
1878 REG_NOTES (i3) = 0;
1879 LOG_LINKS (i2) = 0;
1880 REG_NOTES (i2) = 0;
1881
1882 if (newi2pat)
1883 {
1884 INSN_CODE (i2) = i2_code_number;
1885 PATTERN (i2) = newi2pat;
1886 }
1887 else
1888 {
1889 PUT_CODE (i2, NOTE);
1890 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
1891 NOTE_SOURCE_FILE (i2) = 0;
1892 }
1893
1894 if (i1)
1895 {
1896 LOG_LINKS (i1) = 0;
1897 REG_NOTES (i1) = 0;
1898 PUT_CODE (i1, NOTE);
1899 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
1900 NOTE_SOURCE_FILE (i1) = 0;
1901 }
1902
1903 /* Get death notes for everything that is now used in either I3 or
1904 I2 and used to die in a previous insn. */
1905
1906 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
1907 if (newi2pat)
1908 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
1909
1910 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1911 if (i3notes)
5f4f0e22
CH
1912 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
1913 elim_i2, elim_i1);
230d793d 1914 if (i2notes)
5f4f0e22
CH
1915 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
1916 elim_i2, elim_i1);
230d793d 1917 if (i1notes)
5f4f0e22
CH
1918 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
1919 elim_i2, elim_i1);
230d793d 1920 if (midnotes)
5f4f0e22
CH
1921 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1922 elim_i2, elim_i1);
230d793d
RS
1923
1924 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1925 know these are REG_UNUSED and want them to go to the desired insn,
1926 so we always pass it as i3. */
1927 if (newi2pat && new_i2_notes)
5f4f0e22 1928 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d 1929 if (new_i3_notes)
5f4f0e22 1930 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
1931
1932 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1933 put a REG_DEAD note for it somewhere. Similarly for I2 and I1. */
d0ab8cd3 1934
230d793d 1935 if (i3dest_killed)
5f4f0e22
CH
1936 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed, NULL_RTX),
1937 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1938 NULL_RTX, NULL_RTX);
58c8c593
RK
1939
1940 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
1941 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
1942 we passed I3 in that case, it might delete I2. */
1943
230d793d 1944 if (i2dest_in_i2src)
58c8c593
RK
1945 {
1946 if (newi2pat && reg_set_p (i2dest, newi2pat))
1947 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1948 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1949 else
1950 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1951 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1952 NULL_RTX, NULL_RTX);
1953 }
1954
230d793d 1955 if (i1dest_in_i1src)
58c8c593
RK
1956 {
1957 if (newi2pat && reg_set_p (i1dest, newi2pat))
1958 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
1959 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1960 else
1961 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
1962 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1963 NULL_RTX, NULL_RTX);
1964 }
230d793d
RS
1965
1966 distribute_links (i3links);
1967 distribute_links (i2links);
1968 distribute_links (i1links);
1969
1970 if (GET_CODE (i2dest) == REG)
1971 {
d0ab8cd3
RK
1972 rtx link;
1973 rtx i2_insn = 0, i2_val = 0, set;
1974
1975 /* The insn that used to set this register doesn't exist, and
1976 this life of the register may not exist either. See if one of
1977 I3's links points to an insn that sets I2DEST. If it does,
1978 that is now the last known value for I2DEST. If we don't update
1979 this and I2 set the register to a value that depended on its old
230d793d
RS
1980 contents, we will get confused. If this insn is used, thing
1981 will be set correctly in combine_instructions. */
d0ab8cd3
RK
1982
1983 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
1984 if ((set = single_set (XEXP (link, 0))) != 0
1985 && rtx_equal_p (i2dest, SET_DEST (set)))
1986 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
1987
1988 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
1989
1990 /* If the reg formerly set in I2 died only once and that was in I3,
1991 zero its use count so it won't make `reload' do any work. */
1992 if (! added_sets_2 && newi2pat == 0)
1993 {
1994 regno = REGNO (i2dest);
1995 reg_n_sets[regno]--;
1996 if (reg_n_sets[regno] == 0
5f4f0e22
CH
1997 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
1998 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
1999 reg_n_refs[regno] = 0;
2000 }
2001 }
2002
2003 if (i1 && GET_CODE (i1dest) == REG)
2004 {
d0ab8cd3
RK
2005 rtx link;
2006 rtx i1_insn = 0, i1_val = 0, set;
2007
2008 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2009 if ((set = single_set (XEXP (link, 0))) != 0
2010 && rtx_equal_p (i1dest, SET_DEST (set)))
2011 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2012
2013 record_value_for_reg (i1dest, i1_insn, i1_val);
2014
230d793d
RS
2015 regno = REGNO (i1dest);
2016 if (! added_sets_1)
2017 {
2018 reg_n_sets[regno]--;
2019 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2020 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2021 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2022 reg_n_refs[regno] = 0;
2023 }
2024 }
2025
2026 /* If I3 is now an unconditional jump, ensure that it has a
2027 BARRIER following it since it may have initially been a
2028 conditional jump. */
2029
2030 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2031 && GET_CODE (next_nonnote_insn (i3)) != BARRIER)
2032 emit_barrier_after (i3);
2033 }
2034
2035 combine_successes++;
2036
2037 return newi2pat ? i2 : i3;
2038}
2039\f
2040/* Undo all the modifications recorded in undobuf. */
2041
2042static void
2043undo_all ()
2044{
2045 register int i;
2046 if (undobuf.num_undo > MAX_UNDO)
2047 undobuf.num_undo = MAX_UNDO;
2048 for (i = undobuf.num_undo - 1; i >= 0; i--)
7c046e4e
RK
2049 {
2050 if (undobuf.undo[i].is_int)
2051 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2052 else
2053 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2054
2055 }
230d793d
RS
2056
2057 obfree (undobuf.storage);
2058 undobuf.num_undo = 0;
2059}
2060\f
2061/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2062 where we have an arithmetic expression and return that point. LOC will
2063 be inside INSN.
230d793d
RS
2064
2065 try_combine will call this function to see if an insn can be split into
2066 two insns. */
2067
2068static rtx *
d0ab8cd3 2069find_split_point (loc, insn)
230d793d 2070 rtx *loc;
d0ab8cd3 2071 rtx insn;
230d793d
RS
2072{
2073 rtx x = *loc;
2074 enum rtx_code code = GET_CODE (x);
2075 rtx *split;
2076 int len = 0, pos, unsignedp;
2077 rtx inner;
2078
2079 /* First special-case some codes. */
2080 switch (code)
2081 {
2082 case SUBREG:
2083#ifdef INSN_SCHEDULING
2084 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2085 point. */
2086 if (GET_CODE (SUBREG_REG (x)) == MEM)
2087 return loc;
2088#endif
d0ab8cd3 2089 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2090
230d793d 2091 case MEM:
916f14f1 2092#ifdef HAVE_lo_sum
230d793d
RS
2093 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2094 using LO_SUM and HIGH. */
2095 if (GET_CODE (XEXP (x, 0)) == CONST
2096 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2097 {
2098 SUBST (XEXP (x, 0),
2099 gen_rtx_combine (LO_SUM, Pmode,
2100 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2101 XEXP (x, 0)));
2102 return &XEXP (XEXP (x, 0), 0);
2103 }
230d793d
RS
2104#endif
2105
916f14f1
RK
2106 /* If we have a PLUS whose second operand is a constant and the
2107 address is not valid, perhaps will can split it up using
2108 the machine-specific way to split large constants. We use
d0ab8cd3 2109 the first psuedo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2110 it will not remain in the result. */
2111 if (GET_CODE (XEXP (x, 0)) == PLUS
2112 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2113 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2114 {
2115 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2116 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2117 subst_insn);
2118
2119 /* This should have produced two insns, each of which sets our
2120 placeholder. If the source of the second is a valid address,
2121 we can make put both sources together and make a split point
2122 in the middle. */
2123
2124 if (seq && XVECLEN (seq, 0) == 2
2125 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2126 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2127 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2128 && ! reg_mentioned_p (reg,
2129 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2130 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2131 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2132 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2133 && memory_address_p (GET_MODE (x),
2134 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2135 {
2136 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2137 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2138
2139 /* Replace the placeholder in SRC2 with SRC1. If we can
2140 find where in SRC2 it was placed, that can become our
2141 split point and we can replace this address with SRC2.
2142 Just try two obvious places. */
2143
2144 src2 = replace_rtx (src2, reg, src1);
2145 split = 0;
2146 if (XEXP (src2, 0) == src1)
2147 split = &XEXP (src2, 0);
2148 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2149 && XEXP (XEXP (src2, 0), 0) == src1)
2150 split = &XEXP (XEXP (src2, 0), 0);
2151
2152 if (split)
2153 {
2154 SUBST (XEXP (x, 0), src2);
2155 return split;
2156 }
2157 }
2158 }
2159 break;
2160
230d793d
RS
2161 case SET:
2162#ifdef HAVE_cc0
2163 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2164 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2165 we need to put the operand into a register. So split at that
2166 point. */
2167
2168 if (SET_DEST (x) == cc0_rtx
2169 && GET_CODE (SET_SRC (x)) != COMPARE
2170 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2171 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2172 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2173 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2174 return &SET_SRC (x);
2175#endif
2176
2177 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2178 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2179 if (split && split != &SET_SRC (x))
2180 return split;
2181
2182 /* See if this is a bitfield assignment with everything constant. If
2183 so, this is an IOR of an AND, so split it into that. */
2184 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2185 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2186 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2187 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2188 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2189 && GET_CODE (SET_SRC (x)) == CONST_INT
2190 && ((INTVAL (XEXP (SET_DEST (x), 1))
2191 + INTVAL (XEXP (SET_DEST (x), 2)))
2192 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2193 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2194 {
2195 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2196 int len = INTVAL (XEXP (SET_DEST (x), 1));
2197 int src = INTVAL (SET_SRC (x));
2198 rtx dest = XEXP (SET_DEST (x), 0);
2199 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2200 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d
RS
2201
2202#if BITS_BIG_ENDIAN
2203 pos = GET_MODE_BITSIZE (mode) - len - pos;
2204#endif
2205
2206 if (src == mask)
2207 SUBST (SET_SRC (x),
5f4f0e22 2208 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2209 else
2210 SUBST (SET_SRC (x),
2211 gen_binary (IOR, mode,
2212 gen_binary (AND, mode, dest,
5f4f0e22
CH
2213 GEN_INT (~ (mask << pos)
2214 & GET_MODE_MASK (mode))),
2215 GEN_INT (src << pos)));
230d793d
RS
2216
2217 SUBST (SET_DEST (x), dest);
2218
d0ab8cd3 2219 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2220 if (split && split != &SET_SRC (x))
2221 return split;
2222 }
2223
2224 /* Otherwise, see if this is an operation that we can split into two.
2225 If so, try to split that. */
2226 code = GET_CODE (SET_SRC (x));
2227
2228 switch (code)
2229 {
d0ab8cd3
RK
2230 case AND:
2231 /* If we are AND'ing with a large constant that is only a single
2232 bit and the result is only being used in a context where we
2233 need to know if it is zero or non-zero, replace it with a bit
2234 extraction. This will avoid the large constant, which might
2235 have taken more than one insn to make. If the constant were
2236 not a valid argument to the AND but took only one insn to make,
2237 this is no worse, but if it took more than one insn, it will
2238 be better. */
2239
2240 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2241 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2242 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2243 && GET_CODE (SET_DEST (x)) == REG
2244 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2245 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2246 && XEXP (*split, 0) == SET_DEST (x)
2247 && XEXP (*split, 1) == const0_rtx)
2248 {
2249 SUBST (SET_SRC (x),
2250 make_extraction (GET_MODE (SET_DEST (x)),
2251 XEXP (SET_SRC (x), 0),
2252 pos, NULL_RTX, 1, 1, 0, 0));
2253 return find_split_point (loc, insn);
2254 }
2255 break;
2256
230d793d
RS
2257 case SIGN_EXTEND:
2258 inner = XEXP (SET_SRC (x), 0);
2259 pos = 0;
2260 len = GET_MODE_BITSIZE (GET_MODE (inner));
2261 unsignedp = 0;
2262 break;
2263
2264 case SIGN_EXTRACT:
2265 case ZERO_EXTRACT:
2266 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2267 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2268 {
2269 inner = XEXP (SET_SRC (x), 0);
2270 len = INTVAL (XEXP (SET_SRC (x), 1));
2271 pos = INTVAL (XEXP (SET_SRC (x), 2));
2272
2273#if BITS_BIG_ENDIAN
2274 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2275#endif
2276 unsignedp = (code == ZERO_EXTRACT);
2277 }
2278 break;
2279 }
2280
2281 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2282 {
2283 enum machine_mode mode = GET_MODE (SET_SRC (x));
2284
d0ab8cd3
RK
2285 /* For unsigned, we have a choice of a shift followed by an
2286 AND or two shifts. Use two shifts for field sizes where the
2287 constant might be too large. We assume here that we can
2288 always at least get 8-bit constants in an AND insn, which is
2289 true for every current RISC. */
2290
2291 if (unsignedp && len <= 8)
230d793d
RS
2292 {
2293 SUBST (SET_SRC (x),
2294 gen_rtx_combine
2295 (AND, mode,
2296 gen_rtx_combine (LSHIFTRT, mode,
2297 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2298 GEN_INT (pos)),
2299 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2300
d0ab8cd3 2301 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2302 if (split && split != &SET_SRC (x))
2303 return split;
2304 }
2305 else
2306 {
2307 SUBST (SET_SRC (x),
2308 gen_rtx_combine
d0ab8cd3 2309 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2310 gen_rtx_combine (ASHIFT, mode,
2311 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2312 GEN_INT (GET_MODE_BITSIZE (mode)
2313 - len - pos)),
2314 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2315
d0ab8cd3 2316 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2317 if (split && split != &SET_SRC (x))
2318 return split;
2319 }
2320 }
2321
2322 /* See if this is a simple operation with a constant as the second
2323 operand. It might be that this constant is out of range and hence
2324 could be used as a split point. */
2325 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2326 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2327 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2328 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2329 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2330 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2331 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2332 == 'o'))))
2333 return &XEXP (SET_SRC (x), 1);
2334
2335 /* Finally, see if this is a simple operation with its first operand
2336 not in a register. The operation might require this operand in a
2337 register, so return it as a split point. We can always do this
2338 because if the first operand were another operation, we would have
2339 already found it as a split point. */
2340 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2341 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2342 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2343 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2344 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2345 return &XEXP (SET_SRC (x), 0);
2346
2347 return 0;
2348
2349 case AND:
2350 case IOR:
2351 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2352 it is better to write this as (not (ior A B)) so we can split it.
2353 Similarly for IOR. */
2354 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2355 {
2356 SUBST (*loc,
2357 gen_rtx_combine (NOT, GET_MODE (x),
2358 gen_rtx_combine (code == IOR ? AND : IOR,
2359 GET_MODE (x),
2360 XEXP (XEXP (x, 0), 0),
2361 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2362 return find_split_point (loc, insn);
230d793d
RS
2363 }
2364
2365 /* Many RISC machines have a large set of logical insns. If the
2366 second operand is a NOT, put it first so we will try to split the
2367 other operand first. */
2368 if (GET_CODE (XEXP (x, 1)) == NOT)
2369 {
2370 rtx tem = XEXP (x, 0);
2371 SUBST (XEXP (x, 0), XEXP (x, 1));
2372 SUBST (XEXP (x, 1), tem);
2373 }
2374 break;
2375 }
2376
2377 /* Otherwise, select our actions depending on our rtx class. */
2378 switch (GET_RTX_CLASS (code))
2379 {
2380 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2381 case '3':
d0ab8cd3 2382 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2383 if (split)
2384 return split;
2385 /* ... fall through ... */
2386 case '2':
2387 case 'c':
2388 case '<':
d0ab8cd3 2389 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2390 if (split)
2391 return split;
2392 /* ... fall through ... */
2393 case '1':
2394 /* Some machines have (and (shift ...) ...) insns. If X is not
2395 an AND, but XEXP (X, 0) is, use it as our split point. */
2396 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2397 return &XEXP (x, 0);
2398
d0ab8cd3 2399 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2400 if (split)
2401 return split;
2402 return loc;
2403 }
2404
2405 /* Otherwise, we don't have a split point. */
2406 return 0;
2407}
2408\f
2409/* Throughout X, replace FROM with TO, and return the result.
2410 The result is TO if X is FROM;
2411 otherwise the result is X, but its contents may have been modified.
2412 If they were modified, a record was made in undobuf so that
2413 undo_all will (among other things) return X to its original state.
2414
2415 If the number of changes necessary is too much to record to undo,
2416 the excess changes are not made, so the result is invalid.
2417 The changes already made can still be undone.
2418 undobuf.num_undo is incremented for such changes, so by testing that
2419 the caller can tell whether the result is valid.
2420
2421 `n_occurrences' is incremented each time FROM is replaced.
2422
2423 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2424
5089e22e 2425 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2426 by copying if `n_occurrences' is non-zero. */
2427
2428static rtx
2429subst (x, from, to, in_dest, unique_copy)
2430 register rtx x, from, to;
2431 int in_dest;
2432 int unique_copy;
2433{
2434 register char *fmt;
2435 register int len, i;
2436 register enum rtx_code code = GET_CODE (x), orig_code = code;
2437 rtx temp;
2438 enum machine_mode mode = GET_MODE (x);
2439 enum machine_mode op0_mode = VOIDmode;
2440 rtx other_insn;
2441 rtx *cc_use;
2442 int n_restarts = 0;
2443
2444/* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2445 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2446 If it is 0, that cannot be done. We can now do this for any MEM
2447 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2448 If not for that, MEM's would very rarely be safe. */
2449
2450/* Reject MODEs bigger than a word, because we might not be able
2451 to reference a two-register group starting with an arbitrary register
2452 (and currently gen_lowpart might crash for a SUBREG). */
2453
2454#define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2455 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2456
2457/* Two expressions are equal if they are identical copies of a shared
2458 RTX or if they are both registers with the same register number
2459 and mode. */
2460
2461#define COMBINE_RTX_EQUAL_P(X,Y) \
2462 ((X) == (Y) \
2463 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2464 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2465
2466 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2467 {
2468 n_occurrences++;
2469 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2470 }
2471
2472 /* If X and FROM are the same register but different modes, they will
2473 not have been seen as equal above. However, flow.c will make a
2474 LOG_LINKS entry for that case. If we do nothing, we will try to
2475 rerecognize our original insn and, when it succeeds, we will
2476 delete the feeding insn, which is incorrect.
2477
2478 So force this insn not to match in this (rare) case. */
2479 if (! in_dest && code == REG && GET_CODE (from) == REG
2480 && REGNO (x) == REGNO (from))
2481 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2482
2483 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2484 of which may contain things that can be combined. */
2485 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2486 return x;
2487
2488 /* It is possible to have a subexpression appear twice in the insn.
2489 Suppose that FROM is a register that appears within TO.
2490 Then, after that subexpression has been scanned once by `subst',
2491 the second time it is scanned, TO may be found. If we were
2492 to scan TO here, we would find FROM within it and create a
2493 self-referent rtl structure which is completely wrong. */
2494 if (COMBINE_RTX_EQUAL_P (x, to))
2495 return to;
2496
2497 len = GET_RTX_LENGTH (code);
2498 fmt = GET_RTX_FORMAT (code);
2499
2500 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2501 set up to skip this common case. All other cases where we want to
2502 suppress replacing something inside a SET_SRC are handled via the
2503 IN_DEST operand. */
2504 if (code == SET
2505 && (GET_CODE (SET_DEST (x)) == REG
2506 || GET_CODE (SET_DEST (x)) == CC0
2507 || GET_CODE (SET_DEST (x)) == PC))
2508 fmt = "ie";
2509
2510 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2511 if (fmt[0] == 'e')
2512 op0_mode = GET_MODE (XEXP (x, 0));
2513
2514 for (i = 0; i < len; i++)
2515 {
2516 if (fmt[i] == 'E')
2517 {
2518 register int j;
2519 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2520 {
2521 register rtx new;
2522 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2523 {
2524 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2525 n_occurrences++;
2526 }
2527 else
2528 {
2529 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2530
2531 /* If this substitution failed, this whole thing fails. */
2532 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2533 return new;
2534 }
2535
2536 SUBST (XVECEXP (x, i, j), new);
2537 }
2538 }
2539 else if (fmt[i] == 'e')
2540 {
2541 register rtx new;
2542
2543 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2544 {
2545 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2546 n_occurrences++;
2547 }
2548 else
2549 /* If we are in a SET_DEST, suppress most cases unless we
2550 have gone inside a MEM, in which case we want to
2551 simplify the address. We assume here that things that
2552 are actually part of the destination have their inner
2553 parts in the first expression. This is true for SUBREG,
2554 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2555 things aside from REG and MEM that should appear in a
2556 SET_DEST. */
2557 new = subst (XEXP (x, i), from, to,
2558 (((in_dest
2559 && (code == SUBREG || code == STRICT_LOW_PART
2560 || code == ZERO_EXTRACT))
2561 || code == SET)
2562 && i == 0), unique_copy);
2563
2564 /* If we found that we will have to reject this combination,
2565 indicate that by returning the CLOBBER ourselves, rather than
2566 an expression containing it. This will speed things up as
2567 well as prevent accidents where two CLOBBERs are considered
2568 to be equal, thus producing an incorrect simplification. */
2569
2570 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2571 return new;
2572
2573 SUBST (XEXP (x, i), new);
2574 }
2575 }
2576
d0ab8cd3
RK
2577 /* We come back to here if we have replaced the expression with one of
2578 a different code and it is likely that further simplification will be
2579 possible. */
2580
2581 restart:
2582
eeb43d32
RK
2583 /* If we have restarted more than 4 times, we are probably looping, so
2584 give up. */
2585 if (++n_restarts > 4)
2586 return x;
2587
2588 /* If we are restarting at all, it means that we no longer know the
2589 original mode of operand 0 (since we have probably changed the
2590 form of X). */
2591
2592 if (n_restarts > 1)
2593 op0_mode = VOIDmode;
2594
d0ab8cd3
RK
2595 code = GET_CODE (x);
2596
230d793d
RS
2597 /* If this is a commutative operation, put a constant last and a complex
2598 expression first. We don't need to do this for comparisons here. */
2599 if (GET_RTX_CLASS (code) == 'c'
2600 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2601 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2602 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2603 || (GET_CODE (XEXP (x, 0)) == SUBREG
2604 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2605 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2606 {
2607 temp = XEXP (x, 0);
2608 SUBST (XEXP (x, 0), XEXP (x, 1));
2609 SUBST (XEXP (x, 1), temp);
2610 }
2611
d0ab8cd3
RK
2612 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2613 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2614 things. Don't deal with operations that change modes here. */
2615
2616 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2617 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2618 {
58744483
RK
2619 /* Don't do this by using SUBST inside X since we might be messing
2620 up a shared expression. */
2621 rtx cond = XEXP (XEXP (x, 0), 0);
2622 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2623 XEXP (x, 1)),
2624 pc_rtx, pc_rtx, 0);
2625 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2626 XEXP (x, 1)),
2627 pc_rtx, pc_rtx, 0);
2628
2629
2630 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
d0ab8cd3
RK
2631 goto restart;
2632 }
2633
2634 else if (GET_RTX_CLASS (code) == '1'
2635 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2636 && GET_MODE (XEXP (x, 0)) == mode)
2637 {
58744483
RK
2638 rtx cond = XEXP (XEXP (x, 0), 0);
2639 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
2640 pc_rtx, pc_rtx, 0);
2641 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
2642 pc_rtx, pc_rtx, 0);
2643
2644 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
d0ab8cd3
RK
2645 goto restart;
2646 }
2647
230d793d
RS
2648 /* Try to fold this expression in case we have constants that weren't
2649 present before. */
2650 temp = 0;
2651 switch (GET_RTX_CLASS (code))
2652 {
2653 case '1':
2654 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2655 break;
2656 case '<':
2657 temp = simplify_relational_operation (code, op0_mode,
2658 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
2659#ifdef FLOAT_STORE_FLAG_VALUE
2660 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2661 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2662 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2663#endif
230d793d
RS
2664 break;
2665 case 'c':
2666 case '2':
2667 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2668 break;
2669 case 'b':
2670 case '3':
2671 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2672 XEXP (x, 1), XEXP (x, 2));
2673 break;
2674 }
2675
2676 if (temp)
d0ab8cd3 2677 x = temp, code = GET_CODE (temp);
230d793d 2678
230d793d
RS
2679 /* First see if we can apply the inverse distributive law. */
2680 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2681 {
2682 x = apply_distributive_law (x);
2683 code = GET_CODE (x);
2684 }
2685
2686 /* If CODE is an associative operation not otherwise handled, see if we
2687 can associate some operands. This can win if they are constants or
2688 if they are logically related (i.e. (a & b) & a. */
2689 if ((code == PLUS || code == MINUS
2690 || code == MULT || code == AND || code == IOR || code == XOR
2691 || code == DIV || code == UDIV
2692 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2693 && GET_MODE_CLASS (mode) == MODE_INT)
2694 {
2695 if (GET_CODE (XEXP (x, 0)) == code)
2696 {
2697 rtx other = XEXP (XEXP (x, 0), 0);
2698 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2699 rtx inner_op1 = XEXP (x, 1);
2700 rtx inner;
2701
2702 /* Make sure we pass the constant operand if any as the second
2703 one if this is a commutative operation. */
2704 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2705 {
2706 rtx tem = inner_op0;
2707 inner_op0 = inner_op1;
2708 inner_op1 = tem;
2709 }
2710 inner = simplify_binary_operation (code == MINUS ? PLUS
2711 : code == DIV ? MULT
2712 : code == UDIV ? MULT
2713 : code,
2714 mode, inner_op0, inner_op1);
2715
2716 /* For commutative operations, try the other pair if that one
2717 didn't simplify. */
2718 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2719 {
2720 other = XEXP (XEXP (x, 0), 1);
2721 inner = simplify_binary_operation (code, mode,
2722 XEXP (XEXP (x, 0), 0),
2723 XEXP (x, 1));
2724 }
2725
2726 if (inner)
2727 {
2728 x = gen_binary (code, mode, other, inner);
2729 goto restart;
2730
2731 }
2732 }
2733 }
2734
2735 /* A little bit of algebraic simplification here. */
2736 switch (code)
2737 {
2738 case MEM:
2739 /* Ensure that our address has any ASHIFTs converted to MULT in case
2740 address-recognizing predicates are called later. */
2741 temp = make_compound_operation (XEXP (x, 0), MEM);
2742 SUBST (XEXP (x, 0), temp);
2743 break;
2744
2745 case SUBREG:
2746 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2747 is paradoxical. If we can't do that safely, then it becomes
2748 something nonsensical so that this combination won't take place. */
2749
2750 if (GET_CODE (SUBREG_REG (x)) == MEM
2751 && (GET_MODE_SIZE (mode)
2752 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2753 {
2754 rtx inner = SUBREG_REG (x);
2755 int endian_offset = 0;
2756 /* Don't change the mode of the MEM
2757 if that would change the meaning of the address. */
2758 if (MEM_VOLATILE_P (SUBREG_REG (x))
2759 || mode_dependent_address_p (XEXP (inner, 0)))
2760 return gen_rtx (CLOBBER, mode, const0_rtx);
2761
2762#if BYTES_BIG_ENDIAN
2763 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2764 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2765 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2766 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2767#endif
2768 /* Note if the plus_constant doesn't make a valid address
2769 then this combination won't be accepted. */
2770 x = gen_rtx (MEM, mode,
2771 plus_constant (XEXP (inner, 0),
2772 (SUBREG_WORD (x) * UNITS_PER_WORD
2773 + endian_offset)));
2774 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2775 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2776 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2777 return x;
2778 }
2779
2780 /* If we are in a SET_DEST, these other cases can't apply. */
2781 if (in_dest)
2782 return x;
2783
2784 /* Changing mode twice with SUBREG => just change it once,
2785 or not at all if changing back to starting mode. */
2786 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
2787 {
2788 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
2789 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
2790 return SUBREG_REG (SUBREG_REG (x));
2791
2792 SUBST_INT (SUBREG_WORD (x),
2793 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
2794 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
2795 }
2796
2797 /* SUBREG of a hard register => just change the register number
2798 and/or mode. If the hard register is not valid in that mode,
2799 suppress this combination. */
2800
2801 if (GET_CODE (SUBREG_REG (x)) == REG
2802 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2803 {
2804 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
2805 mode))
2806 return gen_rtx (REG, mode,
2807 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2808 else
2809 return gen_rtx (CLOBBER, mode, const0_rtx);
2810 }
2811
2812 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
2813 word and low-order part. Only do this if we are narrowing
2814 the constant; if it is being widened, we have no idea what
2815 the extra bits will have been set to. */
230d793d
RS
2816
2817 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
2818 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 2819 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
2820 && GET_MODE_CLASS (mode) == MODE_INT)
2821 {
2822 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 2823 0, op0_mode);
230d793d
RS
2824 if (temp)
2825 return temp;
2826 }
2827
a4bde0b1
RK
2828 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
2829 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
230d793d
RS
2830 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
2831
2832 /* If we are narrowing the object, we need to see if we can simplify
2833 the expression for the object knowing that we only need the
d0ab8cd3
RK
2834 low-order bits. */
2835
230d793d 2836 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
d0ab8cd3
RK
2837 && subreg_lowpart_p (x))
2838 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
2839 NULL_RTX);
230d793d
RS
2840 break;
2841
2842 case NOT:
2843 /* (not (plus X -1)) can become (neg X). */
2844 if (GET_CODE (XEXP (x, 0)) == PLUS
2845 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
2846 {
2847 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
2848 goto restart;
2849 }
2850
2851 /* Similarly, (not (neg X)) is (plus X -1). */
2852 if (GET_CODE (XEXP (x, 0)) == NEG)
2853 {
2854 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2855 goto restart;
2856 }
2857
d0ab8cd3
RK
2858 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
2859 if (GET_CODE (XEXP (x, 0)) == XOR
2860 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2861 && (temp = simplify_unary_operation (NOT, mode,
2862 XEXP (XEXP (x, 0), 1),
2863 mode)) != 0)
2864 {
2865 SUBST (XEXP (XEXP (x, 0), 1), temp);
2866 return XEXP (x, 0);
2867 }
2868
230d793d
RS
2869 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2870 other than 1, but that is not valid. We could do a similar
2871 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2872 but this doesn't seem common enough to bother with. */
2873 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2874 && XEXP (XEXP (x, 0), 0) == const1_rtx)
2875 {
2876 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
2877 XEXP (XEXP (x, 0), 1));
2878 goto restart;
2879 }
2880
2881 if (GET_CODE (XEXP (x, 0)) == SUBREG
2882 && subreg_lowpart_p (XEXP (x, 0))
2883 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
2884 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
2885 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
2886 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
2887 {
2888 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
2889
2890 x = gen_rtx (ROTATE, inner_mode,
2891 gen_unary (NOT, inner_mode, const1_rtx),
2892 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
2893 x = gen_lowpart_for_combine (mode, x);
2894 goto restart;
2895 }
2896
2897#if STORE_FLAG_VALUE == -1
2898 /* (not (comparison foo bar)) can be done by reversing the comparison
2899 code if valid. */
2900 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
2901 && reversible_comparison_p (XEXP (x, 0)))
2902 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
2903 mode, XEXP (XEXP (x, 0), 0),
2904 XEXP (XEXP (x, 0), 1));
2905#endif
2906
2907 /* Apply De Morgan's laws to reduce number of patterns for machines
2908 with negating logical insns (and-not, nand, etc.). If result has
2909 only one NOT, put it first, since that is how the patterns are
2910 coded. */
2911
2912 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
2913 {
2914 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
2915
2916 if (GET_CODE (in1) == NOT)
2917 in1 = XEXP (in1, 0);
2918 else
2919 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
2920
2921 if (GET_CODE (in2) == NOT)
2922 in2 = XEXP (in2, 0);
2923 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
2924 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2925 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
2926 else
2927 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
2928
2929 if (GET_CODE (in2) == NOT)
2930 {
2931 rtx tem = in2;
2932 in2 = in1; in1 = tem;
2933 }
2934
2935 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
2936 mode, in1, in2);
2937 goto restart;
2938 }
2939 break;
2940
2941 case NEG:
2942 /* (neg (plus X 1)) can become (not X). */
2943 if (GET_CODE (XEXP (x, 0)) == PLUS
2944 && XEXP (XEXP (x, 0), 1) == const1_rtx)
2945 {
2946 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
2947 goto restart;
2948 }
2949
2950 /* Similarly, (neg (not X)) is (plus X 1). */
2951 if (GET_CODE (XEXP (x, 0)) == NOT)
2952 {
2953 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
2954 goto restart;
2955 }
2956
230d793d
RS
2957 /* (neg (minus X Y)) can become (minus Y X). */
2958 if (GET_CODE (XEXP (x, 0)) == MINUS
2959 && (GET_MODE_CLASS (mode) != MODE_FLOAT
2960 /* x-y != -(y-x) with IEEE floating point. */
2961 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
2962 {
2963 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
2964 XEXP (XEXP (x, 0), 0));
2965 goto restart;
2966 }
2967
d0ab8cd3
RK
2968 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
2969 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
2970 && significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
2971 {
2972 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2973 goto restart;
2974 }
2975
230d793d
RS
2976 /* NEG commutes with ASHIFT since it is multiplication. Only do this
2977 if we can then eliminate the NEG (e.g.,
2978 if the operand is a constant). */
2979
2980 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
2981 {
2982 temp = simplify_unary_operation (NEG, mode,
2983 XEXP (XEXP (x, 0), 0), mode);
2984 if (temp)
2985 {
2986 SUBST (XEXP (XEXP (x, 0), 0), temp);
2987 return XEXP (x, 0);
2988 }
2989 }
2990
2991 temp = expand_compound_operation (XEXP (x, 0));
2992
2993 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
2994 replaced by (lshiftrt X C). This will convert
2995 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
2996
2997 if (GET_CODE (temp) == ASHIFTRT
2998 && GET_CODE (XEXP (temp, 1)) == CONST_INT
2999 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3000 {
3001 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3002 INTVAL (XEXP (temp, 1)));
3003 goto restart;
3004 }
3005
3006 /* If X has only a single bit significant, say, bit I, convert
3007 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3008 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3009 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3010 or a SUBREG of one since we'd be making the expression more
3011 complex if it was just a register. */
3012
3013 if (GET_CODE (temp) != REG
3014 && ! (GET_CODE (temp) == SUBREG
3015 && GET_CODE (SUBREG_REG (temp)) == REG)
3016 && (i = exact_log2 (significant_bits (temp, mode))) >= 0)
3017 {
3018 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3019 (NULL_RTX, ASHIFTRT, mode,
3020 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3021 GET_MODE_BITSIZE (mode) - 1 - i),
3022 GET_MODE_BITSIZE (mode) - 1 - i);
3023
3024 /* If all we did was surround TEMP with the two shifts, we
3025 haven't improved anything, so don't use it. Otherwise,
3026 we are better off with TEMP1. */
3027 if (GET_CODE (temp1) != ASHIFTRT
3028 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3029 || XEXP (XEXP (temp1, 0), 0) != temp)
3030 {
3031 x = temp1;
3032 goto restart;
3033 }
3034 }
3035 break;
3036
3037 case FLOAT_TRUNCATE:
3038 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3039 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3040 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3041 return XEXP (XEXP (x, 0), 0);
3042 break;
3043
3044#ifdef HAVE_cc0
3045 case COMPARE:
3046 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3047 using cc0, in which case we want to leave it as a COMPARE
3048 so we can distinguish it from a register-register-copy. */
3049 if (XEXP (x, 1) == const0_rtx)
3050 return XEXP (x, 0);
3051
3052 /* In IEEE floating point, x-0 is not the same as x. */
3053 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3054 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3055 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3056 return XEXP (x, 0);
3057 break;
3058#endif
3059
3060 case CONST:
3061 /* (const (const X)) can become (const X). Do it this way rather than
3062 returning the inner CONST since CONST can be shared with a
3063 REG_EQUAL note. */
3064 if (GET_CODE (XEXP (x, 0)) == CONST)
3065 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3066 break;
3067
3068#ifdef HAVE_lo_sum
3069 case LO_SUM:
3070 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3071 can add in an offset. find_split_point will split this address up
3072 again if it doesn't match. */
3073 if (GET_CODE (XEXP (x, 0)) == HIGH
3074 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3075 return XEXP (x, 1);
3076 break;
3077#endif
3078
3079 case PLUS:
3080 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3081 outermost. That's because that's the way indexed addresses are
3082 supposed to appear. This code used to check many more cases, but
3083 they are now checked elsewhere. */
3084 if (GET_CODE (XEXP (x, 0)) == PLUS
3085 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3086 return gen_binary (PLUS, mode,
3087 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3088 XEXP (x, 1)),
3089 XEXP (XEXP (x, 0), 1));
3090
3091 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3092 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3093 bit-field and can be replaced by either a sign_extend or a
3094 sign_extract. The `and' may be a zero_extend. */
3095 if (GET_CODE (XEXP (x, 0)) == XOR
3096 && GET_CODE (XEXP (x, 1)) == CONST_INT
3097 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3098 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3099 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3100 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3101 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3102 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3103 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3104 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3105 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3106 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3107 == i + 1))))
3108 {
3109 x = simplify_shift_const
5f4f0e22
CH
3110 (NULL_RTX, ASHIFTRT, mode,
3111 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3112 XEXP (XEXP (XEXP (x, 0), 0), 0),
3113 GET_MODE_BITSIZE (mode) - (i + 1)),
3114 GET_MODE_BITSIZE (mode) - (i + 1));
3115 goto restart;
3116 }
3117
3118 /* If only the low-order bit of X is significant, (plus x -1)
3119 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3120 the bitsize of the mode - 1. This allows simplification of
3121 "a = (b & 8) == 0;" */
3122 if (XEXP (x, 1) == constm1_rtx
3123 && GET_CODE (XEXP (x, 0)) != REG
3124 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3125 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3126 && significant_bits (XEXP (x, 0), mode) == 1)
3127 {
3128 x = simplify_shift_const
5f4f0e22
CH
3129 (NULL_RTX, ASHIFTRT, mode,
3130 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3131 gen_rtx_combine (XOR, mode,
3132 XEXP (x, 0), const1_rtx),
3133 GET_MODE_BITSIZE (mode) - 1),
3134 GET_MODE_BITSIZE (mode) - 1);
3135 goto restart;
3136 }
02f4ada4
RK
3137
3138 /* If we are adding two things that have no bits in common, convert
3139 the addition into an IOR. This will often be further simplified,
3140 for example in cases like ((a & 1) + (a & 2)), which can
3141 become a & 3. */
3142
3143 if ((significant_bits (XEXP (x, 0), mode)
3144 & significant_bits (XEXP (x, 1), mode)) == 0)
3145 {
3146 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3147 goto restart;
3148 }
230d793d
RS
3149 break;
3150
3151 case MINUS:
3152 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3153 (and <foo> (const_int pow2-1)) */
3154 if (GET_CODE (XEXP (x, 1)) == AND
3155 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3156 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3157 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3158 {
5f4f0e22 3159 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
230d793d
RS
3160 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3161 goto restart;
3162 }
3163 break;
3164
3165 case MULT:
3166 /* If we have (mult (plus A B) C), apply the distributive law and then
3167 the inverse distributive law to see if things simplify. This
3168 occurs mostly in addresses, often when unrolling loops. */
3169
3170 if (GET_CODE (XEXP (x, 0)) == PLUS)
3171 {
3172 x = apply_distributive_law
3173 (gen_binary (PLUS, mode,
3174 gen_binary (MULT, mode,
3175 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3176 gen_binary (MULT, mode,
3177 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3178
3179 if (GET_CODE (x) != MULT)
3180 goto restart;
3181 }
3182
3183 /* If this is multiplication by a power of two and its first operand is
3184 a shift, treat the multiply as a shift to allow the shifts to
3185 possibly combine. */
3186 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3187 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3188 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3189 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3190 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3191 || GET_CODE (XEXP (x, 0)) == ROTATE
3192 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3193 {
5f4f0e22 3194 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
230d793d
RS
3195 goto restart;
3196 }
3197
3198 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3199 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3200 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3201 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3202 XEXP (XEXP (x, 0), 1));
3203 break;
3204
3205 case UDIV:
3206 /* If this is a divide by a power of two, treat it as a shift if
3207 its first operand is a shift. */
3208 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3209 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3210 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3211 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3212 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3213 || GET_CODE (XEXP (x, 0)) == ROTATE
3214 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3215 {
5f4f0e22 3216 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3217 goto restart;
3218 }
3219 break;
3220
3221 case EQ: case NE:
3222 case GT: case GTU: case GE: case GEU:
3223 case LT: case LTU: case LE: case LEU:
3224 /* If the first operand is a condition code, we can't do anything
3225 with it. */
3226 if (GET_CODE (XEXP (x, 0)) == COMPARE
3227 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3228#ifdef HAVE_cc0
3229 && XEXP (x, 0) != cc0_rtx
3230#endif
3231 ))
3232 {
3233 rtx op0 = XEXP (x, 0);
3234 rtx op1 = XEXP (x, 1);
3235 enum rtx_code new_code;
3236
3237 if (GET_CODE (op0) == COMPARE)
3238 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3239
3240 /* Simplify our comparison, if possible. */
3241 new_code = simplify_comparison (code, &op0, &op1);
3242
3243#if STORE_FLAG_VALUE == 1
3244 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3245 if only the low-order bit is significant in X (such as when
3246 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3247 EQ to (xor X 1). */
3f508eca 3248 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d
RS
3249 && op1 == const0_rtx
3250 && significant_bits (op0, GET_MODE (op0)) == 1)
3251 return gen_lowpart_for_combine (mode, op0);
3f508eca 3252 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d
RS
3253 && op1 == const0_rtx
3254 && significant_bits (op0, GET_MODE (op0)) == 1)
3255 return gen_rtx_combine (XOR, mode,
3256 gen_lowpart_for_combine (mode, op0),
3257 const1_rtx);
3258#endif
3259
3260#if STORE_FLAG_VALUE == -1
3261 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3262 to (neg x) if only the low-order bit of X is significant.
3263 This converts (ne (zero_extract X 1 Y) 0) to
3264 (sign_extract X 1 Y). */
3f508eca 3265 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d
RS
3266 && op1 == const0_rtx
3267 && significant_bits (op0, GET_MODE (op0)) == 1)
3268 {
3269 x = gen_rtx_combine (NEG, mode,
3270 gen_lowpart_for_combine (mode, op0));
3271 goto restart;
3272 }
3273#endif
3274
3275 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3276 one significant bit, we can convert (ne x 0) to (ashift x c)
3277 where C puts the bit in the sign bit. Remove any AND with
3278 STORE_FLAG_VALUE when we are done, since we are only going to
3279 test the sign bit. */
3f508eca 3280 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22
CH
3281 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3282 && (STORE_FLAG_VALUE
3283 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3284 && op1 == const0_rtx
3285 && mode == GET_MODE (op0)
3286 && (i = exact_log2 (significant_bits (op0, GET_MODE (op0)))) >= 0)
3287 {
5f4f0e22 3288 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, op0,
230d793d
RS
3289 GET_MODE_BITSIZE (mode) - 1 - i);
3290 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3291 return XEXP (x, 0);
3292 else
3293 return x;
3294 }
3295
3296 /* If the code changed, return a whole new comparison. */
3297 if (new_code != code)
3298 return gen_rtx_combine (new_code, mode, op0, op1);
3299
3300 /* Otherwise, keep this operation, but maybe change its operands.
3301 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3302 SUBST (XEXP (x, 0), op0);
3303 SUBST (XEXP (x, 1), op1);
3304 }
3305 break;
3306
3307 case IF_THEN_ELSE:
d0ab8cd3
RK
3308 /* If we are testing a register for equality see if that register is
3309 used in one of the arms. If so, and we know something about its
3310 value in that arm, try to simplify it. */
3311
3312 if ((GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3313 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3314 {
3315 /* Get the value being compared and the value it has on the equal
3316 branch. */
3317 HOST_WIDE_INT sig;
3318 rtx from = XEXP (XEXP (x, 0), 0);
3319 rtx val_if_eq = XEXP (XEXP (x, 0), 1);
3320 rtx val_if_ne = from;
3321 int is_eq = (GET_CODE (XEXP (x, 0)) == EQ);
3322
3323 /* If we are comparing against zero and the expressiond being tested
3324 has only a single significant bit, that is it's value when it is
3325 not equal to zero. Simplilarly if it is known to be -1 or 0. */
3326
3327 if (val_if_eq == const0_rtx
3328 && exact_log2 (sig = significant_bits (from,
3329 GET_MODE (from))) >= 0)
3330 val_if_ne = GEN_INT (sig);
3331 else if (val_if_eq == const0_rtx
3332 && (num_sign_bit_copies (from, GET_MODE (from))
3333 == GET_MODE_BITSIZE (GET_MODE (from))))
3334 val_if_ne = constm1_rtx;
3335
3336 /* Now simplify an arm if we know the value of the register
3337 in the branch and it is used in the arm. Be carefull due to
3338 the potential of locally-shared RTL. */
3339
3340 if ((is_eq || val_if_ne != from)
3341 && reg_mentioned_p (from, XEXP (x, 1)))
3342 SUBST (XEXP (x, 1), subst (copy_rtx (XEXP (x, 1)), from,
3343 is_eq ? val_if_eq : val_if_ne, 0));
3344
3345 if ((! is_eq || val_if_ne != from)
3346 && reg_mentioned_p (from, XEXP (x, 2)))
3347 SUBST (XEXP (x, 2), subst (XEXP (x, 2), from,
3348 is_eq ? val_if_ne : val_if_eq, 0));
3349 }
3350
230d793d
RS
3351 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3352 reversed, do so to avoid needing two sets of patterns for
d0ab8cd3
RK
3353 subtract-and-branch insns. Similarly if we have a constant in that
3354 position. */
3355 if ((XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT)
9797b9e8
RS
3356 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3357 && reversible_comparison_p (XEXP (x, 0)))
230d793d
RS
3358 {
3359 SUBST (XEXP (x, 0),
d0ab8cd3
RK
3360 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3361 GET_MODE (XEXP (x, 0)),
3362 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3363
3364 temp = XEXP (x, 1);
230d793d 3365 SUBST (XEXP (x, 1), XEXP (x, 2));
d0ab8cd3 3366 SUBST (XEXP (x, 2), temp);
230d793d
RS
3367 }
3368 break;
3369
3370 case ZERO_EXTRACT:
3371 case SIGN_EXTRACT:
3372 case ZERO_EXTEND:
3373 case SIGN_EXTEND:
3374 /* If we are processing SET_DEST, we are done. */
3375 if (in_dest)
3376 return x;
3377
3378 x = expand_compound_operation (x);
3379 if (GET_CODE (x) != code)
3380 goto restart;
3381 break;
3382
3383 case SET:
3384 /* (set (pc) (return)) gets written as (return). */
3385 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3386 return SET_SRC (x);
3387
3388 /* Convert this into a field assignment operation, if possible. */
3389 x = make_field_assignment (x);
3390
230d793d
RS
3391 /* If we are setting CC0 or if the source is a COMPARE, look for the
3392 use of the comparison result and try to simplify it unless we already
3393 have used undobuf.other_insn. */
3394 if ((GET_CODE (SET_SRC (x)) == COMPARE
3395#ifdef HAVE_cc0
3396 || SET_DEST (x) == cc0_rtx
3397#endif
3398 )
3399 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3400 &other_insn)) != 0
3401 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3402 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3403 && XEXP (*cc_use, 0) == SET_DEST (x))
3404 {
3405 enum rtx_code old_code = GET_CODE (*cc_use);
3406 enum rtx_code new_code;
3407 rtx op0, op1;
3408 int other_changed = 0;
3409 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3410
3411 if (GET_CODE (SET_SRC (x)) == COMPARE)
3412 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3413 else
3414 op0 = SET_SRC (x), op1 = const0_rtx;
3415
3416 /* Simplify our comparison, if possible. */
3417 new_code = simplify_comparison (old_code, &op0, &op1);
3418
3419#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3420 /* If this machine has CC modes other than CCmode, check to see
3421 if we need to use a different CC mode here. */
77fa0940 3422 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
230d793d
RS
3423
3424 /* If the mode changed, we have to change SET_DEST, the mode
3425 in the compare, and the mode in the place SET_DEST is used.
3426 If SET_DEST is a hard register, just build new versions with
3427 the proper mode. If it is a pseudo, we lose unless it is only
3428 time we set the pseudo, in which case we can safely change
3429 its mode. */
3430 if (compare_mode != GET_MODE (SET_DEST (x)))
3431 {
3432 int regno = REGNO (SET_DEST (x));
3433 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3434
3435 if (regno < FIRST_PSEUDO_REGISTER
3436 || (reg_n_sets[regno] == 1
3437 && ! REG_USERVAR_P (SET_DEST (x))))
3438 {
3439 if (regno >= FIRST_PSEUDO_REGISTER)
3440 SUBST (regno_reg_rtx[regno], new_dest);
3441
3442 SUBST (SET_DEST (x), new_dest);
3443 SUBST (XEXP (*cc_use, 0), new_dest);
3444 other_changed = 1;
3445 }
3446 }
3447#endif
3448
3449 /* If the code changed, we have to build a new comparison
3450 in undobuf.other_insn. */
3451 if (new_code != old_code)
3452 {
3453 unsigned mask;
3454
3455 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3456 SET_DEST (x), const0_rtx));
3457
3458 /* If the only change we made was to change an EQ into an
3459 NE or vice versa, OP0 has only one significant bit,
3460 and OP1 is zero, check if changing the user of the condition
3461 code will produce a valid insn. If it won't, we can keep
3462 the original code in that insn by surrounding our operation
3463 with an XOR. */
3464
3465 if (((old_code == NE && new_code == EQ)
3466 || (old_code == EQ && new_code == NE))
3467 && ! other_changed && op1 == const0_rtx
5f4f0e22
CH
3468 && (GET_MODE_BITSIZE (GET_MODE (op0))
3469 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
3470 && (exact_log2 (mask = significant_bits (op0,
3471 GET_MODE (op0)))
3472 >= 0))
3473 {
3474 rtx pat = PATTERN (other_insn), note = 0;
3475
3476 if ((recog_for_combine (&pat, undobuf.other_insn, &note) < 0
3477 && ! check_asm_operands (pat)))
3478 {
3479 PUT_CODE (*cc_use, old_code);
3480 other_insn = 0;
3481
3482 op0 = gen_binary (XOR, GET_MODE (op0), op0,
5f4f0e22 3483 GEN_INT (mask));
230d793d
RS
3484 }
3485 }
3486
3487 other_changed = 1;
3488 }
3489
3490 if (other_changed)
3491 undobuf.other_insn = other_insn;
3492
3493#ifdef HAVE_cc0
3494 /* If we are now comparing against zero, change our source if
3495 needed. If we do not use cc0, we always have a COMPARE. */
3496 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3497 SUBST (SET_SRC (x), op0);
3498 else
3499#endif
3500
3501 /* Otherwise, if we didn't previously have a COMPARE in the
3502 correct mode, we need one. */
3503 if (GET_CODE (SET_SRC (x)) != COMPARE
3504 || GET_MODE (SET_SRC (x)) != compare_mode)
3505 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3506 op0, op1));
3507 else
3508 {
3509 /* Otherwise, update the COMPARE if needed. */
3510 SUBST (XEXP (SET_SRC (x), 0), op0);
3511 SUBST (XEXP (SET_SRC (x), 1), op1);
3512 }
3513 }
3514 else
3515 {
3516 /* Get SET_SRC in a form where we have placed back any
3517 compound expressions. Then do the checks below. */
3518 temp = make_compound_operation (SET_SRC (x), SET);
3519 SUBST (SET_SRC (x), temp);
3520 }
3521
df62f951
RK
3522 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3523 operation, and X being a REG or (subreg (reg)), we may be able to
3524 convert this to (set (subreg:m2 x) (op)).
3525
3526 We can always do this if M1 is narrower than M2 because that
3527 means that we only care about the low bits of the result.
3528
3529 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
3530 not defined), we cannot perform a narrower operation that
3531 requested since the high-order bits will be undefined. On
3532 machine where BYTE_LOADS_ZERO_EXTEND are defined, however, this
3533 transformation is safe as long as M1 and M2 have the same number
3534 of words. */
3535
3536 if (GET_CODE (SET_SRC (x)) == SUBREG
3537 && subreg_lowpart_p (SET_SRC (x))
3538 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3539 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3540 / UNITS_PER_WORD)
3541 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3542 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
3543#ifndef BYTE_LOADS_ZERO_EXTEND
3544 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3545 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3546#endif
3547 && (GET_CODE (SET_DEST (x)) == REG
3548 || (GET_CODE (SET_DEST (x)) == SUBREG
3549 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3550 {
df62f951 3551 SUBST (SET_DEST (x),
d0ab8cd3
RK
3552 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3553 SET_DEST (x)));
df62f951
RK
3554 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3555 }
3556
230d793d
RS
3557#ifdef BYTE_LOADS_ZERO_EXTEND
3558 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3559 M wider than N, this would require a paradoxical subreg.
3560 Replace the subreg with a zero_extend to avoid the reload that
3561 would otherwise be required. */
3562 if (GET_CODE (SET_SRC (x)) == SUBREG
3563 && subreg_lowpart_p (SET_SRC (x))
3564 && SUBREG_WORD (SET_SRC (x)) == 0
3565 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3566 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3567 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
3568 SUBST (SET_SRC (x), gen_rtx_combine (ZERO_EXTEND,
3569 GET_MODE (SET_SRC (x)),
3570 XEXP (SET_SRC (x), 0)));
3571#endif
3572
3573 break;
3574
3575 case AND:
3576 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3577 {
3578 x = simplify_and_const_int (x, mode, XEXP (x, 0),
3579 INTVAL (XEXP (x, 1)));
3580
3581 /* If we have (ior (and (X C1) C2)) and the next restart would be
3582 the last, simplify this by making C1 as small as possible
3583 and then exit. */
3584 if (n_restarts >= 3 && GET_CODE (x) == IOR
3585 && GET_CODE (XEXP (x, 0)) == AND
3586 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3587 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3588 {
3589 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
5f4f0e22
CH
3590 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
3591 & ~ INTVAL (XEXP (x, 1))));
230d793d
RS
3592 return gen_binary (IOR, mode, temp, XEXP (x, 1));
3593 }
3594
3595 if (GET_CODE (x) != AND)
3596 goto restart;
3597 }
3598
3599 /* Convert (A | B) & A to A. */
3600 if (GET_CODE (XEXP (x, 0)) == IOR
3601 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3602 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3603 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3604 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3605 return XEXP (x, 1);
3606
3607 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3608 insn (and may simplify more). */
3609 else if (GET_CODE (XEXP (x, 0)) == XOR
3610 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3611 && ! side_effects_p (XEXP (x, 1)))
3612 {
3613 x = gen_binary (AND, mode,
3614 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3615 XEXP (x, 1));
3616 goto restart;
3617 }
3618 else if (GET_CODE (XEXP (x, 0)) == XOR
3619 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3620 && ! side_effects_p (XEXP (x, 1)))
3621 {
3622 x = gen_binary (AND, mode,
3623 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3624 XEXP (x, 1));
3625 goto restart;
3626 }
3627
3628 /* Similarly for (~ (A ^ B)) & A. */
3629 else if (GET_CODE (XEXP (x, 0)) == NOT
3630 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3631 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
3632 && ! side_effects_p (XEXP (x, 1)))
3633 {
3634 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
3635 XEXP (x, 1));
3636 goto restart;
3637 }
3638 else if (GET_CODE (XEXP (x, 0)) == NOT
3639 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3640 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
3641 && ! side_effects_p (XEXP (x, 1)))
3642 {
3643 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
3644 XEXP (x, 1));
3645 goto restart;
3646 }
3647
53e33d95
RK
3648#ifdef HAVE_conditional_move
3649
d0ab8cd3
RK
3650 /* If we have (and A B) with A not an object but that is known to
3651 be -1 or 0, this is equivalent to the expression
3652 (if_then_else (ne A (const_int 0)) B (const_int 0))
3653 We make this conversion because it may allow further
3654 simplifications and then allow use of conditional move insns. */
3655
3656 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3657 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3658 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
3659 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3660 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3661 {
3662 rtx op0 = XEXP (x, 0);
3663 rtx op1 = const0_rtx;
3664 enum rtx_code comp_code
3665 = simplify_comparison (NE, &op0, &op1);
3666
3667 x = gen_rtx_combine (IF_THEN_ELSE, mode,
3668 gen_binary (comp_code, VOIDmode, op0, op1),
3669 XEXP (x, 1), const0_rtx);
3670 goto restart;
3671 }
53e33d95 3672#endif
d0ab8cd3
RK
3673
3674 /* In the following group of tests (and those in case IOR below),
230d793d
RS
3675 we start with some combination of logical operations and apply
3676 the distributive law followed by the inverse distributive law.
3677 Most of the time, this results in no change. However, if some of
3678 the operands are the same or inverses of each other, simplifications
3679 will result.
3680
3681 For example, (and (ior A B) (not B)) can occur as the result of
3682 expanding a bit field assignment. When we apply the distributive
3683 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
3684 which then simplifies to (and (A (not B))). */
3685
3686 /* If we have (and (ior A B) C), apply the distributive law and then
3687 the inverse distributive law to see if things simplify. */
3688
3689 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
3690 {
3691 x = apply_distributive_law
3692 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
3693 gen_binary (AND, mode,
3694 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3695 gen_binary (AND, mode,
3696 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3697 if (GET_CODE (x) != AND)
3698 goto restart;
3699 }
3700
3701 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
3702 {
3703 x = apply_distributive_law
3704 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
3705 gen_binary (AND, mode,
3706 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
3707 gen_binary (AND, mode,
3708 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
3709 if (GET_CODE (x) != AND)
3710 goto restart;
3711 }
3712
3713 /* Similarly, taking advantage of the fact that
3714 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
3715
3716 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
3717 {
3718 x = apply_distributive_law
3719 (gen_binary (XOR, mode,
3720 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
3721 XEXP (XEXP (x, 1), 0)),
3722 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
3723 XEXP (XEXP (x, 1), 1))));
3724 if (GET_CODE (x) != AND)
3725 goto restart;
3726 }
3727
3728 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
3729 {
3730 x = apply_distributive_law
3731 (gen_binary (XOR, mode,
3732 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
3733 XEXP (XEXP (x, 0), 0)),
3734 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
3735 XEXP (XEXP (x, 0), 1))));
3736 if (GET_CODE (x) != AND)
3737 goto restart;
3738 }
3739 break;
3740
3741 case IOR:
d0ab8cd3
RK
3742 /* (ior A C) is C if all significant bits of A are on in C. */
3743 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3744 && (significant_bits (XEXP (x, 0), mode)
3745 & ~ INTVAL (XEXP (x, 1))) == 0)
3746 return XEXP (x, 1);
3747
230d793d
RS
3748 /* Convert (A & B) | A to A. */
3749 if (GET_CODE (XEXP (x, 0)) == AND
3750 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3751 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3752 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3753 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3754 return XEXP (x, 1);
3755
3756 /* If we have (ior (and A B) C), apply the distributive law and then
3757 the inverse distributive law to see if things simplify. */
3758
3759 if (GET_CODE (XEXP (x, 0)) == AND)
3760 {
3761 x = apply_distributive_law
3762 (gen_binary (AND, mode,
3763 gen_binary (IOR, mode,
3764 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3765 gen_binary (IOR, mode,
3766 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3767
3768 if (GET_CODE (x) != IOR)
3769 goto restart;
3770 }
3771
3772 if (GET_CODE (XEXP (x, 1)) == AND)
3773 {
3774 x = apply_distributive_law
3775 (gen_binary (AND, mode,
3776 gen_binary (IOR, mode,
3777 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
3778 gen_binary (IOR, mode,
3779 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
3780
3781 if (GET_CODE (x) != IOR)
3782 goto restart;
3783 }
3784
3785 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
3786 mode size to (rotate A CX). */
3787
3788 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
3789 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
3790 || (GET_CODE (XEXP (x, 1)) == ASHIFT
3791 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
3792 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
3793 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3794 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3795 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
3796 == GET_MODE_BITSIZE (mode)))
3797 {
3798 rtx shift_count;
3799
3800 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3801 shift_count = XEXP (XEXP (x, 0), 1);
3802 else
3803 shift_count = XEXP (XEXP (x, 1), 1);
3804 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
3805 goto restart;
3806 }
3807 break;
3808
3809 case XOR:
3810 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
3811 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
3812 (NOT y). */
3813 {
3814 int num_negated = 0;
3815 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
3816
3817 if (GET_CODE (in1) == NOT)
3818 num_negated++, in1 = XEXP (in1, 0);
3819 if (GET_CODE (in2) == NOT)
3820 num_negated++, in2 = XEXP (in2, 0);
3821
3822 if (num_negated == 2)
3823 {
3824 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3825 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
3826 }
3827 else if (num_negated == 1)
d0ab8cd3
RK
3828 {
3829 x = gen_unary (NOT, mode,
3830 gen_binary (XOR, mode, in1, in2));
3831 goto restart;
3832 }
230d793d
RS
3833 }
3834
3835 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
3836 correspond to a machine insn or result in further simplifications
3837 if B is a constant. */
3838
3839 if (GET_CODE (XEXP (x, 0)) == AND
3840 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3841 && ! side_effects_p (XEXP (x, 1)))
3842 {
3843 x = gen_binary (AND, mode,
3844 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3845 XEXP (x, 1));
3846 goto restart;
3847 }
3848 else if (GET_CODE (XEXP (x, 0)) == AND
3849 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3850 && ! side_effects_p (XEXP (x, 1)))
3851 {
3852 x = gen_binary (AND, mode,
3853 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3854 XEXP (x, 1));
3855 goto restart;
3856 }
3857
3858
3859#if STORE_FLAG_VALUE == 1
3860 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
3861 comparison. */
3862 if (XEXP (x, 1) == const1_rtx
3863 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3864 && reversible_comparison_p (XEXP (x, 0)))
3865 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3866 mode, XEXP (XEXP (x, 0), 0),
3867 XEXP (XEXP (x, 0), 1));
3868#endif
3869
3870 /* (xor (comparison foo bar) (const_int sign-bit))
3871 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22
CH
3872 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3873 && (STORE_FLAG_VALUE
3874 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3875 && XEXP (x, 1) == const_true_rtx
3876 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3877 && reversible_comparison_p (XEXP (x, 0)))
3878 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3879 mode, XEXP (XEXP (x, 0), 0),
3880 XEXP (XEXP (x, 0), 1));
3881 break;
3882
3883 case ABS:
3884 /* (abs (neg <foo>)) -> (abs <foo>) */
3885 if (GET_CODE (XEXP (x, 0)) == NEG)
3886 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3887
3888 /* If operand is something known to be positive, ignore the ABS. */
3889 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
5f4f0e22
CH
3890 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3891 <= HOST_BITS_PER_WIDE_INT)
230d793d 3892 && ((significant_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5f4f0e22
CH
3893 & ((HOST_WIDE_INT) 1
3894 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
230d793d
RS
3895 == 0)))
3896 return XEXP (x, 0);
3897
3898
3899 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
d0ab8cd3 3900 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
230d793d
RS
3901 {
3902 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
3903 goto restart;
3904 }
3905 break;
3906
a7c99304
RK
3907 case FFS:
3908 /* (ffs (*_extend <X>)) = (ffs <X>) */
3909 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3910 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3911 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3912 break;
3913
230d793d
RS
3914 case FLOAT:
3915 /* (float (sign_extend <X>)) = (float <X>). */
3916 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3917 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3918 break;
3919
3920 case LSHIFT:
3921 case ASHIFT:
3922 case LSHIFTRT:
3923 case ASHIFTRT:
3924 case ROTATE:
3925 case ROTATERT:
230d793d
RS
3926 /* If this is a shift by a constant amount, simplify it. */
3927 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3928 {
3929 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
3930 INTVAL (XEXP (x, 1)));
3931 if (GET_CODE (x) != code)
3932 goto restart;
3933 }
77fa0940
RK
3934
3935#ifdef SHIFT_COUNT_TRUNCATED
3936 else if (GET_CODE (XEXP (x, 1)) != REG)
3937 SUBST (XEXP (x, 1),
3938 force_to_mode (XEXP (x, 1), GET_MODE (x),
3939 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
5f4f0e22 3940 NULL_RTX));
77fa0940
RK
3941#endif
3942
230d793d
RS
3943 break;
3944 }
3945
3946 return x;
3947}
3948\f
3949/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
3950 operations" because they can be replaced with two more basic operations.
3951 ZERO_EXTEND is also considered "compound" because it can be replaced with
3952 an AND operation, which is simpler, though only one operation.
3953
3954 The function expand_compound_operation is called with an rtx expression
3955 and will convert it to the appropriate shifts and AND operations,
3956 simplifying at each stage.
3957
3958 The function make_compound_operation is called to convert an expression
3959 consisting of shifts and ANDs into the equivalent compound expression.
3960 It is the inverse of this function, loosely speaking. */
3961
3962static rtx
3963expand_compound_operation (x)
3964 rtx x;
3965{
3966 int pos = 0, len;
3967 int unsignedp = 0;
3968 int modewidth;
3969 rtx tem;
3970
3971 switch (GET_CODE (x))
3972 {
3973 case ZERO_EXTEND:
3974 unsignedp = 1;
3975 case SIGN_EXTEND:
75473182
RS
3976 /* We can't necessarily use a const_int for a multiword mode;
3977 it depends on implicitly extending the value.
3978 Since we don't know the right way to extend it,
3979 we can't tell whether the implicit way is right.
3980
3981 Even for a mode that is no wider than a const_int,
3982 we can't win, because we need to sign extend one of its bits through
3983 the rest of it, and we don't know which bit. */
230d793d 3984 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 3985 return x;
230d793d
RS
3986
3987 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
3988 return x;
3989
3990 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
3991 /* If the inner object has VOIDmode (the only way this can happen
3992 is if it is a ASM_OPERANDS), we can't do anything since we don't
3993 know how much masking to do. */
3994 if (len == 0)
3995 return x;
3996
3997 break;
3998
3999 case ZERO_EXTRACT:
4000 unsignedp = 1;
4001 case SIGN_EXTRACT:
4002 /* If the operand is a CLOBBER, just return it. */
4003 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4004 return XEXP (x, 0);
4005
4006 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4007 || GET_CODE (XEXP (x, 2)) != CONST_INT
4008 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4009 return x;
4010
4011 len = INTVAL (XEXP (x, 1));
4012 pos = INTVAL (XEXP (x, 2));
4013
4014 /* If this goes outside the object being extracted, replace the object
4015 with a (use (mem ...)) construct that only combine understands
4016 and is used only for this purpose. */
4017 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4018 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4019
4020#if BITS_BIG_ENDIAN
4021 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4022#endif
4023 break;
4024
4025 default:
4026 return x;
4027 }
4028
4029 /* If we reach here, we want to return a pair of shifts. The inner
4030 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4031 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4032 logical depending on the value of UNSIGNEDP.
4033
4034 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4035 converted into an AND of a shift.
4036
4037 We must check for the case where the left shift would have a negative
4038 count. This can happen in a case like (x >> 31) & 255 on machines
4039 that can't shift by a constant. On those machines, we would first
4040 combine the shift with the AND to produce a variable-position
4041 extraction. Then the constant of 31 would be substituted in to produce
4042 a such a position. */
4043
4044 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4045 if (modewidth >= pos - len)
5f4f0e22 4046 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 4047 GET_MODE (x),
5f4f0e22
CH
4048 simplify_shift_const (NULL_RTX, ASHIFT,
4049 GET_MODE (x),
230d793d
RS
4050 XEXP (x, 0),
4051 modewidth - pos - len),
4052 modewidth - len);
4053
5f4f0e22
CH
4054 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4055 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4056 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
4057 GET_MODE (x),
4058 XEXP (x, 0), pos),
5f4f0e22 4059 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4060 else
4061 /* Any other cases we can't handle. */
4062 return x;
4063
4064
4065 /* If we couldn't do this for some reason, return the original
4066 expression. */
4067 if (GET_CODE (tem) == CLOBBER)
4068 return x;
4069
4070 return tem;
4071}
4072\f
4073/* X is a SET which contains an assignment of one object into
4074 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4075 or certain SUBREGS). If possible, convert it into a series of
4076 logical operations.
4077
4078 We half-heartedly support variable positions, but do not at all
4079 support variable lengths. */
4080
4081static rtx
4082expand_field_assignment (x)
4083 rtx x;
4084{
4085 rtx inner;
4086 rtx pos; /* Always counts from low bit. */
4087 int len;
4088 rtx mask;
4089 enum machine_mode compute_mode;
4090
4091 /* Loop until we find something we can't simplify. */
4092 while (1)
4093 {
4094 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4095 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4096 {
4097 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4098 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4099 pos = const0_rtx;
4100 }
4101 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4102 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4103 {
4104 inner = XEXP (SET_DEST (x), 0);
4105 len = INTVAL (XEXP (SET_DEST (x), 1));
4106 pos = XEXP (SET_DEST (x), 2);
4107
4108 /* If the position is constant and spans the width of INNER,
4109 surround INNER with a USE to indicate this. */
4110 if (GET_CODE (pos) == CONST_INT
4111 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4112 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4113
4114#if BITS_BIG_ENDIAN
4115 if (GET_CODE (pos) == CONST_INT)
5f4f0e22
CH
4116 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4117 - INTVAL (pos));
230d793d
RS
4118 else if (GET_CODE (pos) == MINUS
4119 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4120 && (INTVAL (XEXP (pos, 1))
4121 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4122 /* If position is ADJUST - X, new position is X. */
4123 pos = XEXP (pos, 0);
4124 else
4125 pos = gen_binary (MINUS, GET_MODE (pos),
5f4f0e22
CH
4126 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4127 - len),
4128 pos);
230d793d
RS
4129#endif
4130 }
4131
4132 /* A SUBREG between two modes that occupy the same numbers of words
4133 can be done by moving the SUBREG to the source. */
4134 else if (GET_CODE (SET_DEST (x)) == SUBREG
4135 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4136 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4137 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4138 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4139 {
4140 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4141 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4142 SET_SRC (x)));
4143 continue;
4144 }
4145 else
4146 break;
4147
4148 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4149 inner = SUBREG_REG (inner);
4150
4151 compute_mode = GET_MODE (inner);
4152
4153 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
4154 if (len < HOST_BITS_PER_WIDE_INT)
4155 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4156 else
4157 break;
4158
4159 /* Now compute the equivalent expression. Make a copy of INNER
4160 for the SET_DEST in case it is a MEM into which we will substitute;
4161 we don't want shared RTL in that case. */
4162 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4163 gen_binary (IOR, compute_mode,
4164 gen_binary (AND, compute_mode,
4165 gen_unary (NOT, compute_mode,
4166 gen_binary (ASHIFT,
4167 compute_mode,
4168 mask, pos)),
4169 inner),
4170 gen_binary (ASHIFT, compute_mode,
4171 gen_binary (AND, compute_mode,
4172 gen_lowpart_for_combine
4173 (compute_mode,
4174 SET_SRC (x)),
4175 mask),
4176 pos)));
4177 }
4178
4179 return x;
4180}
4181\f
4182/* Return an RTX for a reference to LEN bits of INNER. POS is the starting
4183 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
4184 the starting bit position.
4185
4186 INNER may be a USE. This will occur when we started with a bitfield
4187 that went outside the boundary of the object in memory, which is
4188 allowed on most machines. To isolate this case, we produce a USE
4189 whose mode is wide enough and surround the MEM with it. The only
4190 code that understands the USE is this routine. If it is not removed,
4191 it will cause the resulting insn not to match.
4192
4193 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4194 signed reference.
4195
4196 IN_DEST is non-zero if this is a reference in the destination of a
4197 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4198 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4199 be used.
4200
4201 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4202 ZERO_EXTRACT should be built even for bits starting at bit 0.
4203
4204 MODE is the desired mode of the result (if IN_DEST == 0). */
4205
4206static rtx
4207make_extraction (mode, inner, pos, pos_rtx, len,
4208 unsignedp, in_dest, in_compare)
4209 enum machine_mode mode;
4210 rtx inner;
4211 int pos;
4212 rtx pos_rtx;
4213 int len;
4214 int unsignedp;
4215 int in_dest, in_compare;
4216{
4217 enum machine_mode is_mode = GET_MODE (inner);
4218 enum machine_mode inner_mode;
4219 enum machine_mode wanted_mem_mode = byte_mode;
4220 enum machine_mode pos_mode = word_mode;
4221 enum machine_mode extraction_mode = word_mode;
4222 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4223 int spans_byte = 0;
4224 rtx new = 0;
4225
4226 /* Get some information about INNER and get the innermost object. */
4227 if (GET_CODE (inner) == USE)
4228 /* We don't need to adjust the position because we set up the USE
4229 to pretend that it was a full-word object. */
4230 spans_byte = 1, inner = XEXP (inner, 0);
4231 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4232 inner = SUBREG_REG (inner);
4233
4234 inner_mode = GET_MODE (inner);
4235
4236 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4237 pos = INTVAL (pos_rtx);
4238
4239 /* See if this can be done without an extraction. We never can if the
4240 width of the field is not the same as that of some integer mode. For
4241 registers, we can only avoid the extraction if the position is at the
4242 low-order bit and this is either not in the destination or we have the
4243 appropriate STRICT_LOW_PART operation available.
4244
4245 For MEM, we can avoid an extract if the field starts on an appropriate
4246 boundary and we can change the mode of the memory reference. However,
4247 we cannot directly access the MEM if we have a USE and the underlying
4248 MEM is not TMODE. This combination means that MEM was being used in a
4249 context where bits outside its mode were being referenced; that is only
4250 valid in bit-field insns. */
4251
4252 if (tmode != BLKmode
4253 && ! (spans_byte && inner_mode != tmode)
df62f951 4254 && ((pos == 0 && GET_CODE (inner) != MEM
230d793d 4255 && (! in_dest
df62f951
RK
4256 || (GET_CODE (inner) == REG
4257 && (movstrict_optab->handlers[(int) tmode].insn_code
4258 != CODE_FOR_nothing))))
230d793d 4259 || (GET_CODE (inner) == MEM && pos >= 0
dfbe1b2f
RK
4260 && (pos
4261 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4262 : BITS_PER_UNIT)) == 0
230d793d
RS
4263 /* We can't do this if we are widening INNER_MODE (it
4264 may not be aligned, for one thing). */
4265 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4266 && (inner_mode == tmode
4267 || (! mode_dependent_address_p (XEXP (inner, 0))
4268 && ! MEM_VOLATILE_P (inner))))))
4269 {
4270 int offset = pos / BITS_PER_UNIT;
4271
4272 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4273 field. If the original and current mode are the same, we need not
4274 adjust the offset. Otherwise, we do if bytes big endian.
4275
4276 If INNER is not a MEM, get a piece consisting of the just the field
df62f951 4277 of interest (in this case POS must be 0). */
230d793d
RS
4278
4279 if (GET_CODE (inner) == MEM)
4280 {
4281#if BYTES_BIG_ENDIAN
4282 if (inner_mode != tmode)
4283 offset = (GET_MODE_SIZE (inner_mode)
4284 - GET_MODE_SIZE (tmode) - offset);
4285#endif
4286
4287 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4288 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4289 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4290 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4291 }
df62f951 4292 else if (GET_CODE (inner) == REG)
77fa0940
RK
4293 /* We can't call gen_lowpart_for_combine here since we always want
4294 a SUBREG and it would sometimes return a new hard register. */
4295 new = gen_rtx (SUBREG, tmode, inner,
4296 (WORDS_BIG_ENDIAN
4297 && GET_MODE_SIZE (is_mode) > UNITS_PER_WORD)
4298 ? ((GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (tmode)
4299 / UNITS_PER_WORD))
4300 : 0);
230d793d 4301 else
d0ab8cd3 4302 new = force_to_mode (inner, tmode, len, NULL_RTX);
230d793d
RS
4303
4304 /* If this extraction is going into the destination of a SET,
4305 make a STRICT_LOW_PART unless we made a MEM. */
4306
4307 if (in_dest)
4308 return (GET_CODE (new) == MEM ? new
77fa0940
RK
4309 : (GET_CODE (new) != SUBREG
4310 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4311 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
4312
4313 /* Otherwise, sign- or zero-extend unless we already are in the
4314 proper mode. */
4315
4316 return (mode == tmode ? new
4317 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4318 mode, new));
4319 }
4320
d0ab8cd3 4321 /* Unless this isin a COMPARE or we have a funny memory reference,
230d793d
RS
4322 don't do anything with field extracts starting at the low-order
4323 bit since they are simple AND operations. */
4324 if (pos == 0 && ! in_dest && ! in_compare && ! spans_byte)
4325 return 0;
4326
4327 /* Get the mode to use should INNER be a MEM, the mode for the position,
4328 and the mode for the result. */
4329#ifdef HAVE_insv
4330 if (in_dest)
4331 {
4332 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4333 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4334 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4335 }
4336#endif
4337
4338#ifdef HAVE_extzv
4339 if (! in_dest && unsignedp)
4340 {
4341 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4342 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4343 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4344 }
4345#endif
4346
4347#ifdef HAVE_extv
4348 if (! in_dest && ! unsignedp)
4349 {
4350 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4351 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4352 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4353 }
4354#endif
4355
4356 /* Never narrow an object, since that might not be safe. */
4357
4358 if (mode != VOIDmode
4359 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4360 extraction_mode = mode;
4361
4362 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4363 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4364 pos_mode = GET_MODE (pos_rtx);
4365
4366 /* If this is not from memory or we have to change the mode of memory and
4367 cannot, the desired mode is EXTRACTION_MODE. */
4368 if (GET_CODE (inner) != MEM
4369 || (inner_mode != wanted_mem_mode
4370 && (mode_dependent_address_p (XEXP (inner, 0))
4371 || MEM_VOLATILE_P (inner))))
4372 wanted_mem_mode = extraction_mode;
4373
4374#if BITS_BIG_ENDIAN
4375 /* If position is constant, compute new position. Otherwise, build
4376 subtraction. */
4377 if (pos >= 0)
4378 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4379 - len - pos);
4380 else
4381 pos_rtx
4382 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5f4f0e22
CH
4383 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4384 GET_MODE_BITSIZE (wanted_mem_mode))
4385 - len),
4386 pos_rtx);
230d793d
RS
4387#endif
4388
4389 /* If INNER has a wider mode, make it smaller. If this is a constant
4390 extract, try to adjust the byte to point to the byte containing
4391 the value. */
4392 if (wanted_mem_mode != VOIDmode
4393 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4394 && ((GET_CODE (inner) == MEM
4395 && (inner_mode == wanted_mem_mode
4396 || (! mode_dependent_address_p (XEXP (inner, 0))
4397 && ! MEM_VOLATILE_P (inner))))))
4398 {
4399 int offset = 0;
4400
4401 /* The computations below will be correct if the machine is big
4402 endian in both bits and bytes or little endian in bits and bytes.
4403 If it is mixed, we must adjust. */
4404
4405#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4406 if (! spans_byte && is_mode != wanted_mem_mode)
4407 offset = (GET_MODE_SIZE (is_mode)
4408 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4409#endif
4410
4411 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4412 adjust OFFSET to compensate. */
4413#if BYTES_BIG_ENDIAN
4414 if (! spans_byte
4415 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4416 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4417#endif
4418
4419 /* If this is a constant position, we can move to the desired byte. */
4420 if (pos >= 0)
4421 {
4422 offset += pos / BITS_PER_UNIT;
4423 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4424 }
4425
4426 if (offset != 0 || inner_mode != wanted_mem_mode)
4427 {
4428 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4429 plus_constant (XEXP (inner, 0), offset));
4430 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4431 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4432 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4433 inner = newmem;
4434 }
4435 }
4436
4437 /* If INNER is not memory, we can always get it into the proper mode. */
4438 else if (GET_CODE (inner) != MEM)
d0ab8cd3
RK
4439 inner = force_to_mode (inner, extraction_mode,
4440 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4441 : len + pos),
4442 NULL_RTX);
230d793d
RS
4443
4444 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4445 have to zero extend. Otherwise, we can just use a SUBREG. */
4446 if (pos < 0
4447 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4448 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4449 else if (pos < 0
4450 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4451 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4452
4453 /* Make POS_RTX unless we already have it and it is correct. */
4454 if (pos_rtx == 0 || (pos >= 0 && INTVAL (pos_rtx) != pos))
5f4f0e22 4455 pos_rtx = GEN_INT (pos);
230d793d
RS
4456
4457 /* Make the required operation. See if we can use existing rtx. */
4458 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 4459 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
4460 if (! in_dest)
4461 new = gen_lowpart_for_combine (mode, new);
4462
4463 return new;
4464}
4465\f
4466/* Look at the expression rooted at X. Look for expressions
4467 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4468 Form these expressions.
4469
4470 Return the new rtx, usually just X.
4471
4472 Also, for machines like the Vax that don't have logical shift insns,
4473 try to convert logical to arithmetic shift operations in cases where
4474 they are equivalent. This undoes the canonicalizations to logical
4475 shifts done elsewhere.
4476
4477 We try, as much as possible, to re-use rtl expressions to save memory.
4478
4479 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
4480 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4481 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
4482 or a COMPARE against zero, it is COMPARE. */
4483
4484static rtx
4485make_compound_operation (x, in_code)
4486 rtx x;
4487 enum rtx_code in_code;
4488{
4489 enum rtx_code code = GET_CODE (x);
4490 enum machine_mode mode = GET_MODE (x);
4491 int mode_width = GET_MODE_BITSIZE (mode);
4492 enum rtx_code next_code;
d0ab8cd3 4493 int i, count;
230d793d
RS
4494 rtx new = 0;
4495 char *fmt;
4496
4497 /* Select the code to be used in recursive calls. Once we are inside an
4498 address, we stay there. If we have a comparison, set to COMPARE,
4499 but once inside, go back to our default of SET. */
4500
42495ca0 4501 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
4502 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4503 && XEXP (x, 1) == const0_rtx) ? COMPARE
4504 : in_code == COMPARE ? SET : in_code);
4505
4506 /* Process depending on the code of this operation. If NEW is set
4507 non-zero, it will be returned. */
4508
4509 switch (code)
4510 {
4511 case ASHIFT:
4512 case LSHIFT:
4513 /* Convert shifts by constants into multiplications if inside
4514 an address. */
4515 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 4516 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
4517 && INTVAL (XEXP (x, 1)) >= 0)
4518 new = gen_rtx_combine (MULT, mode, XEXP (x, 0),
5f4f0e22
CH
4519 GEN_INT ((HOST_WIDE_INT) 1
4520 << INTVAL (XEXP (x, 1))));
230d793d
RS
4521 break;
4522
4523 case AND:
4524 /* If the second operand is not a constant, we can't do anything
4525 with it. */
4526 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4527 break;
4528
4529 /* If the constant is a power of two minus one and the first operand
4530 is a logical right shift, make an extraction. */
4531 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4532 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4533 new = make_extraction (mode, XEXP (XEXP (x, 0), 0), -1,
4534 XEXP (XEXP (x, 0), 1), i, 1,
4535 0, in_code == COMPARE);
dfbe1b2f 4536
230d793d
RS
4537 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4538 else if (GET_CODE (XEXP (x, 0)) == SUBREG
4539 && subreg_lowpart_p (XEXP (x, 0))
4540 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
4541 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4542 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))),
4543 XEXP (SUBREG_REG (XEXP (x, 0)), 0), -1,
4544 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
4545 0, in_code == COMPARE);
230d793d 4546
a7c99304
RK
4547
4548 /* If we are have (and (rotate X C) M) and C is larger than the number
4549 of bits in M, this is an extraction. */
4550
4551 else if (GET_CODE (XEXP (x, 0)) == ROTATE
4552 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4553 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
4554 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
4555 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4556 (GET_MODE_BITSIZE (mode)
4557 - INTVAL (XEXP (XEXP (x, 0), 1))),
5f4f0e22 4558 NULL_RTX, i, 1, 0, in_code == COMPARE);
a7c99304
RK
4559
4560 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
4561 a logical shift and our mask turns off all the propagated sign
4562 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
4563 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4564 && (lshr_optab->handlers[(int) mode].insn_code
4565 == CODE_FOR_nothing)
230d793d
RS
4566 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4567 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4568 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
4569 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
4570 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 4571 {
5f4f0e22 4572 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
4573
4574 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
4575 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
4576 SUBST (XEXP (x, 0),
4577 gen_rtx_combine (ASHIFTRT, mode, XEXP (XEXP (x, 0), 0),
4578 XEXP (XEXP (x, 0), 1)));
4579 }
4580
4581 /* If the constant is one less than a power of two, this might be
4582 representable by an extraction even if no shift is present.
4583 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4584 we are in a COMPARE. */
4585 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5f4f0e22 4586 new = make_extraction (mode, XEXP (x, 0), 0, NULL_RTX, i, 1,
230d793d
RS
4587 0, in_code == COMPARE);
4588
4589 /* If we are in a comparison and this is an AND with a power of two,
4590 convert this into the appropriate bit extract. */
4591 else if (in_code == COMPARE
4592 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5f4f0e22 4593 new = make_extraction (mode, XEXP (x, 0), i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
4594
4595 break;
4596
4597 case LSHIFTRT:
4598 /* If the sign bit is known to be zero, replace this with an
4599 arithmetic shift. */
d0ab8cd3
RK
4600 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
4601 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 4602 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
4603 && (significant_bits (XEXP (x, 0), mode)
4604 & (1 << (mode_width - 1))) == 0)
4605 {
4606 new = gen_rtx_combine (ASHIFTRT, mode, XEXP (x, 0), XEXP (x, 1));
4607 break;
4608 }
4609
4610 /* ... fall through ... */
4611
4612 case ASHIFTRT:
4613 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4614 this is a SIGN_EXTRACT. */
4615 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4616 && GET_CODE (XEXP (x, 0)) == ASHIFT
4617 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4618 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
4619 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4620 (INTVAL (XEXP (x, 1))
4621 - INTVAL (XEXP (XEXP (x, 0), 1))),
5f4f0e22 4622 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
230d793d 4623 code == LSHIFTRT, 0, in_code == COMPARE);
d0ab8cd3
RK
4624
4625 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
4626 cases, we are better off returning a SIGN_EXTEND of the operation. */
4627
4628 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4629 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
4630 || GET_CODE (XEXP (x, 0)) == XOR
4631 || GET_CODE (XEXP (x, 0)) == PLUS)
4632 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4633 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4634 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4635 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
4636 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4637 && (INTVAL (XEXP (XEXP (x, 0), 1))
4638 & (((HOST_WIDE_INT) 1
4639 << INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))) - 1)) == 0)
4640 {
4641 HOST_WIDE_INT newop1
4642 = (INTVAL (XEXP (XEXP (x, 0), 1))
4643 >> INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)));
4644
4645 new = make_extraction (mode,
4646 gen_binary (GET_CODE (XEXP (x, 0)), mode,
4647 XEXP (XEXP (XEXP (x, 0), 0), 0),
4648 GEN_INT (newop1)),
4649 (INTVAL (XEXP (x, 1))
4650 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
4651 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4652 code == LSHIFTRT, 0, in_code == COMPARE);
4653 }
4654
d0dcc580
RK
4655 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
4656 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4657 && GET_CODE (XEXP (x, 0)) == NEG
4658 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4659 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4660 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
4661 new = make_extraction (mode,
4662 gen_unary (GET_CODE (XEXP (x, 0)), mode,
4663 XEXP (XEXP (XEXP (x, 0), 0), 0)),
4664 (INTVAL (XEXP (x, 1))
4665 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
4666 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4667 code == LSHIFTRT, 0, in_code == COMPARE);
230d793d
RS
4668 break;
4669 }
4670
4671 if (new)
4672 {
df62f951 4673 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
4674 code = GET_CODE (x);
4675 }
4676
4677 /* Now recursively process each operand of this operation. */
4678 fmt = GET_RTX_FORMAT (code);
4679 for (i = 0; i < GET_RTX_LENGTH (code); i++)
4680 if (fmt[i] == 'e')
4681 {
4682 new = make_compound_operation (XEXP (x, i), next_code);
4683 SUBST (XEXP (x, i), new);
4684 }
4685
4686 return x;
4687}
4688\f
4689/* Given M see if it is a value that would select a field of bits
4690 within an item, but not the entire word. Return -1 if not.
4691 Otherwise, return the starting position of the field, where 0 is the
4692 low-order bit.
4693
4694 *PLEN is set to the length of the field. */
4695
4696static int
4697get_pos_from_mask (m, plen)
5f4f0e22 4698 unsigned HOST_WIDE_INT m;
230d793d
RS
4699 int *plen;
4700{
4701 /* Get the bit number of the first 1 bit from the right, -1 if none. */
4702 int pos = exact_log2 (m & - m);
4703
4704 if (pos < 0)
4705 return -1;
4706
4707 /* Now shift off the low-order zero bits and see if we have a power of
4708 two minus 1. */
4709 *plen = exact_log2 ((m >> pos) + 1);
4710
4711 if (*plen <= 0)
4712 return -1;
4713
4714 return pos;
4715}
4716\f
dfbe1b2f
RK
4717/* Rewrite X so that it is an expression in MODE. We only care about the
4718 low-order BITS bits so we can ignore AND operations that just clear
4719 higher-order bits.
4720
4721 Also, if REG is non-zero and X is a register equal in value to REG,
4722 replace X with REG. */
4723
4724static rtx
4725force_to_mode (x, mode, bits, reg)
4726 rtx x;
4727 enum machine_mode mode;
4728 int bits;
4729 rtx reg;
4730{
4731 enum rtx_code code = GET_CODE (x);
d0ab8cd3 4732 enum machine_mode op_mode = mode;
dfbe1b2f
RK
4733
4734 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
4735 just get X in the proper mode. */
4736
4737 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
4738 || bits > GET_MODE_BITSIZE (mode))
4739 return gen_lowpart_for_combine (mode, x);
4740
4741 switch (code)
4742 {
4743 case SIGN_EXTEND:
4744 case ZERO_EXTEND:
4745 case ZERO_EXTRACT:
4746 case SIGN_EXTRACT:
4747 x = expand_compound_operation (x);
4748 if (GET_CODE (x) != code)
4749 return force_to_mode (x, mode, bits, reg);
4750 break;
4751
4752 case REG:
4753 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
4754 || rtx_equal_p (reg, get_last_value (x))))
4755 x = reg;
4756 break;
4757
4758 case CONST_INT:
5f4f0e22
CH
4759 if (bits < HOST_BITS_PER_WIDE_INT)
4760 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
dfbe1b2f
RK
4761 return x;
4762
4763 case SUBREG:
4764 /* Ignore low-order SUBREGs. */
4765 if (subreg_lowpart_p (x))
4766 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
4767 break;
4768
4769 case AND:
4770 /* If this is an AND with a constant. Otherwise, we fall through to
4771 do the general binary case. */
4772
4773 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4774 {
5f4f0e22 4775 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
dfbe1b2f
RK
4776 int len = exact_log2 (mask + 1);
4777 rtx op = XEXP (x, 0);
4778
4779 /* If this is masking some low-order bits, we may be able to
4780 impose a stricter constraint on what bits of the operand are
4781 required. */
4782
4783 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
4784 reg);
4785
5f4f0e22
CH
4786 if (bits < HOST_BITS_PER_WIDE_INT)
4787 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
dfbe1b2f 4788
d0ab8cd3
RK
4789 /* If we have no AND in MODE, use the original mode for the
4790 operation. */
4791
4792 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4793 op_mode = GET_MODE (x);
4794
4795 x = simplify_and_const_int (x, op_mode, op, mask);
dfbe1b2f
RK
4796
4797 /* If X is still an AND, see if it is an AND with a mask that
4798 is just some low-order bits. If so, and it is BITS wide (it
4799 can't be wider), we don't need it. */
4800
4801 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22
CH
4802 && bits < HOST_BITS_PER_WIDE_INT
4803 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
dfbe1b2f 4804 x = XEXP (x, 0);
d0ab8cd3
RK
4805
4806 break;
dfbe1b2f
RK
4807 }
4808
4809 /* ... fall through ... */
4810
4811 case PLUS:
4812 case MINUS:
4813 case MULT:
4814 case IOR:
4815 case XOR:
4816 /* For most binary operations, just propagate into the operation and
d0ab8cd3
RK
4817 change the mode if we have an operation of that mode. */
4818
4819 if ((code == PLUS
4820 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4821 || (code == MINUS
4822 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4823 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
4824 == CODE_FOR_nothing))
53e33d95
RK
4825 || (code == AND
4826 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
d0ab8cd3
RK
4827 || (code == IOR
4828 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4829 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
4830 == CODE_FOR_nothing)))
4831 op_mode = GET_MODE (x);
4832
4833 x = gen_binary (code, op_mode,
4834 gen_lowpart_for_combine (op_mode,
4835 force_to_mode (XEXP (x, 0),
4836 mode, bits,
4837 reg)),
4838 gen_lowpart_for_combine (op_mode,
4839 force_to_mode (XEXP (x, 1),
4840 mode, bits,
4841 reg)));
4842 break;
dfbe1b2f
RK
4843
4844 case ASHIFT:
4845 case LSHIFT:
4846 /* For left shifts, do the same, but just for the first operand.
4847 If the shift count is a constant, we need even fewer bits of the
4848 first operand. */
4849
4850 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
4851 bits -= INTVAL (XEXP (x, 1));
4852
d0ab8cd3
RK
4853 if ((code == ASHIFT
4854 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4855 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
4856 == CODE_FOR_nothing)))
4857 op_mode = GET_MODE (x);
4858
4859 x = gen_binary (code, op_mode,
4860 gen_lowpart_for_combine (op_mode,
4861 force_to_mode (XEXP (x, 0),
4862 mode, bits,
4863 reg)),
4864 XEXP (x, 1));
4865 break;
dfbe1b2f
RK
4866
4867 case LSHIFTRT:
4868 /* Here we can only do something if the shift count is a constant and
4869 the count plus BITS is no larger than the width of MODE, we can do
4870 the shift in MODE. */
4871
4872 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4873 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
d0ab8cd3
RK
4874 {
4875 rtx inner = force_to_mode (XEXP (x, 0), mode,
4876 bits + INTVAL (XEXP (x, 1)), reg);
4877
4878 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4879 op_mode = GET_MODE (x);
4880
4881 x = gen_binary (LSHIFTRT, op_mode,
4882 gen_lowpart_for_combine (op_mode, inner),
4883 XEXP (x, 1));
4884 }
4885 break;
4886
4887 case ASHIFTRT:
4888 /* If this is a sign-extension operation that just affects bits
4889 we don't care about, remove it. */
4890
4891 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4892 && INTVAL (XEXP (x, 1)) >= 0
4893 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
4894 && GET_CODE (XEXP (x, 0)) == ASHIFT
4895 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4896 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
4897 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
dfbe1b2f
RK
4898 break;
4899
4900 case NEG:
4901 case NOT:
d0ab8cd3
RK
4902 if ((code == NEG
4903 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4904 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
4905 == CODE_FOR_nothing)))
4906 op_mode = GET_MODE (x);
4907
dfbe1b2f 4908 /* Handle these similarly to the way we handle most binary operations. */
d0ab8cd3
RK
4909 x = gen_unary (code, op_mode,
4910 gen_lowpart_for_combine (op_mode,
4911 force_to_mode (XEXP (x, 0), mode,
4912 bits, reg)));
4913 break;
4914
4915 case IF_THEN_ELSE:
4916 /* We have no way of knowing if the IF_THEN_ELSE can itself be
4917 written in a narrower mode. We play it safe and do not do so. */
4918
4919 SUBST (XEXP (x, 1),
4920 gen_lowpart_for_combine (GET_MODE (x),
4921 force_to_mode (XEXP (x, 1), mode,
4922 bits, reg)));
4923 SUBST (XEXP (x, 2),
4924 gen_lowpart_for_combine (GET_MODE (x),
4925 force_to_mode (XEXP (x, 2), mode,
4926 bits, reg)));
4927 break;
dfbe1b2f
RK
4928 }
4929
d0ab8cd3 4930 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
4931 return gen_lowpart_for_combine (mode, x);
4932}
4933\f
230d793d
RS
4934/* See if X, a SET operation, can be rewritten as a bit-field assignment.
4935 Return that assignment if so.
4936
4937 We only handle the most common cases. */
4938
4939static rtx
4940make_field_assignment (x)
4941 rtx x;
4942{
4943 rtx dest = SET_DEST (x);
4944 rtx src = SET_SRC (x);
dfbe1b2f
RK
4945 rtx ourdest;
4946 rtx assign;
5f4f0e22
CH
4947 HOST_WIDE_INT c1;
4948 int pos, len;
dfbe1b2f
RK
4949 rtx other;
4950 enum machine_mode mode;
230d793d
RS
4951
4952 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
4953 a clear of a one-bit field. We will have changed it to
4954 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
4955 for a SUBREG. */
4956
4957 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
4958 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
4959 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
dfbe1b2f
RK
4960 && (rtx_equal_p (dest, XEXP (src, 1))
4961 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4962 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d
RS
4963 {
4964 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
4965 1, 1, 1, 0);
dfbe1b2f 4966 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
4967 }
4968
4969 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
4970 && subreg_lowpart_p (XEXP (src, 0))
4971 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
4972 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
4973 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
4974 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
dfbe1b2f
RK
4975 && (rtx_equal_p (dest, XEXP (src, 1))
4976 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4977 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d
RS
4978 {
4979 assign = make_extraction (VOIDmode, dest, -1,
4980 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
4981 1, 1, 1, 0);
dfbe1b2f 4982 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
4983 }
4984
4985 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
4986 one-bit field. */
4987 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
4988 && XEXP (XEXP (src, 0), 0) == const1_rtx
dfbe1b2f
RK
4989 && (rtx_equal_p (dest, XEXP (src, 1))
4990 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4991 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d
RS
4992 {
4993 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
4994 1, 1, 1, 0);
dfbe1b2f 4995 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
230d793d
RS
4996 }
4997
dfbe1b2f
RK
4998 /* The other case we handle is assignments into a constant-position
4999 field. They look like (ior (and DEST C1) OTHER). If C1 represents
5000 a mask that has all one bits except for a group of zero bits and
5001 OTHER is known to have zeros where C1 has ones, this is such an
5002 assignment. Compute the position and length from C1. Shift OTHER
5003 to the appropriate position, force it to the required mode, and
5004 make the extraction. Check for the AND in both operands. */
5005
5006 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
5007 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
5008 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
5009 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
5010 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
5011 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
5012 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
5013 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
5014 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
5015 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
5016 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
5017 dest)))
5018 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
5019 else
5020 return x;
230d793d 5021
dfbe1b2f
RK
5022 pos = get_pos_from_mask (~c1, &len);
5023 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
5024 || (c1 & significant_bits (other, GET_MODE (other))) != 0)
5025 return x;
230d793d 5026
5f4f0e22 5027 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
230d793d 5028
dfbe1b2f
RK
5029 /* The mode to use for the source is the mode of the assignment, or of
5030 what is inside a possible STRICT_LOW_PART. */
5031 mode = (GET_CODE (assign) == STRICT_LOW_PART
5032 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 5033
dfbe1b2f
RK
5034 /* Shift OTHER right POS places and make it the source, restricting it
5035 to the proper length and mode. */
230d793d 5036
5f4f0e22
CH
5037 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5038 GET_MODE (src), other, pos),
dfbe1b2f 5039 mode, len, dest);
230d793d 5040
dfbe1b2f 5041 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
5042}
5043\f
5044/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5045 if so. */
5046
5047static rtx
5048apply_distributive_law (x)
5049 rtx x;
5050{
5051 enum rtx_code code = GET_CODE (x);
5052 rtx lhs, rhs, other;
5053 rtx tem;
5054 enum rtx_code inner_code;
5055
5056 /* The outer operation can only be one of the following: */
5057 if (code != IOR && code != AND && code != XOR
5058 && code != PLUS && code != MINUS)
5059 return x;
5060
5061 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5062
dfbe1b2f 5063 /* If either operand is a primitive we can't do anything, so get out fast. */
230d793d 5064 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 5065 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
5066 return x;
5067
5068 lhs = expand_compound_operation (lhs);
5069 rhs = expand_compound_operation (rhs);
5070 inner_code = GET_CODE (lhs);
5071 if (inner_code != GET_CODE (rhs))
5072 return x;
5073
5074 /* See if the inner and outer operations distribute. */
5075 switch (inner_code)
5076 {
5077 case LSHIFTRT:
5078 case ASHIFTRT:
5079 case AND:
5080 case IOR:
5081 /* These all distribute except over PLUS. */
5082 if (code == PLUS || code == MINUS)
5083 return x;
5084 break;
5085
5086 case MULT:
5087 if (code != PLUS && code != MINUS)
5088 return x;
5089 break;
5090
5091 case ASHIFT:
5092 case LSHIFT:
5093 /* These are also multiplies, so they distribute over everything. */
5094 break;
5095
5096 case SUBREG:
dfbe1b2f
RK
5097 /* Non-paradoxical SUBREGs distributes over all operations, provided
5098 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
5099 of a low-order part, we don't convert an fp operation to int or
5100 vice versa, and we would not be converting a single-word
dfbe1b2f 5101 operation into a multi-word operation. The latter test is not
2b4bd1bc 5102 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
5103 Some of the previous tests are redundant given the latter test, but
5104 are retained because they are required for correctness.
5105
5106 We produce the result slightly differently in this case. */
5107
5108 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5109 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5110 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
5111 || (GET_MODE_CLASS (GET_MODE (lhs))
5112 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f
RK
5113 || (GET_MODE_SIZE (GET_MODE (lhs))
5114 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5115 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
5116 return x;
5117
5118 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5119 SUBREG_REG (lhs), SUBREG_REG (rhs));
5120 return gen_lowpart_for_combine (GET_MODE (x), tem);
5121
5122 default:
5123 return x;
5124 }
5125
5126 /* Set LHS and RHS to the inner operands (A and B in the example
5127 above) and set OTHER to the common operand (C in the example).
5128 These is only one way to do this unless the inner operation is
5129 commutative. */
5130 if (GET_RTX_CLASS (inner_code) == 'c'
5131 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5132 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5133 else if (GET_RTX_CLASS (inner_code) == 'c'
5134 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5135 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5136 else if (GET_RTX_CLASS (inner_code) == 'c'
5137 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5138 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5139 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5140 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5141 else
5142 return x;
5143
5144 /* Form the new inner operation, seeing if it simplifies first. */
5145 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5146
5147 /* There is one exception to the general way of distributing:
5148 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5149 if (code == XOR && inner_code == IOR)
5150 {
5151 inner_code = AND;
5152 other = gen_unary (NOT, GET_MODE (x), other);
5153 }
5154
5155 /* We may be able to continuing distributing the result, so call
5156 ourselves recursively on the inner operation before forming the
5157 outer operation, which we return. */
5158 return gen_binary (inner_code, GET_MODE (x),
5159 apply_distributive_law (tem), other);
5160}
5161\f
5162/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5163 in MODE.
5164
5165 Return an equivalent form, if different from X. Otherwise, return X. If
5166 X is zero, we are to always construct the equivalent form. */
5167
5168static rtx
5169simplify_and_const_int (x, mode, varop, constop)
5170 rtx x;
5171 enum machine_mode mode;
5172 rtx varop;
5f4f0e22 5173 unsigned HOST_WIDE_INT constop;
230d793d
RS
5174{
5175 register enum machine_mode tmode;
5176 register rtx temp;
5f4f0e22 5177 unsigned HOST_WIDE_INT significant;
230d793d
RS
5178
5179 /* There is a large class of optimizations based on the principle that
5180 some operations produce results where certain bits are known to be zero,
5181 and hence are not significant to the AND. For example, if we have just
5182 done a left shift of one bit, the low-order bit is known to be zero and
5183 hence an AND with a mask of ~1 would not do anything.
5184
5185 At the end of the following loop, we set:
5186
5187 VAROP to be the item to be AND'ed with;
5188 CONSTOP to the constant value to AND it with. */
5189
5190 while (1)
5191 {
5f4f0e22
CH
5192 /* If we ever encounter a mode wider than the host machine's widest
5193 integer size, we can't compute the masks accurately, so give up. */
5194 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
230d793d
RS
5195 break;
5196
5197 /* Unless one of the cases below does a `continue',
5198 a `break' will be executed to exit the loop. */
5199
5200 switch (GET_CODE (varop))
5201 {
5202 case CLOBBER:
5203 /* If VAROP is a (clobber (const_int)), return it since we know
5204 we are generating something that won't match. */
5205 return varop;
5206
5207#if ! BITS_BIG_ENDIAN
5208 case USE:
5209 /* VAROP is a (use (mem ..)) that was made from a bit-field
5210 extraction that spanned the boundary of the MEM. If we are
5211 now masking so it is within that boundary, we don't need the
5212 USE any more. */
5213 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5214 {
5215 varop = XEXP (varop, 0);
5216 continue;
5217 }
5218 break;
5219#endif
5220
5221 case SUBREG:
5222 if (subreg_lowpart_p (varop)
5223 /* We can ignore the effect this SUBREG if it narrows the mode
5224 or, on machines where byte operations zero extend, if the
5225 constant masks to zero all the bits the mode doesn't have. */
5226 && ((GET_MODE_SIZE (GET_MODE (varop))
5227 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
5228#ifdef BYTE_LOADS_ZERO_EXTEND
5229 || (0 == (constop
5230 & GET_MODE_MASK (GET_MODE (varop))
5231 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5232#endif
5233 ))
5234 {
5235 varop = SUBREG_REG (varop);
5236 continue;
5237 }
5238 break;
5239
5240 case ZERO_EXTRACT:
5241 case SIGN_EXTRACT:
5242 case ZERO_EXTEND:
5243 case SIGN_EXTEND:
5244 /* Try to expand these into a series of shifts and then work
5245 with that result. If we can't, for example, if the extract
5246 isn't at a fixed position, give up. */
5247 temp = expand_compound_operation (varop);
5248 if (temp != varop)
5249 {
5250 varop = temp;
5251 continue;
5252 }
5253 break;
5254
5255 case AND:
5256 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5257 {
5258 constop &= INTVAL (XEXP (varop, 1));
5259 varop = XEXP (varop, 0);
5260 continue;
5261 }
5262 break;
5263
5264 case IOR:
5265 case XOR:
5266 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5267 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5268 operation which may be a bitfield extraction. */
5269
5270 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5271 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5272 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5f4f0e22 5273 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
5274 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5275 && (INTVAL (XEXP (varop, 1))
5276 & ~ significant_bits (XEXP (varop, 0),
5277 GET_MODE (varop)) == 0))
5278 {
5f4f0e22
CH
5279 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5280 << INTVAL (XEXP (XEXP (varop, 0), 1)));
230d793d
RS
5281 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5282 XEXP (XEXP (varop, 0), 0), temp);
5283 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5284 temp, XEXP (varop, 1));
5285 continue;
5286 }
5287
5288 /* Apply the AND to both branches of the IOR or XOR, then try to
5289 apply the distributive law. This may eliminate operations
5290 if either branch can be simplified because of the AND.
5291 It may also make some cases more complex, but those cases
5292 probably won't match a pattern either with or without this. */
5293 return
5294 gen_lowpart_for_combine
5295 (mode, apply_distributive_law
5296 (gen_rtx_combine
5297 (GET_CODE (varop), GET_MODE (varop),
5f4f0e22 5298 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
230d793d 5299 XEXP (varop, 0), constop),
5f4f0e22 5300 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
230d793d
RS
5301 XEXP (varop, 1), constop))));
5302
5303 case NOT:
5304 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5305 LSHIFTRT we can do the same as above. */
5306
5307 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5308 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5309 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5f4f0e22 5310 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d 5311 {
5f4f0e22 5312 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
230d793d
RS
5313 temp = gen_binary (XOR, GET_MODE (varop),
5314 XEXP (XEXP (varop, 0), 0), temp);
5315 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5316 temp, XEXP (XEXP (varop, 0), 1));
5317 continue;
5318 }
5319 break;
5320
5321 case ASHIFTRT:
5322 /* If we are just looking for the sign bit, we don't need this
5323 shift at all, even if it has a variable count. */
5f4f0e22
CH
5324 if (constop == ((HOST_WIDE_INT) 1
5325 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
230d793d
RS
5326 {
5327 varop = XEXP (varop, 0);
5328 continue;
5329 }
5330
5331 /* If this is a shift by a constant, get a mask that contains
5332 those bits that are not copies of the sign bit. We then have
5333 two cases: If CONSTOP only includes those bits, this can be
5334 a logical shift, which may allow simplifications. If CONSTOP
5335 is a single-bit field not within those bits, we are requesting
5336 a copy of the sign bit and hence can shift the sign bit to
5337 the appropriate location. */
5338 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5339 && INTVAL (XEXP (varop, 1)) >= 0
5f4f0e22 5340 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
5341 {
5342 int i = -1;
5343
5344 significant = GET_MODE_MASK (GET_MODE (varop));
5345 significant >>= INTVAL (XEXP (varop, 1));
5346
5347 if ((constop & ~significant) == 0
5348 || (i = exact_log2 (constop)) >= 0)
5349 {
5350 varop = simplify_shift_const
5351 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5352 i < 0 ? INTVAL (XEXP (varop, 1))
5353 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
5354 if (GET_CODE (varop) != ASHIFTRT)
5355 continue;
5356 }
5357 }
5358
5359 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5360 even if the shift count isn't a constant. */
5361 if (constop == 1)
5362 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5363 XEXP (varop, 0), XEXP (varop, 1));
5364 break;
5365
5366 case NE:
5367 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5368 included in STORE_FLAG_VALUE and FOO has no significant bits
5369 not in CONST. */
5370 if ((constop & ~ STORE_FLAG_VALUE) == 0
5371 && XEXP (varop, 0) == const0_rtx
5372 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5373 {
5374 varop = XEXP (varop, 0);
5375 continue;
5376 }
5377 break;
5378
5379 case PLUS:
5380 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5381 low-order bits (as in an alignment operation) and FOO is already
5382 aligned to that boundary, we can convert remove this AND
5383 and possibly the PLUS if it is now adding zero. */
5384 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5385 && exact_log2 (-constop) >= 0
5386 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5387 {
5388 varop = plus_constant (XEXP (varop, 0),
5389 INTVAL (XEXP (varop, 1)) & constop);
5390 constop = ~0;
5391 break;
5392 }
5393
5394 /* ... fall through ... */
5395
5396 case MINUS:
5397 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5398 less than powers of two and M2 is narrower than M1, we can
5399 eliminate the inner AND. This occurs when incrementing
5400 bit fields. */
5401
5402 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
5403 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
5404 SUBST (XEXP (varop, 0),
5405 expand_compound_operation (XEXP (varop, 0)));
5406
5407 if (GET_CODE (XEXP (varop, 0)) == AND
5408 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5409 && exact_log2 (constop + 1) >= 0
5410 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
5411 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
5412 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
5413 break;
5414 }
5415
5416 break;
5417 }
5418
5419 /* If we have reached a constant, this whole thing is constant. */
5420 if (GET_CODE (varop) == CONST_INT)
5f4f0e22 5421 return GEN_INT (constop & INTVAL (varop));
230d793d
RS
5422
5423 /* See what bits are significant in VAROP. */
5424 significant = significant_bits (varop, mode);
5425
5426 /* Turn off all bits in the constant that are known to already be zero.
5427 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
5428 which is tested below. */
5429
5430 constop &= significant;
5431
5432 /* If we don't have any bits left, return zero. */
5433 if (constop == 0)
5434 return const0_rtx;
5435
5436 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
5437 if we already had one (just check for the simplest cases). */
5438 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
5439 && GET_MODE (XEXP (x, 0)) == mode
5440 && SUBREG_REG (XEXP (x, 0)) == varop)
5441 varop = XEXP (x, 0);
5442 else
5443 varop = gen_lowpart_for_combine (mode, varop);
5444
5445 /* If we can't make the SUBREG, try to return what we were given. */
5446 if (GET_CODE (varop) == CLOBBER)
5447 return x ? x : varop;
5448
5449 /* If we are only masking insignificant bits, return VAROP. */
5450 if (constop == significant)
5451 x = varop;
5452
5453 /* Otherwise, return an AND. See how much, if any, of X we can use. */
5454 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
5f4f0e22 5455 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
230d793d
RS
5456
5457 else
5458 {
5459 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5460 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 5461 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
5462
5463 SUBST (XEXP (x, 0), varop);
5464 }
5465
5466 return x;
5467}
5468\f
5469/* Given an expression, X, compute which bits in X can be non-zero.
5470 We don't care about bits outside of those defined in MODE.
5471
5472 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
5473 a shift, AND, or zero_extract, we can do better. */
5474
5f4f0e22 5475static unsigned HOST_WIDE_INT
230d793d
RS
5476significant_bits (x, mode)
5477 rtx x;
5478 enum machine_mode mode;
5479{
5f4f0e22
CH
5480 unsigned HOST_WIDE_INT significant = GET_MODE_MASK (mode);
5481 unsigned HOST_WIDE_INT inner_sig;
230d793d
RS
5482 enum rtx_code code;
5483 int mode_width = GET_MODE_BITSIZE (mode);
5484 rtx tem;
5485
5486 /* If X is wider than MODE, use its mode instead. */
5487 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
5488 {
5489 mode = GET_MODE (x);
5490 significant = GET_MODE_MASK (mode);
5491 mode_width = GET_MODE_BITSIZE (mode);
5492 }
5493
5f4f0e22 5494 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
5495 /* Our only callers in this case look for single bit values. So
5496 just return the mode mask. Those tests will then be false. */
5497 return significant;
5498
5499 code = GET_CODE (x);
5500 switch (code)
5501 {
5502 case REG:
5503#ifdef STACK_BOUNDARY
5504 /* If this is the stack pointer, we may know something about its
5505 alignment. If PUSH_ROUNDING is defined, it is possible for the
5506 stack to be momentarily aligned only to that amount, so we pick
5507 the least alignment. */
5508
5509 if (x == stack_pointer_rtx)
5510 {
5511 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
5512
5513#ifdef PUSH_ROUNDING
5514 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
5515#endif
5516
5517 return significant & ~ (sp_alignment - 1);
5518 }
5519#endif
5520
5521 /* If X is a register whose value we can find, use that value.
5522 Otherwise, use the previously-computed significant bits for this
5523 register. */
5524
5525 tem = get_last_value (x);
5526 if (tem)
5527 return significant_bits (tem, mode);
5528 else if (significant_valid && reg_significant[REGNO (x)])
5529 return reg_significant[REGNO (x)] & significant;
5530 else
5531 return significant;
5532
5533 case CONST_INT:
5534 return INTVAL (x);
5535
5536#ifdef BYTE_LOADS_ZERO_EXTEND
5537 case MEM:
5538 /* In many, if not most, RISC machines, reading a byte from memory
5539 zeros the rest of the register. Noticing that fact saves a lot
5540 of extra zero-extends. */
5541 significant &= GET_MODE_MASK (GET_MODE (x));
5542 break;
5543#endif
5544
5545#if STORE_FLAG_VALUE == 1
5546 case EQ: case NE:
5547 case GT: case GTU:
5548 case LT: case LTU:
5549 case GE: case GEU:
5550 case LE: case LEU:
3f508eca
RK
5551
5552 if (GET_MODE_CLASS (mode) == MODE_INT)
5553 significant = 1;
230d793d
RS
5554
5555 /* A comparison operation only sets the bits given by its mode. The
5556 rest are set undefined. */
5557 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5558 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5559 break;
5560#endif
5561
230d793d 5562 case NEG:
d0ab8cd3
RK
5563 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5564 == GET_MODE_BITSIZE (GET_MODE (x)))
230d793d
RS
5565 significant = 1;
5566
5567 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5568 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5569 break;
d0ab8cd3
RK
5570
5571 case ABS:
5572 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5573 == GET_MODE_BITSIZE (GET_MODE (x)))
5574 significant = 1;
5575 break;
230d793d
RS
5576
5577 case TRUNCATE:
5578 significant &= (significant_bits (XEXP (x, 0), mode)
5579 & GET_MODE_MASK (mode));
5580 break;
5581
5582 case ZERO_EXTEND:
5583 significant &= significant_bits (XEXP (x, 0), mode);
5584 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
5585 significant &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
5586 break;
5587
5588 case SIGN_EXTEND:
5589 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
5590 Otherwise, show all the bits in the outer mode but not the inner
5591 may be non-zero. */
5592 inner_sig = significant_bits (XEXP (x, 0), mode);
5593 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
5594 {
5595 inner_sig &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
5596 if (inner_sig &
5f4f0e22
CH
5597 (((HOST_WIDE_INT) 1
5598 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
230d793d
RS
5599 inner_sig |= (GET_MODE_MASK (mode)
5600 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
5601 }
5602
5603 significant &= inner_sig;
5604 break;
5605
5606 case AND:
5607 significant &= (significant_bits (XEXP (x, 0), mode)
5608 & significant_bits (XEXP (x, 1), mode));
5609 break;
5610
d0ab8cd3
RK
5611 case XOR: case IOR:
5612 case UMIN: case UMAX: case SMIN: case SMAX:
230d793d
RS
5613 significant &= (significant_bits (XEXP (x, 0), mode)
5614 | significant_bits (XEXP (x, 1), mode));
5615 break;
5616
5617 case PLUS: case MINUS:
5618 case MULT:
5619 case DIV: case UDIV:
5620 case MOD: case UMOD:
5621 /* We can apply the rules of arithmetic to compute the number of
5622 high- and low-order zero bits of these operations. We start by
5623 computing the width (position of the highest-order non-zero bit)
5624 and the number of low-order zero bits for each value. */
5625 {
5f4f0e22
CH
5626 unsigned HOST_WIDE_INT sig0 = significant_bits (XEXP (x, 0), mode);
5627 unsigned HOST_WIDE_INT sig1 = significant_bits (XEXP (x, 1), mode);
230d793d
RS
5628 int width0 = floor_log2 (sig0) + 1;
5629 int width1 = floor_log2 (sig1) + 1;
5630 int low0 = floor_log2 (sig0 & -sig0);
5631 int low1 = floor_log2 (sig1 & -sig1);
5632 int op0_maybe_minusp = (sig0 & (1 << (mode_width - 1)));
5633 int op1_maybe_minusp = (sig1 & (1 << (mode_width - 1)));
5634 int result_width = mode_width;
5635 int result_low = 0;
5636
5637 switch (code)
5638 {
5639 case PLUS:
5640 result_width = MAX (width0, width1) + 1;
5641 result_low = MIN (low0, low1);
5642 break;
5643 case MINUS:
5644 result_low = MIN (low0, low1);
5645 break;
5646 case MULT:
5647 result_width = width0 + width1;
5648 result_low = low0 + low1;
5649 break;
5650 case DIV:
5651 if (! op0_maybe_minusp && ! op1_maybe_minusp)
5652 result_width = width0;
5653 break;
5654 case UDIV:
5655 result_width = width0;
5656 break;
5657 case MOD:
5658 if (! op0_maybe_minusp && ! op1_maybe_minusp)
5659 result_width = MIN (width0, width1);
5660 result_low = MIN (low0, low1);
5661 break;
5662 case UMOD:
5663 result_width = MIN (width0, width1);
5664 result_low = MIN (low0, low1);
5665 break;
5666 }
5667
5668 if (result_width < mode_width)
5f4f0e22 5669 significant &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
5670
5671 if (result_low > 0)
5f4f0e22 5672 significant &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
5673 }
5674 break;
5675
5676 case ZERO_EXTRACT:
5677 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22
CH
5678 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5679 significant &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
5680 break;
5681
5682 case SUBREG:
5683 /* If the inner mode is a single word for both the host and target
5684 machines, we can compute this from which bits of the inner
5685 object are known significant. */
5686 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
5687 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
5688 <= HOST_BITS_PER_WIDE_INT))
230d793d
RS
5689 {
5690 significant &= significant_bits (SUBREG_REG (x), mode);
5691#ifndef BYTE_LOADS_ZERO_EXTEND
5692 /* On many CISC machines, accessing an object in a wider mode
5693 causes the high-order bits to become undefined. So they are
5694 not known to be zero. */
5695 if (GET_MODE_SIZE (GET_MODE (x))
5696 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5697 significant |= (GET_MODE_MASK (GET_MODE (x))
5698 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
5699#endif
5700 }
5701 break;
5702
5703 case ASHIFTRT:
5704 case LSHIFTRT:
5705 case ASHIFT:
5706 case LSHIFT:
5707 case ROTATE:
5708 /* The significant bits are in two classes: any bits within MODE
5709 that aren't in GET_MODE (x) are always significant. The rest of the
5710 significant bits are those that are significant in the operand of
5711 the shift when shifted the appropriate number of bits. This
5712 shows that high-order bits are cleared by the right shift and
5713 low-order bits by left shifts. */
5714 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5715 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 5716 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
5717 {
5718 enum machine_mode inner_mode = GET_MODE (x);
5719 int width = GET_MODE_BITSIZE (inner_mode);
5720 int count = INTVAL (XEXP (x, 1));
5f4f0e22
CH
5721 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
5722 unsigned HOST_WIDE_INT op_significant
5723 = significant_bits (XEXP (x, 0), mode);
5724 unsigned HOST_WIDE_INT inner = op_significant & mode_mask;
5725 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
5726
5727 if (mode_width > width)
5728 outer = (op_significant & significant & ~ mode_mask);
5729
5730 if (code == LSHIFTRT)
5731 inner >>= count;
5732 else if (code == ASHIFTRT)
5733 {
5734 inner >>= count;
5735
5736 /* If the sign bit was significant at before the shift, we
5737 need to mark all the places it could have been copied to
5738 by the shift significant. */
5f4f0e22
CH
5739 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
5740 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d
RS
5741 }
5742 else if (code == LSHIFT || code == ASHIFT)
5743 inner <<= count;
5744 else
5745 inner = ((inner << (count % width)
5746 | (inner >> (width - (count % width)))) & mode_mask);
5747
5748 significant &= (outer | inner);
5749 }
5750 break;
5751
5752 case FFS:
5753 /* This is at most the number of bits in the mode. */
5f4f0e22 5754 significant = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 5755 break;
d0ab8cd3
RK
5756
5757 case IF_THEN_ELSE:
5758 significant &= (significant_bits (XEXP (x, 1), mode)
5759 | significant_bits (XEXP (x, 2), mode));
5760 break;
230d793d
RS
5761 }
5762
5763 return significant;
5764}
5765\f
d0ab8cd3
RK
5766/* Return the number of bits at the high-order end of X that are known to
5767 be equal to the sign bit. This number will always be between 1 and
5768 the number of bits in the mode of X. MODE is the mode to be used
5769 if X is VOIDmode. */
5770
5771static int
5772num_sign_bit_copies (x, mode)
5773 rtx x;
5774 enum machine_mode mode;
5775{
5776 enum rtx_code code = GET_CODE (x);
5777 int bitwidth;
5778 int num0, num1, result;
5779 unsigned HOST_WIDE_INT sig;
5780 rtx tem;
5781
5782 /* If we weren't given a mode, use the mode of X. If the mode is still
5783 VOIDmode, we don't know anything. */
5784
5785 if (mode == VOIDmode)
5786 mode = GET_MODE (x);
5787
5788 if (mode == VOIDmode)
5789 return 0;
5790
5791 bitwidth = GET_MODE_BITSIZE (mode);
5792
5793 switch (code)
5794 {
5795 case REG:
5796 if (significant_valid && reg_sign_bit_copies[REGNO (x)] != 0)
5797 return reg_sign_bit_copies[REGNO (x)];
5798
5799 tem = get_last_value (x);
5800 if (tem != 0)
5801 return num_sign_bit_copies (tem, mode);
5802 break;
5803
5804 case CONST_INT:
5805 /* If the constant is negative, take its 1's complement and remask.
5806 Then see how many zero bits we have. */
5807 sig = INTVAL (x) & GET_MODE_MASK (mode);
5808 if (sig & ((HOST_WIDE_INT) 1 << (bitwidth - 1)))
5809 sig = (~ sig) & GET_MODE_MASK (mode);
5810
5811 return (sig == 0 ? bitwidth : bitwidth - floor_log2 (sig) - 1);
5812
5813 case SUBREG:
5814 /* For a smaller object, just ignore the high bits. */
5815 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
5816 {
5817 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
5818 return MAX (1, (num0
5819 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
5820 - bitwidth)));
5821 }
5822 break;
5823
5824 case SIGN_EXTRACT:
5825 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5826 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
5827 break;
5828
5829 case SIGN_EXTEND:
5830 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5831 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
5832
5833 case TRUNCATE:
5834 /* For a smaller object, just ignore the high bits. */
5835 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
5836 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5837 - bitwidth)));
5838
5839 case NOT:
5840 return num_sign_bit_copies (XEXP (x, 0), mode);
5841
5842 case ROTATE: case ROTATERT:
5843 /* If we are rotating left by a number of bits less than the number
5844 of sign bit copies, we can just subtract that amount from the
5845 number. */
5846 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5847 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
5848 {
5849 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5850 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
5851 : bitwidth - INTVAL (XEXP (x, 1))));
5852 }
5853 break;
5854
5855 case NEG:
5856 /* In general, this subtracts one sign bit copy. But if the value
5857 is known to be positive, the number of sign bit copies is the
5858 same as that of the input. Finally, if the input has just one
5859 significant bit, all the bits are copies of the sign bit. */
5860 sig = significant_bits (XEXP (x, 0), mode);
5861 if (sig == 1)
5862 return bitwidth;
5863
5864 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5865 if (num0 > 1
5866 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig))
5867 num0--;
5868
5869 return num0;
5870
5871 case IOR: case AND: case XOR:
5872 case SMIN: case SMAX: case UMIN: case UMAX:
5873 /* Logical operations will preserve the number of sign-bit copies.
5874 MIN and MAX operations always return one of the operands. */
5875 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5876 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
5877 return MIN (num0, num1);
5878
5879 case PLUS: case MINUS:
5880 /* For addition and subtraction, we can have a 1-bit carry. However,
5881 if we are subtracting 1 from a positive number, there will not
5882 be such a carry. Furthermore, if the positive number is known to
5883 be 0 or 1, we know the result is either -1 or 0. */
5884
5885 if (code == PLUS && XEXP (x, 1) == constm1_rtx)
5886 {
5887 sig = significant_bits (XEXP (x, 0), mode);
5888 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig) == 0)
5889 return (sig == 1 || sig == 0 ? bitwidth
5890 : bitwidth - floor_log2 (sig));
5891 }
5892
5893 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5894 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
5895 return MAX (1, MIN (num0, num1) - 1);
5896
5897 case MULT:
5898 /* The number of bits of the product is the sum of the number of
5899 bits of both terms. However, unless one of the terms if known
5900 to be positive, we must allow for an additional bit since negating
5901 a negative number can remove one sign bit copy. */
5902
5903 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5904 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
5905
5906 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
5907 if (result > 0
5908 && ((significant_bits (XEXP (x, 0), mode)
5909 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
5910 && (significant_bits (XEXP (x, 1), mode)
5911 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
5912 result--;
5913
5914 return MAX (1, result);
5915
5916 case UDIV:
5917 /* The result must be <= the first operand. */
5918 return num_sign_bit_copies (XEXP (x, 0), mode);
5919
5920 case UMOD:
5921 /* The result must be <= the scond operand. */
5922 return num_sign_bit_copies (XEXP (x, 1), mode);
5923
5924 case DIV:
5925 /* Similar to unsigned division, except that we have to worry about
5926 the case where the divisor is negative, in which case we have
5927 to add 1. */
5928 result = num_sign_bit_copies (XEXP (x, 0), mode);
5929 if (result > 1
5930 && (significant_bits (XEXP (x, 1), mode)
5931 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
5932 result --;
5933
5934 return result;
5935
5936 case MOD:
5937 result = num_sign_bit_copies (XEXP (x, 1), mode);
5938 if (result > 1
5939 && (significant_bits (XEXP (x, 1), mode)
5940 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
5941 result --;
5942
5943 return result;
5944
5945 case ASHIFTRT:
5946 /* Shifts by a constant add to the number of bits equal to the
5947 sign bit. */
5948 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5949 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5950 && INTVAL (XEXP (x, 1)) > 0)
5951 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
5952
5953 return num0;
5954
5955 case ASHIFT:
5956 case LSHIFT:
5957 /* Left shifts destroy copies. */
5958 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5959 || INTVAL (XEXP (x, 1)) < 0
5960 || INTVAL (XEXP (x, 1)) >= bitwidth)
5961 return 1;
5962
5963 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5964 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
5965
5966 case IF_THEN_ELSE:
5967 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
5968 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
5969 return MIN (num0, num1);
5970
5971#if STORE_FLAG_VALUE == -1
5972 case EQ: case NE: case GE: case GT: case LE: case LT:
5973 case GEU: case GTU: case LEU: case LTU:
5974 return bitwidth;
5975#endif
5976 }
5977
5978 /* If we haven't been able to figure it out by one of the above rules,
5979 see if some of the high-order bits are known to be zero. If so,
5980 count those bits and return one less than that amount. */
5981
5982 sig = significant_bits (x, mode);
5983 return sig == GET_MODE_MASK (mode) ? 1 : bitwidth - floor_log2 (sig) - 1;
5984}
5985\f
230d793d
RS
5986/* This function is called from `simplify_shift_const' to merge two
5987 outer operations. Specifically, we have already found that we need
5988 to perform operation *POP0 with constant *PCONST0 at the outermost
5989 position. We would now like to also perform OP1 with constant CONST1
5990 (with *POP0 being done last).
5991
5992 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
5993 the resulting operation. *PCOMP_P is set to 1 if we would need to
5994 complement the innermost operand, otherwise it is unchanged.
5995
5996 MODE is the mode in which the operation will be done. No bits outside
5997 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 5998 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
5999
6000 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
6001 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
6002 result is simply *PCONST0.
6003
6004 If the resulting operation cannot be expressed as one operation, we
6005 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
6006
6007static int
6008merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
6009 enum rtx_code *pop0;
5f4f0e22 6010 HOST_WIDE_INT *pconst0;
230d793d 6011 enum rtx_code op1;
5f4f0e22 6012 HOST_WIDE_INT const1;
230d793d
RS
6013 enum machine_mode mode;
6014 int *pcomp_p;
6015{
6016 enum rtx_code op0 = *pop0;
5f4f0e22 6017 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
6018
6019 const0 &= GET_MODE_MASK (mode);
6020 const1 &= GET_MODE_MASK (mode);
6021
6022 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6023 if (op0 == AND)
6024 const1 &= const0;
6025
6026 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6027 if OP0 is SET. */
6028
6029 if (op1 == NIL || op0 == SET)
6030 return 1;
6031
6032 else if (op0 == NIL)
6033 op0 = op1, const0 = const1;
6034
6035 else if (op0 == op1)
6036 {
6037 switch (op0)
6038 {
6039 case AND:
6040 const0 &= const1;
6041 break;
6042 case IOR:
6043 const0 |= const1;
6044 break;
6045 case XOR:
6046 const0 ^= const1;
6047 break;
6048 case PLUS:
6049 const0 += const1;
6050 break;
6051 case NEG:
6052 op0 = NIL;
6053 break;
6054 }
6055 }
6056
6057 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6058 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6059 return 0;
6060
6061 /* If the two constants aren't the same, we can't do anything. The
6062 remaining six cases can all be done. */
6063 else if (const0 != const1)
6064 return 0;
6065
6066 else
6067 switch (op0)
6068 {
6069 case IOR:
6070 if (op1 == AND)
6071 /* (a & b) | b == b */
6072 op0 = SET;
6073 else /* op1 == XOR */
6074 /* (a ^ b) | b == a | b */
6075 ;
6076 break;
6077
6078 case XOR:
6079 if (op1 == AND)
6080 /* (a & b) ^ b == (~a) & b */
6081 op0 = AND, *pcomp_p = 1;
6082 else /* op1 == IOR */
6083 /* (a | b) ^ b == a & ~b */
6084 op0 = AND, *pconst0 = ~ const0;
6085 break;
6086
6087 case AND:
6088 if (op1 == IOR)
6089 /* (a | b) & b == b */
6090 op0 = SET;
6091 else /* op1 == XOR */
6092 /* (a ^ b) & b) == (~a) & b */
6093 *pcomp_p = 1;
6094 break;
6095 }
6096
6097 /* Check for NO-OP cases. */
6098 const0 &= GET_MODE_MASK (mode);
6099 if (const0 == 0
6100 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6101 op0 = NIL;
6102 else if (const0 == 0 && op0 == AND)
6103 op0 = SET;
6104 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6105 op0 = NIL;
6106
6107 *pop0 = op0;
6108 *pconst0 = const0;
6109
6110 return 1;
6111}
6112\f
6113/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6114 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6115 that we started with.
6116
6117 The shift is normally computed in the widest mode we find in VAROP, as
6118 long as it isn't a different number of words than RESULT_MODE. Exceptions
6119 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6120
6121static rtx
6122simplify_shift_const (x, code, result_mode, varop, count)
6123 rtx x;
6124 enum rtx_code code;
6125 enum machine_mode result_mode;
6126 rtx varop;
6127 int count;
6128{
6129 enum rtx_code orig_code = code;
6130 int orig_count = count;
6131 enum machine_mode mode = result_mode;
6132 enum machine_mode shift_mode, tmode;
6133 int mode_words
6134 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6135 /* We form (outer_op (code varop count) (outer_const)). */
6136 enum rtx_code outer_op = NIL;
5f4f0e22 6137 HOST_WIDE_INT outer_const;
230d793d
RS
6138 rtx const_rtx;
6139 int complement_p = 0;
6140 rtx new;
6141
6142 /* If we were given an invalid count, don't do anything except exactly
6143 what was requested. */
6144
6145 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6146 {
6147 if (x)
6148 return x;
6149
5f4f0e22 6150 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
6151 }
6152
6153 /* Unless one of the branches of the `if' in this loop does a `continue',
6154 we will `break' the loop after the `if'. */
6155
6156 while (count != 0)
6157 {
6158 /* If we have an operand of (clobber (const_int 0)), just return that
6159 value. */
6160 if (GET_CODE (varop) == CLOBBER)
6161 return varop;
6162
6163 /* If we discovered we had to complement VAROP, leave. Making a NOT
6164 here would cause an infinite loop. */
6165 if (complement_p)
6166 break;
6167
6168 /* Convert ROTATETRT to ROTATE. */
6169 if (code == ROTATERT)
6170 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6171
6172 /* Canonicalize LSHIFT to ASHIFT. */
6173 if (code == LSHIFT)
6174 code = ASHIFT;
6175
6176 /* We need to determine what mode we will do the shift in. If the
6177 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6178 was originally done in. Otherwise, we can do it in MODE, the widest
6179 mode encountered. */
6180 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6181
6182 /* Handle cases where the count is greater than the size of the mode
6183 minus 1. For ASHIFT, use the size minus one as the count (this can
6184 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6185 take the count modulo the size. For other shifts, the result is
6186 zero.
6187
6188 Since these shifts are being produced by the compiler by combining
6189 multiple operations, each of which are defined, we know what the
6190 result is supposed to be. */
6191
6192 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6193 {
6194 if (code == ASHIFTRT)
6195 count = GET_MODE_BITSIZE (shift_mode) - 1;
6196 else if (code == ROTATE || code == ROTATERT)
6197 count %= GET_MODE_BITSIZE (shift_mode);
6198 else
6199 {
6200 /* We can't simply return zero because there may be an
6201 outer op. */
6202 varop = const0_rtx;
6203 count = 0;
6204 break;
6205 }
6206 }
6207
6208 /* Negative counts are invalid and should not have been made (a
6209 programmer-specified negative count should have been handled
6210 above). */
6211 else if (count < 0)
6212 abort ();
6213
d0ab8cd3
RK
6214 /* An arithmetic right shift of a quantity known to be -1 or 0
6215 is a no-op. */
6216 if (code == ASHIFTRT
6217 && (num_sign_bit_copies (varop, shift_mode)
6218 == GET_MODE_BITSIZE (shift_mode)))
6219 {
6220 count = 0;
6221 break;
6222 }
6223
230d793d
RS
6224 /* We simplify the tests below and elsewhere by converting
6225 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6226 `make_compound_operation' will convert it to a ASHIFTRT for
6227 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 6228 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 6229 && code == ASHIFTRT
5f4f0e22
CH
6230 && ((significant_bits (varop, shift_mode)
6231 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
6232 == 0))
230d793d
RS
6233 code = LSHIFTRT;
6234
6235 switch (GET_CODE (varop))
6236 {
6237 case SIGN_EXTEND:
6238 case ZERO_EXTEND:
6239 case SIGN_EXTRACT:
6240 case ZERO_EXTRACT:
6241 new = expand_compound_operation (varop);
6242 if (new != varop)
6243 {
6244 varop = new;
6245 continue;
6246 }
6247 break;
6248
6249 case MEM:
6250 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
6251 minus the width of a smaller mode, we can do this with a
6252 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
6253 if ((code == ASHIFTRT || code == LSHIFTRT)
6254 && ! mode_dependent_address_p (XEXP (varop, 0))
6255 && ! MEM_VOLATILE_P (varop)
6256 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6257 MODE_INT, 1)) != BLKmode)
6258 {
6259#if BYTES_BIG_ENDIAN
6260 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
6261#else
6262 new = gen_rtx (MEM, tmode,
6263 plus_constant (XEXP (varop, 0),
6264 count / BITS_PER_UNIT));
6265 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
6266 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
6267 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
6268#endif
6269 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6270 : ZERO_EXTEND, mode, new);
6271 count = 0;
6272 continue;
6273 }
6274 break;
6275
6276 case USE:
6277 /* Similar to the case above, except that we can only do this if
6278 the resulting mode is the same as that of the underlying
6279 MEM and adjust the address depending on the *bits* endianness
6280 because of the way that bit-field extract insns are defined. */
6281 if ((code == ASHIFTRT || code == LSHIFTRT)
6282 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6283 MODE_INT, 1)) != BLKmode
6284 && tmode == GET_MODE (XEXP (varop, 0)))
6285 {
6286#if BITS_BIG_ENDIAN
6287 new = XEXP (varop, 0);
6288#else
6289 new = copy_rtx (XEXP (varop, 0));
6290 SUBST (XEXP (new, 0),
6291 plus_constant (XEXP (new, 0),
6292 count / BITS_PER_UNIT));
6293#endif
6294
6295 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6296 : ZERO_EXTEND, mode, new);
6297 count = 0;
6298 continue;
6299 }
6300 break;
6301
6302 case SUBREG:
6303 /* If VAROP is a SUBREG, strip it as long as the inner operand has
6304 the same number of words as what we've seen so far. Then store
6305 the widest mode in MODE. */
6306 if (SUBREG_WORD (varop) == 0
6307 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6308 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6309 == mode_words))
6310 {
6311 varop = SUBREG_REG (varop);
6312 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
6313 mode = GET_MODE (varop);
6314 continue;
6315 }
6316 break;
6317
6318 case MULT:
6319 /* Some machines use MULT instead of ASHIFT because MULT
6320 is cheaper. But it is still better on those machines to
6321 merge two shifts into one. */
6322 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6323 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6324 {
6325 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 6326 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
6327 continue;
6328 }
6329 break;
6330
6331 case UDIV:
6332 /* Similar, for when divides are cheaper. */
6333 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6334 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6335 {
6336 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 6337 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
6338 continue;
6339 }
6340 break;
6341
6342 case ASHIFTRT:
6343 /* If we are extracting just the sign bit of an arithmetic right
6344 shift, that shift is not needed. */
6345 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
6346 {
6347 varop = XEXP (varop, 0);
6348 continue;
6349 }
6350
6351 /* ... fall through ... */
6352
6353 case LSHIFTRT:
6354 case ASHIFT:
6355 case LSHIFT:
6356 case ROTATE:
6357 /* Here we have two nested shifts. The result is usually the
6358 AND of a new shift with a mask. We compute the result below. */
6359 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6360 && INTVAL (XEXP (varop, 1)) >= 0
6361 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
6362 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
6363 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
6364 {
6365 enum rtx_code first_code = GET_CODE (varop);
6366 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 6367 unsigned HOST_WIDE_INT mask;
230d793d
RS
6368 rtx mask_rtx;
6369 rtx inner;
6370
6371 if (first_code == LSHIFT)
6372 first_code = ASHIFT;
6373
6374 /* We have one common special case. We can't do any merging if
6375 the inner code is an ASHIFTRT of a smaller mode. However, if
6376 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
6377 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
6378 we can convert it to
6379 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
6380 This simplifies certain SIGN_EXTEND operations. */
6381 if (code == ASHIFT && first_code == ASHIFTRT
6382 && (GET_MODE_BITSIZE (result_mode)
6383 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
6384 {
6385 /* C3 has the low-order C1 bits zero. */
6386
5f4f0e22
CH
6387 mask = (GET_MODE_MASK (mode)
6388 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 6389
5f4f0e22 6390 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 6391 XEXP (varop, 0), mask);
5f4f0e22 6392 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
6393 varop, count);
6394 count = first_count;
6395 code = ASHIFTRT;
6396 continue;
6397 }
6398
d0ab8cd3
RK
6399 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
6400 than C1 high-order bits equal to the sign bit, we can convert
6401 this to either an ASHIFT or a ASHIFTRT depending on the
6402 two counts.
230d793d
RS
6403
6404 We cannot do this if VAROP's mode is not SHIFT_MODE. */
6405
6406 if (code == ASHIFTRT && first_code == ASHIFT
6407 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
6408 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
6409 > first_count))
230d793d 6410 {
d0ab8cd3
RK
6411 count -= first_count;
6412 if (count < 0)
6413 count = - count, code = ASHIFT;
6414 varop = XEXP (varop, 0);
6415 continue;
230d793d
RS
6416 }
6417
6418 /* There are some cases we can't do. If CODE is ASHIFTRT,
6419 we can only do this if FIRST_CODE is also ASHIFTRT.
6420
6421 We can't do the case when CODE is ROTATE and FIRST_CODE is
6422 ASHIFTRT.
6423
6424 If the mode of this shift is not the mode of the outer shift,
6425 we can't do this if either shift is ASHIFTRT or ROTATE.
6426
6427 Finally, we can't do any of these if the mode is too wide
6428 unless the codes are the same.
6429
6430 Handle the case where the shift codes are the same
6431 first. */
6432
6433 if (code == first_code)
6434 {
6435 if (GET_MODE (varop) != result_mode
6436 && (code == ASHIFTRT || code == ROTATE))
6437 break;
6438
6439 count += first_count;
6440 varop = XEXP (varop, 0);
6441 continue;
6442 }
6443
6444 if (code == ASHIFTRT
6445 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 6446 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d
RS
6447 || (GET_MODE (varop) != result_mode
6448 && (first_code == ASHIFTRT || first_code == ROTATE
6449 || code == ROTATE)))
6450 break;
6451
6452 /* To compute the mask to apply after the shift, shift the
6453 significant bits of the inner shift the same way the
6454 outer shift will. */
6455
5f4f0e22 6456 mask_rtx = GEN_INT (significant_bits (varop, GET_MODE (varop)));
230d793d
RS
6457
6458 mask_rtx
6459 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 6460 GEN_INT (count));
230d793d
RS
6461
6462 /* Give up if we can't compute an outer operation to use. */
6463 if (mask_rtx == 0
6464 || GET_CODE (mask_rtx) != CONST_INT
6465 || ! merge_outer_ops (&outer_op, &outer_const, AND,
6466 INTVAL (mask_rtx),
6467 result_mode, &complement_p))
6468 break;
6469
6470 /* If the shifts are in the same direction, we add the
6471 counts. Otherwise, we subtract them. */
6472 if ((code == ASHIFTRT || code == LSHIFTRT)
6473 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
6474 count += first_count;
6475 else
6476 count -= first_count;
6477
6478 /* If COUNT is positive, the new shift is usually CODE,
6479 except for the two exceptions below, in which case it is
6480 FIRST_CODE. If the count is negative, FIRST_CODE should
6481 always be used */
6482 if (count > 0
6483 && ((first_code == ROTATE && code == ASHIFT)
6484 || (first_code == ASHIFTRT && code == LSHIFTRT)))
6485 code = first_code;
6486 else if (count < 0)
6487 code = first_code, count = - count;
6488
6489 varop = XEXP (varop, 0);
6490 continue;
6491 }
6492
6493 /* If we have (A << B << C) for any shift, we can convert this to
6494 (A << C << B). This wins if A is a constant. Only try this if
6495 B is not a constant. */
6496
6497 else if (GET_CODE (varop) == code
6498 && GET_CODE (XEXP (varop, 1)) != CONST_INT
6499 && 0 != (new
6500 = simplify_binary_operation (code, mode,
6501 XEXP (varop, 0),
5f4f0e22 6502 GEN_INT (count))))
230d793d
RS
6503 {
6504 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
6505 count = 0;
6506 continue;
6507 }
6508 break;
6509
6510 case NOT:
6511 /* Make this fit the case below. */
6512 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 6513 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
6514 continue;
6515
6516 case IOR:
6517 case AND:
6518 case XOR:
6519 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
6520 with C the size of VAROP - 1 and the shift is logical if
6521 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6522 we have an (le X 0) operation. If we have an arithmetic shift
6523 and STORE_FLAG_VALUE is 1 or we have a logical shift with
6524 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
6525
6526 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
6527 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
6528 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6529 && (code == LSHIFTRT || code == ASHIFTRT)
6530 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
6531 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
6532 {
6533 count = 0;
6534 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
6535 const0_rtx);
6536
6537 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
6538 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
6539
6540 continue;
6541 }
6542
6543 /* If we have (shift (logical)), move the logical to the outside
6544 to allow it to possibly combine with another logical and the
6545 shift to combine with another shift. This also canonicalizes to
6546 what a ZERO_EXTRACT looks like. Also, some machines have
6547 (and (shift)) insns. */
6548
6549 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6550 && (new = simplify_binary_operation (code, result_mode,
6551 XEXP (varop, 1),
5f4f0e22 6552 GEN_INT (count))) != 0
230d793d
RS
6553 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
6554 INTVAL (new), result_mode, &complement_p))
6555 {
6556 varop = XEXP (varop, 0);
6557 continue;
6558 }
6559
6560 /* If we can't do that, try to simplify the shift in each arm of the
6561 logical expression, make a new logical expression, and apply
6562 the inverse distributive law. */
6563 {
5f4f0e22 6564 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
230d793d 6565 XEXP (varop, 0), count);
5f4f0e22 6566 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
230d793d
RS
6567 XEXP (varop, 1), count);
6568
6569 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
6570 varop = apply_distributive_law (varop);
6571
6572 count = 0;
6573 }
6574 break;
6575
6576 case EQ:
6577 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
6578 says that the sign bit can be tested, FOO has mode MODE, C is
6579 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
6580 significant. */
6581 if (code == LSHIFT
6582 && XEXP (varop, 1) == const0_rtx
6583 && GET_MODE (XEXP (varop, 0)) == result_mode
6584 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 6585 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 6586 && ((STORE_FLAG_VALUE
5f4f0e22 6587 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
230d793d 6588 && significant_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
6589 && merge_outer_ops (&outer_op, &outer_const, XOR,
6590 (HOST_WIDE_INT) 1, result_mode,
6591 &complement_p))
230d793d
RS
6592 {
6593 varop = XEXP (varop, 0);
6594 count = 0;
6595 continue;
6596 }
6597 break;
6598
6599 case NEG:
d0ab8cd3
RK
6600 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
6601 than the number of bits in the mode is equivalent to A. */
6602 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
230d793d
RS
6603 && significant_bits (XEXP (varop, 0), result_mode) == 1)
6604 {
d0ab8cd3 6605 varop = XEXP (varop, 0);
230d793d
RS
6606 count = 0;
6607 continue;
6608 }
6609
6610 /* NEG commutes with ASHIFT since it is multiplication. Move the
6611 NEG outside to allow shifts to combine. */
6612 if (code == ASHIFT
5f4f0e22
CH
6613 && merge_outer_ops (&outer_op, &outer_const, NEG,
6614 (HOST_WIDE_INT) 0, result_mode,
6615 &complement_p))
230d793d
RS
6616 {
6617 varop = XEXP (varop, 0);
6618 continue;
6619 }
6620 break;
6621
6622 case PLUS:
d0ab8cd3
RK
6623 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
6624 is one less than the number of bits in the mode is
6625 equivalent to (xor A 1). */
230d793d
RS
6626 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
6627 && XEXP (varop, 1) == constm1_rtx
6628 && significant_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
6629 && merge_outer_ops (&outer_op, &outer_const, XOR,
6630 (HOST_WIDE_INT) 1, result_mode,
6631 &complement_p))
230d793d
RS
6632 {
6633 count = 0;
6634 varop = XEXP (varop, 0);
6635 continue;
6636 }
6637
3f508eca
RK
6638 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
6639 significant in BAR are those being shifted out and those
6640 bits are known zero in FOO, we can replace the PLUS with FOO.
6641 Similarly in the other operand order. This code occurs when
6642 we are computing the size of a variable-size array. */
6643
6644 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 6645 && count < HOST_BITS_PER_WIDE_INT
3f508eca
RK
6646 && significant_bits (XEXP (varop, 1), result_mode) >> count == 0
6647 && (significant_bits (XEXP (varop, 1), result_mode)
6648 & significant_bits (XEXP (varop, 0), result_mode)) == 0)
6649 {
6650 varop = XEXP (varop, 0);
6651 continue;
6652 }
6653 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 6654 && count < HOST_BITS_PER_WIDE_INT
3f508eca
RK
6655 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
6656 >> count)
6657 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
6658 & significant_bits (XEXP (varop, 1),
6659 result_mode)))
6660 {
6661 varop = XEXP (varop, 1);
6662 continue;
6663 }
6664
230d793d
RS
6665 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
6666 if (code == ASHIFT
6667 && GET_CODE (XEXP (varop, 1)) == CONST_INT
6668 && (new = simplify_binary_operation (ASHIFT, result_mode,
6669 XEXP (varop, 1),
5f4f0e22 6670 GEN_INT (count))) != 0
230d793d
RS
6671 && merge_outer_ops (&outer_op, &outer_const, PLUS,
6672 INTVAL (new), result_mode, &complement_p))
6673 {
6674 varop = XEXP (varop, 0);
6675 continue;
6676 }
6677 break;
6678
6679 case MINUS:
6680 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
6681 with C the size of VAROP - 1 and the shift is logical if
6682 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6683 we have a (gt X 0) operation. If the shift is arithmetic with
6684 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
6685 we have a (neg (gt X 0)) operation. */
6686
6687 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
6688 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
6689 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6690 && (code == LSHIFTRT || code == ASHIFTRT)
6691 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
6692 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
6693 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
6694 {
6695 count = 0;
6696 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
6697 const0_rtx);
6698
6699 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
6700 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
6701
6702 continue;
6703 }
6704 break;
6705 }
6706
6707 break;
6708 }
6709
6710 /* We need to determine what mode to do the shift in. If the shift is
6711 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
6712 done in. Otherwise, we can do it in MODE, the widest mode encountered.
6713 The code we care about is that of the shift that will actually be done,
6714 not the shift that was originally requested. */
6715 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6716
6717 /* We have now finished analyzing the shift. The result should be
6718 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
6719 OUTER_OP is non-NIL, it is an operation that needs to be applied
6720 to the result of the shift. OUTER_CONST is the relevant constant,
6721 but we must turn off all bits turned off in the shift.
6722
6723 If we were passed a value for X, see if we can use any pieces of
6724 it. If not, make new rtx. */
6725
6726 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
6727 && GET_CODE (XEXP (x, 1)) == CONST_INT
6728 && INTVAL (XEXP (x, 1)) == count)
6729 const_rtx = XEXP (x, 1);
6730 else
5f4f0e22 6731 const_rtx = GEN_INT (count);
230d793d
RS
6732
6733 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6734 && GET_MODE (XEXP (x, 0)) == shift_mode
6735 && SUBREG_REG (XEXP (x, 0)) == varop)
6736 varop = XEXP (x, 0);
6737 else if (GET_MODE (varop) != shift_mode)
6738 varop = gen_lowpart_for_combine (shift_mode, varop);
6739
6740 /* If we can't make the SUBREG, try to return what we were given. */
6741 if (GET_CODE (varop) == CLOBBER)
6742 return x ? x : varop;
6743
6744 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
6745 if (new != 0)
6746 x = new;
6747 else
6748 {
6749 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
6750 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
6751
6752 SUBST (XEXP (x, 0), varop);
6753 SUBST (XEXP (x, 1), const_rtx);
6754 }
6755
6756 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
6757 turn off all the bits that the shift would have turned off. */
6758 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 6759 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
6760 GET_MODE_MASK (result_mode) >> orig_count);
6761
6762 /* Do the remainder of the processing in RESULT_MODE. */
6763 x = gen_lowpart_for_combine (result_mode, x);
6764
6765 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
6766 operation. */
6767 if (complement_p)
6768 x = gen_unary (NOT, result_mode, x);
6769
6770 if (outer_op != NIL)
6771 {
5f4f0e22 6772 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
6773 outer_const &= GET_MODE_MASK (result_mode);
6774
6775 if (outer_op == AND)
5f4f0e22 6776 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
6777 else if (outer_op == SET)
6778 /* This means that we have determined that the result is
6779 equivalent to a constant. This should be rare. */
5f4f0e22 6780 x = GEN_INT (outer_const);
230d793d
RS
6781 else if (GET_RTX_CLASS (outer_op) == '1')
6782 x = gen_unary (outer_op, result_mode, x);
6783 else
5f4f0e22 6784 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
6785 }
6786
6787 return x;
6788}
6789\f
6790/* Like recog, but we receive the address of a pointer to a new pattern.
6791 We try to match the rtx that the pointer points to.
6792 If that fails, we may try to modify or replace the pattern,
6793 storing the replacement into the same pointer object.
6794
6795 Modifications include deletion or addition of CLOBBERs.
6796
6797 PNOTES is a pointer to a location where any REG_UNUSED notes added for
6798 the CLOBBERs are placed.
6799
6800 The value is the final insn code from the pattern ultimately matched,
6801 or -1. */
6802
6803static int
6804recog_for_combine (pnewpat, insn, pnotes)
6805 rtx *pnewpat;
6806 rtx insn;
6807 rtx *pnotes;
6808{
6809 register rtx pat = *pnewpat;
6810 int insn_code_number;
6811 int num_clobbers_to_add = 0;
6812 int i;
6813 rtx notes = 0;
6814
6815 /* Is the result of combination a valid instruction? */
6816 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
6817
6818 /* If it isn't, there is the possibility that we previously had an insn
6819 that clobbered some register as a side effect, but the combined
6820 insn doesn't need to do that. So try once more without the clobbers
6821 unless this represents an ASM insn. */
6822
6823 if (insn_code_number < 0 && ! check_asm_operands (pat)
6824 && GET_CODE (pat) == PARALLEL)
6825 {
6826 int pos;
6827
6828 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
6829 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
6830 {
6831 if (i != pos)
6832 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
6833 pos++;
6834 }
6835
6836 SUBST_INT (XVECLEN (pat, 0), pos);
6837
6838 if (pos == 1)
6839 pat = XVECEXP (pat, 0, 0);
6840
6841 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
6842 }
6843
6844 /* If we had any clobbers to add, make a new pattern than contains
6845 them. Then check to make sure that all of them are dead. */
6846 if (num_clobbers_to_add)
6847 {
6848 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
6849 gen_rtvec (GET_CODE (pat) == PARALLEL
6850 ? XVECLEN (pat, 0) + num_clobbers_to_add
6851 : num_clobbers_to_add + 1));
6852
6853 if (GET_CODE (pat) == PARALLEL)
6854 for (i = 0; i < XVECLEN (pat, 0); i++)
6855 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
6856 else
6857 XVECEXP (newpat, 0, 0) = pat;
6858
6859 add_clobbers (newpat, insn_code_number);
6860
6861 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
6862 i < XVECLEN (newpat, 0); i++)
6863 {
6864 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
6865 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
6866 return -1;
6867 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
6868 XEXP (XVECEXP (newpat, 0, i), 0), notes);
6869 }
6870 pat = newpat;
6871 }
6872
6873 *pnewpat = pat;
6874 *pnotes = notes;
6875
6876 return insn_code_number;
6877}
6878\f
6879/* Like gen_lowpart but for use by combine. In combine it is not possible
6880 to create any new pseudoregs. However, it is safe to create
6881 invalid memory addresses, because combine will try to recognize
6882 them and all they will do is make the combine attempt fail.
6883
6884 If for some reason this cannot do its job, an rtx
6885 (clobber (const_int 0)) is returned.
6886 An insn containing that will not be recognized. */
6887
6888#undef gen_lowpart
6889
6890static rtx
6891gen_lowpart_for_combine (mode, x)
6892 enum machine_mode mode;
6893 register rtx x;
6894{
6895 rtx result;
6896
6897 if (GET_MODE (x) == mode)
6898 return x;
6899
6900 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6901 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
6902
6903 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
6904 won't know what to do. So we will strip off the SUBREG here and
6905 process normally. */
6906 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
6907 {
6908 x = SUBREG_REG (x);
6909 if (GET_MODE (x) == mode)
6910 return x;
6911 }
6912
6913 result = gen_lowpart_common (mode, x);
6914 if (result)
6915 return result;
6916
6917 if (GET_CODE (x) == MEM)
6918 {
6919 register int offset = 0;
6920 rtx new;
6921
6922 /* Refuse to work on a volatile memory ref or one with a mode-dependent
6923 address. */
6924 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
6925 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
6926
6927 /* If we want to refer to something bigger than the original memref,
6928 generate a perverse subreg instead. That will force a reload
6929 of the original memref X. */
6930 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
6931 return gen_rtx (SUBREG, mode, x, 0);
6932
6933#if WORDS_BIG_ENDIAN
6934 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6935 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6936#endif
6937#if BYTES_BIG_ENDIAN
6938 /* Adjust the address so that the address-after-the-data
6939 is unchanged. */
6940 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6941 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
6942#endif
6943 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
6944 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
6945 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
6946 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
6947 return new;
6948 }
6949
6950 /* If X is a comparison operator, rewrite it in a new mode. This
6951 probably won't match, but may allow further simplifications. */
6952 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6953 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
6954
6955 /* If we couldn't simplify X any other way, just enclose it in a
6956 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 6957 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 6958 else
dfbe1b2f
RK
6959 {
6960 int word = 0;
6961
6962 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
6963 word = ((GET_MODE_SIZE (GET_MODE (x))
6964 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
6965 / UNITS_PER_WORD);
6966 return gen_rtx (SUBREG, mode, x, word);
6967 }
230d793d
RS
6968}
6969\f
6970/* Make an rtx expression. This is a subset of gen_rtx and only supports
6971 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
6972
6973 If the identical expression was previously in the insn (in the undobuf),
6974 it will be returned. Only if it is not found will a new expression
6975 be made. */
6976
6977/*VARARGS2*/
6978static rtx
6979gen_rtx_combine (va_alist)
6980 va_dcl
6981{
6982 va_list p;
6983 enum rtx_code code;
6984 enum machine_mode mode;
6985 int n_args;
6986 rtx args[3];
6987 int i, j;
6988 char *fmt;
6989 rtx rt;
6990
6991 va_start (p);
6992 code = va_arg (p, enum rtx_code);
6993 mode = va_arg (p, enum machine_mode);
6994 n_args = GET_RTX_LENGTH (code);
6995 fmt = GET_RTX_FORMAT (code);
6996
6997 if (n_args == 0 || n_args > 3)
6998 abort ();
6999
7000 /* Get each arg and verify that it is supposed to be an expression. */
7001 for (j = 0; j < n_args; j++)
7002 {
7003 if (*fmt++ != 'e')
7004 abort ();
7005
7006 args[j] = va_arg (p, rtx);
7007 }
7008
7009 /* See if this is in undobuf. Be sure we don't use objects that came
7010 from another insn; this could produce circular rtl structures. */
7011
7012 for (i = previous_num_undos; i < undobuf.num_undo; i++)
7013 if (!undobuf.undo[i].is_int
7c046e4e
RK
7014 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
7015 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
230d793d
RS
7016 {
7017 for (j = 0; j < n_args; j++)
7c046e4e 7018 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
230d793d
RS
7019 break;
7020
7021 if (j == n_args)
7c046e4e 7022 return undobuf.undo[i].old_contents.rtx;
230d793d
RS
7023 }
7024
7025 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7026 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7027 rt = rtx_alloc (code);
7028 PUT_MODE (rt, mode);
7029 XEXP (rt, 0) = args[0];
7030 if (n_args > 1)
7031 {
7032 XEXP (rt, 1) = args[1];
7033 if (n_args > 2)
7034 XEXP (rt, 2) = args[2];
7035 }
7036 return rt;
7037}
7038
7039/* These routines make binary and unary operations by first seeing if they
7040 fold; if not, a new expression is allocated. */
7041
7042static rtx
7043gen_binary (code, mode, op0, op1)
7044 enum rtx_code code;
7045 enum machine_mode mode;
7046 rtx op0, op1;
7047{
7048 rtx result;
7049
7050 if (GET_RTX_CLASS (code) == '<')
7051 {
7052 enum machine_mode op_mode = GET_MODE (op0);
7053 if (op_mode == VOIDmode)
7054 op_mode = GET_MODE (op1);
7055 result = simplify_relational_operation (code, op_mode, op0, op1);
7056 }
7057 else
7058 result = simplify_binary_operation (code, mode, op0, op1);
7059
7060 if (result)
7061 return result;
7062
7063 /* Put complex operands first and constants second. */
7064 if (GET_RTX_CLASS (code) == 'c'
7065 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7066 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7067 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7068 || (GET_CODE (op0) == SUBREG
7069 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7070 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7071 return gen_rtx_combine (code, mode, op1, op0);
7072
7073 return gen_rtx_combine (code, mode, op0, op1);
7074}
7075
7076static rtx
7077gen_unary (code, mode, op0)
7078 enum rtx_code code;
7079 enum machine_mode mode;
7080 rtx op0;
7081{
7082 rtx result = simplify_unary_operation (code, mode, op0, mode);
7083
7084 if (result)
7085 return result;
7086
7087 return gen_rtx_combine (code, mode, op0);
7088}
7089\f
7090/* Simplify a comparison between *POP0 and *POP1 where CODE is the
7091 comparison code that will be tested.
7092
7093 The result is a possibly different comparison code to use. *POP0 and
7094 *POP1 may be updated.
7095
7096 It is possible that we might detect that a comparison is either always
7097 true or always false. However, we do not perform general constant
5089e22e 7098 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
7099 should have been detected earlier. Hence we ignore all such cases. */
7100
7101static enum rtx_code
7102simplify_comparison (code, pop0, pop1)
7103 enum rtx_code code;
7104 rtx *pop0;
7105 rtx *pop1;
7106{
7107 rtx op0 = *pop0;
7108 rtx op1 = *pop1;
7109 rtx tem, tem1;
7110 int i;
7111 enum machine_mode mode, tmode;
7112
7113 /* Try a few ways of applying the same transformation to both operands. */
7114 while (1)
7115 {
7116 /* If both operands are the same constant shift, see if we can ignore the
7117 shift. We can if the shift is a rotate or if the bits shifted out of
7118 this shift are not significant for either input and if the type of
7119 comparison is compatible with the shift. */
7120 if (GET_CODE (op0) == GET_CODE (op1)
5f4f0e22 7121 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
7122 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7123 || ((GET_CODE (op0) == LSHIFTRT
7124 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7125 && (code != GT && code != LT && code != GE && code != LE))
7126 || (GET_CODE (op0) == ASHIFTRT
7127 && (code != GTU && code != LTU
7128 && code != GEU && code != GEU)))
7129 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7130 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22 7131 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
7132 && XEXP (op0, 1) == XEXP (op1, 1))
7133 {
7134 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 7135 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
7136 int shift_count = INTVAL (XEXP (op0, 1));
7137
7138 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7139 mask &= (mask >> shift_count) << shift_count;
7140 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7141 mask = (mask & (mask << shift_count)) >> shift_count;
7142
7143 if ((significant_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7144 && (significant_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
7145 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7146 else
7147 break;
7148 }
7149
7150 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7151 SUBREGs are of the same mode, and, in both cases, the AND would
7152 be redundant if the comparison was done in the narrower mode,
7153 do the comparison in the narrower mode (e.g., we are AND'ing with 1
7154 and the operand's significant bits are 0xffffff01; in that case if
7155 we only care about QImode, we don't need the AND). This case occurs
7156 if the output mode of an scc insn is not SImode and
7157 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7158
7159 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7160 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7161 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7162 && GET_CODE (XEXP (op0, 0)) == SUBREG
7163 && GET_CODE (XEXP (op1, 0)) == SUBREG
7164 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7165 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7166 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7167 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7168 && (significant_bits (SUBREG_REG (XEXP (op0, 0)),
7169 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7170 & ~ INTVAL (XEXP (op0, 1))) == 0
7171 && (significant_bits (SUBREG_REG (XEXP (op1, 0)),
7172 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7173 & ~ INTVAL (XEXP (op1, 1))) == 0)
7174 {
7175 op0 = SUBREG_REG (XEXP (op0, 0));
7176 op1 = SUBREG_REG (XEXP (op1, 0));
7177
7178 /* the resulting comparison is always unsigned since we masked off
7179 the original sign bit. */
7180 code = unsigned_condition (code);
7181 }
7182 else
7183 break;
7184 }
7185
7186 /* If the first operand is a constant, swap the operands and adjust the
7187 comparison code appropriately. */
7188 if (CONSTANT_P (op0))
7189 {
7190 tem = op0, op0 = op1, op1 = tem;
7191 code = swap_condition (code);
7192 }
7193
7194 /* We now enter a loop during which we will try to simplify the comparison.
7195 For the most part, we only are concerned with comparisons with zero,
7196 but some things may really be comparisons with zero but not start
7197 out looking that way. */
7198
7199 while (GET_CODE (op1) == CONST_INT)
7200 {
7201 enum machine_mode mode = GET_MODE (op0);
7202 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 7203 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
7204 int equality_comparison_p;
7205 int sign_bit_comparison_p;
7206 int unsigned_comparison_p;
5f4f0e22 7207 HOST_WIDE_INT const_op;
230d793d
RS
7208
7209 /* We only want to handle integral modes. This catches VOIDmode,
7210 CCmode, and the floating-point modes. An exception is that we
7211 can handle VOIDmode if OP0 is a COMPARE or a comparison
7212 operation. */
7213
7214 if (GET_MODE_CLASS (mode) != MODE_INT
7215 && ! (mode == VOIDmode
7216 && (GET_CODE (op0) == COMPARE
7217 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
7218 break;
7219
7220 /* Get the constant we are comparing against and turn off all bits
7221 not on in our mode. */
7222 const_op = INTVAL (op1);
5f4f0e22 7223 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 7224 const_op &= mask;
230d793d
RS
7225
7226 /* If we are comparing against a constant power of two and the value
7227 being compared has only that single significant bit (e.g., it was
7228 `and'ed with that bit), we can replace this with a comparison
7229 with zero. */
7230 if (const_op
7231 && (code == EQ || code == NE || code == GE || code == GEU
7232 || code == LT || code == LTU)
5f4f0e22 7233 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
7234 && exact_log2 (const_op) >= 0
7235 && significant_bits (op0, mode) == const_op)
7236 {
7237 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
7238 op1 = const0_rtx, const_op = 0;
7239 }
7240
d0ab8cd3
RK
7241 /* Similarly, if we are comparing a value known to be either -1 or
7242 0 with -1, change it to the opposite comparison against zero. */
7243
7244 if (const_op == -1
7245 && (code == EQ || code == NE || code == GT || code == LE
7246 || code == GEU || code == LTU)
7247 && num_sign_bit_copies (op0, mode) == mode_width)
7248 {
7249 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
7250 op1 = const0_rtx, const_op = 0;
7251 }
7252
230d793d 7253 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
7254 comparisons against zero and then prefer equality comparisons.
7255 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
7256
7257 switch (code)
7258 {
7259 case LT:
4803a34a
RK
7260 /* < C is equivalent to <= (C - 1) */
7261 if (const_op > 0)
230d793d 7262 {
4803a34a 7263 const_op -= 1;
5f4f0e22 7264 op1 = GEN_INT (const_op);
230d793d
RS
7265 code = LE;
7266 /* ... fall through to LE case below. */
7267 }
7268 else
7269 break;
7270
7271 case LE:
4803a34a
RK
7272 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
7273 if (const_op < 0)
7274 {
7275 const_op += 1;
5f4f0e22 7276 op1 = GEN_INT (const_op);
4803a34a
RK
7277 code = LT;
7278 }
230d793d
RS
7279
7280 /* If we are doing a <= 0 comparison on a value known to have
7281 a zero sign bit, we can replace this with == 0. */
7282 else if (const_op == 0
5f4f0e22 7283 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 7284 && (significant_bits (op0, mode)
5f4f0e22 7285 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
7286 code = EQ;
7287 break;
7288
7289 case GE:
4803a34a
RK
7290 /* >= C is equivalent to > (C - 1). */
7291 if (const_op > 0)
230d793d 7292 {
4803a34a 7293 const_op -= 1;
5f4f0e22 7294 op1 = GEN_INT (const_op);
230d793d
RS
7295 code = GT;
7296 /* ... fall through to GT below. */
7297 }
7298 else
7299 break;
7300
7301 case GT:
4803a34a
RK
7302 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
7303 if (const_op < 0)
7304 {
7305 const_op += 1;
5f4f0e22 7306 op1 = GEN_INT (const_op);
4803a34a
RK
7307 code = GE;
7308 }
230d793d
RS
7309
7310 /* If we are doing a > 0 comparison on a value known to have
7311 a zero sign bit, we can replace this with != 0. */
7312 else if (const_op == 0
5f4f0e22 7313 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 7314 && (significant_bits (op0, mode)
5f4f0e22 7315 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
7316 code = NE;
7317 break;
7318
230d793d 7319 case LTU:
4803a34a
RK
7320 /* < C is equivalent to <= (C - 1). */
7321 if (const_op > 0)
7322 {
7323 const_op -= 1;
5f4f0e22 7324 op1 = GEN_INT (const_op);
4803a34a
RK
7325 code = LEU;
7326 /* ... fall through ... */
7327 }
d0ab8cd3
RK
7328
7329 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
7330 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7331 {
7332 const_op = 0, op1 = const0_rtx;
7333 code = GE;
7334 break;
7335 }
4803a34a
RK
7336 else
7337 break;
230d793d
RS
7338
7339 case LEU:
7340 /* unsigned <= 0 is equivalent to == 0 */
7341 if (const_op == 0)
7342 code = EQ;
d0ab8cd3
RK
7343
7344 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
7345 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7346 {
7347 const_op = 0, op1 = const0_rtx;
7348 code = GE;
7349 }
230d793d
RS
7350 break;
7351
4803a34a
RK
7352 case GEU:
7353 /* >= C is equivalent to < (C - 1). */
7354 if (const_op > 1)
7355 {
7356 const_op -= 1;
5f4f0e22 7357 op1 = GEN_INT (const_op);
4803a34a
RK
7358 code = GTU;
7359 /* ... fall through ... */
7360 }
d0ab8cd3
RK
7361
7362 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
7363 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7364 {
7365 const_op = 0, op1 = const0_rtx;
7366 code = LT;
7367 }
4803a34a
RK
7368 else
7369 break;
7370
230d793d
RS
7371 case GTU:
7372 /* unsigned > 0 is equivalent to != 0 */
7373 if (const_op == 0)
7374 code = NE;
d0ab8cd3
RK
7375
7376 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
7377 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7378 {
7379 const_op = 0, op1 = const0_rtx;
7380 code = LT;
7381 }
230d793d
RS
7382 break;
7383 }
7384
7385 /* Compute some predicates to simplify code below. */
7386
7387 equality_comparison_p = (code == EQ || code == NE);
7388 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
7389 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
7390 || code == LEU);
7391
7392 /* Now try cases based on the opcode of OP0. If none of the cases
7393 does a "continue", we exit this loop immediately after the
7394 switch. */
7395
7396 switch (GET_CODE (op0))
7397 {
7398 case ZERO_EXTRACT:
7399 /* If we are extracting a single bit from a variable position in
7400 a constant that has only a single bit set and are comparing it
7401 with zero, we can convert this into an equality comparison
7402 between the position and the location of the single bit. We can't
7403 do this if bit endian and we don't have an extzv since we then
7404 can't know what mode to use for the endianness adjustment. */
7405
7406#if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
7407 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
7408 && XEXP (op0, 1) == const1_rtx
7409 && equality_comparison_p && const_op == 0
7410 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
7411 {
7412#if BITS_BIG_ENDIAN
7413 i = (GET_MODE_BITSIZE
7414 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
7415#endif
7416
7417 op0 = XEXP (op0, 2);
5f4f0e22 7418 op1 = GEN_INT (i);
230d793d
RS
7419 const_op = i;
7420
7421 /* Result is nonzero iff shift count is equal to I. */
7422 code = reverse_condition (code);
7423 continue;
7424 }
7425#endif
7426
7427 /* ... fall through ... */
7428
7429 case SIGN_EXTRACT:
7430 tem = expand_compound_operation (op0);
7431 if (tem != op0)
7432 {
7433 op0 = tem;
7434 continue;
7435 }
7436 break;
7437
7438 case NOT:
7439 /* If testing for equality, we can take the NOT of the constant. */
7440 if (equality_comparison_p
7441 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
7442 {
7443 op0 = XEXP (op0, 0);
7444 op1 = tem;
7445 continue;
7446 }
7447
7448 /* If just looking at the sign bit, reverse the sense of the
7449 comparison. */
7450 if (sign_bit_comparison_p)
7451 {
7452 op0 = XEXP (op0, 0);
7453 code = (code == GE ? LT : GE);
7454 continue;
7455 }
7456 break;
7457
7458 case NEG:
7459 /* If testing for equality, we can take the NEG of the constant. */
7460 if (equality_comparison_p
7461 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
7462 {
7463 op0 = XEXP (op0, 0);
7464 op1 = tem;
7465 continue;
7466 }
7467
7468 /* The remaining cases only apply to comparisons with zero. */
7469 if (const_op != 0)
7470 break;
7471
7472 /* When X is ABS or is known positive,
7473 (neg X) is < 0 if and only if X != 0. */
7474
7475 if (sign_bit_comparison_p
7476 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 7477 || (mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 7478 && (significant_bits (XEXP (op0, 0), mode)
5f4f0e22 7479 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
7480 {
7481 op0 = XEXP (op0, 0);
7482 code = (code == LT ? NE : EQ);
7483 continue;
7484 }
7485
7486 /* If we have NEG of something that is the result of a
7487 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
7488 two high-order bits must be the same and hence that
7489 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
7490 do this. */
7491 if (GET_CODE (XEXP (op0, 0)) == SIGN_EXTEND
7492 || (GET_CODE (XEXP (op0, 0)) == SIGN_EXTRACT
7493 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7494 && (INTVAL (XEXP (XEXP (op0, 0), 1))
7495 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0, 0), 0)))))
7496 || (GET_CODE (XEXP (op0, 0)) == ASHIFTRT
7497 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7498 && XEXP (XEXP (op0, 0), 1) != const0_rtx)
7499 || ((tem = get_last_value (XEXP (op0, 0))) != 0
7500 && (GET_CODE (tem) == SIGN_EXTEND
7501 || (GET_CODE (tem) == SIGN_EXTRACT
7502 && GET_CODE (XEXP (tem, 1)) == CONST_INT
7503 && (INTVAL (XEXP (tem, 1))
7504 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem, 0)))))
7505 || (GET_CODE (tem) == ASHIFTRT
7506 && GET_CODE (XEXP (tem, 1)) == CONST_INT
7507 && XEXP (tem, 1) != const0_rtx))))
7508 {
7509 op0 = XEXP (op0, 0);
7510 code = swap_condition (code);
7511 continue;
7512 }
7513 break;
7514
7515 case ROTATE:
7516 /* If we are testing equality and our count is a constant, we
7517 can perform the inverse operation on our RHS. */
7518 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7519 && (tem = simplify_binary_operation (ROTATERT, mode,
7520 op1, XEXP (op0, 1))) != 0)
7521 {
7522 op0 = XEXP (op0, 0);
7523 op1 = tem;
7524 continue;
7525 }
7526
7527 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
7528 a particular bit. Convert it to an AND of a constant of that
7529 bit. This will be converted into a ZERO_EXTRACT. */
7530 if (const_op == 0 && sign_bit_comparison_p
7531 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 7532 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 7533 {
5f4f0e22
CH
7534 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7535 ((HOST_WIDE_INT) 1
7536 << (mode_width - 1
7537 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
7538 code = (code == LT ? NE : EQ);
7539 continue;
7540 }
7541
7542 /* ... fall through ... */
7543
7544 case ABS:
7545 /* ABS is ignorable inside an equality comparison with zero. */
7546 if (const_op == 0 && equality_comparison_p)
7547 {
7548 op0 = XEXP (op0, 0);
7549 continue;
7550 }
7551 break;
7552
7553
7554 case SIGN_EXTEND:
7555 /* Can simplify (compare (zero/sign_extend FOO) CONST)
7556 to (compare FOO CONST) if CONST fits in FOO's mode and we
7557 are either testing inequality or have an unsigned comparison
7558 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
7559 if (! unsigned_comparison_p
7560 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
7561 <= HOST_BITS_PER_WIDE_INT)
7562 && ((unsigned HOST_WIDE_INT) const_op
7563 < (((HOST_WIDE_INT) 1
7564 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
7565 {
7566 op0 = XEXP (op0, 0);
7567 continue;
7568 }
7569 break;
7570
7571 case SUBREG:
a687e897
RK
7572 /* Check for the case where we are comparing A - C1 with C2,
7573 both constants are smaller than 1/2 the maxium positive
7574 value in MODE, and the comparison is equality or unsigned.
7575 In that case, if A is either zero-extended to MODE or has
7576 sufficient sign bits so that the high-order bit in MODE
7577 is a copy of the sign in the inner mode, we can prove that it is
7578 safe to do the operation in the wider mode. This simplifies
7579 many range checks. */
7580
7581 if (mode_width <= HOST_BITS_PER_WIDE_INT
7582 && subreg_lowpart_p (op0)
7583 && GET_CODE (SUBREG_REG (op0)) == PLUS
7584 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
7585 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
7586 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
7587 < GET_MODE_MASK (mode) / 2)
7588 && (unsigned) const_op < GET_MODE_MASK (mode) / 2
7589 && (0 == (significant_bits (XEXP (SUBREG_REG (op0), 0),
7590 GET_MODE (SUBREG_REG (op0)))
7591 & ~ GET_MODE_MASK (mode))
7592 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
7593 GET_MODE (SUBREG_REG (op0)))
7594 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
7595 - GET_MODE_BITSIZE (mode)))))
7596 {
7597 op0 = SUBREG_REG (op0);
7598 continue;
7599 }
7600
fe0cf571
RK
7601 /* If the inner mode is narrower and we are extracting the low part,
7602 we can treat the SUBREG as if it were a ZERO_EXTEND. */
7603 if (subreg_lowpart_p (op0)
89f1c7f2
RS
7604 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
7605 /* Fall through */ ;
7606 else
230d793d
RS
7607 break;
7608
7609 /* ... fall through ... */
7610
7611 case ZERO_EXTEND:
7612 if ((unsigned_comparison_p || equality_comparison_p)
7613 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
7614 <= HOST_BITS_PER_WIDE_INT)
7615 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
7616 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
7617 {
7618 op0 = XEXP (op0, 0);
7619 continue;
7620 }
7621 break;
7622
7623 case PLUS:
7624 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
5089e22e 7625 this for equality comparisons due to pathological cases involving
230d793d
RS
7626 overflows. */
7627 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7628 && (tem = simplify_binary_operation (MINUS, mode, op1,
7629 XEXP (op0, 1))) != 0)
7630 {
7631 op0 = XEXP (op0, 0);
7632 op1 = tem;
7633 continue;
7634 }
7635
7636 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
7637 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
7638 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
7639 {
7640 op0 = XEXP (XEXP (op0, 0), 0);
7641 code = (code == LT ? EQ : NE);
7642 continue;
7643 }
7644 break;
7645
7646 case MINUS:
7647 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
7648 of bits in X minus 1, is one iff X > 0. */
7649 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
7650 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7651 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
7652 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
7653 {
7654 op0 = XEXP (op0, 1);
7655 code = (code == GE ? LE : GT);
7656 continue;
7657 }
7658 break;
7659
7660 case XOR:
7661 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
7662 if C is zero or B is a constant. */
7663 if (equality_comparison_p
7664 && 0 != (tem = simplify_binary_operation (XOR, mode,
7665 XEXP (op0, 1), op1)))
7666 {
7667 op0 = XEXP (op0, 0);
7668 op1 = tem;
7669 continue;
7670 }
7671 break;
7672
7673 case EQ: case NE:
7674 case LT: case LTU: case LE: case LEU:
7675 case GT: case GTU: case GE: case GEU:
7676 /* We can't do anything if OP0 is a condition code value, rather
7677 than an actual data value. */
7678 if (const_op != 0
7679#ifdef HAVE_cc0
7680 || XEXP (op0, 0) == cc0_rtx
7681#endif
7682 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
7683 break;
7684
7685 /* Get the two operands being compared. */
7686 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
7687 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
7688 else
7689 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
7690
7691 /* Check for the cases where we simply want the result of the
7692 earlier test or the opposite of that result. */
7693 if (code == NE
7694 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 7695 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 7696 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 7697 && (STORE_FLAG_VALUE
5f4f0e22
CH
7698 & (((HOST_WIDE_INT) 1
7699 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
7700 && (code == LT
7701 || (code == GE && reversible_comparison_p (op0)))))
7702 {
7703 code = (code == LT || code == NE
7704 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
7705 op0 = tem, op1 = tem1;
7706 continue;
7707 }
7708 break;
7709
7710 case IOR:
7711 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
7712 iff X <= 0. */
7713 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
7714 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
7715 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
7716 {
7717 op0 = XEXP (op0, 1);
7718 code = (code == GE ? GT : LE);
7719 continue;
7720 }
7721 break;
7722
7723 case AND:
7724 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
7725 will be converted to a ZERO_EXTRACT later. */
7726 if (const_op == 0 && equality_comparison_p
7727 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
7728 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
7729 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
7730 {
7731 op0 = simplify_and_const_int
7732 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
7733 XEXP (op0, 1),
7734 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 7735 (HOST_WIDE_INT) 1);
230d793d
RS
7736 continue;
7737 }
7738
7739 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
7740 zero and X is a comparison and C1 and C2 describe only bits set
7741 in STORE_FLAG_VALUE, we can compare with X. */
7742 if (const_op == 0 && equality_comparison_p
5f4f0e22 7743 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
7744 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7745 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
7746 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7747 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 7748 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7749 {
7750 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
7751 << INTVAL (XEXP (XEXP (op0, 0), 1)));
7752 if ((~ STORE_FLAG_VALUE & mask) == 0
7753 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
7754 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
7755 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
7756 {
7757 op0 = XEXP (XEXP (op0, 0), 0);
7758 continue;
7759 }
7760 }
7761
7762 /* If we are doing an equality comparison of an AND of a bit equal
7763 to the sign bit, replace this with a LT or GE comparison of
7764 the underlying value. */
7765 if (equality_comparison_p
7766 && const_op == 0
7767 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 7768 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 7769 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 7770 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
7771 {
7772 op0 = XEXP (op0, 0);
7773 code = (code == EQ ? GE : LT);
7774 continue;
7775 }
7776
7777 /* If this AND operation is really a ZERO_EXTEND from a narrower
7778 mode, the constant fits within that mode, and this is either an
7779 equality or unsigned comparison, try to do this comparison in
7780 the narrower mode. */
7781 if ((equality_comparison_p || unsigned_comparison_p)
7782 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7783 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
7784 & GET_MODE_MASK (mode))
7785 + 1)) >= 0
7786 && const_op >> i == 0
7787 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
7788 {
7789 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
7790 continue;
7791 }
7792 break;
7793
7794 case ASHIFT:
7795 case LSHIFT:
7796 /* If we have (compare (xshift FOO N) (const_int C)) and
7797 the high order N bits of FOO (N+1 if an inequality comparison)
7798 are not significant, we can do this by comparing FOO with C
7799 shifted right N bits so long as the low-order N bits of C are
7800 zero. */
7801 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
7802 && INTVAL (XEXP (op0, 1)) >= 0
7803 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
7804 < HOST_BITS_PER_WIDE_INT)
7805 && ((const_op
7806 & ~ (((HOST_WIDE_INT) 1
7807 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
7808 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
7809 && (significant_bits (XEXP (op0, 0), mode)
7810 & ~ (mask >> (INTVAL (XEXP (op0, 1))
7811 + ! equality_comparison_p))) == 0)
7812 {
7813 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 7814 op1 = GEN_INT (const_op);
230d793d
RS
7815 op0 = XEXP (op0, 0);
7816 continue;
7817 }
7818
dfbe1b2f 7819 /* If we are doing a sign bit comparison, it means we are testing
230d793d 7820 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 7821 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 7822 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 7823 {
5f4f0e22
CH
7824 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7825 ((HOST_WIDE_INT) 1
7826 << (mode_width - 1
7827 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
7828 code = (code == LT ? NE : EQ);
7829 continue;
7830 }
dfbe1b2f
RK
7831
7832 /* If this an equality comparison with zero and we are shifting
7833 the low bit to the sign bit, we can convert this to an AND of the
7834 low-order bit. */
7835 if (const_op == 0 && equality_comparison_p
7836 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7837 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
7838 {
5f4f0e22
CH
7839 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7840 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
7841 continue;
7842 }
230d793d
RS
7843 break;
7844
7845 case ASHIFTRT:
d0ab8cd3
RK
7846 /* If this is an equality comparison with zero, we can do this
7847 as a logical shift, which might be much simpler. */
7848 if (equality_comparison_p && const_op == 0
7849 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7850 {
7851 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
7852 XEXP (op0, 0),
7853 INTVAL (XEXP (op0, 1)));
7854 continue;
7855 }
7856
230d793d
RS
7857 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
7858 do the comparison in a narrower mode. */
7859 if (! unsigned_comparison_p
7860 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7861 && GET_CODE (XEXP (op0, 0)) == ASHIFT
7862 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
7863 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
7864 MODE_INT, 1)) != VOIDmode
5f4f0e22
CH
7865 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
7866 || ((unsigned HOST_WIDE_INT) - const_op
7867 <= GET_MODE_MASK (tmode))))
230d793d
RS
7868 {
7869 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
7870 continue;
7871 }
7872
7873 /* ... fall through ... */
7874 case LSHIFTRT:
7875 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
7876 the low order N bits of FOO are not significant, we can do this
7877 by comparing FOO with C shifted left N bits so long as no
7878 overflow occurs. */
7879 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
7880 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
7881 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7882 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 7883 && (significant_bits (XEXP (op0, 0), mode)
5f4f0e22 7884 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
7885 && (const_op == 0
7886 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
7887 < mode_width)))
7888 {
7889 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 7890 op1 = GEN_INT (const_op);
230d793d
RS
7891 op0 = XEXP (op0, 0);
7892 continue;
7893 }
7894
7895 /* If we are using this shift to extract just the sign bit, we
7896 can replace this with an LT or GE comparison. */
7897 if (const_op == 0
7898 && (equality_comparison_p || sign_bit_comparison_p)
7899 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7900 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
7901 {
7902 op0 = XEXP (op0, 0);
7903 code = (code == NE || code == GT ? LT : GE);
7904 continue;
7905 }
7906 break;
7907 }
7908
7909 break;
7910 }
7911
7912 /* Now make any compound operations involved in this comparison. Then,
7913 check for an outmost SUBREG on OP0 that isn't doing anything or is
7914 paradoxical. The latter case can only occur when it is known that the
7915 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
7916 We can never remove a SUBREG for a non-equality comparison because the
7917 sign bit is in a different place in the underlying object. */
7918
7919 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
7920 op1 = make_compound_operation (op1, SET);
7921
7922 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
7923 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7924 && (code == NE || code == EQ)
7925 && ((GET_MODE_SIZE (GET_MODE (op0))
7926 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
7927 {
7928 op0 = SUBREG_REG (op0);
7929 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
7930 }
7931
7932 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
7933 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7934 && (code == NE || code == EQ)
5f4f0e22 7935 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
7936 && (significant_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
7937 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
7938 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
7939 op1),
7940 (significant_bits (tem, GET_MODE (SUBREG_REG (op0)))
7941 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
7942 op0 = SUBREG_REG (op0), op1 = tem;
7943
7944 /* We now do the opposite procedure: Some machines don't have compare
7945 insns in all modes. If OP0's mode is an integer mode smaller than a
7946 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
7947 mode for which we can do the compare. There are a number of cases in
7948 which we can use the wider mode. */
230d793d
RS
7949
7950 mode = GET_MODE (op0);
7951 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
7952 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
7953 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
7954 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
7955 (tmode != VOIDmode
7956 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 7957 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 7958 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 7959 {
a687e897
RK
7960 /* If the only significant bits in OP0 and OP1 are those in the
7961 narrower mode and this is an equality or unsigned comparison,
7962 we can use the wider mode. Similarly for sign-extended
7963 values and equality or signed comparisons. */
7964 if (((code == EQ || code == NE
7965 || code == GEU || code == GTU || code == LEU || code == LTU)
7966 && ((significant_bits (op0, tmode) & ~ GET_MODE_MASK (mode))
7967 == 0)
7968 && ((significant_bits (op1, tmode) & ~ GET_MODE_MASK (mode))
7969 == 0))
7970 || ((code == EQ || code == NE
7971 || code == GE || code == GT || code == LE || code == LT)
7972 && (num_sign_bit_copies (op0, tmode)
58744483 7973 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 7974 && (num_sign_bit_copies (op1, tmode)
58744483 7975 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
7976 {
7977 op0 = gen_lowpart_for_combine (tmode, op0);
7978 op1 = gen_lowpart_for_combine (tmode, op1);
7979 break;
7980 }
230d793d 7981
a687e897
RK
7982 /* If this is a test for negative, we can make an explicit
7983 test of the sign bit. */
7984
7985 if (op1 == const0_rtx && (code == LT || code == GE)
7986 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 7987 {
a687e897
RK
7988 op0 = gen_binary (AND, tmode,
7989 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
7990 GEN_INT ((HOST_WIDE_INT) 1
7991 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 7992 code = (code == LT) ? NE : EQ;
a687e897 7993 break;
230d793d 7994 }
230d793d
RS
7995 }
7996
7997 *pop0 = op0;
7998 *pop1 = op1;
7999
8000 return code;
8001}
8002\f
8003/* Return 1 if we know that X, a comparison operation, is not operating
8004 on a floating-point value or is EQ or NE, meaning that we can safely
8005 reverse it. */
8006
8007static int
8008reversible_comparison_p (x)
8009 rtx x;
8010{
8011 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
8012 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
8013 return 1;
8014
8015 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
8016 {
8017 case MODE_INT:
8018 return 1;
8019
8020 case MODE_CC:
8021 x = get_last_value (XEXP (x, 0));
8022 return (x && GET_CODE (x) == COMPARE
8023 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8024 }
8025
8026 return 0;
8027}
8028\f
8029/* Utility function for following routine. Called when X is part of a value
8030 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8031 for each register mentioned. Similar to mention_regs in cse.c */
8032
8033static void
8034update_table_tick (x)
8035 rtx x;
8036{
8037 register enum rtx_code code = GET_CODE (x);
8038 register char *fmt = GET_RTX_FORMAT (code);
8039 register int i;
8040
8041 if (code == REG)
8042 {
8043 int regno = REGNO (x);
8044 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8045 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8046
8047 for (i = regno; i < endregno; i++)
8048 reg_last_set_table_tick[i] = label_tick;
8049
8050 return;
8051 }
8052
8053 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8054 /* Note that we can't have an "E" in values stored; see
8055 get_last_value_validate. */
8056 if (fmt[i] == 'e')
8057 update_table_tick (XEXP (x, i));
8058}
8059
8060/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8061 are saying that the register is clobbered and we no longer know its
8062 value. If INSN is zero, don't update reg_last_set; this call is normally
8063 done with VALUE also zero to invalidate the register. */
8064
8065static void
8066record_value_for_reg (reg, insn, value)
8067 rtx reg;
8068 rtx insn;
8069 rtx value;
8070{
8071 int regno = REGNO (reg);
8072 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8073 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8074 int i;
8075
8076 /* If VALUE contains REG and we have a previous value for REG, substitute
8077 the previous value. */
8078 if (value && insn && reg_overlap_mentioned_p (reg, value))
8079 {
8080 rtx tem;
8081
8082 /* Set things up so get_last_value is allowed to see anything set up to
8083 our insn. */
8084 subst_low_cuid = INSN_CUID (insn);
8085 tem = get_last_value (reg);
8086
8087 if (tem)
8088 value = replace_rtx (copy_rtx (value), reg, tem);
8089 }
8090
8091 /* For each register modified, show we don't know its value, that
8092 its value has been updated, and that we don't know the location of
8093 the death of the register. */
8094 for (i = regno; i < endregno; i ++)
8095 {
8096 if (insn)
8097 reg_last_set[i] = insn;
8098 reg_last_set_value[i] = 0;
8099 reg_last_death[i] = 0;
8100 }
8101
8102 /* Mark registers that are being referenced in this value. */
8103 if (value)
8104 update_table_tick (value);
8105
8106 /* Now update the status of each register being set.
8107 If someone is using this register in this block, set this register
8108 to invalid since we will get confused between the two lives in this
8109 basic block. This makes using this register always invalid. In cse, we
8110 scan the table to invalidate all entries using this register, but this
8111 is too much work for us. */
8112
8113 for (i = regno; i < endregno; i++)
8114 {
8115 reg_last_set_label[i] = label_tick;
8116 if (value && reg_last_set_table_tick[i] == label_tick)
8117 reg_last_set_invalid[i] = 1;
8118 else
8119 reg_last_set_invalid[i] = 0;
8120 }
8121
8122 /* The value being assigned might refer to X (like in "x++;"). In that
8123 case, we must replace it with (clobber (const_int 0)) to prevent
8124 infinite loops. */
8125 if (value && ! get_last_value_validate (&value,
8126 reg_last_set_label[regno], 0))
8127 {
8128 value = copy_rtx (value);
8129 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8130 value = 0;
8131 }
8132
8133 /* For the main register being modified, update the value. */
8134 reg_last_set_value[regno] = value;
8135
8136}
8137
8138/* Used for communication between the following two routines. */
8139static rtx record_dead_insn;
8140
8141/* Called via note_stores from record_dead_and_set_regs to handle one
8142 SET or CLOBBER in an insn. */
8143
8144static void
8145record_dead_and_set_regs_1 (dest, setter)
8146 rtx dest, setter;
8147{
8148 if (GET_CODE (dest) == REG)
8149 {
8150 /* If we are setting the whole register, we know its value. Otherwise
8151 show that we don't know the value. We can handle SUBREG in
8152 some cases. */
8153 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8154 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8155 else if (GET_CODE (setter) == SET
8156 && GET_CODE (SET_DEST (setter)) == SUBREG
8157 && SUBREG_REG (SET_DEST (setter)) == dest
8158 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
8159 record_value_for_reg (dest, record_dead_insn,
8160 gen_lowpart_for_combine (GET_MODE (dest),
8161 SET_SRC (setter)));
230d793d 8162 else
5f4f0e22 8163 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
8164 }
8165 else if (GET_CODE (dest) == MEM
8166 /* Ignore pushes, they clobber nothing. */
8167 && ! push_operand (dest, GET_MODE (dest)))
8168 mem_last_set = INSN_CUID (record_dead_insn);
8169}
8170
8171/* Update the records of when each REG was most recently set or killed
8172 for the things done by INSN. This is the last thing done in processing
8173 INSN in the combiner loop.
8174
8175 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8176 similar information mem_last_set (which insn most recently modified memory)
8177 and last_call_cuid (which insn was the most recent subroutine call). */
8178
8179static void
8180record_dead_and_set_regs (insn)
8181 rtx insn;
8182{
8183 register rtx link;
8184 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
8185 {
8186 if (REG_NOTE_KIND (link) == REG_DEAD)
8187 reg_last_death[REGNO (XEXP (link, 0))] = insn;
8188 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 8189 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
8190 }
8191
8192 if (GET_CODE (insn) == CALL_INSN)
8193 last_call_cuid = mem_last_set = INSN_CUID (insn);
8194
8195 record_dead_insn = insn;
8196 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
8197}
8198\f
8199/* Utility routine for the following function. Verify that all the registers
8200 mentioned in *LOC are valid when *LOC was part of a value set when
8201 label_tick == TICK. Return 0 if some are not.
8202
8203 If REPLACE is non-zero, replace the invalid reference with
8204 (clobber (const_int 0)) and return 1. This replacement is useful because
8205 we often can get useful information about the form of a value (e.g., if
8206 it was produced by a shift that always produces -1 or 0) even though
8207 we don't know exactly what registers it was produced from. */
8208
8209static int
8210get_last_value_validate (loc, tick, replace)
8211 rtx *loc;
8212 int tick;
8213 int replace;
8214{
8215 rtx x = *loc;
8216 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
8217 int len = GET_RTX_LENGTH (GET_CODE (x));
8218 int i;
8219
8220 if (GET_CODE (x) == REG)
8221 {
8222 int regno = REGNO (x);
8223 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8224 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8225 int j;
8226
8227 for (j = regno; j < endregno; j++)
8228 if (reg_last_set_invalid[j]
8229 /* If this is a pseudo-register that was only set once, it is
8230 always valid. */
8231 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
8232 && reg_last_set_label[j] > tick))
8233 {
8234 if (replace)
8235 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8236 return replace;
8237 }
8238
8239 return 1;
8240 }
8241
8242 for (i = 0; i < len; i++)
8243 if ((fmt[i] == 'e'
8244 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
8245 /* Don't bother with these. They shouldn't occur anyway. */
8246 || fmt[i] == 'E')
8247 return 0;
8248
8249 /* If we haven't found a reason for it to be invalid, it is valid. */
8250 return 1;
8251}
8252
8253/* Get the last value assigned to X, if known. Some registers
8254 in the value may be replaced with (clobber (const_int 0)) if their value
8255 is known longer known reliably. */
8256
8257static rtx
8258get_last_value (x)
8259 rtx x;
8260{
8261 int regno;
8262 rtx value;
8263
8264 /* If this is a non-paradoxical SUBREG, get the value of its operand and
8265 then convert it to the desired mode. If this is a paradoxical SUBREG,
8266 we cannot predict what values the "extra" bits might have. */
8267 if (GET_CODE (x) == SUBREG
8268 && subreg_lowpart_p (x)
8269 && (GET_MODE_SIZE (GET_MODE (x))
8270 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8271 && (value = get_last_value (SUBREG_REG (x))) != 0)
8272 return gen_lowpart_for_combine (GET_MODE (x), value);
8273
8274 if (GET_CODE (x) != REG)
8275 return 0;
8276
8277 regno = REGNO (x);
8278 value = reg_last_set_value[regno];
8279
d0ab8cd3 8280 /* If we don't have a value or if it isn't for this basic block, return 0. */
230d793d
RS
8281
8282 if (value == 0
8283 || (reg_n_sets[regno] != 1
d0ab8cd3 8284 && (reg_last_set_label[regno] != label_tick)))
230d793d
RS
8285 return 0;
8286
d0ab8cd3
RK
8287 /* If the value was set in a later insn that the ones we are processing,
8288 we can't use it, but make a quick check to see if the previous insn
8289 set it to something. This is commonly the case when the same pseudo
8290 is used by repeated insns. */
8291
8292 if (reg_n_sets[regno] != 1
8293 && INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
8294 {
8295 rtx insn, set;
8296
2fc9c644 8297 for (insn = prev_nonnote_insn (subst_insn);
d0ab8cd3 8298 insn && INSN_CUID (insn) >= subst_low_cuid;
2fc9c644 8299 insn = prev_nonnote_insn (insn))
d0ab8cd3
RK
8300 ;
8301
8302 if (insn
8303 && (set = single_set (insn)) != 0
8304 && rtx_equal_p (SET_DEST (set), x))
8305 {
8306 value = SET_SRC (set);
8307
8308 /* Make sure that VALUE doesn't reference X. Replace any
8309 expliit references with a CLOBBER. If there are any remaining
8310 references (rare), don't use the value. */
8311
8312 if (reg_mentioned_p (x, value))
8313 value = replace_rtx (copy_rtx (value), x,
8314 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
8315
8316 if (reg_overlap_mentioned_p (x, value))
8317 return 0;
8318 }
8319 else
8320 return 0;
8321 }
8322
8323 /* If the value has all its registers valid, return it. */
230d793d
RS
8324 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
8325 return value;
8326
8327 /* Otherwise, make a copy and replace any invalid register with
8328 (clobber (const_int 0)). If that fails for some reason, return 0. */
8329
8330 value = copy_rtx (value);
8331 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
8332 return value;
8333
8334 return 0;
8335}
8336\f
8337/* Return nonzero if expression X refers to a REG or to memory
8338 that is set in an instruction more recent than FROM_CUID. */
8339
8340static int
8341use_crosses_set_p (x, from_cuid)
8342 register rtx x;
8343 int from_cuid;
8344{
8345 register char *fmt;
8346 register int i;
8347 register enum rtx_code code = GET_CODE (x);
8348
8349 if (code == REG)
8350 {
8351 register int regno = REGNO (x);
8352#ifdef PUSH_ROUNDING
8353 /* Don't allow uses of the stack pointer to be moved,
8354 because we don't know whether the move crosses a push insn. */
8355 if (regno == STACK_POINTER_REGNUM)
8356 return 1;
8357#endif
8358 return (reg_last_set[regno]
8359 && INSN_CUID (reg_last_set[regno]) > from_cuid);
8360 }
8361
8362 if (code == MEM && mem_last_set > from_cuid)
8363 return 1;
8364
8365 fmt = GET_RTX_FORMAT (code);
8366
8367 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8368 {
8369 if (fmt[i] == 'E')
8370 {
8371 register int j;
8372 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8373 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
8374 return 1;
8375 }
8376 else if (fmt[i] == 'e'
8377 && use_crosses_set_p (XEXP (x, i), from_cuid))
8378 return 1;
8379 }
8380 return 0;
8381}
8382\f
8383/* Define three variables used for communication between the following
8384 routines. */
8385
8386static int reg_dead_regno, reg_dead_endregno;
8387static int reg_dead_flag;
8388
8389/* Function called via note_stores from reg_dead_at_p.
8390
8391 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
8392 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
8393
8394static void
8395reg_dead_at_p_1 (dest, x)
8396 rtx dest;
8397 rtx x;
8398{
8399 int regno, endregno;
8400
8401 if (GET_CODE (dest) != REG)
8402 return;
8403
8404 regno = REGNO (dest);
8405 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8406 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
8407
8408 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
8409 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
8410}
8411
8412/* Return non-zero if REG is known to be dead at INSN.
8413
8414 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
8415 referencing REG, it is dead. If we hit a SET referencing REG, it is
8416 live. Otherwise, see if it is live or dead at the start of the basic
8417 block we are in. */
8418
8419static int
8420reg_dead_at_p (reg, insn)
8421 rtx reg;
8422 rtx insn;
8423{
8424 int block, i;
8425
8426 /* Set variables for reg_dead_at_p_1. */
8427 reg_dead_regno = REGNO (reg);
8428 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
8429 ? HARD_REGNO_NREGS (reg_dead_regno,
8430 GET_MODE (reg))
8431 : 1);
8432
8433 reg_dead_flag = 0;
8434
8435 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
8436 beginning of function. */
8437 for (; insn && GET_CODE (insn) != CODE_LABEL;
8438 insn = prev_nonnote_insn (insn))
8439 {
8440 note_stores (PATTERN (insn), reg_dead_at_p_1);
8441 if (reg_dead_flag)
8442 return reg_dead_flag == 1 ? 1 : 0;
8443
8444 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
8445 return 1;
8446 }
8447
8448 /* Get the basic block number that we were in. */
8449 if (insn == 0)
8450 block = 0;
8451 else
8452 {
8453 for (block = 0; block < n_basic_blocks; block++)
8454 if (insn == basic_block_head[block])
8455 break;
8456
8457 if (block == n_basic_blocks)
8458 return 0;
8459 }
8460
8461 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
8462 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
8463 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
8464 return 0;
8465
8466 return 1;
8467}
8468\f
8469/* Remove register number REGNO from the dead registers list of INSN.
8470
8471 Return the note used to record the death, if there was one. */
8472
8473rtx
8474remove_death (regno, insn)
8475 int regno;
8476 rtx insn;
8477{
8478 register rtx note = find_regno_note (insn, REG_DEAD, regno);
8479
8480 if (note)
8481 remove_note (insn, note);
8482
8483 return note;
8484}
8485
8486/* For each register (hardware or pseudo) used within expression X, if its
8487 death is in an instruction with cuid between FROM_CUID (inclusive) and
8488 TO_INSN (exclusive), put a REG_DEAD note for that register in the
8489 list headed by PNOTES.
8490
8491 This is done when X is being merged by combination into TO_INSN. These
8492 notes will then be distributed as needed. */
8493
8494static void
8495move_deaths (x, from_cuid, to_insn, pnotes)
8496 rtx x;
8497 int from_cuid;
8498 rtx to_insn;
8499 rtx *pnotes;
8500{
8501 register char *fmt;
8502 register int len, i;
8503 register enum rtx_code code = GET_CODE (x);
8504
8505 if (code == REG)
8506 {
8507 register int regno = REGNO (x);
8508 register rtx where_dead = reg_last_death[regno];
8509
8510 if (where_dead && INSN_CUID (where_dead) >= from_cuid
8511 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
8512 {
8513 rtx note = remove_death (regno, reg_last_death[regno]);
8514
8515 /* It is possible for the call above to return 0. This can occur
8516 when reg_last_death points to I2 or I1 that we combined with.
8517 In that case make a new note. */
8518
8519 if (note)
8520 {
8521 XEXP (note, 1) = *pnotes;
8522 *pnotes = note;
8523 }
8524 else
8525 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
8526 }
8527
8528 return;
8529 }
8530
8531 else if (GET_CODE (x) == SET)
8532 {
8533 rtx dest = SET_DEST (x);
8534
8535 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
8536
a7c99304
RK
8537 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
8538 that accesses one word of a multi-word item, some
8539 piece of everything register in the expression is used by
8540 this insn, so remove any old death. */
8541
8542 if (GET_CODE (dest) == ZERO_EXTRACT
8543 || GET_CODE (dest) == STRICT_LOW_PART
8544 || (GET_CODE (dest) == SUBREG
8545 && (((GET_MODE_SIZE (GET_MODE (dest))
8546 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
8547 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
8548 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 8549 {
a7c99304
RK
8550 move_deaths (dest, from_cuid, to_insn, pnotes);
8551 return;
230d793d
RS
8552 }
8553
a7c99304
RK
8554 /* If this is some other SUBREG, we know it replaces the entire
8555 value, so use that as the destination. */
8556 if (GET_CODE (dest) == SUBREG)
8557 dest = SUBREG_REG (dest);
8558
8559 /* If this is a MEM, adjust deaths of anything used in the address.
8560 For a REG (the only other possibility), the entire value is
8561 being replaced so the old value is not used in this insn. */
230d793d
RS
8562
8563 if (GET_CODE (dest) == MEM)
8564 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
8565 return;
8566 }
8567
8568 else if (GET_CODE (x) == CLOBBER)
8569 return;
8570
8571 len = GET_RTX_LENGTH (code);
8572 fmt = GET_RTX_FORMAT (code);
8573
8574 for (i = 0; i < len; i++)
8575 {
8576 if (fmt[i] == 'E')
8577 {
8578 register int j;
8579 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8580 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
8581 }
8582 else if (fmt[i] == 'e')
8583 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
8584 }
8585}
8586\f
a7c99304
RK
8587/* Return 1 if X is the target of a bit-field assignment in BODY, the
8588 pattern of an insn. X must be a REG. */
230d793d
RS
8589
8590static int
a7c99304
RK
8591reg_bitfield_target_p (x, body)
8592 rtx x;
230d793d
RS
8593 rtx body;
8594{
8595 int i;
8596
8597 if (GET_CODE (body) == SET)
a7c99304
RK
8598 {
8599 rtx dest = SET_DEST (body);
8600 rtx target;
8601 int regno, tregno, endregno, endtregno;
8602
8603 if (GET_CODE (dest) == ZERO_EXTRACT)
8604 target = XEXP (dest, 0);
8605 else if (GET_CODE (dest) == STRICT_LOW_PART)
8606 target = SUBREG_REG (XEXP (dest, 0));
8607 else
8608 return 0;
8609
8610 if (GET_CODE (target) == SUBREG)
8611 target = SUBREG_REG (target);
8612
8613 if (GET_CODE (target) != REG)
8614 return 0;
8615
8616 tregno = REGNO (target), regno = REGNO (x);
8617 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
8618 return target == x;
8619
8620 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
8621 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
8622
8623 return endregno > tregno && regno < endtregno;
8624 }
230d793d
RS
8625
8626 else if (GET_CODE (body) == PARALLEL)
8627 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 8628 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
8629 return 1;
8630
8631 return 0;
8632}
8633\f
8634/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
8635 as appropriate. I3 and I2 are the insns resulting from the combination
8636 insns including FROM (I2 may be zero).
8637
8638 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
8639 not need REG_DEAD notes because they are being substituted for. This
8640 saves searching in the most common cases.
8641
8642 Each note in the list is either ignored or placed on some insns, depending
8643 on the type of note. */
8644
8645static void
8646distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
8647 rtx notes;
8648 rtx from_insn;
8649 rtx i3, i2;
8650 rtx elim_i2, elim_i1;
8651{
8652 rtx note, next_note;
8653 rtx tem;
8654
8655 for (note = notes; note; note = next_note)
8656 {
8657 rtx place = 0, place2 = 0;
8658
8659 /* If this NOTE references a pseudo register, ensure it references
8660 the latest copy of that register. */
8661 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
8662 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
8663 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
8664
8665 next_note = XEXP (note, 1);
8666 switch (REG_NOTE_KIND (note))
8667 {
8668 case REG_UNUSED:
8669 /* If this register is set or clobbered in I3, put the note there
8670 unless there is one already. */
8671 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
8672 {
8673 if (! (GET_CODE (XEXP (note, 0)) == REG
8674 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
8675 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
8676 place = i3;
8677 }
8678 /* Otherwise, if this register is used by I3, then this register
8679 now dies here, so we must put a REG_DEAD note here unless there
8680 is one already. */
8681 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
8682 && ! (GET_CODE (XEXP (note, 0)) == REG
8683 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
8684 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
8685 {
8686 PUT_REG_NOTE_KIND (note, REG_DEAD);
8687 place = i3;
8688 }
8689 break;
8690
8691 case REG_EQUAL:
8692 case REG_EQUIV:
8693 case REG_NONNEG:
8694 /* These notes say something about results of an insn. We can
8695 only support them if they used to be on I3 in which case they
a687e897
RK
8696 remain on I3. Otherwise they are ignored.
8697
8698 If the note refers to an expression that is not a constant, we
8699 must also ignore the note since we cannot tell whether the
8700 equivalence is still true. It might be possible to do
8701 slightly better than this (we only have a problem if I2DEST
8702 or I1DEST is present in the expression), but it doesn't
8703 seem worth the trouble. */
8704
8705 if (from_insn == i3
8706 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
8707 place = i3;
8708 break;
8709
8710 case REG_INC:
8711 case REG_NO_CONFLICT:
8712 case REG_LABEL:
8713 /* These notes say something about how a register is used. They must
8714 be present on any use of the register in I2 or I3. */
8715 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
8716 place = i3;
8717
8718 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
8719 {
8720 if (place)
8721 place2 = i2;
8722 else
8723 place = i2;
8724 }
8725 break;
8726
8727 case REG_WAS_0:
8728 /* It is too much trouble to try to see if this note is still
8729 correct in all situations. It is better to simply delete it. */
8730 break;
8731
8732 case REG_RETVAL:
8733 /* If the insn previously containing this note still exists,
8734 put it back where it was. Otherwise move it to the previous
8735 insn. Adjust the corresponding REG_LIBCALL note. */
8736 if (GET_CODE (from_insn) != NOTE)
8737 place = from_insn;
8738 else
8739 {
5f4f0e22 8740 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
8741 place = prev_real_insn (from_insn);
8742 if (tem && place)
8743 XEXP (tem, 0) = place;
8744 }
8745 break;
8746
8747 case REG_LIBCALL:
8748 /* This is handled similarly to REG_RETVAL. */
8749 if (GET_CODE (from_insn) != NOTE)
8750 place = from_insn;
8751 else
8752 {
5f4f0e22 8753 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
8754 place = next_real_insn (from_insn);
8755 if (tem && place)
8756 XEXP (tem, 0) = place;
8757 }
8758 break;
8759
8760 case REG_DEAD:
8761 /* If the register is used as an input in I3, it dies there.
8762 Similarly for I2, if it is non-zero and adjacent to I3.
8763
8764 If the register is not used as an input in either I3 or I2
8765 and it is not one of the registers we were supposed to eliminate,
8766 there are two possibilities. We might have a non-adjacent I2
8767 or we might have somehow eliminated an additional register
8768 from a computation. For example, we might have had A & B where
8769 we discover that B will always be zero. In this case we will
8770 eliminate the reference to A.
8771
8772 In both cases, we must search to see if we can find a previous
8773 use of A and put the death note there. */
8774
8775 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
8776 place = i3;
8777 else if (i2 != 0 && next_nonnote_insn (i2) == i3
8778 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
8779 place = i2;
8780
8781 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
8782 break;
8783
510dd77e
RK
8784 /* If the register is used in both I2 and I3 and it dies in I3,
8785 we might have added another reference to it. If reg_n_refs
8786 was 2, bump it to 3. This has to be correct since the
8787 register must have been set somewhere. The reason this is
8788 done is because local-alloc.c treats 2 references as a
8789 special case. */
8790
8791 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
8792 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
8793 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
8794 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
8795
230d793d
RS
8796 if (place == 0)
8797 for (tem = prev_nonnote_insn (i3);
8798 tem && (GET_CODE (tem) == INSN
8799 || GET_CODE (tem) == CALL_INSN);
8800 tem = prev_nonnote_insn (tem))
8801 {
8802 /* If the register is being set at TEM, see if that is all
8803 TEM is doing. If so, delete TEM. Otherwise, make this
8804 into a REG_UNUSED note instead. */
8805 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
8806 {
8807 rtx set = single_set (tem);
8808
5089e22e
RS
8809 /* Verify that it was the set, and not a clobber that
8810 modified the register. */
8811
8812 if (set != 0 && ! side_effects_p (SET_SRC (set))
8813 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
230d793d
RS
8814 {
8815 /* Move the notes and links of TEM elsewhere.
8816 This might delete other dead insns recursively.
8817 First set the pattern to something that won't use
8818 any register. */
8819
8820 PATTERN (tem) = pc_rtx;
8821
5f4f0e22
CH
8822 distribute_notes (REG_NOTES (tem), tem, tem,
8823 NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
8824 distribute_links (LOG_LINKS (tem));
8825
8826 PUT_CODE (tem, NOTE);
8827 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
8828 NOTE_SOURCE_FILE (tem) = 0;
8829 }
8830 else
8831 {
8832 PUT_REG_NOTE_KIND (note, REG_UNUSED);
8833
8834 /* If there isn't already a REG_UNUSED note, put one
8835 here. */
8836 if (! find_regno_note (tem, REG_UNUSED,
8837 REGNO (XEXP (note, 0))))
8838 place = tem;
8839 break;
8840 }
8841 }
8842 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
8843 {
8844 place = tem;
8845 break;
8846 }
8847 }
8848
8849 /* If the register is set or already dead at PLACE, we needn't do
8850 anything with this note if it is still a REG_DEAD note.
8851
8852 Note that we cannot use just `dead_or_set_p' here since we can
8853 convert an assignment to a register into a bit-field assignment.
8854 Therefore, we must also omit the note if the register is the
8855 target of a bitfield assignment. */
8856
8857 if (place && REG_NOTE_KIND (note) == REG_DEAD)
8858 {
8859 int regno = REGNO (XEXP (note, 0));
8860
8861 if (dead_or_set_p (place, XEXP (note, 0))
8862 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
8863 {
8864 /* Unless the register previously died in PLACE, clear
8865 reg_last_death. [I no longer understand why this is
8866 being done.] */
8867 if (reg_last_death[regno] != place)
8868 reg_last_death[regno] = 0;
8869 place = 0;
8870 }
8871 else
8872 reg_last_death[regno] = place;
8873
8874 /* If this is a death note for a hard reg that is occupying
8875 multiple registers, ensure that we are still using all
8876 parts of the object. If we find a piece of the object
8877 that is unused, we must add a USE for that piece before
8878 PLACE and put the appropriate REG_DEAD note on it.
8879
8880 An alternative would be to put a REG_UNUSED for the pieces
8881 on the insn that set the register, but that can't be done if
8882 it is not in the same block. It is simpler, though less
8883 efficient, to add the USE insns. */
8884
8885 if (place && regno < FIRST_PSEUDO_REGISTER
8886 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
8887 {
8888 int endregno
8889 = regno + HARD_REGNO_NREGS (regno,
8890 GET_MODE (XEXP (note, 0)));
8891 int all_used = 1;
8892 int i;
8893
8894 for (i = regno; i < endregno; i++)
8895 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
8896 {
8897 rtx piece = gen_rtx (REG, word_mode, i);
28f6d3af
RK
8898 rtx p;
8899
8900 /* See if we already placed a USE note for this
8901 register in front of PLACE. */
8902 for (p = place;
8903 GET_CODE (PREV_INSN (p)) == INSN
8904 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
8905 p = PREV_INSN (p))
8906 if (rtx_equal_p (piece,
8907 XEXP (PATTERN (PREV_INSN (p)), 0)))
8908 {
8909 p = 0;
8910 break;
8911 }
8912
8913 if (p)
8914 {
8915 rtx use_insn
8916 = emit_insn_before (gen_rtx (USE, VOIDmode,
8917 piece),
8918 p);
8919 REG_NOTES (use_insn)
8920 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
8921 REG_NOTES (use_insn));
8922 }
230d793d 8923
5089e22e 8924 all_used = 0;
230d793d
RS
8925 }
8926
8927 if (! all_used)
8928 {
8929 /* Put only REG_DEAD notes for pieces that are
8930 still used and that are not already dead or set. */
8931
8932 for (i = regno; i < endregno; i++)
8933 {
8934 rtx piece = gen_rtx (REG, word_mode, i);
8935
8936 if (reg_referenced_p (piece, PATTERN (place))
8937 && ! dead_or_set_p (place, piece)
8938 && ! reg_bitfield_target_p (piece,
8939 PATTERN (place)))
8940 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
8941 piece,
8942 REG_NOTES (place));
8943 }
8944
8945 place = 0;
8946 }
8947 }
8948 }
8949 break;
8950
8951 default:
8952 /* Any other notes should not be present at this point in the
8953 compilation. */
8954 abort ();
8955 }
8956
8957 if (place)
8958 {
8959 XEXP (note, 1) = REG_NOTES (place);
8960 REG_NOTES (place) = note;
8961 }
8962
8963 if (place2)
8964 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
8965 XEXP (note, 0), REG_NOTES (place2));
8966 }
8967}
8968\f
8969/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
8970 I3, I2, and I1 to new locations. This is also called in one case to
8971 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
8972
8973static void
8974distribute_links (links)
8975 rtx links;
8976{
8977 rtx link, next_link;
8978
8979 for (link = links; link; link = next_link)
8980 {
8981 rtx place = 0;
8982 rtx insn;
8983 rtx set, reg;
8984
8985 next_link = XEXP (link, 1);
8986
8987 /* If the insn that this link points to is a NOTE or isn't a single
8988 set, ignore it. In the latter case, it isn't clear what we
8989 can do other than ignore the link, since we can't tell which
8990 register it was for. Such links wouldn't be used by combine
8991 anyway.
8992
8993 It is not possible for the destination of the target of the link to
8994 have been changed by combine. The only potential of this is if we
8995 replace I3, I2, and I1 by I3 and I2. But in that case the
8996 destination of I2 also remains unchanged. */
8997
8998 if (GET_CODE (XEXP (link, 0)) == NOTE
8999 || (set = single_set (XEXP (link, 0))) == 0)
9000 continue;
9001
9002 reg = SET_DEST (set);
9003 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
9004 || GET_CODE (reg) == SIGN_EXTRACT
9005 || GET_CODE (reg) == STRICT_LOW_PART)
9006 reg = XEXP (reg, 0);
9007
9008 /* A LOG_LINK is defined as being placed on the first insn that uses
9009 a register and points to the insn that sets the register. Start
9010 searching at the next insn after the target of the link and stop
9011 when we reach a set of the register or the end of the basic block.
9012
9013 Note that this correctly handles the link that used to point from
5089e22e 9014 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
9015 since most links don't point very far away. */
9016
9017 for (insn = NEXT_INSN (XEXP (link, 0));
9018 (insn && GET_CODE (insn) != CODE_LABEL
9019 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
9020 insn = NEXT_INSN (insn))
9021 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9022 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9023 {
9024 if (reg_referenced_p (reg, PATTERN (insn)))
9025 place = insn;
9026 break;
9027 }
9028
9029 /* If we found a place to put the link, place it there unless there
9030 is already a link to the same insn as LINK at that point. */
9031
9032 if (place)
9033 {
9034 rtx link2;
9035
9036 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9037 if (XEXP (link2, 0) == XEXP (link, 0))
9038 break;
9039
9040 if (link2 == 0)
9041 {
9042 XEXP (link, 1) = LOG_LINKS (place);
9043 LOG_LINKS (place) = link;
9044 }
9045 }
9046 }
9047}
9048\f
9049void
9050dump_combine_stats (file)
9051 FILE *file;
9052{
9053 fprintf
9054 (file,
9055 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
9056 combine_attempts, combine_merges, combine_extras, combine_successes);
9057}
9058
9059void
9060dump_combine_total_stats (file)
9061 FILE *file;
9062{
9063 fprintf
9064 (file,
9065 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
9066 total_attempts, total_merges, total_extras, total_successes);
9067}
This page took 1.00189 seconds and 5 git commands to generate.