]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
If -g1, do not emit line number at function prologue, it confuses gdb.
[gcc.git] / gcc / combine.c
CommitLineData
230d793d
RS
1/* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
230d793d
RS
76#include "config.h"
77#include "gvarargs.h"
78#include "rtl.h"
79#include "flags.h"
80#include "regs.h"
81#include "expr.h"
82#include "basic-block.h"
83#include "insn-config.h"
84#include "insn-flags.h"
85#include "insn-codes.h"
86#include "insn-attr.h"
87#include "recog.h"
88#include "real.h"
f8d97cf4 89#include <stdio.h>
230d793d
RS
90
91/* It is not safe to use ordinary gen_lowpart in combine.
92 Use gen_lowpart_for_combine instead. See comments there. */
93#define gen_lowpart dont_use_gen_lowpart_you_dummy
94
95/* Number of attempts to combine instructions in this function. */
96
97static int combine_attempts;
98
99/* Number of attempts that got as far as substitution in this function. */
100
101static int combine_merges;
102
103/* Number of instructions combined with added SETs in this function. */
104
105static int combine_extras;
106
107/* Number of instructions combined in this function. */
108
109static int combine_successes;
110
111/* Totals over entire compilation. */
112
113static int total_attempts, total_merges, total_extras, total_successes;
114\f
115/* Vector mapping INSN_UIDs to cuids.
5089e22e 116 The cuids are like uids but increase monotonically always.
230d793d
RS
117 Combine always uses cuids so that it can compare them.
118 But actually renumbering the uids, which we used to do,
119 proves to be a bad idea because it makes it hard to compare
120 the dumps produced by earlier passes with those from later passes. */
121
122static int *uid_cuid;
123
124/* Get the cuid of an insn. */
125
126#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
127
128/* Maximum register number, which is the size of the tables below. */
129
130static int combine_max_regno;
131
132/* Record last point of death of (hard or pseudo) register n. */
133
134static rtx *reg_last_death;
135
136/* Record last point of modification of (hard or pseudo) register n. */
137
138static rtx *reg_last_set;
139
140/* Record the cuid of the last insn that invalidated memory
141 (anything that writes memory, and subroutine calls, but not pushes). */
142
143static int mem_last_set;
144
145/* Record the cuid of the last CALL_INSN
146 so we can tell whether a potential combination crosses any calls. */
147
148static int last_call_cuid;
149
150/* When `subst' is called, this is the insn that is being modified
151 (by combining in a previous insn). The PATTERN of this insn
152 is still the old pattern partially modified and it should not be
153 looked at, but this may be used to examine the successors of the insn
154 to judge whether a simplification is valid. */
155
156static rtx subst_insn;
157
158/* This is the lowest CUID that `subst' is currently dealing with.
159 get_last_value will not return a value if the register was set at or
160 after this CUID. If not for this mechanism, we could get confused if
161 I2 or I1 in try_combine were an insn that used the old value of a register
162 to obtain a new value. In that case, we might erroneously get the
163 new value of the register when we wanted the old one. */
164
165static int subst_low_cuid;
166
167/* This is the value of undobuf.num_undo when we started processing this
168 substitution. This will prevent gen_rtx_combine from re-used a piece
169 from the previous expression. Doing so can produce circular rtl
170 structures. */
171
172static int previous_num_undos;
173\f
174/* The next group of arrays allows the recording of the last value assigned
175 to (hard or pseudo) register n. We use this information to see if a
5089e22e 176 operation being processed is redundant given a prior operation performed
230d793d
RS
177 on the register. For example, an `and' with a constant is redundant if
178 all the zero bits are already known to be turned off.
179
180 We use an approach similar to that used by cse, but change it in the
181 following ways:
182
183 (1) We do not want to reinitialize at each label.
184 (2) It is useful, but not critical, to know the actual value assigned
185 to a register. Often just its form is helpful.
186
187 Therefore, we maintain the following arrays:
188
189 reg_last_set_value the last value assigned
190 reg_last_set_label records the value of label_tick when the
191 register was assigned
192 reg_last_set_table_tick records the value of label_tick when a
193 value using the register is assigned
194 reg_last_set_invalid set to non-zero when it is not valid
195 to use the value of this register in some
196 register's value
197
198 To understand the usage of these tables, it is important to understand
199 the distinction between the value in reg_last_set_value being valid
200 and the register being validly contained in some other expression in the
201 table.
202
203 Entry I in reg_last_set_value is valid if it is non-zero, and either
204 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
205
206 Register I may validly appear in any expression returned for the value
207 of another register if reg_n_sets[i] is 1. It may also appear in the
208 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
209 reg_last_set_invalid[j] is zero.
210
211 If an expression is found in the table containing a register which may
212 not validly appear in an expression, the register is replaced by
213 something that won't match, (clobber (const_int 0)).
214
215 reg_last_set_invalid[i] is set non-zero when register I is being assigned
216 to and reg_last_set_table_tick[i] == label_tick. */
217
218/* Record last value assigned to (hard or pseudo) register n. */
219
220static rtx *reg_last_set_value;
221
222/* Record the value of label_tick when the value for register n is placed in
223 reg_last_set_value[n]. */
224
225static short *reg_last_set_label;
226
227/* Record the value of label_tick when an expression involving register n
228 is placed in reg_last_set_value. */
229
230static short *reg_last_set_table_tick;
231
232/* Set non-zero if references to register n in expressions should not be
233 used. */
234
235static char *reg_last_set_invalid;
236
237/* Incremented for each label. */
238
239static short label_tick;
240
241/* Some registers that are set more than once and used in more than one
242 basic block are nevertheless always set in similar ways. For example,
243 a QImode register may be loaded from memory in two places on a machine
244 where byte loads zero extend.
245
246 We record in the following array what we know about the significant
247 bits of a register, specifically which bits are known to be zero.
248
249 If an entry is zero, it means that we don't know anything special. */
250
5f4f0e22 251static HOST_WIDE_INT *reg_significant;
230d793d
RS
252
253/* Mode used to compute significance in reg_significant. It is the largest
5f4f0e22 254 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d
RS
255
256static enum machine_mode significant_mode;
257
d0ab8cd3
RK
258/* Nonzero if we know that a register has some leading bits that are always
259 equal to the sign bit. */
260
261static char *reg_sign_bit_copies;
262
263/* Nonzero when reg_significant and reg_sign_bit_copies can be safely used.
1a26b032
RK
264 It is zero while computing them and after combine has completed. This
265 former test prevents propagating values based on previously set values,
266 which can be incorrect if a variable is modified in a loop. */
230d793d
RS
267
268static int significant_valid;
269\f
270/* Record one modification to rtl structure
271 to be undone by storing old_contents into *where.
272 is_int is 1 if the contents are an int. */
273
274struct undo
275{
230d793d 276 int is_int;
7c046e4e
RK
277 union {rtx rtx; int i;} old_contents;
278 union {rtx *rtx; int *i;} where;
230d793d
RS
279};
280
281/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
282 num_undo says how many are currently recorded.
283
284 storage is nonzero if we must undo the allocation of new storage.
285 The value of storage is what to pass to obfree.
286
287 other_insn is nonzero if we have modified some other insn in the process
288 of working on subst_insn. It must be verified too. */
289
290#define MAX_UNDO 50
291
292struct undobuf
293{
294 int num_undo;
295 char *storage;
296 struct undo undo[MAX_UNDO];
297 rtx other_insn;
298};
299
300static struct undobuf undobuf;
301
cc876596 302/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 303 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
304 set to NEWVAL, do not record this change. Because computing NEWVAL might
305 also call SUBST, we have to compute it before we put anything into
306 the undo table. */
230d793d
RS
307
308#define SUBST(INTO, NEWVAL) \
cc876596
RK
309 do { rtx _new = (NEWVAL); \
310 if (undobuf.num_undo < MAX_UNDO) \
230d793d 311 { \
230d793d 312 undobuf.undo[undobuf.num_undo].is_int = 0; \
7c046e4e
RK
313 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
314 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
cc876596 315 INTO = _new; \
7c046e4e 316 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
230d793d
RS
317 undobuf.num_undo++; \
318 } \
319 } while (0)
320
321/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
322 expression.
323 Note that substitution for the value of a CONST_INT is not safe. */
324
325#define SUBST_INT(INTO, NEWVAL) \
326 do { if (undobuf.num_undo < MAX_UNDO) \
327{ \
7c046e4e
RK
328 undobuf.undo[undobuf.num_undo].is_int = 1; \
329 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
330 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
230d793d 331 INTO = NEWVAL; \
7c046e4e 332 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
230d793d
RS
333 undobuf.num_undo++; \
334 } \
335 } while (0)
336
337/* Number of times the pseudo being substituted for
338 was found and replaced. */
339
340static int n_occurrences;
341
342static void set_significant ();
343static void move_deaths ();
344rtx remove_death ();
345static void record_value_for_reg ();
346static void record_dead_and_set_regs ();
347static int use_crosses_set_p ();
348static rtx try_combine ();
349static rtx *find_split_point ();
350static rtx subst ();
351static void undo_all ();
352static int reg_dead_at_p ();
353static rtx expand_compound_operation ();
354static rtx expand_field_assignment ();
355static rtx make_extraction ();
356static int get_pos_from_mask ();
77fa0940 357static rtx force_to_mode ();
1a26b032 358static rtx known_cond ();
230d793d
RS
359static rtx make_field_assignment ();
360static rtx make_compound_operation ();
361static rtx apply_distributive_law ();
362static rtx simplify_and_const_int ();
5f4f0e22 363static unsigned HOST_WIDE_INT significant_bits ();
d0ab8cd3 364static int num_sign_bit_copies ();
230d793d
RS
365static int merge_outer_ops ();
366static rtx simplify_shift_const ();
367static int recog_for_combine ();
368static rtx gen_lowpart_for_combine ();
369static rtx gen_rtx_combine ();
370static rtx gen_binary ();
371static rtx gen_unary ();
372static enum rtx_code simplify_comparison ();
373static int reversible_comparison_p ();
374static int get_last_value_validate ();
375static rtx get_last_value ();
376static void distribute_notes ();
377static void distribute_links ();
378\f
379/* Main entry point for combiner. F is the first insn of the function.
380 NREGS is the first unused pseudo-reg number. */
381
382void
383combine_instructions (f, nregs)
384 rtx f;
385 int nregs;
386{
387 register rtx insn, next, prev;
388 register int i;
389 register rtx links, nextlinks;
390
391 combine_attempts = 0;
392 combine_merges = 0;
393 combine_extras = 0;
394 combine_successes = 0;
395
396 combine_max_regno = nregs;
397
398 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
399 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
400 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
401 reg_last_set_table_tick = (short *) alloca (nregs * sizeof (short));
402 reg_last_set_label = (short *) alloca (nregs * sizeof (short));
5f4f0e22
CH
403 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
404 reg_significant = (HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
d0ab8cd3 405 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
230d793d
RS
406
407 bzero (reg_last_death, nregs * sizeof (rtx));
408 bzero (reg_last_set, nregs * sizeof (rtx));
409 bzero (reg_last_set_value, nregs * sizeof (rtx));
410 bzero (reg_last_set_table_tick, nregs * sizeof (short));
411 bzero (reg_last_set_invalid, nregs * sizeof (char));
5f4f0e22 412 bzero (reg_significant, nregs * sizeof (HOST_WIDE_INT));
d0ab8cd3 413 bzero (reg_sign_bit_copies, nregs * sizeof (char));
230d793d
RS
414
415 init_recog_no_volatile ();
416
417 /* Compute maximum uid value so uid_cuid can be allocated. */
418
419 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
420 if (INSN_UID (insn) > i)
421 i = INSN_UID (insn);
422
423 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
424
5f4f0e22 425 significant_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d
RS
426
427 /* Don't use reg_significant when computing it. This can cause problems
428 when, for example, we have j <<= 1 in a loop. */
429
430 significant_valid = 0;
431
432 /* Compute the mapping from uids to cuids.
433 Cuids are numbers assigned to insns, like uids,
434 except that cuids increase monotonically through the code.
435
436 Scan all SETs and see if we can deduce anything about what
437 bits are significant for some registers. */
438
439 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
440 {
441 INSN_CUID (insn) = ++i;
442 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
443 note_stores (PATTERN (insn), set_significant);
444 }
445
446 significant_valid = 1;
447
448 /* Now scan all the insns in forward order. */
449
450 label_tick = 1;
451 last_call_cuid = 0;
452 mem_last_set = 0;
453
454 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
455 {
456 next = 0;
457
458 if (GET_CODE (insn) == CODE_LABEL)
459 label_tick++;
460
461 else if (GET_CODE (insn) == INSN
462 || GET_CODE (insn) == CALL_INSN
463 || GET_CODE (insn) == JUMP_INSN)
464 {
465 /* Try this insn with each insn it links back to. */
466
467 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 468 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
469 goto retry;
470
471 /* Try each sequence of three linked insns ending with this one. */
472
473 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
474 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
475 nextlinks = XEXP (nextlinks, 1))
476 if ((next = try_combine (insn, XEXP (links, 0),
477 XEXP (nextlinks, 0))) != 0)
478 goto retry;
479
480#ifdef HAVE_cc0
481 /* Try to combine a jump insn that uses CC0
482 with a preceding insn that sets CC0, and maybe with its
483 logical predecessor as well.
484 This is how we make decrement-and-branch insns.
485 We need this special code because data flow connections
486 via CC0 do not get entered in LOG_LINKS. */
487
488 if (GET_CODE (insn) == JUMP_INSN
489 && (prev = prev_nonnote_insn (insn)) != 0
490 && GET_CODE (prev) == INSN
491 && sets_cc0_p (PATTERN (prev)))
492 {
5f4f0e22 493 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
494 goto retry;
495
496 for (nextlinks = LOG_LINKS (prev); nextlinks;
497 nextlinks = XEXP (nextlinks, 1))
498 if ((next = try_combine (insn, prev,
499 XEXP (nextlinks, 0))) != 0)
500 goto retry;
501 }
502
503 /* Do the same for an insn that explicitly references CC0. */
504 if (GET_CODE (insn) == INSN
505 && (prev = prev_nonnote_insn (insn)) != 0
506 && GET_CODE (prev) == INSN
507 && sets_cc0_p (PATTERN (prev))
508 && GET_CODE (PATTERN (insn)) == SET
509 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
510 {
5f4f0e22 511 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
512 goto retry;
513
514 for (nextlinks = LOG_LINKS (prev); nextlinks;
515 nextlinks = XEXP (nextlinks, 1))
516 if ((next = try_combine (insn, prev,
517 XEXP (nextlinks, 0))) != 0)
518 goto retry;
519 }
520
521 /* Finally, see if any of the insns that this insn links to
522 explicitly references CC0. If so, try this insn, that insn,
5089e22e 523 and its predecessor if it sets CC0. */
230d793d
RS
524 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
525 if (GET_CODE (XEXP (links, 0)) == INSN
526 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
527 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
528 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
529 && GET_CODE (prev) == INSN
530 && sets_cc0_p (PATTERN (prev))
531 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
532 goto retry;
533#endif
534
535 /* Try combining an insn with two different insns whose results it
536 uses. */
537 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
538 for (nextlinks = XEXP (links, 1); nextlinks;
539 nextlinks = XEXP (nextlinks, 1))
540 if ((next = try_combine (insn, XEXP (links, 0),
541 XEXP (nextlinks, 0))) != 0)
542 goto retry;
543
544 if (GET_CODE (insn) != NOTE)
545 record_dead_and_set_regs (insn);
546
547 retry:
548 ;
549 }
550 }
551
552 total_attempts += combine_attempts;
553 total_merges += combine_merges;
554 total_extras += combine_extras;
555 total_successes += combine_successes;
1a26b032
RK
556
557 significant_valid = 0;
230d793d
RS
558}
559\f
560/* Called via note_stores. If X is a pseudo that is used in more than
5f4f0e22 561 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
230d793d
RS
562 set, record what bits are significant. If we are clobbering X,
563 ignore this "set" because the clobbered value won't be used.
564
565 If we are setting only a portion of X and we can't figure out what
566 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
567 be happening.
568
569 Similarly, set how many bits of X are known to be copies of the sign bit
570 at all locations in the function. This is the smallest number implied
571 by any set of X. */
230d793d
RS
572
573static void
574set_significant (x, set)
575 rtx x;
576 rtx set;
577{
d0ab8cd3
RK
578 int num;
579
230d793d
RS
580 if (GET_CODE (x) == REG
581 && REGNO (x) >= FIRST_PSEUDO_REGISTER
582 && reg_n_sets[REGNO (x)] > 1
583 && reg_basic_block[REGNO (x)] < 0
5f4f0e22 584 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
585 {
586 if (GET_CODE (set) == CLOBBER)
587 return;
588
589 /* If this is a complex assignment, see if we can convert it into a
5089e22e 590 simple assignment. */
230d793d
RS
591 set = expand_field_assignment (set);
592 if (SET_DEST (set) == x)
d0ab8cd3
RK
593 {
594 reg_significant[REGNO (x)]
595 |= significant_bits (SET_SRC (set), significant_mode);
596 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
597 if (reg_sign_bit_copies[REGNO (x)] == 0
598 || reg_sign_bit_copies[REGNO (x)] > num)
599 reg_sign_bit_copies[REGNO (x)] = num;
600 }
230d793d 601 else
d0ab8cd3
RK
602 {
603 reg_significant[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
604 reg_sign_bit_copies[REGNO (x)] = 0;
605 }
230d793d
RS
606 }
607}
608\f
609/* See if INSN can be combined into I3. PRED and SUCC are optionally
610 insns that were previously combined into I3 or that will be combined
611 into the merger of INSN and I3.
612
613 Return 0 if the combination is not allowed for any reason.
614
615 If the combination is allowed, *PDEST will be set to the single
616 destination of INSN and *PSRC to the single source, and this function
617 will return 1. */
618
619static int
620can_combine_p (insn, i3, pred, succ, pdest, psrc)
621 rtx insn;
622 rtx i3;
623 rtx pred, succ;
624 rtx *pdest, *psrc;
625{
626 int i;
627 rtx set = 0, src, dest;
628 rtx p, link;
629 int all_adjacent = (succ ? (next_active_insn (insn) == succ
630 && next_active_insn (succ) == i3)
631 : next_active_insn (insn) == i3);
632
633 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
634 or a PARALLEL consisting of such a SET and CLOBBERs.
635
636 If INSN has CLOBBER parallel parts, ignore them for our processing.
637 By definition, these happen during the execution of the insn. When it
638 is merged with another insn, all bets are off. If they are, in fact,
639 needed and aren't also supplied in I3, they may be added by
640 recog_for_combine. Otherwise, it won't match.
641
642 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
643 note.
644
645 Get the source and destination of INSN. If more than one, can't
646 combine. */
647
648 if (GET_CODE (PATTERN (insn)) == SET)
649 set = PATTERN (insn);
650 else if (GET_CODE (PATTERN (insn)) == PARALLEL
651 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
652 {
653 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
654 {
655 rtx elt = XVECEXP (PATTERN (insn), 0, i);
656
657 switch (GET_CODE (elt))
658 {
659 /* We can ignore CLOBBERs. */
660 case CLOBBER:
661 break;
662
663 case SET:
664 /* Ignore SETs whose result isn't used but not those that
665 have side-effects. */
666 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
667 && ! side_effects_p (elt))
668 break;
669
670 /* If we have already found a SET, this is a second one and
671 so we cannot combine with this insn. */
672 if (set)
673 return 0;
674
675 set = elt;
676 break;
677
678 default:
679 /* Anything else means we can't combine. */
680 return 0;
681 }
682 }
683
684 if (set == 0
685 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
686 so don't do anything with it. */
687 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
688 return 0;
689 }
690 else
691 return 0;
692
693 if (set == 0)
694 return 0;
695
696 set = expand_field_assignment (set);
697 src = SET_SRC (set), dest = SET_DEST (set);
698
699 /* Don't eliminate a store in the stack pointer. */
700 if (dest == stack_pointer_rtx
701 /* Don't install a subreg involving two modes not tieable.
702 It can worsen register allocation, and can even make invalid reload
703 insns, since the reg inside may need to be copied from in the
704 outside mode, and that may be invalid if it is an fp reg copied in
5089e22e
RS
705 integer mode. As a special exception, we can allow this if
706 I3 is simply copying DEST, a REG, to CC0. */
230d793d 707 || (GET_CODE (src) == SUBREG
5089e22e
RS
708 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
709#ifdef HAVE_cc0
710 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
711 && SET_DEST (PATTERN (i3)) == cc0_rtx
712 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
713#endif
714 )
230d793d
RS
715 /* If we couldn't eliminate a field assignment, we can't combine. */
716 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
717 /* Don't combine with an insn that sets a register to itself if it has
718 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 719 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
720 /* Can't merge a function call. */
721 || GET_CODE (src) == CALL
722 /* Don't substitute into an incremented register. */
723 || FIND_REG_INC_NOTE (i3, dest)
724 || (succ && FIND_REG_INC_NOTE (succ, dest))
725 /* Don't combine the end of a libcall into anything. */
5f4f0e22 726 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
727 /* Make sure that DEST is not used after SUCC but before I3. */
728 || (succ && ! all_adjacent
729 && reg_used_between_p (dest, succ, i3))
730 /* Make sure that the value that is to be substituted for the register
731 does not use any registers whose values alter in between. However,
732 If the insns are adjacent, a use can't cross a set even though we
733 think it might (this can happen for a sequence of insns each setting
734 the same destination; reg_last_set of that register might point to
735 a NOTE). Also, don't move a volatile asm across any other insns. */
736 || (! all_adjacent
737 && (use_crosses_set_p (src, INSN_CUID (insn))
738 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))))
739 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
740 better register allocation by not doing the combine. */
741 || find_reg_note (i3, REG_NO_CONFLICT, dest)
742 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
743 /* Don't combine across a CALL_INSN, because that would possibly
744 change whether the life span of some REGs crosses calls or not,
745 and it is a pain to update that information.
746 Exception: if source is a constant, moving it later can't hurt.
747 Accept that special case, because it helps -fforce-addr a lot. */
748 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
749 return 0;
750
751 /* DEST must either be a REG or CC0. */
752 if (GET_CODE (dest) == REG)
753 {
754 /* If register alignment is being enforced for multi-word items in all
755 cases except for parameters, it is possible to have a register copy
756 insn referencing a hard register that is not allowed to contain the
757 mode being copied and which would not be valid as an operand of most
758 insns. Eliminate this problem by not combining with such an insn.
759
760 Also, on some machines we don't want to extend the life of a hard
761 register. */
762
763 if (GET_CODE (src) == REG
764 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
765 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
766#ifdef SMALL_REGISTER_CLASSES
767 /* Don't extend the life of a hard register. */
768 || REGNO (src) < FIRST_PSEUDO_REGISTER
769#else
770 || (REGNO (src) < FIRST_PSEUDO_REGISTER
771 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
772#endif
773 ))
774 return 0;
775 }
776 else if (GET_CODE (dest) != CC0)
777 return 0;
778
5f96750d
RS
779 /* Don't substitute for a register intended as a clobberable operand.
780 Similarly, don't substitute an expression containing a register that
781 will be clobbered in I3. */
230d793d
RS
782 if (GET_CODE (PATTERN (i3)) == PARALLEL)
783 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
784 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
785 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
786 src)
787 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
788 return 0;
789
790 /* If INSN contains anything volatile, or is an `asm' (whether volatile
791 or not), reject, unless nothing volatile comes between it and I3,
792 with the exception of SUCC. */
793
794 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
795 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
796 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
797 && p != succ && volatile_refs_p (PATTERN (p)))
798 return 0;
799
800 /* If INSN or I2 contains an autoincrement or autodecrement,
801 make sure that register is not used between there and I3,
802 and not already used in I3 either.
803 Also insist that I3 not be a jump; if it were one
804 and the incremented register were spilled, we would lose. */
805
806#ifdef AUTO_INC_DEC
807 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
808 if (REG_NOTE_KIND (link) == REG_INC
809 && (GET_CODE (i3) == JUMP_INSN
810 || reg_used_between_p (XEXP (link, 0), insn, i3)
811 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
812 return 0;
813#endif
814
815#ifdef HAVE_cc0
816 /* Don't combine an insn that follows a CC0-setting insn.
817 An insn that uses CC0 must not be separated from the one that sets it.
818 We do, however, allow I2 to follow a CC0-setting insn if that insn
819 is passed as I1; in that case it will be deleted also.
820 We also allow combining in this case if all the insns are adjacent
821 because that would leave the two CC0 insns adjacent as well.
822 It would be more logical to test whether CC0 occurs inside I1 or I2,
823 but that would be much slower, and this ought to be equivalent. */
824
825 p = prev_nonnote_insn (insn);
826 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
827 && ! all_adjacent)
828 return 0;
829#endif
830
831 /* If we get here, we have passed all the tests and the combination is
832 to be allowed. */
833
834 *pdest = dest;
835 *psrc = src;
836
837 return 1;
838}
839\f
840/* LOC is the location within I3 that contains its pattern or the component
841 of a PARALLEL of the pattern. We validate that it is valid for combining.
842
843 One problem is if I3 modifies its output, as opposed to replacing it
844 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
845 so would produce an insn that is not equivalent to the original insns.
846
847 Consider:
848
849 (set (reg:DI 101) (reg:DI 100))
850 (set (subreg:SI (reg:DI 101) 0) <foo>)
851
852 This is NOT equivalent to:
853
854 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
855 (set (reg:DI 101) (reg:DI 100))])
856
857 Not only does this modify 100 (in which case it might still be valid
858 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
859
860 We can also run into a problem if I2 sets a register that I1
861 uses and I1 gets directly substituted into I3 (not via I2). In that
862 case, we would be getting the wrong value of I2DEST into I3, so we
863 must reject the combination. This case occurs when I2 and I1 both
864 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
865 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
866 of a SET must prevent combination from occurring.
867
868 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
869 if the destination of a SET is a hard register.
870
871 Before doing the above check, we first try to expand a field assignment
872 into a set of logical operations.
873
874 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
875 we place a register that is both set and used within I3. If more than one
876 such register is detected, we fail.
877
878 Return 1 if the combination is valid, zero otherwise. */
879
880static int
881combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
882 rtx i3;
883 rtx *loc;
884 rtx i2dest;
885 rtx i1dest;
886 int i1_not_in_src;
887 rtx *pi3dest_killed;
888{
889 rtx x = *loc;
890
891 if (GET_CODE (x) == SET)
892 {
893 rtx set = expand_field_assignment (x);
894 rtx dest = SET_DEST (set);
895 rtx src = SET_SRC (set);
896 rtx inner_dest = dest, inner_src = src;
897
898 SUBST (*loc, set);
899
900 while (GET_CODE (inner_dest) == STRICT_LOW_PART
901 || GET_CODE (inner_dest) == SUBREG
902 || GET_CODE (inner_dest) == ZERO_EXTRACT)
903 inner_dest = XEXP (inner_dest, 0);
904
905 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
906 was added. */
907#if 0
908 while (GET_CODE (inner_src) == STRICT_LOW_PART
909 || GET_CODE (inner_src) == SUBREG
910 || GET_CODE (inner_src) == ZERO_EXTRACT)
911 inner_src = XEXP (inner_src, 0);
912
913 /* If it is better that two different modes keep two different pseudos,
914 avoid combining them. This avoids producing the following pattern
915 on a 386:
916 (set (subreg:SI (reg/v:QI 21) 0)
917 (lshiftrt:SI (reg/v:SI 20)
918 (const_int 24)))
919 If that were made, reload could not handle the pair of
920 reg 20/21, since it would try to get any GENERAL_REGS
921 but some of them don't handle QImode. */
922
923 if (rtx_equal_p (inner_src, i2dest)
924 && GET_CODE (inner_dest) == REG
925 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
926 return 0;
927#endif
928
929 /* Check for the case where I3 modifies its output, as
930 discussed above. */
931 if ((inner_dest != dest
932 && (reg_overlap_mentioned_p (i2dest, inner_dest)
933 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
934 /* This is the same test done in can_combine_p except that we
935 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
936 CALL operation. */
230d793d 937 || (GET_CODE (inner_dest) == REG
dfbe1b2f 938 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
3f508eca
RK
939#ifdef SMALL_REGISTER_CLASSES
940 && GET_CODE (src) != CALL
941#else
dfbe1b2f
RK
942 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
943 GET_MODE (inner_dest))
230d793d 944#endif
dfbe1b2f
RK
945 )
946
230d793d
RS
947 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
948 return 0;
949
950 /* If DEST is used in I3, it is being killed in this insn,
951 so record that for later. */
952 if (pi3dest_killed && GET_CODE (dest) == REG
953 && reg_referenced_p (dest, PATTERN (i3)))
954 {
955 if (*pi3dest_killed)
956 return 0;
957
958 *pi3dest_killed = dest;
959 }
960 }
961
962 else if (GET_CODE (x) == PARALLEL)
963 {
964 int i;
965
966 for (i = 0; i < XVECLEN (x, 0); i++)
967 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
968 i1_not_in_src, pi3dest_killed))
969 return 0;
970 }
971
972 return 1;
973}
974\f
975/* Try to combine the insns I1 and I2 into I3.
976 Here I1 and I2 appear earlier than I3.
977 I1 can be zero; then we combine just I2 into I3.
978
979 It we are combining three insns and the resulting insn is not recognized,
980 try splitting it into two insns. If that happens, I2 and I3 are retained
981 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
982 are pseudo-deleted.
983
984 If we created two insns, return I2; otherwise return I3.
985 Return 0 if the combination does not work. Then nothing is changed. */
986
987static rtx
988try_combine (i3, i2, i1)
989 register rtx i3, i2, i1;
990{
991 /* New patterns for I3 and I3, respectively. */
992 rtx newpat, newi2pat = 0;
993 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
994 int added_sets_1, added_sets_2;
995 /* Total number of SETs to put into I3. */
996 int total_sets;
997 /* Nonzero is I2's body now appears in I3. */
998 int i2_is_used;
999 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1000 int insn_code_number, i2_code_number, other_code_number;
1001 /* Contains I3 if the destination of I3 is used in its source, which means
1002 that the old life of I3 is being killed. If that usage is placed into
1003 I2 and not in I3, a REG_DEAD note must be made. */
1004 rtx i3dest_killed = 0;
1005 /* SET_DEST and SET_SRC of I2 and I1. */
1006 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1007 /* PATTERN (I2), or a copy of it in certain cases. */
1008 rtx i2pat;
1009 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1010 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1011 int i1_feeds_i3 = 0;
1012 /* Notes that must be added to REG_NOTES in I3 and I2. */
1013 rtx new_i3_notes, new_i2_notes;
1014
1015 int maxreg;
1016 rtx temp;
1017 register rtx link;
1018 int i;
1019
1020 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1021 This can occur when flow deletes an insn that it has merged into an
1022 auto-increment address. We also can't do anything if I3 has a
1023 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1024 libcall. */
1025
1026 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1027 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1028 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1029 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1030 return 0;
1031
1032 combine_attempts++;
1033
1034 undobuf.num_undo = previous_num_undos = 0;
1035 undobuf.other_insn = 0;
1036
1037 /* Save the current high-water-mark so we can free storage if we didn't
1038 accept this combination. */
1039 undobuf.storage = (char *) oballoc (0);
1040
1041 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1042 code below, set I1 to be the earlier of the two insns. */
1043 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1044 temp = i1, i1 = i2, i2 = temp;
1045
1046 /* First check for one important special-case that the code below will
1047 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1048 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1049 we may be able to replace that destination with the destination of I3.
1050 This occurs in the common code where we compute both a quotient and
1051 remainder into a structure, in which case we want to do the computation
1052 directly into the structure to avoid register-register copies.
1053
1054 We make very conservative checks below and only try to handle the
1055 most common cases of this. For example, we only handle the case
1056 where I2 and I3 are adjacent to avoid making difficult register
1057 usage tests. */
1058
1059 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1060 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1061 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1062#ifdef SMALL_REGISTER_CLASSES
1063 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1064 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1065#endif
1066 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1067 && GET_CODE (PATTERN (i2)) == PARALLEL
1068 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1069 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1070 below would need to check what is inside (and reg_overlap_mentioned_p
1071 doesn't support those codes anyway). Don't allow those destinations;
1072 the resulting insn isn't likely to be recognized anyway. */
1073 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1074 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1075 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1076 SET_DEST (PATTERN (i3)))
1077 && next_real_insn (i2) == i3)
5089e22e
RS
1078 {
1079 rtx p2 = PATTERN (i2);
1080
1081 /* Make sure that the destination of I3,
1082 which we are going to substitute into one output of I2,
1083 is not used within another output of I2. We must avoid making this:
1084 (parallel [(set (mem (reg 69)) ...)
1085 (set (reg 69) ...)])
1086 which is not well-defined as to order of actions.
1087 (Besides, reload can't handle output reloads for this.)
1088
1089 The problem can also happen if the dest of I3 is a memory ref,
1090 if another dest in I2 is an indirect memory ref. */
1091 for (i = 0; i < XVECLEN (p2, 0); i++)
1092 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1093 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1094 SET_DEST (XVECEXP (p2, 0, i))))
1095 break;
230d793d 1096
5089e22e
RS
1097 if (i == XVECLEN (p2, 0))
1098 for (i = 0; i < XVECLEN (p2, 0); i++)
1099 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1100 {
1101 combine_merges++;
230d793d 1102
5089e22e
RS
1103 subst_insn = i3;
1104 subst_low_cuid = INSN_CUID (i2);
230d793d 1105
5089e22e
RS
1106 added_sets_2 = 0;
1107 i2dest = SET_SRC (PATTERN (i3));
230d793d 1108
5089e22e
RS
1109 /* Replace the dest in I2 with our dest and make the resulting
1110 insn the new pattern for I3. Then skip to where we
1111 validate the pattern. Everything was set up above. */
1112 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1113 SET_DEST (PATTERN (i3)));
1114
1115 newpat = p2;
1116 goto validate_replacement;
1117 }
1118 }
230d793d
RS
1119
1120#ifndef HAVE_cc0
1121 /* If we have no I1 and I2 looks like:
1122 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1123 (set Y OP)])
1124 make up a dummy I1 that is
1125 (set Y OP)
1126 and change I2 to be
1127 (set (reg:CC X) (compare:CC Y (const_int 0)))
1128
1129 (We can ignore any trailing CLOBBERs.)
1130
1131 This undoes a previous combination and allows us to match a branch-and-
1132 decrement insn. */
1133
1134 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1135 && XVECLEN (PATTERN (i2), 0) >= 2
1136 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1137 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1138 == MODE_CC)
1139 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1140 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1141 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1142 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1143 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1144 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1145 {
1146 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1147 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1148 break;
1149
1150 if (i == 1)
1151 {
1152 /* We make I1 with the same INSN_UID as I2. This gives it
1153 the same INSN_CUID for value tracking. Our fake I1 will
1154 never appear in the insn stream so giving it the same INSN_UID
1155 as I2 will not cause a problem. */
1156
1157 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1158 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1159
1160 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1161 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1162 SET_DEST (PATTERN (i1)));
1163 }
1164 }
1165#endif
1166
1167 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1168 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1169 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1170 {
1171 undo_all ();
1172 return 0;
1173 }
1174
1175 /* Record whether I2DEST is used in I2SRC and similarly for the other
1176 cases. Knowing this will help in register status updating below. */
1177 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1178 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1179 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1180
916f14f1 1181 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1182 in I2SRC. */
1183 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1184
1185 /* Ensure that I3's pattern can be the destination of combines. */
1186 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1187 i1 && i2dest_in_i1src && i1_feeds_i3,
1188 &i3dest_killed))
1189 {
1190 undo_all ();
1191 return 0;
1192 }
1193
1194 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1195 We used to do this EXCEPT in one case: I3 has a post-inc in an
1196 output operand. However, that exception can give rise to insns like
1197 mov r3,(r3)+
1198 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1199 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1200
1201#if 0
1202 if (!(GET_CODE (PATTERN (i3)) == SET
1203 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1204 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1205 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1206 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1207 /* It's not the exception. */
1208#endif
1209#ifdef AUTO_INC_DEC
1210 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1211 if (REG_NOTE_KIND (link) == REG_INC
1212 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1213 || (i1 != 0
1214 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1215 {
1216 undo_all ();
1217 return 0;
1218 }
1219#endif
1220
1221 /* See if the SETs in I1 or I2 need to be kept around in the merged
1222 instruction: whenever the value set there is still needed past I3.
1223 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1224
1225 For the SET in I1, we have two cases: If I1 and I2 independently
1226 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1227 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1228 in I1 needs to be kept around unless I1DEST dies or is set in either
1229 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1230 I1DEST. If so, we know I1 feeds into I2. */
1231
1232 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1233
1234 added_sets_1
1235 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1236 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1237
1238 /* If the set in I2 needs to be kept around, we must make a copy of
1239 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1240 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1241 an already-substituted copy. This also prevents making self-referential
1242 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1243 I2DEST. */
1244
1245 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1246 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1247 : PATTERN (i2));
1248
1249 if (added_sets_2)
1250 i2pat = copy_rtx (i2pat);
1251
1252 combine_merges++;
1253
1254 /* Substitute in the latest insn for the regs set by the earlier ones. */
1255
1256 maxreg = max_reg_num ();
1257
1258 subst_insn = i3;
230d793d
RS
1259
1260 /* It is possible that the source of I2 or I1 may be performing an
1261 unneeded operation, such as a ZERO_EXTEND of something that is known
1262 to have the high part zero. Handle that case by letting subst look at
1263 the innermost one of them.
1264
1265 Another way to do this would be to have a function that tries to
1266 simplify a single insn instead of merging two or more insns. We don't
1267 do this because of the potential of infinite loops and because
1268 of the potential extra memory required. However, doing it the way
1269 we are is a bit of a kludge and doesn't catch all cases.
1270
1271 But only do this if -fexpensive-optimizations since it slows things down
1272 and doesn't usually win. */
1273
1274 if (flag_expensive_optimizations)
1275 {
1276 /* Pass pc_rtx so no substitutions are done, just simplifications.
1277 The cases that we are interested in here do not involve the few
1278 cases were is_replaced is checked. */
1279 if (i1)
d0ab8cd3
RK
1280 {
1281 subst_low_cuid = INSN_CUID (i1);
1282 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1283 }
230d793d 1284 else
d0ab8cd3
RK
1285 {
1286 subst_low_cuid = INSN_CUID (i2);
1287 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1288 }
230d793d
RS
1289
1290 previous_num_undos = undobuf.num_undo;
1291 }
1292
1293#ifndef HAVE_cc0
1294 /* Many machines that don't use CC0 have insns that can both perform an
1295 arithmetic operation and set the condition code. These operations will
1296 be represented as a PARALLEL with the first element of the vector
1297 being a COMPARE of an arithmetic operation with the constant zero.
1298 The second element of the vector will set some pseudo to the result
1299 of the same arithmetic operation. If we simplify the COMPARE, we won't
1300 match such a pattern and so will generate an extra insn. Here we test
1301 for this case, where both the comparison and the operation result are
1302 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1303 I2SRC. Later we will make the PARALLEL that contains I2. */
1304
1305 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1306 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1307 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1308 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1309 {
1310 rtx *cc_use;
1311 enum machine_mode compare_mode;
1312
1313 newpat = PATTERN (i3);
1314 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1315
1316 i2_is_used = 1;
1317
1318#ifdef EXTRA_CC_MODES
1319 /* See if a COMPARE with the operand we substituted in should be done
1320 with the mode that is currently being used. If not, do the same
1321 processing we do in `subst' for a SET; namely, if the destination
1322 is used only once, try to replace it with a register of the proper
1323 mode and also replace the COMPARE. */
1324 if (undobuf.other_insn == 0
1325 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1326 &undobuf.other_insn))
77fa0940
RK
1327 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1328 i2src, const0_rtx))
230d793d
RS
1329 != GET_MODE (SET_DEST (newpat))))
1330 {
1331 int regno = REGNO (SET_DEST (newpat));
1332 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1333
1334 if (regno < FIRST_PSEUDO_REGISTER
1335 || (reg_n_sets[regno] == 1 && ! added_sets_2
1336 && ! REG_USERVAR_P (SET_DEST (newpat))))
1337 {
1338 if (regno >= FIRST_PSEUDO_REGISTER)
1339 SUBST (regno_reg_rtx[regno], new_dest);
1340
1341 SUBST (SET_DEST (newpat), new_dest);
1342 SUBST (XEXP (*cc_use, 0), new_dest);
1343 SUBST (SET_SRC (newpat),
1344 gen_rtx_combine (COMPARE, compare_mode,
1345 i2src, const0_rtx));
1346 }
1347 else
1348 undobuf.other_insn = 0;
1349 }
1350#endif
1351 }
1352 else
1353#endif
1354 {
1355 n_occurrences = 0; /* `subst' counts here */
1356
1357 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1358 need to make a unique copy of I2SRC each time we substitute it
1359 to avoid self-referential rtl. */
1360
d0ab8cd3 1361 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1362 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1363 ! i1_feeds_i3 && i1dest_in_i1src);
1364 previous_num_undos = undobuf.num_undo;
1365
1366 /* Record whether i2's body now appears within i3's body. */
1367 i2_is_used = n_occurrences;
1368 }
1369
1370 /* If we already got a failure, don't try to do more. Otherwise,
1371 try to substitute in I1 if we have it. */
1372
1373 if (i1 && GET_CODE (newpat) != CLOBBER)
1374 {
1375 /* Before we can do this substitution, we must redo the test done
1376 above (see detailed comments there) that ensures that I1DEST
1377 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1378
5f4f0e22
CH
1379 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1380 0, NULL_PTR))
230d793d
RS
1381 {
1382 undo_all ();
1383 return 0;
1384 }
1385
1386 n_occurrences = 0;
d0ab8cd3 1387 subst_low_cuid = INSN_CUID (i1);
230d793d
RS
1388 newpat = subst (newpat, i1dest, i1src, 0, 0);
1389 previous_num_undos = undobuf.num_undo;
1390 }
1391
916f14f1
RK
1392 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1393 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1394 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1395 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1396 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1397 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1398 > 1))
230d793d
RS
1399 /* Fail if we tried to make a new register (we used to abort, but there's
1400 really no reason to). */
1401 || max_reg_num () != maxreg
1402 /* Fail if we couldn't do something and have a CLOBBER. */
1403 || GET_CODE (newpat) == CLOBBER)
1404 {
1405 undo_all ();
1406 return 0;
1407 }
1408
1409 /* If the actions of the earlier insns must be kept
1410 in addition to substituting them into the latest one,
1411 we must make a new PARALLEL for the latest insn
1412 to hold additional the SETs. */
1413
1414 if (added_sets_1 || added_sets_2)
1415 {
1416 combine_extras++;
1417
1418 if (GET_CODE (newpat) == PARALLEL)
1419 {
1420 rtvec old = XVEC (newpat, 0);
1421 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1422 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1423 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1424 sizeof (old->elem[0]) * old->num_elem);
1425 }
1426 else
1427 {
1428 rtx old = newpat;
1429 total_sets = 1 + added_sets_1 + added_sets_2;
1430 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1431 XVECEXP (newpat, 0, 0) = old;
1432 }
1433
1434 if (added_sets_1)
1435 XVECEXP (newpat, 0, --total_sets)
1436 = (GET_CODE (PATTERN (i1)) == PARALLEL
1437 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1438
1439 if (added_sets_2)
1440 {
1441 /* If there is no I1, use I2's body as is. We used to also not do
1442 the subst call below if I2 was substituted into I3,
1443 but that could lose a simplification. */
1444 if (i1 == 0)
1445 XVECEXP (newpat, 0, --total_sets) = i2pat;
1446 else
1447 /* See comment where i2pat is assigned. */
1448 XVECEXP (newpat, 0, --total_sets)
1449 = subst (i2pat, i1dest, i1src, 0, 0);
1450 }
1451 }
1452
1453 /* We come here when we are replacing a destination in I2 with the
1454 destination of I3. */
1455 validate_replacement:
1456
1457 /* Is the result of combination a valid instruction? */
1458 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1459
1460 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1461 the second SET's destination is a register that is unused. In that case,
1462 we just need the first SET. This can occur when simplifying a divmod
1463 insn. We *must* test for this case here because the code below that
1464 splits two independent SETs doesn't handle this case correctly when it
1465 updates the register status. Also check the case where the first
1466 SET's destination is unused. That would not cause incorrect code, but
1467 does cause an unneeded insn to remain. */
1468
1469 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1470 && XVECLEN (newpat, 0) == 2
1471 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1472 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1473 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1474 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1475 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1476 && asm_noperands (newpat) < 0)
1477 {
1478 newpat = XVECEXP (newpat, 0, 0);
1479 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1480 }
1481
1482 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1483 && XVECLEN (newpat, 0) == 2
1484 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1485 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1486 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1487 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1488 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1489 && asm_noperands (newpat) < 0)
1490 {
1491 newpat = XVECEXP (newpat, 0, 1);
1492 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1493 }
1494
d0ab8cd3
RK
1495 /* See if this is an XOR. If so, perhaps the problem is that the
1496 constant is out of range. Replace it with a complemented XOR with
1497 a complemented constant; it might be in range. */
1498
1499 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1500 && GET_CODE (SET_SRC (newpat)) == XOR
1501 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1502 && ((temp = simplify_unary_operation (NOT,
1503 GET_MODE (SET_SRC (newpat)),
1504 XEXP (SET_SRC (newpat), 1),
1505 GET_MODE (SET_SRC (newpat))))
1506 != 0))
1507 {
1508 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1509 rtx pat
1510 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1511 gen_unary (NOT, i_mode,
1512 gen_binary (XOR, i_mode,
1513 XEXP (SET_SRC (newpat), 0),
1514 temp)));
1515
1516 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1517 if (insn_code_number >= 0)
1518 newpat = pat;
1519 }
1520
230d793d
RS
1521 /* If we were combining three insns and the result is a simple SET
1522 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1523 insns. There are two ways to do this. It can be split using a
1524 machine-specific method (like when you have an addition of a large
1525 constant) or by combine in the function find_split_point. */
1526
230d793d
RS
1527 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1528 && asm_noperands (newpat) < 0)
1529 {
916f14f1 1530 rtx m_split, *split;
42495ca0 1531 rtx ni2dest = i2dest;
916f14f1
RK
1532
1533 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1534 use I2DEST as a scratch register will help. In the latter case,
1535 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1536
1537 m_split = split_insns (newpat, i3);
1538 if (m_split == 0)
42495ca0
RK
1539 {
1540 /* If I2DEST is a hard register or the only use of a pseudo,
1541 we can change its mode. */
1542 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1543 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1544 && GET_CODE (i2dest) == REG
42495ca0
RK
1545 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1546 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1547 && ! REG_USERVAR_P (i2dest))))
1548 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1549 REGNO (i2dest));
1550
1551 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1552 gen_rtvec (2, newpat,
1553 gen_rtx (CLOBBER,
1554 VOIDmode,
1555 ni2dest))),
1556 i3);
1557 }
916f14f1
RK
1558
1559 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1560 && XVECLEN (m_split, 0) == 2
1561 && (next_real_insn (i2) == i3
1562 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1563 INSN_CUID (i2))))
916f14f1 1564 {
1a26b032 1565 rtx i2set, i3set;
d0ab8cd3 1566 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1567 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1568
e4ba89be
RK
1569 i3set = single_set (XVECEXP (m_split, 0, 1));
1570 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1571
42495ca0
RK
1572 /* In case we changed the mode of I2DEST, replace it in the
1573 pseudo-register table here. We can't do it above in case this
1574 code doesn't get executed and we do a split the other way. */
1575
1576 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1577 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1578
916f14f1 1579 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
1580
1581 /* If I2 or I3 has multiple SETs, we won't know how to track
1582 register status, so don't use these insns. */
1583
1584 if (i2_code_number >= 0 && i2set && i3set)
8888fada
RK
1585 insn_code_number = recog_for_combine (&newi3pat, i3,
1586 &new_i3_notes);
c767f54b 1587
d0ab8cd3
RK
1588 if (insn_code_number >= 0)
1589 newpat = newi3pat;
1590
c767f54b 1591 /* It is possible that both insns now set the destination of I3.
22609cbf 1592 If so, we must show an extra use of it. */
c767f54b 1593
1a26b032
RK
1594 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1595 && GET_CODE (SET_DEST (i2set)) == REG
1596 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
22609cbf 1597 reg_n_sets[REGNO (SET_DEST (i2set))]++;
916f14f1 1598 }
230d793d
RS
1599
1600 /* If we can split it and use I2DEST, go ahead and see if that
1601 helps things be recognized. Verify that none of the registers
1602 are set between I2 and I3. */
d0ab8cd3 1603 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1604#ifdef HAVE_cc0
1605 && GET_CODE (i2dest) == REG
1606#endif
1607 /* We need I2DEST in the proper mode. If it is a hard register
1608 or the only use of a pseudo, we can change its mode. */
1609 && (GET_MODE (*split) == GET_MODE (i2dest)
1610 || GET_MODE (*split) == VOIDmode
1611 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1612 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1613 && ! REG_USERVAR_P (i2dest)))
1614 && (next_real_insn (i2) == i3
1615 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1616 /* We can't overwrite I2DEST if its value is still used by
1617 NEWPAT. */
1618 && ! reg_referenced_p (i2dest, newpat))
1619 {
1620 rtx newdest = i2dest;
1621
1622 /* Get NEWDEST as a register in the proper mode. We have already
1623 validated that we can do this. */
1624 if (GET_MODE (i2dest) != GET_MODE (*split)
1625 && GET_MODE (*split) != VOIDmode)
1626 {
1627 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1628
1629 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1630 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1631 }
1632
1633 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1634 an ASHIFT. This can occur if it was inside a PLUS and hence
1635 appeared to be a memory address. This is a kludge. */
1636 if (GET_CODE (*split) == MULT
1637 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1638 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1639 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
5f4f0e22 1640 XEXP (*split, 0), GEN_INT (i)));
230d793d
RS
1641
1642#ifdef INSN_SCHEDULING
1643 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1644 be written as a ZERO_EXTEND. */
1645 if (GET_CODE (*split) == SUBREG
1646 && GET_CODE (SUBREG_REG (*split)) == MEM)
1647 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1648 XEXP (*split, 0)));
1649#endif
1650
1651 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1652 SUBST (*split, newdest);
1653 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1654 if (i2_code_number >= 0)
1655 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1656 }
1657 }
1658
1659 /* Check for a case where we loaded from memory in a narrow mode and
1660 then sign extended it, but we need both registers. In that case,
1661 we have a PARALLEL with both loads from the same memory location.
1662 We can split this into a load from memory followed by a register-register
1663 copy. This saves at least one insn, more if register allocation can
1664 eliminate the copy. */
1665
1666 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1667 && GET_CODE (newpat) == PARALLEL
1668 && XVECLEN (newpat, 0) == 2
1669 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1670 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1671 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1672 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1673 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1674 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1675 INSN_CUID (i2))
1676 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1677 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1678 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1679 SET_SRC (XVECEXP (newpat, 0, 1)))
1680 && ! find_reg_note (i3, REG_UNUSED,
1681 SET_DEST (XVECEXP (newpat, 0, 0))))
1682 {
472fbdd1
RK
1683 rtx ni2dest;
1684
230d793d 1685 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 1686 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
1687 newpat = XVECEXP (newpat, 0, 1);
1688 SUBST (SET_SRC (newpat),
472fbdd1 1689 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
230d793d
RS
1690 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1691 if (i2_code_number >= 0)
1692 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
1693
1694 if (insn_code_number >= 0)
1695 {
1696 rtx insn;
1697 rtx link;
1698
1699 /* If we will be able to accept this, we have made a change to the
1700 destination of I3. This can invalidate a LOG_LINKS pointing
1701 to I3. No other part of combine.c makes such a transformation.
1702
1703 The new I3 will have a destination that was previously the
1704 destination of I1 or I2 and which was used in i2 or I3. Call
1705 distribute_links to make a LOG_LINK from the next use of
1706 that destination. */
1707
1708 PATTERN (i3) = newpat;
5f4f0e22 1709 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
1710
1711 /* I3 now uses what used to be its destination and which is
1712 now I2's destination. That means we need a LOG_LINK from
1713 I3 to I2. But we used to have one, so we still will.
1714
1715 However, some later insn might be using I2's dest and have
1716 a LOG_LINK pointing at I3. We must remove this link.
1717 The simplest way to remove the link is to point it at I1,
1718 which we know will be a NOTE. */
1719
1720 for (insn = NEXT_INSN (i3);
1721 insn && GET_CODE (insn) != CODE_LABEL
1722 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1723 insn = NEXT_INSN (insn))
1724 {
1725 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 1726 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
1727 {
1728 for (link = LOG_LINKS (insn); link;
1729 link = XEXP (link, 1))
1730 if (XEXP (link, 0) == i3)
1731 XEXP (link, 0) = i1;
1732
1733 break;
1734 }
1735 }
1736 }
230d793d
RS
1737 }
1738
1739 /* Similarly, check for a case where we have a PARALLEL of two independent
1740 SETs but we started with three insns. In this case, we can do the sets
1741 as two separate insns. This case occurs when some SET allows two
1742 other insns to combine, but the destination of that SET is still live. */
1743
1744 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1745 && GET_CODE (newpat) == PARALLEL
1746 && XVECLEN (newpat, 0) == 2
1747 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1748 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1749 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1750 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1751 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1752 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1753 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1754 INSN_CUID (i2))
1755 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1756 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1757 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1758 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1759 XVECEXP (newpat, 0, 0))
1760 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1761 XVECEXP (newpat, 0, 1)))
1762 {
1763 newi2pat = XVECEXP (newpat, 0, 1);
1764 newpat = XVECEXP (newpat, 0, 0);
1765
1766 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1767 if (i2_code_number >= 0)
1768 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1769 }
1770
1771 /* If it still isn't recognized, fail and change things back the way they
1772 were. */
1773 if ((insn_code_number < 0
1774 /* Is the result a reasonable ASM_OPERANDS? */
1775 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1776 {
1777 undo_all ();
1778 return 0;
1779 }
1780
1781 /* If we had to change another insn, make sure it is valid also. */
1782 if (undobuf.other_insn)
1783 {
1784 rtx other_notes = REG_NOTES (undobuf.other_insn);
1785 rtx other_pat = PATTERN (undobuf.other_insn);
1786 rtx new_other_notes;
1787 rtx note, next;
1788
1789 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1790 &new_other_notes);
1791
1792 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1793 {
1794 undo_all ();
1795 return 0;
1796 }
1797
1798 PATTERN (undobuf.other_insn) = other_pat;
1799
1800 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1801 are still valid. Then add any non-duplicate notes added by
1802 recog_for_combine. */
1803 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1804 {
1805 next = XEXP (note, 1);
1806
1807 if (REG_NOTE_KIND (note) == REG_UNUSED
1808 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
1809 {
1810 if (GET_CODE (XEXP (note, 0)) == REG)
1811 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1812
1813 remove_note (undobuf.other_insn, note);
1814 }
230d793d
RS
1815 }
1816
1a26b032
RK
1817 for (note = new_other_notes; note; note = XEXP (note, 1))
1818 if (GET_CODE (XEXP (note, 0)) == REG)
1819 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1820
230d793d 1821 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 1822 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
1823 }
1824
1825 /* We now know that we can do this combination. Merge the insns and
1826 update the status of registers and LOG_LINKS. */
1827
1828 {
1829 rtx i3notes, i2notes, i1notes = 0;
1830 rtx i3links, i2links, i1links = 0;
1831 rtx midnotes = 0;
1832 int all_adjacent = (next_real_insn (i2) == i3
1833 && (i1 == 0 || next_real_insn (i1) == i2));
1834 register int regno;
1835 /* Compute which registers we expect to eliminate. */
1836 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1837 ? 0 : i2dest);
1838 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1839
1840 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1841 clear them. */
1842 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1843 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1844 if (i1)
1845 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1846
1847 /* Ensure that we do not have something that should not be shared but
1848 occurs multiple times in the new insns. Check this by first
5089e22e 1849 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
1850
1851 reset_used_flags (i3notes);
1852 reset_used_flags (i2notes);
1853 reset_used_flags (i1notes);
1854 reset_used_flags (newpat);
1855 reset_used_flags (newi2pat);
1856 if (undobuf.other_insn)
1857 reset_used_flags (PATTERN (undobuf.other_insn));
1858
1859 i3notes = copy_rtx_if_shared (i3notes);
1860 i2notes = copy_rtx_if_shared (i2notes);
1861 i1notes = copy_rtx_if_shared (i1notes);
1862 newpat = copy_rtx_if_shared (newpat);
1863 newi2pat = copy_rtx_if_shared (newi2pat);
1864 if (undobuf.other_insn)
1865 reset_used_flags (PATTERN (undobuf.other_insn));
1866
1867 INSN_CODE (i3) = insn_code_number;
1868 PATTERN (i3) = newpat;
1869 if (undobuf.other_insn)
1870 INSN_CODE (undobuf.other_insn) = other_code_number;
1871
1872 /* We had one special case above where I2 had more than one set and
1873 we replaced a destination of one of those sets with the destination
1874 of I3. In that case, we have to update LOG_LINKS of insns later
1875 in this basic block. Note that this (expensive) case is rare. */
1876
1877 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1878 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1879 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1880 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1881 && ! find_reg_note (i2, REG_UNUSED,
1882 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1883 {
1884 register rtx insn;
1885
1886 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1887 {
1888 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1889 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1890 if (XEXP (link, 0) == i2)
1891 XEXP (link, 0) = i3;
1892
1893 if (GET_CODE (insn) == CODE_LABEL
1894 || GET_CODE (insn) == JUMP_INSN)
1895 break;
1896 }
1897 }
1898
1899 LOG_LINKS (i3) = 0;
1900 REG_NOTES (i3) = 0;
1901 LOG_LINKS (i2) = 0;
1902 REG_NOTES (i2) = 0;
1903
1904 if (newi2pat)
1905 {
1906 INSN_CODE (i2) = i2_code_number;
1907 PATTERN (i2) = newi2pat;
1908 }
1909 else
1910 {
1911 PUT_CODE (i2, NOTE);
1912 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
1913 NOTE_SOURCE_FILE (i2) = 0;
1914 }
1915
1916 if (i1)
1917 {
1918 LOG_LINKS (i1) = 0;
1919 REG_NOTES (i1) = 0;
1920 PUT_CODE (i1, NOTE);
1921 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
1922 NOTE_SOURCE_FILE (i1) = 0;
1923 }
1924
1925 /* Get death notes for everything that is now used in either I3 or
1926 I2 and used to die in a previous insn. */
1927
1928 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
1929 if (newi2pat)
1930 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
1931
1932 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1933 if (i3notes)
5f4f0e22
CH
1934 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
1935 elim_i2, elim_i1);
230d793d 1936 if (i2notes)
5f4f0e22
CH
1937 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
1938 elim_i2, elim_i1);
230d793d 1939 if (i1notes)
5f4f0e22
CH
1940 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
1941 elim_i2, elim_i1);
230d793d 1942 if (midnotes)
5f4f0e22
CH
1943 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1944 elim_i2, elim_i1);
230d793d
RS
1945
1946 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1947 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
1948 so we always pass it as i3. We have not counted the notes in
1949 reg_n_deaths yet, so we need to do so now. */
1950
230d793d 1951 if (newi2pat && new_i2_notes)
1a26b032
RK
1952 {
1953 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
1954 if (GET_CODE (XEXP (temp, 0)) == REG)
1955 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
1956
1957 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1958 }
1959
230d793d 1960 if (new_i3_notes)
1a26b032
RK
1961 {
1962 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
1963 if (GET_CODE (XEXP (temp, 0)) == REG)
1964 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
1965
1966 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
1967 }
230d793d
RS
1968
1969 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1a26b032
RK
1970 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
1971 Show an additional death due to the REG_DEAD note we make here. If
1972 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 1973
230d793d 1974 if (i3dest_killed)
1a26b032
RK
1975 {
1976 if (GET_CODE (i3dest_killed) == REG)
1977 reg_n_deaths[REGNO (i3dest_killed)]++;
1978
1979 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
1980 NULL_RTX),
1981 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1982 NULL_RTX, NULL_RTX);
1983 }
58c8c593
RK
1984
1985 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
1986 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
1987 we passed I3 in that case, it might delete I2. */
1988
230d793d 1989 if (i2dest_in_i2src)
58c8c593 1990 {
1a26b032
RK
1991 if (GET_CODE (i2dest) == REG)
1992 reg_n_deaths[REGNO (i2dest)]++;
1993
58c8c593
RK
1994 if (newi2pat && reg_set_p (i2dest, newi2pat))
1995 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1996 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1997 else
1998 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1999 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2000 NULL_RTX, NULL_RTX);
2001 }
2002
230d793d 2003 if (i1dest_in_i1src)
58c8c593 2004 {
1a26b032
RK
2005 if (GET_CODE (i1dest) == REG)
2006 reg_n_deaths[REGNO (i1dest)]++;
2007
58c8c593
RK
2008 if (newi2pat && reg_set_p (i1dest, newi2pat))
2009 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2010 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2011 else
2012 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2013 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2014 NULL_RTX, NULL_RTX);
2015 }
230d793d
RS
2016
2017 distribute_links (i3links);
2018 distribute_links (i2links);
2019 distribute_links (i1links);
2020
2021 if (GET_CODE (i2dest) == REG)
2022 {
d0ab8cd3
RK
2023 rtx link;
2024 rtx i2_insn = 0, i2_val = 0, set;
2025
2026 /* The insn that used to set this register doesn't exist, and
2027 this life of the register may not exist either. See if one of
2028 I3's links points to an insn that sets I2DEST. If it does,
2029 that is now the last known value for I2DEST. If we don't update
2030 this and I2 set the register to a value that depended on its old
230d793d
RS
2031 contents, we will get confused. If this insn is used, thing
2032 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2033
2034 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2035 if ((set = single_set (XEXP (link, 0))) != 0
2036 && rtx_equal_p (i2dest, SET_DEST (set)))
2037 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2038
2039 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2040
2041 /* If the reg formerly set in I2 died only once and that was in I3,
2042 zero its use count so it won't make `reload' do any work. */
2043 if (! added_sets_2 && newi2pat == 0)
2044 {
2045 regno = REGNO (i2dest);
2046 reg_n_sets[regno]--;
2047 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2048 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2049 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2050 reg_n_refs[regno] = 0;
2051 }
2052 }
2053
2054 if (i1 && GET_CODE (i1dest) == REG)
2055 {
d0ab8cd3
RK
2056 rtx link;
2057 rtx i1_insn = 0, i1_val = 0, set;
2058
2059 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2060 if ((set = single_set (XEXP (link, 0))) != 0
2061 && rtx_equal_p (i1dest, SET_DEST (set)))
2062 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2063
2064 record_value_for_reg (i1dest, i1_insn, i1_val);
2065
230d793d
RS
2066 regno = REGNO (i1dest);
2067 if (! added_sets_1)
2068 {
2069 reg_n_sets[regno]--;
2070 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2071 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2072 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2073 reg_n_refs[regno] = 0;
2074 }
2075 }
2076
22609cbf
RK
2077 /* Update reg_significant et al for any changes that may have been made
2078 to this insn. */
2079
2080 note_stores (newpat, set_significant);
2081 if (newi2pat)
2082 note_stores (newi2pat, set_significant);
2083
230d793d
RS
2084 /* If I3 is now an unconditional jump, ensure that it has a
2085 BARRIER following it since it may have initially been a
2086 conditional jump. */
2087
2088 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2089 && GET_CODE (next_nonnote_insn (i3)) != BARRIER)
2090 emit_barrier_after (i3);
2091 }
2092
2093 combine_successes++;
2094
2095 return newi2pat ? i2 : i3;
2096}
2097\f
2098/* Undo all the modifications recorded in undobuf. */
2099
2100static void
2101undo_all ()
2102{
2103 register int i;
2104 if (undobuf.num_undo > MAX_UNDO)
2105 undobuf.num_undo = MAX_UNDO;
2106 for (i = undobuf.num_undo - 1; i >= 0; i--)
7c046e4e
RK
2107 {
2108 if (undobuf.undo[i].is_int)
2109 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2110 else
2111 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2112
2113 }
230d793d
RS
2114
2115 obfree (undobuf.storage);
2116 undobuf.num_undo = 0;
2117}
2118\f
2119/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2120 where we have an arithmetic expression and return that point. LOC will
2121 be inside INSN.
230d793d
RS
2122
2123 try_combine will call this function to see if an insn can be split into
2124 two insns. */
2125
2126static rtx *
d0ab8cd3 2127find_split_point (loc, insn)
230d793d 2128 rtx *loc;
d0ab8cd3 2129 rtx insn;
230d793d
RS
2130{
2131 rtx x = *loc;
2132 enum rtx_code code = GET_CODE (x);
2133 rtx *split;
2134 int len = 0, pos, unsignedp;
2135 rtx inner;
2136
2137 /* First special-case some codes. */
2138 switch (code)
2139 {
2140 case SUBREG:
2141#ifdef INSN_SCHEDULING
2142 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2143 point. */
2144 if (GET_CODE (SUBREG_REG (x)) == MEM)
2145 return loc;
2146#endif
d0ab8cd3 2147 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2148
230d793d 2149 case MEM:
916f14f1 2150#ifdef HAVE_lo_sum
230d793d
RS
2151 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2152 using LO_SUM and HIGH. */
2153 if (GET_CODE (XEXP (x, 0)) == CONST
2154 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2155 {
2156 SUBST (XEXP (x, 0),
2157 gen_rtx_combine (LO_SUM, Pmode,
2158 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2159 XEXP (x, 0)));
2160 return &XEXP (XEXP (x, 0), 0);
2161 }
230d793d
RS
2162#endif
2163
916f14f1
RK
2164 /* If we have a PLUS whose second operand is a constant and the
2165 address is not valid, perhaps will can split it up using
2166 the machine-specific way to split large constants. We use
d0ab8cd3 2167 the first psuedo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2168 it will not remain in the result. */
2169 if (GET_CODE (XEXP (x, 0)) == PLUS
2170 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2171 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2172 {
2173 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2174 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2175 subst_insn);
2176
2177 /* This should have produced two insns, each of which sets our
2178 placeholder. If the source of the second is a valid address,
2179 we can make put both sources together and make a split point
2180 in the middle. */
2181
2182 if (seq && XVECLEN (seq, 0) == 2
2183 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2184 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2185 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2186 && ! reg_mentioned_p (reg,
2187 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2188 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2189 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2190 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2191 && memory_address_p (GET_MODE (x),
2192 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2193 {
2194 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2195 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2196
2197 /* Replace the placeholder in SRC2 with SRC1. If we can
2198 find where in SRC2 it was placed, that can become our
2199 split point and we can replace this address with SRC2.
2200 Just try two obvious places. */
2201
2202 src2 = replace_rtx (src2, reg, src1);
2203 split = 0;
2204 if (XEXP (src2, 0) == src1)
2205 split = &XEXP (src2, 0);
2206 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2207 && XEXP (XEXP (src2, 0), 0) == src1)
2208 split = &XEXP (XEXP (src2, 0), 0);
2209
2210 if (split)
2211 {
2212 SUBST (XEXP (x, 0), src2);
2213 return split;
2214 }
2215 }
1a26b032
RK
2216
2217 /* If that didn't work, perhaps the first operand is complex and
2218 needs to be computed separately, so make a split point there.
2219 This will occur on machines that just support REG + CONST
2220 and have a constant moved through some previous computation. */
2221
2222 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2223 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2224 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2225 == 'o')))
2226 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2227 }
2228 break;
2229
230d793d
RS
2230 case SET:
2231#ifdef HAVE_cc0
2232 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2233 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2234 we need to put the operand into a register. So split at that
2235 point. */
2236
2237 if (SET_DEST (x) == cc0_rtx
2238 && GET_CODE (SET_SRC (x)) != COMPARE
2239 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2240 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2241 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2242 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2243 return &SET_SRC (x);
2244#endif
2245
2246 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2247 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2248 if (split && split != &SET_SRC (x))
2249 return split;
2250
2251 /* See if this is a bitfield assignment with everything constant. If
2252 so, this is an IOR of an AND, so split it into that. */
2253 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2254 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2255 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2256 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2257 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2258 && GET_CODE (SET_SRC (x)) == CONST_INT
2259 && ((INTVAL (XEXP (SET_DEST (x), 1))
2260 + INTVAL (XEXP (SET_DEST (x), 2)))
2261 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2262 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2263 {
2264 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2265 int len = INTVAL (XEXP (SET_DEST (x), 1));
2266 int src = INTVAL (SET_SRC (x));
2267 rtx dest = XEXP (SET_DEST (x), 0);
2268 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2269 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d
RS
2270
2271#if BITS_BIG_ENDIAN
2272 pos = GET_MODE_BITSIZE (mode) - len - pos;
2273#endif
2274
2275 if (src == mask)
2276 SUBST (SET_SRC (x),
5f4f0e22 2277 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2278 else
2279 SUBST (SET_SRC (x),
2280 gen_binary (IOR, mode,
2281 gen_binary (AND, mode, dest,
5f4f0e22
CH
2282 GEN_INT (~ (mask << pos)
2283 & GET_MODE_MASK (mode))),
2284 GEN_INT (src << pos)));
230d793d
RS
2285
2286 SUBST (SET_DEST (x), dest);
2287
d0ab8cd3 2288 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2289 if (split && split != &SET_SRC (x))
2290 return split;
2291 }
2292
2293 /* Otherwise, see if this is an operation that we can split into two.
2294 If so, try to split that. */
2295 code = GET_CODE (SET_SRC (x));
2296
2297 switch (code)
2298 {
d0ab8cd3
RK
2299 case AND:
2300 /* If we are AND'ing with a large constant that is only a single
2301 bit and the result is only being used in a context where we
2302 need to know if it is zero or non-zero, replace it with a bit
2303 extraction. This will avoid the large constant, which might
2304 have taken more than one insn to make. If the constant were
2305 not a valid argument to the AND but took only one insn to make,
2306 this is no worse, but if it took more than one insn, it will
2307 be better. */
2308
2309 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2310 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2311 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2312 && GET_CODE (SET_DEST (x)) == REG
2313 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2314 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2315 && XEXP (*split, 0) == SET_DEST (x)
2316 && XEXP (*split, 1) == const0_rtx)
2317 {
2318 SUBST (SET_SRC (x),
2319 make_extraction (GET_MODE (SET_DEST (x)),
2320 XEXP (SET_SRC (x), 0),
2321 pos, NULL_RTX, 1, 1, 0, 0));
2322 return find_split_point (loc, insn);
2323 }
2324 break;
2325
230d793d
RS
2326 case SIGN_EXTEND:
2327 inner = XEXP (SET_SRC (x), 0);
2328 pos = 0;
2329 len = GET_MODE_BITSIZE (GET_MODE (inner));
2330 unsignedp = 0;
2331 break;
2332
2333 case SIGN_EXTRACT:
2334 case ZERO_EXTRACT:
2335 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2336 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2337 {
2338 inner = XEXP (SET_SRC (x), 0);
2339 len = INTVAL (XEXP (SET_SRC (x), 1));
2340 pos = INTVAL (XEXP (SET_SRC (x), 2));
2341
2342#if BITS_BIG_ENDIAN
2343 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2344#endif
2345 unsignedp = (code == ZERO_EXTRACT);
2346 }
2347 break;
2348 }
2349
2350 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2351 {
2352 enum machine_mode mode = GET_MODE (SET_SRC (x));
2353
d0ab8cd3
RK
2354 /* For unsigned, we have a choice of a shift followed by an
2355 AND or two shifts. Use two shifts for field sizes where the
2356 constant might be too large. We assume here that we can
2357 always at least get 8-bit constants in an AND insn, which is
2358 true for every current RISC. */
2359
2360 if (unsignedp && len <= 8)
230d793d
RS
2361 {
2362 SUBST (SET_SRC (x),
2363 gen_rtx_combine
2364 (AND, mode,
2365 gen_rtx_combine (LSHIFTRT, mode,
2366 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2367 GEN_INT (pos)),
2368 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2369
d0ab8cd3 2370 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2371 if (split && split != &SET_SRC (x))
2372 return split;
2373 }
2374 else
2375 {
2376 SUBST (SET_SRC (x),
2377 gen_rtx_combine
d0ab8cd3 2378 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2379 gen_rtx_combine (ASHIFT, mode,
2380 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2381 GEN_INT (GET_MODE_BITSIZE (mode)
2382 - len - pos)),
2383 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2384
d0ab8cd3 2385 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2386 if (split && split != &SET_SRC (x))
2387 return split;
2388 }
2389 }
2390
2391 /* See if this is a simple operation with a constant as the second
2392 operand. It might be that this constant is out of range and hence
2393 could be used as a split point. */
2394 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2395 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2396 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2397 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2398 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2399 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2400 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2401 == 'o'))))
2402 return &XEXP (SET_SRC (x), 1);
2403
2404 /* Finally, see if this is a simple operation with its first operand
2405 not in a register. The operation might require this operand in a
2406 register, so return it as a split point. We can always do this
2407 because if the first operand were another operation, we would have
2408 already found it as a split point. */
2409 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2410 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2411 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2412 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2413 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2414 return &XEXP (SET_SRC (x), 0);
2415
2416 return 0;
2417
2418 case AND:
2419 case IOR:
2420 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2421 it is better to write this as (not (ior A B)) so we can split it.
2422 Similarly for IOR. */
2423 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2424 {
2425 SUBST (*loc,
2426 gen_rtx_combine (NOT, GET_MODE (x),
2427 gen_rtx_combine (code == IOR ? AND : IOR,
2428 GET_MODE (x),
2429 XEXP (XEXP (x, 0), 0),
2430 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2431 return find_split_point (loc, insn);
230d793d
RS
2432 }
2433
2434 /* Many RISC machines have a large set of logical insns. If the
2435 second operand is a NOT, put it first so we will try to split the
2436 other operand first. */
2437 if (GET_CODE (XEXP (x, 1)) == NOT)
2438 {
2439 rtx tem = XEXP (x, 0);
2440 SUBST (XEXP (x, 0), XEXP (x, 1));
2441 SUBST (XEXP (x, 1), tem);
2442 }
2443 break;
2444 }
2445
2446 /* Otherwise, select our actions depending on our rtx class. */
2447 switch (GET_RTX_CLASS (code))
2448 {
2449 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2450 case '3':
d0ab8cd3 2451 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2452 if (split)
2453 return split;
2454 /* ... fall through ... */
2455 case '2':
2456 case 'c':
2457 case '<':
d0ab8cd3 2458 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2459 if (split)
2460 return split;
2461 /* ... fall through ... */
2462 case '1':
2463 /* Some machines have (and (shift ...) ...) insns. If X is not
2464 an AND, but XEXP (X, 0) is, use it as our split point. */
2465 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2466 return &XEXP (x, 0);
2467
d0ab8cd3 2468 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2469 if (split)
2470 return split;
2471 return loc;
2472 }
2473
2474 /* Otherwise, we don't have a split point. */
2475 return 0;
2476}
2477\f
2478/* Throughout X, replace FROM with TO, and return the result.
2479 The result is TO if X is FROM;
2480 otherwise the result is X, but its contents may have been modified.
2481 If they were modified, a record was made in undobuf so that
2482 undo_all will (among other things) return X to its original state.
2483
2484 If the number of changes necessary is too much to record to undo,
2485 the excess changes are not made, so the result is invalid.
2486 The changes already made can still be undone.
2487 undobuf.num_undo is incremented for such changes, so by testing that
2488 the caller can tell whether the result is valid.
2489
2490 `n_occurrences' is incremented each time FROM is replaced.
2491
2492 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2493
5089e22e 2494 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2495 by copying if `n_occurrences' is non-zero. */
2496
2497static rtx
2498subst (x, from, to, in_dest, unique_copy)
2499 register rtx x, from, to;
2500 int in_dest;
2501 int unique_copy;
2502{
2503 register char *fmt;
2504 register int len, i;
2505 register enum rtx_code code = GET_CODE (x), orig_code = code;
2506 rtx temp;
2507 enum machine_mode mode = GET_MODE (x);
2508 enum machine_mode op0_mode = VOIDmode;
2509 rtx other_insn;
2510 rtx *cc_use;
2511 int n_restarts = 0;
2512
2513/* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2514 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2515 If it is 0, that cannot be done. We can now do this for any MEM
2516 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2517 If not for that, MEM's would very rarely be safe. */
2518
2519/* Reject MODEs bigger than a word, because we might not be able
2520 to reference a two-register group starting with an arbitrary register
2521 (and currently gen_lowpart might crash for a SUBREG). */
2522
2523#define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2524 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2525
2526/* Two expressions are equal if they are identical copies of a shared
2527 RTX or if they are both registers with the same register number
2528 and mode. */
2529
2530#define COMBINE_RTX_EQUAL_P(X,Y) \
2531 ((X) == (Y) \
2532 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2533 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2534
2535 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2536 {
2537 n_occurrences++;
2538 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2539 }
2540
2541 /* If X and FROM are the same register but different modes, they will
2542 not have been seen as equal above. However, flow.c will make a
2543 LOG_LINKS entry for that case. If we do nothing, we will try to
2544 rerecognize our original insn and, when it succeeds, we will
2545 delete the feeding insn, which is incorrect.
2546
2547 So force this insn not to match in this (rare) case. */
2548 if (! in_dest && code == REG && GET_CODE (from) == REG
2549 && REGNO (x) == REGNO (from))
2550 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2551
2552 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2553 of which may contain things that can be combined. */
2554 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2555 return x;
2556
2557 /* It is possible to have a subexpression appear twice in the insn.
2558 Suppose that FROM is a register that appears within TO.
2559 Then, after that subexpression has been scanned once by `subst',
2560 the second time it is scanned, TO may be found. If we were
2561 to scan TO here, we would find FROM within it and create a
2562 self-referent rtl structure which is completely wrong. */
2563 if (COMBINE_RTX_EQUAL_P (x, to))
2564 return to;
2565
2566 len = GET_RTX_LENGTH (code);
2567 fmt = GET_RTX_FORMAT (code);
2568
2569 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2570 set up to skip this common case. All other cases where we want to
2571 suppress replacing something inside a SET_SRC are handled via the
2572 IN_DEST operand. */
2573 if (code == SET
2574 && (GET_CODE (SET_DEST (x)) == REG
2575 || GET_CODE (SET_DEST (x)) == CC0
2576 || GET_CODE (SET_DEST (x)) == PC))
2577 fmt = "ie";
2578
2579 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2580 if (fmt[0] == 'e')
2581 op0_mode = GET_MODE (XEXP (x, 0));
2582
2583 for (i = 0; i < len; i++)
2584 {
2585 if (fmt[i] == 'E')
2586 {
2587 register int j;
2588 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2589 {
2590 register rtx new;
2591 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2592 {
2593 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2594 n_occurrences++;
2595 }
2596 else
2597 {
2598 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2599
2600 /* If this substitution failed, this whole thing fails. */
2601 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2602 return new;
2603 }
2604
2605 SUBST (XVECEXP (x, i, j), new);
2606 }
2607 }
2608 else if (fmt[i] == 'e')
2609 {
2610 register rtx new;
2611
2612 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2613 {
2614 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2615 n_occurrences++;
2616 }
2617 else
2618 /* If we are in a SET_DEST, suppress most cases unless we
2619 have gone inside a MEM, in which case we want to
2620 simplify the address. We assume here that things that
2621 are actually part of the destination have their inner
2622 parts in the first expression. This is true for SUBREG,
2623 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2624 things aside from REG and MEM that should appear in a
2625 SET_DEST. */
2626 new = subst (XEXP (x, i), from, to,
2627 (((in_dest
2628 && (code == SUBREG || code == STRICT_LOW_PART
2629 || code == ZERO_EXTRACT))
2630 || code == SET)
2631 && i == 0), unique_copy);
2632
2633 /* If we found that we will have to reject this combination,
2634 indicate that by returning the CLOBBER ourselves, rather than
2635 an expression containing it. This will speed things up as
2636 well as prevent accidents where two CLOBBERs are considered
2637 to be equal, thus producing an incorrect simplification. */
2638
2639 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2640 return new;
2641
2642 SUBST (XEXP (x, i), new);
2643 }
2644 }
2645
d0ab8cd3
RK
2646 /* We come back to here if we have replaced the expression with one of
2647 a different code and it is likely that further simplification will be
2648 possible. */
2649
2650 restart:
2651
eeb43d32
RK
2652 /* If we have restarted more than 4 times, we are probably looping, so
2653 give up. */
2654 if (++n_restarts > 4)
2655 return x;
2656
2657 /* If we are restarting at all, it means that we no longer know the
2658 original mode of operand 0 (since we have probably changed the
2659 form of X). */
2660
2661 if (n_restarts > 1)
2662 op0_mode = VOIDmode;
2663
d0ab8cd3
RK
2664 code = GET_CODE (x);
2665
230d793d
RS
2666 /* If this is a commutative operation, put a constant last and a complex
2667 expression first. We don't need to do this for comparisons here. */
2668 if (GET_RTX_CLASS (code) == 'c'
2669 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2670 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2671 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2672 || (GET_CODE (XEXP (x, 0)) == SUBREG
2673 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2674 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2675 {
2676 temp = XEXP (x, 0);
2677 SUBST (XEXP (x, 0), XEXP (x, 1));
2678 SUBST (XEXP (x, 1), temp);
2679 }
2680
22609cbf
RK
2681 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2682 sign extension of a PLUS with a constant, reverse the order of the sign
2683 extension and the addition. Note that this not the same as the original
2684 code, but overflow is undefined for signed values. Also note that the
2685 PLUS will have been partially moved "inside" the sign-extension, so that
2686 the first operand of X will really look like:
2687 (ashiftrt (plus (ashift A C4) C5) C4).
2688 We convert this to
2689 (plus (ashiftrt (ashift A C4) C2) C4)
2690 and replace the first operand of X with that expression. Later parts
2691 of this function may simplify the expression further.
2692
2693 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2694 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2695 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2696
2697 We do this to simplify address expressions. */
2698
2699 if ((code == PLUS || code == MINUS || code == MULT)
2700 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2701 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2702 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2703 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2704 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2705 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2706 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2707 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2708 XEXP (XEXP (XEXP (x, 0), 0), 1),
2709 XEXP (XEXP (x, 0), 1))) != 0)
2710 {
2711 rtx new
2712 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2713 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2714 INTVAL (XEXP (XEXP (x, 0), 1)));
2715
2716 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2717 INTVAL (XEXP (XEXP (x, 0), 1)));
2718
2719 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2720 }
2721
d0ab8cd3
RK
2722 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2723 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2724 things. Don't deal with operations that change modes here. */
2725
2726 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2727 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2728 {
58744483
RK
2729 /* Don't do this by using SUBST inside X since we might be messing
2730 up a shared expression. */
2731 rtx cond = XEXP (XEXP (x, 0), 0);
2732 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2733 XEXP (x, 1)),
1a26b032 2734 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2735 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2736 XEXP (x, 1)),
1a26b032 2737 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2738
2739
2740 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
d0ab8cd3
RK
2741 goto restart;
2742 }
2743
2744 else if (GET_RTX_CLASS (code) == '1'
2745 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2746 && GET_MODE (XEXP (x, 0)) == mode)
2747 {
58744483
RK
2748 rtx cond = XEXP (XEXP (x, 0), 0);
2749 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
1a26b032 2750 pc_rtx, pc_rtx, 0, 0);
58744483 2751 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
1a26b032 2752 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2753
2754 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
d0ab8cd3
RK
2755 goto restart;
2756 }
2757
230d793d
RS
2758 /* Try to fold this expression in case we have constants that weren't
2759 present before. */
2760 temp = 0;
2761 switch (GET_RTX_CLASS (code))
2762 {
2763 case '1':
2764 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2765 break;
2766 case '<':
2767 temp = simplify_relational_operation (code, op0_mode,
2768 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
2769#ifdef FLOAT_STORE_FLAG_VALUE
2770 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2771 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2772 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2773#endif
230d793d
RS
2774 break;
2775 case 'c':
2776 case '2':
2777 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2778 break;
2779 case 'b':
2780 case '3':
2781 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2782 XEXP (x, 1), XEXP (x, 2));
2783 break;
2784 }
2785
2786 if (temp)
d0ab8cd3 2787 x = temp, code = GET_CODE (temp);
230d793d 2788
230d793d
RS
2789 /* First see if we can apply the inverse distributive law. */
2790 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2791 {
2792 x = apply_distributive_law (x);
2793 code = GET_CODE (x);
2794 }
2795
2796 /* If CODE is an associative operation not otherwise handled, see if we
2797 can associate some operands. This can win if they are constants or
2798 if they are logically related (i.e. (a & b) & a. */
2799 if ((code == PLUS || code == MINUS
2800 || code == MULT || code == AND || code == IOR || code == XOR
2801 || code == DIV || code == UDIV
2802 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2803 && GET_MODE_CLASS (mode) == MODE_INT)
2804 {
2805 if (GET_CODE (XEXP (x, 0)) == code)
2806 {
2807 rtx other = XEXP (XEXP (x, 0), 0);
2808 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2809 rtx inner_op1 = XEXP (x, 1);
2810 rtx inner;
2811
2812 /* Make sure we pass the constant operand if any as the second
2813 one if this is a commutative operation. */
2814 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2815 {
2816 rtx tem = inner_op0;
2817 inner_op0 = inner_op1;
2818 inner_op1 = tem;
2819 }
2820 inner = simplify_binary_operation (code == MINUS ? PLUS
2821 : code == DIV ? MULT
2822 : code == UDIV ? MULT
2823 : code,
2824 mode, inner_op0, inner_op1);
2825
2826 /* For commutative operations, try the other pair if that one
2827 didn't simplify. */
2828 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2829 {
2830 other = XEXP (XEXP (x, 0), 1);
2831 inner = simplify_binary_operation (code, mode,
2832 XEXP (XEXP (x, 0), 0),
2833 XEXP (x, 1));
2834 }
2835
2836 if (inner)
2837 {
2838 x = gen_binary (code, mode, other, inner);
2839 goto restart;
2840
2841 }
2842 }
2843 }
2844
2845 /* A little bit of algebraic simplification here. */
2846 switch (code)
2847 {
2848 case MEM:
2849 /* Ensure that our address has any ASHIFTs converted to MULT in case
2850 address-recognizing predicates are called later. */
2851 temp = make_compound_operation (XEXP (x, 0), MEM);
2852 SUBST (XEXP (x, 0), temp);
2853 break;
2854
2855 case SUBREG:
2856 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2857 is paradoxical. If we can't do that safely, then it becomes
2858 something nonsensical so that this combination won't take place. */
2859
2860 if (GET_CODE (SUBREG_REG (x)) == MEM
2861 && (GET_MODE_SIZE (mode)
2862 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2863 {
2864 rtx inner = SUBREG_REG (x);
2865 int endian_offset = 0;
2866 /* Don't change the mode of the MEM
2867 if that would change the meaning of the address. */
2868 if (MEM_VOLATILE_P (SUBREG_REG (x))
2869 || mode_dependent_address_p (XEXP (inner, 0)))
2870 return gen_rtx (CLOBBER, mode, const0_rtx);
2871
2872#if BYTES_BIG_ENDIAN
2873 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2874 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2875 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2876 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2877#endif
2878 /* Note if the plus_constant doesn't make a valid address
2879 then this combination won't be accepted. */
2880 x = gen_rtx (MEM, mode,
2881 plus_constant (XEXP (inner, 0),
2882 (SUBREG_WORD (x) * UNITS_PER_WORD
2883 + endian_offset)));
2884 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2885 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2886 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2887 return x;
2888 }
2889
2890 /* If we are in a SET_DEST, these other cases can't apply. */
2891 if (in_dest)
2892 return x;
2893
2894 /* Changing mode twice with SUBREG => just change it once,
2895 or not at all if changing back to starting mode. */
2896 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
2897 {
2898 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
2899 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
2900 return SUBREG_REG (SUBREG_REG (x));
2901
2902 SUBST_INT (SUBREG_WORD (x),
2903 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
2904 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
2905 }
2906
2907 /* SUBREG of a hard register => just change the register number
2908 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
2909 suppress this combination. If the hard register is the stack,
2910 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
2911
2912 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
2913 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2914 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
2915#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2916 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
2917#endif
2918 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
2919 {
2920 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
2921 mode))
2922 return gen_rtx (REG, mode,
2923 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2924 else
2925 return gen_rtx (CLOBBER, mode, const0_rtx);
2926 }
2927
2928 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
2929 word and low-order part. Only do this if we are narrowing
2930 the constant; if it is being widened, we have no idea what
2931 the extra bits will have been set to. */
230d793d
RS
2932
2933 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
2934 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 2935 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
2936 && GET_MODE_CLASS (mode) == MODE_INT)
2937 {
2938 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 2939 0, op0_mode);
230d793d
RS
2940 if (temp)
2941 return temp;
2942 }
2943
a4bde0b1
RK
2944 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
2945 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
230d793d
RS
2946 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
2947
2948 /* If we are narrowing the object, we need to see if we can simplify
2949 the expression for the object knowing that we only need the
d0ab8cd3
RK
2950 low-order bits. */
2951
230d793d 2952 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
d0ab8cd3
RK
2953 && subreg_lowpart_p (x))
2954 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
2955 NULL_RTX);
230d793d
RS
2956 break;
2957
2958 case NOT:
2959 /* (not (plus X -1)) can become (neg X). */
2960 if (GET_CODE (XEXP (x, 0)) == PLUS
2961 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
2962 {
2963 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
2964 goto restart;
2965 }
2966
2967 /* Similarly, (not (neg X)) is (plus X -1). */
2968 if (GET_CODE (XEXP (x, 0)) == NEG)
2969 {
2970 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2971 goto restart;
2972 }
2973
d0ab8cd3
RK
2974 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
2975 if (GET_CODE (XEXP (x, 0)) == XOR
2976 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2977 && (temp = simplify_unary_operation (NOT, mode,
2978 XEXP (XEXP (x, 0), 1),
2979 mode)) != 0)
2980 {
2981 SUBST (XEXP (XEXP (x, 0), 1), temp);
2982 return XEXP (x, 0);
2983 }
2984
230d793d
RS
2985 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2986 other than 1, but that is not valid. We could do a similar
2987 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2988 but this doesn't seem common enough to bother with. */
2989 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2990 && XEXP (XEXP (x, 0), 0) == const1_rtx)
2991 {
2992 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
2993 XEXP (XEXP (x, 0), 1));
2994 goto restart;
2995 }
2996
2997 if (GET_CODE (XEXP (x, 0)) == SUBREG
2998 && subreg_lowpart_p (XEXP (x, 0))
2999 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3000 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3001 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3002 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3003 {
3004 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3005
3006 x = gen_rtx (ROTATE, inner_mode,
3007 gen_unary (NOT, inner_mode, const1_rtx),
3008 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3009 x = gen_lowpart_for_combine (mode, x);
3010 goto restart;
3011 }
3012
3013#if STORE_FLAG_VALUE == -1
3014 /* (not (comparison foo bar)) can be done by reversing the comparison
3015 code if valid. */
3016 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3017 && reversible_comparison_p (XEXP (x, 0)))
3018 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3019 mode, XEXP (XEXP (x, 0), 0),
3020 XEXP (XEXP (x, 0), 1));
3021#endif
3022
3023 /* Apply De Morgan's laws to reduce number of patterns for machines
3024 with negating logical insns (and-not, nand, etc.). If result has
3025 only one NOT, put it first, since that is how the patterns are
3026 coded. */
3027
3028 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3029 {
3030 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3031
3032 if (GET_CODE (in1) == NOT)
3033 in1 = XEXP (in1, 0);
3034 else
3035 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3036
3037 if (GET_CODE (in2) == NOT)
3038 in2 = XEXP (in2, 0);
3039 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3040 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3041 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3042 else
3043 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3044
3045 if (GET_CODE (in2) == NOT)
3046 {
3047 rtx tem = in2;
3048 in2 = in1; in1 = tem;
3049 }
3050
3051 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3052 mode, in1, in2);
3053 goto restart;
3054 }
3055 break;
3056
3057 case NEG:
3058 /* (neg (plus X 1)) can become (not X). */
3059 if (GET_CODE (XEXP (x, 0)) == PLUS
3060 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3061 {
3062 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3063 goto restart;
3064 }
3065
3066 /* Similarly, (neg (not X)) is (plus X 1). */
3067 if (GET_CODE (XEXP (x, 0)) == NOT)
3068 {
3069 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
3070 goto restart;
3071 }
3072
230d793d
RS
3073 /* (neg (minus X Y)) can become (minus Y X). */
3074 if (GET_CODE (XEXP (x, 0)) == MINUS
3075 && (GET_MODE_CLASS (mode) != MODE_FLOAT
3076 /* x-y != -(y-x) with IEEE floating point. */
3077 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3078 {
3079 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3080 XEXP (XEXP (x, 0), 0));
3081 goto restart;
3082 }
3083
d0ab8cd3
RK
3084 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3085 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3086 && significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3087 {
3088 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3089 goto restart;
3090 }
3091
230d793d
RS
3092 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3093 if we can then eliminate the NEG (e.g.,
3094 if the operand is a constant). */
3095
3096 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3097 {
3098 temp = simplify_unary_operation (NEG, mode,
3099 XEXP (XEXP (x, 0), 0), mode);
3100 if (temp)
3101 {
3102 SUBST (XEXP (XEXP (x, 0), 0), temp);
3103 return XEXP (x, 0);
3104 }
3105 }
3106
3107 temp = expand_compound_operation (XEXP (x, 0));
3108
3109 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3110 replaced by (lshiftrt X C). This will convert
3111 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3112
3113 if (GET_CODE (temp) == ASHIFTRT
3114 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3115 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3116 {
3117 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3118 INTVAL (XEXP (temp, 1)));
3119 goto restart;
3120 }
3121
3122 /* If X has only a single bit significant, say, bit I, convert
3123 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3124 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3125 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3126 or a SUBREG of one since we'd be making the expression more
3127 complex if it was just a register. */
3128
3129 if (GET_CODE (temp) != REG
3130 && ! (GET_CODE (temp) == SUBREG
3131 && GET_CODE (SUBREG_REG (temp)) == REG)
3132 && (i = exact_log2 (significant_bits (temp, mode))) >= 0)
3133 {
3134 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3135 (NULL_RTX, ASHIFTRT, mode,
3136 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3137 GET_MODE_BITSIZE (mode) - 1 - i),
3138 GET_MODE_BITSIZE (mode) - 1 - i);
3139
3140 /* If all we did was surround TEMP with the two shifts, we
3141 haven't improved anything, so don't use it. Otherwise,
3142 we are better off with TEMP1. */
3143 if (GET_CODE (temp1) != ASHIFTRT
3144 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3145 || XEXP (XEXP (temp1, 0), 0) != temp)
3146 {
3147 x = temp1;
3148 goto restart;
3149 }
3150 }
3151 break;
3152
3153 case FLOAT_TRUNCATE:
3154 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3155 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3156 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3157 return XEXP (XEXP (x, 0), 0);
3158 break;
3159
3160#ifdef HAVE_cc0
3161 case COMPARE:
3162 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3163 using cc0, in which case we want to leave it as a COMPARE
3164 so we can distinguish it from a register-register-copy. */
3165 if (XEXP (x, 1) == const0_rtx)
3166 return XEXP (x, 0);
3167
3168 /* In IEEE floating point, x-0 is not the same as x. */
3169 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3170 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3171 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3172 return XEXP (x, 0);
3173 break;
3174#endif
3175
3176 case CONST:
3177 /* (const (const X)) can become (const X). Do it this way rather than
3178 returning the inner CONST since CONST can be shared with a
3179 REG_EQUAL note. */
3180 if (GET_CODE (XEXP (x, 0)) == CONST)
3181 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3182 break;
3183
3184#ifdef HAVE_lo_sum
3185 case LO_SUM:
3186 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3187 can add in an offset. find_split_point will split this address up
3188 again if it doesn't match. */
3189 if (GET_CODE (XEXP (x, 0)) == HIGH
3190 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3191 return XEXP (x, 1);
3192 break;
3193#endif
3194
3195 case PLUS:
3196 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3197 outermost. That's because that's the way indexed addresses are
3198 supposed to appear. This code used to check many more cases, but
3199 they are now checked elsewhere. */
3200 if (GET_CODE (XEXP (x, 0)) == PLUS
3201 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3202 return gen_binary (PLUS, mode,
3203 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3204 XEXP (x, 1)),
3205 XEXP (XEXP (x, 0), 1));
3206
3207 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3208 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3209 bit-field and can be replaced by either a sign_extend or a
3210 sign_extract. The `and' may be a zero_extend. */
3211 if (GET_CODE (XEXP (x, 0)) == XOR
3212 && GET_CODE (XEXP (x, 1)) == CONST_INT
3213 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3214 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3215 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3216 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3217 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3218 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3219 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3220 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3221 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3222 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3223 == i + 1))))
3224 {
3225 x = simplify_shift_const
5f4f0e22
CH
3226 (NULL_RTX, ASHIFTRT, mode,
3227 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3228 XEXP (XEXP (XEXP (x, 0), 0), 0),
3229 GET_MODE_BITSIZE (mode) - (i + 1)),
3230 GET_MODE_BITSIZE (mode) - (i + 1));
3231 goto restart;
3232 }
3233
3234 /* If only the low-order bit of X is significant, (plus x -1)
3235 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3236 the bitsize of the mode - 1. This allows simplification of
3237 "a = (b & 8) == 0;" */
3238 if (XEXP (x, 1) == constm1_rtx
3239 && GET_CODE (XEXP (x, 0)) != REG
3240 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3241 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3242 && significant_bits (XEXP (x, 0), mode) == 1)
3243 {
3244 x = simplify_shift_const
5f4f0e22
CH
3245 (NULL_RTX, ASHIFTRT, mode,
3246 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3247 gen_rtx_combine (XOR, mode,
3248 XEXP (x, 0), const1_rtx),
3249 GET_MODE_BITSIZE (mode) - 1),
3250 GET_MODE_BITSIZE (mode) - 1);
3251 goto restart;
3252 }
02f4ada4
RK
3253
3254 /* If we are adding two things that have no bits in common, convert
3255 the addition into an IOR. This will often be further simplified,
3256 for example in cases like ((a & 1) + (a & 2)), which can
3257 become a & 3. */
3258
ac49a949
RS
3259 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3260 && (significant_bits (XEXP (x, 0), mode)
3261 & significant_bits (XEXP (x, 1), mode)) == 0)
02f4ada4
RK
3262 {
3263 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3264 goto restart;
3265 }
230d793d
RS
3266 break;
3267
3268 case MINUS:
3269 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3270 (and <foo> (const_int pow2-1)) */
3271 if (GET_CODE (XEXP (x, 1)) == AND
3272 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3273 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3274 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3275 {
5f4f0e22 3276 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
230d793d
RS
3277 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3278 goto restart;
3279 }
3280 break;
3281
3282 case MULT:
3283 /* If we have (mult (plus A B) C), apply the distributive law and then
3284 the inverse distributive law to see if things simplify. This
3285 occurs mostly in addresses, often when unrolling loops. */
3286
3287 if (GET_CODE (XEXP (x, 0)) == PLUS)
3288 {
3289 x = apply_distributive_law
3290 (gen_binary (PLUS, mode,
3291 gen_binary (MULT, mode,
3292 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3293 gen_binary (MULT, mode,
3294 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3295
3296 if (GET_CODE (x) != MULT)
3297 goto restart;
3298 }
3299
3300 /* If this is multiplication by a power of two and its first operand is
3301 a shift, treat the multiply as a shift to allow the shifts to
3302 possibly combine. */
3303 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3304 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3305 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3306 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3307 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3308 || GET_CODE (XEXP (x, 0)) == ROTATE
3309 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3310 {
5f4f0e22 3311 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
230d793d
RS
3312 goto restart;
3313 }
3314
3315 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3316 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3317 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3318 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3319 XEXP (XEXP (x, 0), 1));
3320 break;
3321
3322 case UDIV:
3323 /* If this is a divide by a power of two, treat it as a shift if
3324 its first operand is a shift. */
3325 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3326 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3327 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3328 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3329 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3330 || GET_CODE (XEXP (x, 0)) == ROTATE
3331 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3332 {
5f4f0e22 3333 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3334 goto restart;
3335 }
3336 break;
3337
3338 case EQ: case NE:
3339 case GT: case GTU: case GE: case GEU:
3340 case LT: case LTU: case LE: case LEU:
3341 /* If the first operand is a condition code, we can't do anything
3342 with it. */
3343 if (GET_CODE (XEXP (x, 0)) == COMPARE
3344 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3345#ifdef HAVE_cc0
3346 && XEXP (x, 0) != cc0_rtx
3347#endif
3348 ))
3349 {
3350 rtx op0 = XEXP (x, 0);
3351 rtx op1 = XEXP (x, 1);
3352 enum rtx_code new_code;
3353
3354 if (GET_CODE (op0) == COMPARE)
3355 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3356
3357 /* Simplify our comparison, if possible. */
3358 new_code = simplify_comparison (code, &op0, &op1);
3359
3360#if STORE_FLAG_VALUE == 1
3361 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3362 if only the low-order bit is significant in X (such as when
3363 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3364 EQ to (xor X 1). */
3f508eca 3365 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d
RS
3366 && op1 == const0_rtx
3367 && significant_bits (op0, GET_MODE (op0)) == 1)
3368 return gen_lowpart_for_combine (mode, op0);
3f508eca 3369 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d
RS
3370 && op1 == const0_rtx
3371 && significant_bits (op0, GET_MODE (op0)) == 1)
3372 return gen_rtx_combine (XOR, mode,
3373 gen_lowpart_for_combine (mode, op0),
3374 const1_rtx);
3375#endif
3376
3377#if STORE_FLAG_VALUE == -1
3378 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3379 to (neg x) if only the low-order bit of X is significant.
3380 This converts (ne (zero_extract X 1 Y) 0) to
3381 (sign_extract X 1 Y). */
3f508eca 3382 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d
RS
3383 && op1 == const0_rtx
3384 && significant_bits (op0, GET_MODE (op0)) == 1)
3385 {
3386 x = gen_rtx_combine (NEG, mode,
3387 gen_lowpart_for_combine (mode, op0));
3388 goto restart;
3389 }
3390#endif
3391
3392 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3393 one significant bit, we can convert (ne x 0) to (ashift x c)
3394 where C puts the bit in the sign bit. Remove any AND with
3395 STORE_FLAG_VALUE when we are done, since we are only going to
3396 test the sign bit. */
3f508eca 3397 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22
CH
3398 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3399 && (STORE_FLAG_VALUE
3400 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3401 && op1 == const0_rtx
3402 && mode == GET_MODE (op0)
3403 && (i = exact_log2 (significant_bits (op0, GET_MODE (op0)))) >= 0)
3404 {
5f4f0e22 3405 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, op0,
230d793d
RS
3406 GET_MODE_BITSIZE (mode) - 1 - i);
3407 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3408 return XEXP (x, 0);
3409 else
3410 return x;
3411 }
3412
3413 /* If the code changed, return a whole new comparison. */
3414 if (new_code != code)
3415 return gen_rtx_combine (new_code, mode, op0, op1);
3416
3417 /* Otherwise, keep this operation, but maybe change its operands.
3418 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3419 SUBST (XEXP (x, 0), op0);
3420 SUBST (XEXP (x, 1), op1);
3421 }
3422 break;
3423
3424 case IF_THEN_ELSE:
1a26b032
RK
3425 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3426 used in it is being compared against certain values. Get the
3427 true and false comparisons and see if that says anything about the
3428 value of each arm. */
d0ab8cd3 3429
1a26b032
RK
3430 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3431 && reversible_comparison_p (XEXP (x, 0))
d0ab8cd3
RK
3432 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3433 {
d0ab8cd3
RK
3434 HOST_WIDE_INT sig;
3435 rtx from = XEXP (XEXP (x, 0), 0);
1a26b032
RK
3436 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3437 enum rtx_code false_code = reverse_condition (true_code);
3438 rtx true_val = XEXP (XEXP (x, 0), 1);
3439 rtx false_val = true_val;
3440 rtx true_arm = XEXP (x, 1);
3441 rtx false_arm = XEXP (x, 2);
3442 int swapped = 0;
3443
3444 /* If FALSE_CODE is EQ, swap the codes and arms. */
3445
3446 if (false_code == EQ)
3447 {
3448 swapped = 1, true_code = EQ, false_code = NE;
3449 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3450 }
d0ab8cd3 3451
1a26b032
RK
3452 /* If we are comparing against zero and the expression being tested
3453 has only a single significant bit, that is its value when it is
3454 not equal to zero. Similarly if it is known to be -1 or 0. */
d0ab8cd3 3455
1a26b032 3456 if (true_code == EQ && true_val == const0_rtx
d0ab8cd3
RK
3457 && exact_log2 (sig = significant_bits (from,
3458 GET_MODE (from))) >= 0)
1a26b032
RK
3459 false_code = EQ, false_val = GEN_INT (sig);
3460 else if (true_code == EQ && true_val == const0_rtx
d0ab8cd3
RK
3461 && (num_sign_bit_copies (from, GET_MODE (from))
3462 == GET_MODE_BITSIZE (GET_MODE (from))))
1a26b032 3463 false_code = EQ, false_val = constm1_rtx;
d0ab8cd3
RK
3464
3465 /* Now simplify an arm if we know the value of the register
3466 in the branch and it is used in the arm. Be carefull due to
3467 the potential of locally-shared RTL. */
3468
1a26b032
RK
3469 if (reg_mentioned_p (from, true_arm))
3470 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3471 from, true_val),
3472 pc_rtx, pc_rtx, 0, 0);
3473 if (reg_mentioned_p (from, false_arm))
3474 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3475 from, false_val),
3476 pc_rtx, pc_rtx, 0, 0);
3477
3478 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3479 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
d0ab8cd3
RK
3480 }
3481
230d793d
RS
3482 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3483 reversed, do so to avoid needing two sets of patterns for
d0ab8cd3 3484 subtract-and-branch insns. Similarly if we have a constant in that
1a26b032
RK
3485 position or if the third operand is the same as the first operand
3486 of the comparison. */
3487
3488 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3489 && reversible_comparison_p (XEXP (x, 0))
3490 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3491 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
230d793d
RS
3492 {
3493 SUBST (XEXP (x, 0),
d0ab8cd3
RK
3494 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3495 GET_MODE (XEXP (x, 0)),
3496 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3497
3498 temp = XEXP (x, 1);
230d793d 3499 SUBST (XEXP (x, 1), XEXP (x, 2));
d0ab8cd3 3500 SUBST (XEXP (x, 2), temp);
230d793d 3501 }
1a26b032
RK
3502
3503 /* If the two arms are identical, we don't need the comparison. */
3504
3505 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3506 && ! side_effects_p (XEXP (x, 0)))
3507 return XEXP (x, 1);
3508
3509 /* Look for cases where we have (abs x) or (neg (abs X)). */
3510
3511 if (GET_MODE_CLASS (mode) == MODE_INT
3512 && GET_CODE (XEXP (x, 2)) == NEG
3513 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3514 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3515 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3516 && ! side_effects_p (XEXP (x, 1)))
3517 switch (GET_CODE (XEXP (x, 0)))
3518 {
3519 case GT:
3520 case GE:
3521 x = gen_unary (ABS, mode, XEXP (x, 1));
3522 goto restart;
3523 case LT:
3524 case LE:
3525 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3526 goto restart;
3527 }
3528
3529 /* Look for MIN or MAX. */
3530
3531 if (GET_MODE_CLASS (mode) == MODE_INT
3532 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3533 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3534 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3535 && ! side_effects_p (XEXP (x, 0)))
3536 switch (GET_CODE (XEXP (x, 0)))
3537 {
3538 case GE:
3539 case GT:
3540 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3541 goto restart;
3542 case LE:
3543 case LT:
3544 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3545 goto restart;
3546 case GEU:
3547 case GTU:
3548 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3549 goto restart;
3550 case LEU:
3551 case LTU:
3552 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3553 goto restart;
3554 }
3555
3556 /* If we have something like (if_then_else (ne A 0) (OP X C) X),
3557 A is known to be either 0 or 1, and OP is an identity when its
3558 second operand is zero, this can be done as (OP X (mult A C)).
3559 Similarly if A is known to be 0 or -1 and also similarly if we have
3560 a ZERO_EXTEND or SIGN_EXTEND as long as X is already extended (so
3561 we don't destroy it). */
3562
3563 if (mode != VOIDmode
3564 && (GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3565 && XEXP (XEXP (x, 0), 1) == const0_rtx
3566 && (significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3567 || (num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
3568 == GET_MODE_BITSIZE (mode))))
3569 {
3570 rtx nz = make_compound_operation (GET_CODE (XEXP (x, 0)) == NE
3571 ? XEXP (x, 1) : XEXP (x, 2));
3572 rtx z = GET_CODE (XEXP (x, 0)) == NE ? XEXP (x, 2) : XEXP (x, 1);
3573 rtx dir = (significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3574 ? const1_rtx : constm1_rtx);
3575 rtx c = 0;
3576 enum machine_mode m = mode;
e64ff103 3577 enum rtx_code op, extend_op = 0;
1a26b032
RK
3578
3579 if ((GET_CODE (nz) == PLUS || GET_CODE (nz) == MINUS
3580 || GET_CODE (nz) == IOR || GET_CODE (nz) == XOR
3581 || GET_CODE (nz) == ASHIFT
3582 || GET_CODE (nz) == LSHIFTRT || GET_CODE (nz) == ASHIFTRT)
3583 && rtx_equal_p (XEXP (nz, 0), z))
3584 c = XEXP (nz, 1), op = GET_CODE (nz);
3585 else if (GET_CODE (nz) == SIGN_EXTEND
3586 && (GET_CODE (XEXP (nz, 0)) == PLUS
3587 || GET_CODE (XEXP (nz, 0)) == MINUS
3588 || GET_CODE (XEXP (nz, 0)) == IOR
3589 || GET_CODE (XEXP (nz, 0)) == XOR
3590 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3591 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3592 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3593 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3594 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3595 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3596 && (num_sign_bit_copies (z, GET_MODE (z))
3597 >= (GET_MODE_BITSIZE (mode)
3598 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (nz, 0), 0))))))
3599 {
3600 c = XEXP (XEXP (nz, 0), 1);
3601 op = GET_CODE (XEXP (nz, 0));
3602 extend_op = SIGN_EXTEND;
3603 m = GET_MODE (XEXP (nz, 0));
3604 }
3605 else if (GET_CODE (nz) == ZERO_EXTEND
3606 && (GET_CODE (XEXP (nz, 0)) == PLUS
3607 || GET_CODE (XEXP (nz, 0)) == MINUS
3608 || GET_CODE (XEXP (nz, 0)) == IOR
3609 || GET_CODE (XEXP (nz, 0)) == XOR
3610 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3611 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3612 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3613 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3614 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3615 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3616 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3617 && ((significant_bits (z, GET_MODE (z))
3618 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (nz, 0), 0))))
3619 == 0))
3620 {
3621 c = XEXP (XEXP (nz, 0), 1);
3622 op = GET_CODE (XEXP (nz, 0));
3623 extend_op = ZERO_EXTEND;
3624 m = GET_MODE (XEXP (nz, 0));
3625 }
3626
3627 if (c && ! side_effects_p (c) && ! side_effects_p (z))
3628 {
3629 temp
3630 = gen_binary (MULT, m,
3631 gen_lowpart_for_combine (m,
3632 XEXP (XEXP (x, 0), 0)),
3633 gen_binary (MULT, m, c, dir));
3634
3635 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3636
e64ff103 3637 if (extend_op != 0)
1a26b032
RK
3638 temp = gen_unary (extend_op, mode, temp);
3639
3640 return temp;
3641 }
3642 }
230d793d
RS
3643 break;
3644
3645 case ZERO_EXTRACT:
3646 case SIGN_EXTRACT:
3647 case ZERO_EXTEND:
3648 case SIGN_EXTEND:
3649 /* If we are processing SET_DEST, we are done. */
3650 if (in_dest)
3651 return x;
3652
3653 x = expand_compound_operation (x);
3654 if (GET_CODE (x) != code)
3655 goto restart;
3656 break;
3657
3658 case SET:
3659 /* (set (pc) (return)) gets written as (return). */
3660 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3661 return SET_SRC (x);
3662
3663 /* Convert this into a field assignment operation, if possible. */
3664 x = make_field_assignment (x);
3665
230d793d
RS
3666 /* If we are setting CC0 or if the source is a COMPARE, look for the
3667 use of the comparison result and try to simplify it unless we already
3668 have used undobuf.other_insn. */
3669 if ((GET_CODE (SET_SRC (x)) == COMPARE
3670#ifdef HAVE_cc0
3671 || SET_DEST (x) == cc0_rtx
3672#endif
3673 )
3674 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3675 &other_insn)) != 0
3676 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3677 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3678 && XEXP (*cc_use, 0) == SET_DEST (x))
3679 {
3680 enum rtx_code old_code = GET_CODE (*cc_use);
3681 enum rtx_code new_code;
3682 rtx op0, op1;
3683 int other_changed = 0;
3684 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3685
3686 if (GET_CODE (SET_SRC (x)) == COMPARE)
3687 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3688 else
3689 op0 = SET_SRC (x), op1 = const0_rtx;
3690
3691 /* Simplify our comparison, if possible. */
3692 new_code = simplify_comparison (old_code, &op0, &op1);
3693
3694#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3695 /* If this machine has CC modes other than CCmode, check to see
3696 if we need to use a different CC mode here. */
77fa0940 3697 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
230d793d
RS
3698
3699 /* If the mode changed, we have to change SET_DEST, the mode
3700 in the compare, and the mode in the place SET_DEST is used.
3701 If SET_DEST is a hard register, just build new versions with
3702 the proper mode. If it is a pseudo, we lose unless it is only
3703 time we set the pseudo, in which case we can safely change
3704 its mode. */
3705 if (compare_mode != GET_MODE (SET_DEST (x)))
3706 {
3707 int regno = REGNO (SET_DEST (x));
3708 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3709
3710 if (regno < FIRST_PSEUDO_REGISTER
3711 || (reg_n_sets[regno] == 1
3712 && ! REG_USERVAR_P (SET_DEST (x))))
3713 {
3714 if (regno >= FIRST_PSEUDO_REGISTER)
3715 SUBST (regno_reg_rtx[regno], new_dest);
3716
3717 SUBST (SET_DEST (x), new_dest);
3718 SUBST (XEXP (*cc_use, 0), new_dest);
3719 other_changed = 1;
3720 }
3721 }
3722#endif
3723
3724 /* If the code changed, we have to build a new comparison
3725 in undobuf.other_insn. */
3726 if (new_code != old_code)
3727 {
3728 unsigned mask;
3729
3730 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3731 SET_DEST (x), const0_rtx));
3732
3733 /* If the only change we made was to change an EQ into an
3734 NE or vice versa, OP0 has only one significant bit,
3735 and OP1 is zero, check if changing the user of the condition
3736 code will produce a valid insn. If it won't, we can keep
3737 the original code in that insn by surrounding our operation
3738 with an XOR. */
3739
3740 if (((old_code == NE && new_code == EQ)
3741 || (old_code == EQ && new_code == NE))
3742 && ! other_changed && op1 == const0_rtx
5f4f0e22
CH
3743 && (GET_MODE_BITSIZE (GET_MODE (op0))
3744 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
3745 && (exact_log2 (mask = significant_bits (op0,
3746 GET_MODE (op0)))
3747 >= 0))
3748 {
3749 rtx pat = PATTERN (other_insn), note = 0;
3750
3751 if ((recog_for_combine (&pat, undobuf.other_insn, &note) < 0
3752 && ! check_asm_operands (pat)))
3753 {
3754 PUT_CODE (*cc_use, old_code);
3755 other_insn = 0;
3756
3757 op0 = gen_binary (XOR, GET_MODE (op0), op0,
5f4f0e22 3758 GEN_INT (mask));
230d793d
RS
3759 }
3760 }
3761
3762 other_changed = 1;
3763 }
3764
3765 if (other_changed)
3766 undobuf.other_insn = other_insn;
3767
3768#ifdef HAVE_cc0
3769 /* If we are now comparing against zero, change our source if
3770 needed. If we do not use cc0, we always have a COMPARE. */
3771 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3772 SUBST (SET_SRC (x), op0);
3773 else
3774#endif
3775
3776 /* Otherwise, if we didn't previously have a COMPARE in the
3777 correct mode, we need one. */
3778 if (GET_CODE (SET_SRC (x)) != COMPARE
3779 || GET_MODE (SET_SRC (x)) != compare_mode)
3780 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3781 op0, op1));
3782 else
3783 {
3784 /* Otherwise, update the COMPARE if needed. */
3785 SUBST (XEXP (SET_SRC (x), 0), op0);
3786 SUBST (XEXP (SET_SRC (x), 1), op1);
3787 }
3788 }
3789 else
3790 {
3791 /* Get SET_SRC in a form where we have placed back any
3792 compound expressions. Then do the checks below. */
3793 temp = make_compound_operation (SET_SRC (x), SET);
3794 SUBST (SET_SRC (x), temp);
3795 }
3796
df62f951
RK
3797 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3798 operation, and X being a REG or (subreg (reg)), we may be able to
3799 convert this to (set (subreg:m2 x) (op)).
3800
3801 We can always do this if M1 is narrower than M2 because that
3802 means that we only care about the low bits of the result.
3803
3804 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
457816e2
RK
3805 and BYTES_LOADS_SIGN_EXTEND not defined), we cannot perform a
3806 narrower operation that requested since the high-order bits will
3807 be undefined. On machine where BYTE_LOADS_*_EXTEND is defined,
3808 however, this transformation is safe as long as M1 and M2 have
3809 the same number of words. */
df62f951
RK
3810
3811 if (GET_CODE (SET_SRC (x)) == SUBREG
3812 && subreg_lowpart_p (SET_SRC (x))
3813 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3814 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3815 / UNITS_PER_WORD)
3816 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3817 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
457816e2 3818#if ! defined(BYTE_LOADS_ZERO_EXTEND) && ! defined (BYTE_LOADS_SIGN_EXTEND)
df62f951
RK
3819 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3820 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3821#endif
3822 && (GET_CODE (SET_DEST (x)) == REG
3823 || (GET_CODE (SET_DEST (x)) == SUBREG
3824 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3825 {
df62f951 3826 SUBST (SET_DEST (x),
d0ab8cd3
RK
3827 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3828 SET_DEST (x)));
df62f951
RK
3829 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3830 }
3831
230d793d
RS
3832#ifdef BYTE_LOADS_ZERO_EXTEND
3833 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3834 M wider than N, this would require a paradoxical subreg.
3835 Replace the subreg with a zero_extend to avoid the reload that
3836 would otherwise be required. */
3837 if (GET_CODE (SET_SRC (x)) == SUBREG
3838 && subreg_lowpart_p (SET_SRC (x))
3839 && SUBREG_WORD (SET_SRC (x)) == 0
3840 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3841 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3842 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
3843 SUBST (SET_SRC (x), gen_rtx_combine (ZERO_EXTEND,
3844 GET_MODE (SET_SRC (x)),
3845 XEXP (SET_SRC (x), 0)));
3846#endif
3847
1a26b032
RK
3848#ifndef HAVE_conditional_move
3849
3850 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
3851 and we are comparing an item known to be 0 or -1 against 0, use a
3852 logical operation instead. Check for one of the arms being an IOR
3853 of the other arm with some value. We compute three terms to be
3854 IOR'ed together. In practice, at most two will be nonzero. Then
3855 we do the IOR's. */
3856
696223d7
TW
3857 if (GET_CODE (SET_DEST (x)) != PC
3858 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
1a26b032
RK
3859 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
3860 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
3861 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
3862 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
3863 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
3864 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
3865 && ! side_effects_p (SET_SRC (x)))
3866 {
3867 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3868 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
3869 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3870 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
3871 rtx term1 = const0_rtx, term2, term3;
3872
3873 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
3874 term1 = false, true = XEXP (true, 1), false = const0_rtx;
3875 else if (GET_CODE (true) == IOR
3876 && rtx_equal_p (XEXP (true, 1), false))
3877 term1 = false, true = XEXP (true, 0), false = const0_rtx;
3878 else if (GET_CODE (false) == IOR
3879 && rtx_equal_p (XEXP (false, 0), true))
3880 term1 = true, false = XEXP (false, 1), true = const0_rtx;
3881 else if (GET_CODE (false) == IOR
3882 && rtx_equal_p (XEXP (false, 1), true))
3883 term1 = true, false = XEXP (false, 0), true = const0_rtx;
3884
3885 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3886 XEXP (XEXP (SET_SRC (x), 0), 0), true);
3887 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3888 gen_unary (NOT, GET_MODE (SET_SRC (x)),
3889 XEXP (XEXP (SET_SRC (x), 0), 0)),
3890 false);
3891
3892 SUBST (SET_SRC (x),
3893 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3894 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3895 term1, term2),
3896 term3));
3897 }
3898#endif
230d793d
RS
3899 break;
3900
3901 case AND:
3902 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3903 {
3904 x = simplify_and_const_int (x, mode, XEXP (x, 0),
3905 INTVAL (XEXP (x, 1)));
3906
3907 /* If we have (ior (and (X C1) C2)) and the next restart would be
3908 the last, simplify this by making C1 as small as possible
3909 and then exit. */
3910 if (n_restarts >= 3 && GET_CODE (x) == IOR
3911 && GET_CODE (XEXP (x, 0)) == AND
3912 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3913 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3914 {
3915 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
5f4f0e22
CH
3916 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
3917 & ~ INTVAL (XEXP (x, 1))));
230d793d
RS
3918 return gen_binary (IOR, mode, temp, XEXP (x, 1));
3919 }
3920
3921 if (GET_CODE (x) != AND)
3922 goto restart;
3923 }
3924
3925 /* Convert (A | B) & A to A. */
3926 if (GET_CODE (XEXP (x, 0)) == IOR
3927 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3928 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3929 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3930 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3931 return XEXP (x, 1);
3932
3933 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3934 insn (and may simplify more). */
3935 else if (GET_CODE (XEXP (x, 0)) == XOR
3936 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3937 && ! side_effects_p (XEXP (x, 1)))
3938 {
3939 x = gen_binary (AND, mode,
3940 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3941 XEXP (x, 1));
3942 goto restart;
3943 }
3944 else if (GET_CODE (XEXP (x, 0)) == XOR
3945 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3946 && ! side_effects_p (XEXP (x, 1)))
3947 {
3948 x = gen_binary (AND, mode,
3949 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3950 XEXP (x, 1));
3951 goto restart;
3952 }
3953
3954 /* Similarly for (~ (A ^ B)) & A. */
3955 else if (GET_CODE (XEXP (x, 0)) == NOT
3956 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3957 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
3958 && ! side_effects_p (XEXP (x, 1)))
3959 {
3960 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
3961 XEXP (x, 1));
3962 goto restart;
3963 }
3964 else if (GET_CODE (XEXP (x, 0)) == NOT
3965 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3966 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
3967 && ! side_effects_p (XEXP (x, 1)))
3968 {
3969 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
3970 XEXP (x, 1));
3971 goto restart;
3972 }
3973
d0ab8cd3
RK
3974 /* If we have (and A B) with A not an object but that is known to
3975 be -1 or 0, this is equivalent to the expression
3976 (if_then_else (ne A (const_int 0)) B (const_int 0))
3977 We make this conversion because it may allow further
1a26b032
RK
3978 simplifications and then allow use of conditional move insns.
3979 If the machine doesn't have condition moves, code in case SET
3980 will convert the IF_THEN_ELSE back to the logical operation.
3981 We build the IF_THEN_ELSE here in case further simplification
3982 is possible (e.g., we can convert it to ABS). */
d0ab8cd3
RK
3983
3984 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3985 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3986 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
3987 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3988 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3989 {
3990 rtx op0 = XEXP (x, 0);
3991 rtx op1 = const0_rtx;
3992 enum rtx_code comp_code
3993 = simplify_comparison (NE, &op0, &op1);
3994
3995 x = gen_rtx_combine (IF_THEN_ELSE, mode,
3996 gen_binary (comp_code, VOIDmode, op0, op1),
3997 XEXP (x, 1), const0_rtx);
3998 goto restart;
3999 }
4000
4001 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4002 we start with some combination of logical operations and apply
4003 the distributive law followed by the inverse distributive law.
4004 Most of the time, this results in no change. However, if some of
4005 the operands are the same or inverses of each other, simplifications
4006 will result.
4007
4008 For example, (and (ior A B) (not B)) can occur as the result of
4009 expanding a bit field assignment. When we apply the distributive
4010 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4011 which then simplifies to (and (A (not B))). */
4012
4013 /* If we have (and (ior A B) C), apply the distributive law and then
4014 the inverse distributive law to see if things simplify. */
4015
4016 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4017 {
4018 x = apply_distributive_law
4019 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4020 gen_binary (AND, mode,
4021 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4022 gen_binary (AND, mode,
4023 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4024 if (GET_CODE (x) != AND)
4025 goto restart;
4026 }
4027
4028 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4029 {
4030 x = apply_distributive_law
4031 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4032 gen_binary (AND, mode,
4033 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4034 gen_binary (AND, mode,
4035 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4036 if (GET_CODE (x) != AND)
4037 goto restart;
4038 }
4039
4040 /* Similarly, taking advantage of the fact that
4041 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4042
4043 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4044 {
4045 x = apply_distributive_law
4046 (gen_binary (XOR, mode,
4047 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4048 XEXP (XEXP (x, 1), 0)),
4049 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4050 XEXP (XEXP (x, 1), 1))));
4051 if (GET_CODE (x) != AND)
4052 goto restart;
4053 }
4054
4055 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4056 {
4057 x = apply_distributive_law
4058 (gen_binary (XOR, mode,
4059 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4060 XEXP (XEXP (x, 0), 0)),
4061 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4062 XEXP (XEXP (x, 0), 1))));
4063 if (GET_CODE (x) != AND)
4064 goto restart;
4065 }
4066 break;
4067
4068 case IOR:
d0ab8cd3
RK
4069 /* (ior A C) is C if all significant bits of A are on in C. */
4070 if (GET_CODE (XEXP (x, 1)) == CONST_INT
ac49a949 4071 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
d0ab8cd3
RK
4072 && (significant_bits (XEXP (x, 0), mode)
4073 & ~ INTVAL (XEXP (x, 1))) == 0)
4074 return XEXP (x, 1);
4075
230d793d
RS
4076 /* Convert (A & B) | A to A. */
4077 if (GET_CODE (XEXP (x, 0)) == AND
4078 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4079 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4080 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4081 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4082 return XEXP (x, 1);
4083
4084 /* If we have (ior (and A B) C), apply the distributive law and then
4085 the inverse distributive law to see if things simplify. */
4086
4087 if (GET_CODE (XEXP (x, 0)) == AND)
4088 {
4089 x = apply_distributive_law
4090 (gen_binary (AND, mode,
4091 gen_binary (IOR, mode,
4092 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4093 gen_binary (IOR, mode,
4094 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4095
4096 if (GET_CODE (x) != IOR)
4097 goto restart;
4098 }
4099
4100 if (GET_CODE (XEXP (x, 1)) == AND)
4101 {
4102 x = apply_distributive_law
4103 (gen_binary (AND, mode,
4104 gen_binary (IOR, mode,
4105 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4106 gen_binary (IOR, mode,
4107 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4108
4109 if (GET_CODE (x) != IOR)
4110 goto restart;
4111 }
4112
4113 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4114 mode size to (rotate A CX). */
4115
4116 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4117 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4118 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4119 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4120 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4121 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4122 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4123 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4124 == GET_MODE_BITSIZE (mode)))
4125 {
4126 rtx shift_count;
4127
4128 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4129 shift_count = XEXP (XEXP (x, 0), 1);
4130 else
4131 shift_count = XEXP (XEXP (x, 1), 1);
4132 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4133 goto restart;
4134 }
4135 break;
4136
4137 case XOR:
4138 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4139 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4140 (NOT y). */
4141 {
4142 int num_negated = 0;
4143 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4144
4145 if (GET_CODE (in1) == NOT)
4146 num_negated++, in1 = XEXP (in1, 0);
4147 if (GET_CODE (in2) == NOT)
4148 num_negated++, in2 = XEXP (in2, 0);
4149
4150 if (num_negated == 2)
4151 {
4152 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4153 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4154 }
4155 else if (num_negated == 1)
d0ab8cd3
RK
4156 {
4157 x = gen_unary (NOT, mode,
4158 gen_binary (XOR, mode, in1, in2));
4159 goto restart;
4160 }
230d793d
RS
4161 }
4162
4163 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4164 correspond to a machine insn or result in further simplifications
4165 if B is a constant. */
4166
4167 if (GET_CODE (XEXP (x, 0)) == AND
4168 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4169 && ! side_effects_p (XEXP (x, 1)))
4170 {
4171 x = gen_binary (AND, mode,
4172 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4173 XEXP (x, 1));
4174 goto restart;
4175 }
4176 else if (GET_CODE (XEXP (x, 0)) == AND
4177 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4178 && ! side_effects_p (XEXP (x, 1)))
4179 {
4180 x = gen_binary (AND, mode,
4181 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4182 XEXP (x, 1));
4183 goto restart;
4184 }
4185
4186
4187#if STORE_FLAG_VALUE == 1
4188 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4189 comparison. */
4190 if (XEXP (x, 1) == const1_rtx
4191 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4192 && reversible_comparison_p (XEXP (x, 0)))
4193 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4194 mode, XEXP (XEXP (x, 0), 0),
4195 XEXP (XEXP (x, 0), 1));
4196#endif
4197
4198 /* (xor (comparison foo bar) (const_int sign-bit))
4199 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22
CH
4200 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4201 && (STORE_FLAG_VALUE
4202 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
4203 && XEXP (x, 1) == const_true_rtx
4204 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4205 && reversible_comparison_p (XEXP (x, 0)))
4206 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4207 mode, XEXP (XEXP (x, 0), 0),
4208 XEXP (XEXP (x, 0), 1));
4209 break;
4210
4211 case ABS:
4212 /* (abs (neg <foo>)) -> (abs <foo>) */
4213 if (GET_CODE (XEXP (x, 0)) == NEG)
4214 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4215
4216 /* If operand is something known to be positive, ignore the ABS. */
4217 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
5f4f0e22
CH
4218 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4219 <= HOST_BITS_PER_WIDE_INT)
230d793d 4220 && ((significant_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5f4f0e22
CH
4221 & ((HOST_WIDE_INT) 1
4222 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
230d793d
RS
4223 == 0)))
4224 return XEXP (x, 0);
4225
4226
4227 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
d0ab8cd3 4228 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
230d793d
RS
4229 {
4230 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4231 goto restart;
4232 }
4233 break;
4234
a7c99304
RK
4235 case FFS:
4236 /* (ffs (*_extend <X>)) = (ffs <X>) */
4237 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4238 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4239 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4240 break;
4241
230d793d
RS
4242 case FLOAT:
4243 /* (float (sign_extend <X>)) = (float <X>). */
4244 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4245 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4246 break;
4247
4248 case LSHIFT:
4249 case ASHIFT:
4250 case LSHIFTRT:
4251 case ASHIFTRT:
4252 case ROTATE:
4253 case ROTATERT:
230d793d
RS
4254 /* If this is a shift by a constant amount, simplify it. */
4255 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4256 {
4257 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4258 INTVAL (XEXP (x, 1)));
4259 if (GET_CODE (x) != code)
4260 goto restart;
4261 }
77fa0940
RK
4262
4263#ifdef SHIFT_COUNT_TRUNCATED
4264 else if (GET_CODE (XEXP (x, 1)) != REG)
4265 SUBST (XEXP (x, 1),
4266 force_to_mode (XEXP (x, 1), GET_MODE (x),
4267 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
5f4f0e22 4268 NULL_RTX));
77fa0940
RK
4269#endif
4270
230d793d
RS
4271 break;
4272 }
4273
4274 return x;
4275}
4276\f
4277/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4278 operations" because they can be replaced with two more basic operations.
4279 ZERO_EXTEND is also considered "compound" because it can be replaced with
4280 an AND operation, which is simpler, though only one operation.
4281
4282 The function expand_compound_operation is called with an rtx expression
4283 and will convert it to the appropriate shifts and AND operations,
4284 simplifying at each stage.
4285
4286 The function make_compound_operation is called to convert an expression
4287 consisting of shifts and ANDs into the equivalent compound expression.
4288 It is the inverse of this function, loosely speaking. */
4289
4290static rtx
4291expand_compound_operation (x)
4292 rtx x;
4293{
4294 int pos = 0, len;
4295 int unsignedp = 0;
4296 int modewidth;
4297 rtx tem;
4298
4299 switch (GET_CODE (x))
4300 {
4301 case ZERO_EXTEND:
4302 unsignedp = 1;
4303 case SIGN_EXTEND:
75473182
RS
4304 /* We can't necessarily use a const_int for a multiword mode;
4305 it depends on implicitly extending the value.
4306 Since we don't know the right way to extend it,
4307 we can't tell whether the implicit way is right.
4308
4309 Even for a mode that is no wider than a const_int,
4310 we can't win, because we need to sign extend one of its bits through
4311 the rest of it, and we don't know which bit. */
230d793d 4312 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 4313 return x;
230d793d
RS
4314
4315 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4316 return x;
4317
4318 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4319 /* If the inner object has VOIDmode (the only way this can happen
4320 is if it is a ASM_OPERANDS), we can't do anything since we don't
4321 know how much masking to do. */
4322 if (len == 0)
4323 return x;
4324
4325 break;
4326
4327 case ZERO_EXTRACT:
4328 unsignedp = 1;
4329 case SIGN_EXTRACT:
4330 /* If the operand is a CLOBBER, just return it. */
4331 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4332 return XEXP (x, 0);
4333
4334 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4335 || GET_CODE (XEXP (x, 2)) != CONST_INT
4336 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4337 return x;
4338
4339 len = INTVAL (XEXP (x, 1));
4340 pos = INTVAL (XEXP (x, 2));
4341
4342 /* If this goes outside the object being extracted, replace the object
4343 with a (use (mem ...)) construct that only combine understands
4344 and is used only for this purpose. */
4345 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4346 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4347
4348#if BITS_BIG_ENDIAN
4349 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4350#endif
4351 break;
4352
4353 default:
4354 return x;
4355 }
4356
4357 /* If we reach here, we want to return a pair of shifts. The inner
4358 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4359 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4360 logical depending on the value of UNSIGNEDP.
4361
4362 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4363 converted into an AND of a shift.
4364
4365 We must check for the case where the left shift would have a negative
4366 count. This can happen in a case like (x >> 31) & 255 on machines
4367 that can't shift by a constant. On those machines, we would first
4368 combine the shift with the AND to produce a variable-position
4369 extraction. Then the constant of 31 would be substituted in to produce
4370 a such a position. */
4371
4372 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4373 if (modewidth >= pos - len)
5f4f0e22 4374 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 4375 GET_MODE (x),
5f4f0e22
CH
4376 simplify_shift_const (NULL_RTX, ASHIFT,
4377 GET_MODE (x),
230d793d
RS
4378 XEXP (x, 0),
4379 modewidth - pos - len),
4380 modewidth - len);
4381
5f4f0e22
CH
4382 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4383 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4384 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
4385 GET_MODE (x),
4386 XEXP (x, 0), pos),
5f4f0e22 4387 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4388 else
4389 /* Any other cases we can't handle. */
4390 return x;
4391
4392
4393 /* If we couldn't do this for some reason, return the original
4394 expression. */
4395 if (GET_CODE (tem) == CLOBBER)
4396 return x;
4397
4398 return tem;
4399}
4400\f
4401/* X is a SET which contains an assignment of one object into
4402 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4403 or certain SUBREGS). If possible, convert it into a series of
4404 logical operations.
4405
4406 We half-heartedly support variable positions, but do not at all
4407 support variable lengths. */
4408
4409static rtx
4410expand_field_assignment (x)
4411 rtx x;
4412{
4413 rtx inner;
4414 rtx pos; /* Always counts from low bit. */
4415 int len;
4416 rtx mask;
4417 enum machine_mode compute_mode;
4418
4419 /* Loop until we find something we can't simplify. */
4420 while (1)
4421 {
4422 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4423 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4424 {
4425 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4426 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4427 pos = const0_rtx;
4428 }
4429 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4430 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4431 {
4432 inner = XEXP (SET_DEST (x), 0);
4433 len = INTVAL (XEXP (SET_DEST (x), 1));
4434 pos = XEXP (SET_DEST (x), 2);
4435
4436 /* If the position is constant and spans the width of INNER,
4437 surround INNER with a USE to indicate this. */
4438 if (GET_CODE (pos) == CONST_INT
4439 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4440 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4441
4442#if BITS_BIG_ENDIAN
4443 if (GET_CODE (pos) == CONST_INT)
5f4f0e22
CH
4444 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4445 - INTVAL (pos));
230d793d
RS
4446 else if (GET_CODE (pos) == MINUS
4447 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4448 && (INTVAL (XEXP (pos, 1))
4449 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4450 /* If position is ADJUST - X, new position is X. */
4451 pos = XEXP (pos, 0);
4452 else
4453 pos = gen_binary (MINUS, GET_MODE (pos),
5f4f0e22
CH
4454 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4455 - len),
4456 pos);
230d793d
RS
4457#endif
4458 }
4459
4460 /* A SUBREG between two modes that occupy the same numbers of words
4461 can be done by moving the SUBREG to the source. */
4462 else if (GET_CODE (SET_DEST (x)) == SUBREG
4463 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4464 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4465 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4466 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4467 {
4468 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4469 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4470 SET_SRC (x)));
4471 continue;
4472 }
4473 else
4474 break;
4475
4476 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4477 inner = SUBREG_REG (inner);
4478
4479 compute_mode = GET_MODE (inner);
4480
4481 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
4482 if (len < HOST_BITS_PER_WIDE_INT)
4483 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4484 else
4485 break;
4486
4487 /* Now compute the equivalent expression. Make a copy of INNER
4488 for the SET_DEST in case it is a MEM into which we will substitute;
4489 we don't want shared RTL in that case. */
4490 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4491 gen_binary (IOR, compute_mode,
4492 gen_binary (AND, compute_mode,
4493 gen_unary (NOT, compute_mode,
4494 gen_binary (ASHIFT,
4495 compute_mode,
4496 mask, pos)),
4497 inner),
4498 gen_binary (ASHIFT, compute_mode,
4499 gen_binary (AND, compute_mode,
4500 gen_lowpart_for_combine
4501 (compute_mode,
4502 SET_SRC (x)),
4503 mask),
4504 pos)));
4505 }
4506
4507 return x;
4508}
4509\f
4510/* Return an RTX for a reference to LEN bits of INNER. POS is the starting
4511 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
4512 the starting bit position.
4513
4514 INNER may be a USE. This will occur when we started with a bitfield
4515 that went outside the boundary of the object in memory, which is
4516 allowed on most machines. To isolate this case, we produce a USE
4517 whose mode is wide enough and surround the MEM with it. The only
4518 code that understands the USE is this routine. If it is not removed,
4519 it will cause the resulting insn not to match.
4520
4521 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4522 signed reference.
4523
4524 IN_DEST is non-zero if this is a reference in the destination of a
4525 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4526 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4527 be used.
4528
4529 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4530 ZERO_EXTRACT should be built even for bits starting at bit 0.
4531
4532 MODE is the desired mode of the result (if IN_DEST == 0). */
4533
4534static rtx
4535make_extraction (mode, inner, pos, pos_rtx, len,
4536 unsignedp, in_dest, in_compare)
4537 enum machine_mode mode;
4538 rtx inner;
4539 int pos;
4540 rtx pos_rtx;
4541 int len;
4542 int unsignedp;
4543 int in_dest, in_compare;
4544{
94b4b17a
RS
4545 /* This mode describes the size of the storage area
4546 to fetch the overall value from. Within that, we
4547 ignore the POS lowest bits, etc. */
230d793d
RS
4548 enum machine_mode is_mode = GET_MODE (inner);
4549 enum machine_mode inner_mode;
4550 enum machine_mode wanted_mem_mode = byte_mode;
4551 enum machine_mode pos_mode = word_mode;
4552 enum machine_mode extraction_mode = word_mode;
4553 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4554 int spans_byte = 0;
4555 rtx new = 0;
4556
4557 /* Get some information about INNER and get the innermost object. */
4558 if (GET_CODE (inner) == USE)
94b4b17a 4559 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
4560 /* We don't need to adjust the position because we set up the USE
4561 to pretend that it was a full-word object. */
4562 spans_byte = 1, inner = XEXP (inner, 0);
4563 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
4564 {
4565 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4566 consider just the QI as the memory to extract from.
4567 The subreg adds or removes high bits; its mode is
4568 irrelevant to the meaning of this extraction,
4569 since POS and LEN count from the lsb. */
4570 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4571 is_mode = GET_MODE (SUBREG_REG (inner));
4572 inner = SUBREG_REG (inner);
4573 }
230d793d
RS
4574
4575 inner_mode = GET_MODE (inner);
4576
4577 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4578 pos = INTVAL (pos_rtx);
4579
4580 /* See if this can be done without an extraction. We never can if the
4581 width of the field is not the same as that of some integer mode. For
4582 registers, we can only avoid the extraction if the position is at the
4583 low-order bit and this is either not in the destination or we have the
4584 appropriate STRICT_LOW_PART operation available.
4585
4586 For MEM, we can avoid an extract if the field starts on an appropriate
4587 boundary and we can change the mode of the memory reference. However,
4588 we cannot directly access the MEM if we have a USE and the underlying
4589 MEM is not TMODE. This combination means that MEM was being used in a
4590 context where bits outside its mode were being referenced; that is only
4591 valid in bit-field insns. */
4592
4593 if (tmode != BLKmode
4594 && ! (spans_byte && inner_mode != tmode)
df62f951 4595 && ((pos == 0 && GET_CODE (inner) != MEM
230d793d 4596 && (! in_dest
df62f951
RK
4597 || (GET_CODE (inner) == REG
4598 && (movstrict_optab->handlers[(int) tmode].insn_code
4599 != CODE_FOR_nothing))))
230d793d 4600 || (GET_CODE (inner) == MEM && pos >= 0
dfbe1b2f
RK
4601 && (pos
4602 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4603 : BITS_PER_UNIT)) == 0
230d793d
RS
4604 /* We can't do this if we are widening INNER_MODE (it
4605 may not be aligned, for one thing). */
4606 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4607 && (inner_mode == tmode
4608 || (! mode_dependent_address_p (XEXP (inner, 0))
4609 && ! MEM_VOLATILE_P (inner))))))
4610 {
230d793d
RS
4611 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4612 field. If the original and current mode are the same, we need not
4613 adjust the offset. Otherwise, we do if bytes big endian.
4614
4615 If INNER is not a MEM, get a piece consisting of the just the field
df62f951 4616 of interest (in this case POS must be 0). */
230d793d
RS
4617
4618 if (GET_CODE (inner) == MEM)
4619 {
94b4b17a
RS
4620 int offset;
4621 /* POS counts from lsb, but make OFFSET count in memory order. */
4622 if (BYTES_BIG_ENDIAN)
4623 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
4624 else
4625 offset = pos / BITS_PER_UNIT;
230d793d
RS
4626
4627 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4628 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4629 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4630 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4631 }
df62f951 4632 else if (GET_CODE (inner) == REG)
77fa0940
RK
4633 /* We can't call gen_lowpart_for_combine here since we always want
4634 a SUBREG and it would sometimes return a new hard register. */
4635 new = gen_rtx (SUBREG, tmode, inner,
4636 (WORDS_BIG_ENDIAN
3e3ea975
RS
4637 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
4638 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
6ba17bb0
RS
4639 / UNITS_PER_WORD)
4640 : 0));
230d793d 4641 else
d0ab8cd3 4642 new = force_to_mode (inner, tmode, len, NULL_RTX);
230d793d
RS
4643
4644 /* If this extraction is going into the destination of a SET,
4645 make a STRICT_LOW_PART unless we made a MEM. */
4646
4647 if (in_dest)
4648 return (GET_CODE (new) == MEM ? new
77fa0940
RK
4649 : (GET_CODE (new) != SUBREG
4650 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4651 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
4652
4653 /* Otherwise, sign- or zero-extend unless we already are in the
4654 proper mode. */
4655
4656 return (mode == tmode ? new
4657 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4658 mode, new));
4659 }
4660
cc471082
RS
4661 /* Unless this is a COMPARE or we have a funny memory reference,
4662 don't do anything with zero-extending field extracts starting at
4663 the low-order bit since they are simple AND operations. */
4664 if (pos == 0 && ! in_dest && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
4665 return 0;
4666
4667 /* Get the mode to use should INNER be a MEM, the mode for the position,
4668 and the mode for the result. */
4669#ifdef HAVE_insv
4670 if (in_dest)
4671 {
4672 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4673 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4674 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4675 }
4676#endif
4677
4678#ifdef HAVE_extzv
4679 if (! in_dest && unsignedp)
4680 {
4681 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4682 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4683 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4684 }
4685#endif
4686
4687#ifdef HAVE_extv
4688 if (! in_dest && ! unsignedp)
4689 {
4690 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4691 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4692 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4693 }
4694#endif
4695
4696 /* Never narrow an object, since that might not be safe. */
4697
4698 if (mode != VOIDmode
4699 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4700 extraction_mode = mode;
4701
4702 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4703 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4704 pos_mode = GET_MODE (pos_rtx);
4705
4706 /* If this is not from memory or we have to change the mode of memory and
4707 cannot, the desired mode is EXTRACTION_MODE. */
4708 if (GET_CODE (inner) != MEM
4709 || (inner_mode != wanted_mem_mode
4710 && (mode_dependent_address_p (XEXP (inner, 0))
4711 || MEM_VOLATILE_P (inner))))
4712 wanted_mem_mode = extraction_mode;
4713
4714#if BITS_BIG_ENDIAN
4715 /* If position is constant, compute new position. Otherwise, build
4716 subtraction. */
4717 if (pos >= 0)
4718 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4719 - len - pos);
4720 else
4721 pos_rtx
4722 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5f4f0e22
CH
4723 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4724 GET_MODE_BITSIZE (wanted_mem_mode))
4725 - len),
4726 pos_rtx);
230d793d
RS
4727#endif
4728
4729 /* If INNER has a wider mode, make it smaller. If this is a constant
4730 extract, try to adjust the byte to point to the byte containing
4731 the value. */
4732 if (wanted_mem_mode != VOIDmode
4733 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4734 && ((GET_CODE (inner) == MEM
4735 && (inner_mode == wanted_mem_mode
4736 || (! mode_dependent_address_p (XEXP (inner, 0))
4737 && ! MEM_VOLATILE_P (inner))))))
4738 {
4739 int offset = 0;
4740
4741 /* The computations below will be correct if the machine is big
4742 endian in both bits and bytes or little endian in bits and bytes.
4743 If it is mixed, we must adjust. */
4744
4745#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4746 if (! spans_byte && is_mode != wanted_mem_mode)
4747 offset = (GET_MODE_SIZE (is_mode)
4748 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4749#endif
4750
4751 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4752 adjust OFFSET to compensate. */
4753#if BYTES_BIG_ENDIAN
4754 if (! spans_byte
4755 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4756 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4757#endif
4758
4759 /* If this is a constant position, we can move to the desired byte. */
4760 if (pos >= 0)
4761 {
4762 offset += pos / BITS_PER_UNIT;
4763 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4764 }
4765
4766 if (offset != 0 || inner_mode != wanted_mem_mode)
4767 {
4768 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4769 plus_constant (XEXP (inner, 0), offset));
4770 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4771 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4772 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4773 inner = newmem;
4774 }
4775 }
4776
4777 /* If INNER is not memory, we can always get it into the proper mode. */
4778 else if (GET_CODE (inner) != MEM)
d0ab8cd3
RK
4779 inner = force_to_mode (inner, extraction_mode,
4780 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4781 : len + pos),
4782 NULL_RTX);
230d793d
RS
4783
4784 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4785 have to zero extend. Otherwise, we can just use a SUBREG. */
4786 if (pos < 0
4787 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4788 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4789 else if (pos < 0
4790 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4791 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4792
4793 /* Make POS_RTX unless we already have it and it is correct. */
4794 if (pos_rtx == 0 || (pos >= 0 && INTVAL (pos_rtx) != pos))
5f4f0e22 4795 pos_rtx = GEN_INT (pos);
230d793d
RS
4796
4797 /* Make the required operation. See if we can use existing rtx. */
4798 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 4799 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
4800 if (! in_dest)
4801 new = gen_lowpart_for_combine (mode, new);
4802
4803 return new;
4804}
4805\f
4806/* Look at the expression rooted at X. Look for expressions
4807 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4808 Form these expressions.
4809
4810 Return the new rtx, usually just X.
4811
4812 Also, for machines like the Vax that don't have logical shift insns,
4813 try to convert logical to arithmetic shift operations in cases where
4814 they are equivalent. This undoes the canonicalizations to logical
4815 shifts done elsewhere.
4816
4817 We try, as much as possible, to re-use rtl expressions to save memory.
4818
4819 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
4820 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4821 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
4822 or a COMPARE against zero, it is COMPARE. */
4823
4824static rtx
4825make_compound_operation (x, in_code)
4826 rtx x;
4827 enum rtx_code in_code;
4828{
4829 enum rtx_code code = GET_CODE (x);
4830 enum machine_mode mode = GET_MODE (x);
4831 int mode_width = GET_MODE_BITSIZE (mode);
4832 enum rtx_code next_code;
d0ab8cd3 4833 int i, count;
230d793d
RS
4834 rtx new = 0;
4835 char *fmt;
4836
4837 /* Select the code to be used in recursive calls. Once we are inside an
4838 address, we stay there. If we have a comparison, set to COMPARE,
4839 but once inside, go back to our default of SET. */
4840
42495ca0 4841 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
4842 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4843 && XEXP (x, 1) == const0_rtx) ? COMPARE
4844 : in_code == COMPARE ? SET : in_code);
4845
4846 /* Process depending on the code of this operation. If NEW is set
4847 non-zero, it will be returned. */
4848
4849 switch (code)
4850 {
4851 case ASHIFT:
4852 case LSHIFT:
4853 /* Convert shifts by constants into multiplications if inside
4854 an address. */
4855 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 4856 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
4857 && INTVAL (XEXP (x, 1)) >= 0)
4858 new = gen_rtx_combine (MULT, mode, XEXP (x, 0),
5f4f0e22
CH
4859 GEN_INT ((HOST_WIDE_INT) 1
4860 << INTVAL (XEXP (x, 1))));
230d793d
RS
4861 break;
4862
4863 case AND:
4864 /* If the second operand is not a constant, we can't do anything
4865 with it. */
4866 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4867 break;
4868
4869 /* If the constant is a power of two minus one and the first operand
4870 is a logical right shift, make an extraction. */
4871 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4872 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4873 new = make_extraction (mode, XEXP (XEXP (x, 0), 0), -1,
4874 XEXP (XEXP (x, 0), 1), i, 1,
4875 0, in_code == COMPARE);
dfbe1b2f 4876
230d793d
RS
4877 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4878 else if (GET_CODE (XEXP (x, 0)) == SUBREG
4879 && subreg_lowpart_p (XEXP (x, 0))
4880 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
4881 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4882 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))),
4883 XEXP (SUBREG_REG (XEXP (x, 0)), 0), -1,
4884 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
4885 0, in_code == COMPARE);
230d793d 4886
a7c99304
RK
4887
4888 /* If we are have (and (rotate X C) M) and C is larger than the number
4889 of bits in M, this is an extraction. */
4890
4891 else if (GET_CODE (XEXP (x, 0)) == ROTATE
4892 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4893 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
4894 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
4895 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4896 (GET_MODE_BITSIZE (mode)
4897 - INTVAL (XEXP (XEXP (x, 0), 1))),
5f4f0e22 4898 NULL_RTX, i, 1, 0, in_code == COMPARE);
a7c99304
RK
4899
4900 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
4901 a logical shift and our mask turns off all the propagated sign
4902 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
4903 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4904 && (lshr_optab->handlers[(int) mode].insn_code
4905 == CODE_FOR_nothing)
230d793d
RS
4906 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4907 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4908 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
4909 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
4910 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 4911 {
5f4f0e22 4912 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
4913
4914 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
4915 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
4916 SUBST (XEXP (x, 0),
4917 gen_rtx_combine (ASHIFTRT, mode, XEXP (XEXP (x, 0), 0),
4918 XEXP (XEXP (x, 0), 1)));
4919 }
4920
4921 /* If the constant is one less than a power of two, this might be
4922 representable by an extraction even if no shift is present.
4923 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4924 we are in a COMPARE. */
4925 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5f4f0e22 4926 new = make_extraction (mode, XEXP (x, 0), 0, NULL_RTX, i, 1,
230d793d
RS
4927 0, in_code == COMPARE);
4928
4929 /* If we are in a comparison and this is an AND with a power of two,
4930 convert this into the appropriate bit extract. */
4931 else if (in_code == COMPARE
4932 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5f4f0e22 4933 new = make_extraction (mode, XEXP (x, 0), i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
4934
4935 break;
4936
4937 case LSHIFTRT:
4938 /* If the sign bit is known to be zero, replace this with an
4939 arithmetic shift. */
d0ab8cd3
RK
4940 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
4941 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 4942 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
4943 && (significant_bits (XEXP (x, 0), mode)
4944 & (1 << (mode_width - 1))) == 0)
4945 {
4946 new = gen_rtx_combine (ASHIFTRT, mode, XEXP (x, 0), XEXP (x, 1));
4947 break;
4948 }
4949
4950 /* ... fall through ... */
4951
4952 case ASHIFTRT:
4953 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4954 this is a SIGN_EXTRACT. */
4955 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4956 && GET_CODE (XEXP (x, 0)) == ASHIFT
4957 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4958 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
4959 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4960 (INTVAL (XEXP (x, 1))
4961 - INTVAL (XEXP (XEXP (x, 0), 1))),
5f4f0e22 4962 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
230d793d 4963 code == LSHIFTRT, 0, in_code == COMPARE);
d0ab8cd3
RK
4964
4965 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
4966 cases, we are better off returning a SIGN_EXTEND of the operation. */
4967
4968 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4969 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
4970 || GET_CODE (XEXP (x, 0)) == XOR
4971 || GET_CODE (XEXP (x, 0)) == PLUS)
4972 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4973 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4974 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4975 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
4976 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4977 && (INTVAL (XEXP (XEXP (x, 0), 1))
4978 & (((HOST_WIDE_INT) 1
4979 << INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))) - 1)) == 0)
4980 {
4981 HOST_WIDE_INT newop1
4982 = (INTVAL (XEXP (XEXP (x, 0), 1))
4983 >> INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)));
4984
4985 new = make_extraction (mode,
4986 gen_binary (GET_CODE (XEXP (x, 0)), mode,
4987 XEXP (XEXP (XEXP (x, 0), 0), 0),
4988 GEN_INT (newop1)),
4989 (INTVAL (XEXP (x, 1))
4990 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
4991 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4992 code == LSHIFTRT, 0, in_code == COMPARE);
4993 }
4994
d0dcc580
RK
4995 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
4996 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4997 && GET_CODE (XEXP (x, 0)) == NEG
4998 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4999 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5000 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
5001 new = make_extraction (mode,
5002 gen_unary (GET_CODE (XEXP (x, 0)), mode,
5003 XEXP (XEXP (XEXP (x, 0), 0), 0)),
5004 (INTVAL (XEXP (x, 1))
5005 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5006 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5007 code == LSHIFTRT, 0, in_code == COMPARE);
230d793d
RS
5008 break;
5009 }
5010
5011 if (new)
5012 {
df62f951 5013 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
5014 code = GET_CODE (x);
5015 }
5016
5017 /* Now recursively process each operand of this operation. */
5018 fmt = GET_RTX_FORMAT (code);
5019 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5020 if (fmt[i] == 'e')
5021 {
5022 new = make_compound_operation (XEXP (x, i), next_code);
5023 SUBST (XEXP (x, i), new);
5024 }
5025
5026 return x;
5027}
5028\f
5029/* Given M see if it is a value that would select a field of bits
5030 within an item, but not the entire word. Return -1 if not.
5031 Otherwise, return the starting position of the field, where 0 is the
5032 low-order bit.
5033
5034 *PLEN is set to the length of the field. */
5035
5036static int
5037get_pos_from_mask (m, plen)
5f4f0e22 5038 unsigned HOST_WIDE_INT m;
230d793d
RS
5039 int *plen;
5040{
5041 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5042 int pos = exact_log2 (m & - m);
5043
5044 if (pos < 0)
5045 return -1;
5046
5047 /* Now shift off the low-order zero bits and see if we have a power of
5048 two minus 1. */
5049 *plen = exact_log2 ((m >> pos) + 1);
5050
5051 if (*plen <= 0)
5052 return -1;
5053
5054 return pos;
5055}
5056\f
dfbe1b2f
RK
5057/* Rewrite X so that it is an expression in MODE. We only care about the
5058 low-order BITS bits so we can ignore AND operations that just clear
5059 higher-order bits.
5060
5061 Also, if REG is non-zero and X is a register equal in value to REG,
5062 replace X with REG. */
5063
5064static rtx
5065force_to_mode (x, mode, bits, reg)
5066 rtx x;
5067 enum machine_mode mode;
5068 int bits;
5069 rtx reg;
5070{
5071 enum rtx_code code = GET_CODE (x);
d0ab8cd3 5072 enum machine_mode op_mode = mode;
dfbe1b2f
RK
5073
5074 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
5075 just get X in the proper mode. */
5076
5077 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5078 || bits > GET_MODE_BITSIZE (mode))
5079 return gen_lowpart_for_combine (mode, x);
5080
5081 switch (code)
5082 {
5083 case SIGN_EXTEND:
5084 case ZERO_EXTEND:
5085 case ZERO_EXTRACT:
5086 case SIGN_EXTRACT:
5087 x = expand_compound_operation (x);
5088 if (GET_CODE (x) != code)
5089 return force_to_mode (x, mode, bits, reg);
5090 break;
5091
5092 case REG:
5093 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5094 || rtx_equal_p (reg, get_last_value (x))))
5095 x = reg;
5096 break;
5097
5098 case CONST_INT:
5f4f0e22
CH
5099 if (bits < HOST_BITS_PER_WIDE_INT)
5100 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
dfbe1b2f
RK
5101 return x;
5102
5103 case SUBREG:
5104 /* Ignore low-order SUBREGs. */
5105 if (subreg_lowpart_p (x))
5106 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
5107 break;
5108
5109 case AND:
5110 /* If this is an AND with a constant. Otherwise, we fall through to
5111 do the general binary case. */
5112
5113 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5114 {
5f4f0e22 5115 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
dfbe1b2f
RK
5116 int len = exact_log2 (mask + 1);
5117 rtx op = XEXP (x, 0);
5118
5119 /* If this is masking some low-order bits, we may be able to
5120 impose a stricter constraint on what bits of the operand are
5121 required. */
5122
5123 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
5124 reg);
5125
5f4f0e22
CH
5126 if (bits < HOST_BITS_PER_WIDE_INT)
5127 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
dfbe1b2f 5128
d0ab8cd3
RK
5129 /* If we have no AND in MODE, use the original mode for the
5130 operation. */
5131
5132 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5133 op_mode = GET_MODE (x);
5134
5135 x = simplify_and_const_int (x, op_mode, op, mask);
dfbe1b2f
RK
5136
5137 /* If X is still an AND, see if it is an AND with a mask that
5138 is just some low-order bits. If so, and it is BITS wide (it
5139 can't be wider), we don't need it. */
5140
5141 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22
CH
5142 && bits < HOST_BITS_PER_WIDE_INT
5143 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
dfbe1b2f 5144 x = XEXP (x, 0);
d0ab8cd3
RK
5145
5146 break;
dfbe1b2f
RK
5147 }
5148
5149 /* ... fall through ... */
5150
5151 case PLUS:
5152 case MINUS:
5153 case MULT:
5154 case IOR:
5155 case XOR:
5156 /* For most binary operations, just propagate into the operation and
d0ab8cd3
RK
5157 change the mode if we have an operation of that mode. */
5158
5159 if ((code == PLUS
5160 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5161 || (code == MINUS
5162 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5163 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
5164 == CODE_FOR_nothing))
53e33d95
RK
5165 || (code == AND
5166 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
d0ab8cd3
RK
5167 || (code == IOR
5168 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5169 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
5170 == CODE_FOR_nothing)))
5171 op_mode = GET_MODE (x);
5172
5173 x = gen_binary (code, op_mode,
5174 gen_lowpart_for_combine (op_mode,
5175 force_to_mode (XEXP (x, 0),
5176 mode, bits,
5177 reg)),
5178 gen_lowpart_for_combine (op_mode,
5179 force_to_mode (XEXP (x, 1),
5180 mode, bits,
5181 reg)));
5182 break;
dfbe1b2f
RK
5183
5184 case ASHIFT:
5185 case LSHIFT:
5186 /* For left shifts, do the same, but just for the first operand.
5187 If the shift count is a constant, we need even fewer bits of the
5188 first operand. */
5189
5190 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
5191 bits -= INTVAL (XEXP (x, 1));
5192
d0ab8cd3
RK
5193 if ((code == ASHIFT
5194 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5195 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
5196 == CODE_FOR_nothing)))
5197 op_mode = GET_MODE (x);
5198
5199 x = gen_binary (code, op_mode,
5200 gen_lowpart_for_combine (op_mode,
5201 force_to_mode (XEXP (x, 0),
5202 mode, bits,
5203 reg)),
5204 XEXP (x, 1));
5205 break;
dfbe1b2f
RK
5206
5207 case LSHIFTRT:
5208 /* Here we can only do something if the shift count is a constant and
5209 the count plus BITS is no larger than the width of MODE, we can do
5210 the shift in MODE. */
5211
5212 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5213 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
d0ab8cd3
RK
5214 {
5215 rtx inner = force_to_mode (XEXP (x, 0), mode,
5216 bits + INTVAL (XEXP (x, 1)), reg);
5217
5218 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5219 op_mode = GET_MODE (x);
5220
5221 x = gen_binary (LSHIFTRT, op_mode,
5222 gen_lowpart_for_combine (op_mode, inner),
5223 XEXP (x, 1));
5224 }
5225 break;
5226
5227 case ASHIFTRT:
5228 /* If this is a sign-extension operation that just affects bits
5229 we don't care about, remove it. */
5230
5231 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5232 && INTVAL (XEXP (x, 1)) >= 0
5233 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
5234 && GET_CODE (XEXP (x, 0)) == ASHIFT
5235 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5236 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5237 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
dfbe1b2f
RK
5238 break;
5239
5240 case NEG:
5241 case NOT:
d0ab8cd3
RK
5242 if ((code == NEG
5243 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5244 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
5245 == CODE_FOR_nothing)))
5246 op_mode = GET_MODE (x);
5247
dfbe1b2f 5248 /* Handle these similarly to the way we handle most binary operations. */
d0ab8cd3
RK
5249 x = gen_unary (code, op_mode,
5250 gen_lowpart_for_combine (op_mode,
5251 force_to_mode (XEXP (x, 0), mode,
5252 bits, reg)));
5253 break;
5254
5255 case IF_THEN_ELSE:
5256 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5257 written in a narrower mode. We play it safe and do not do so. */
5258
5259 SUBST (XEXP (x, 1),
5260 gen_lowpart_for_combine (GET_MODE (x),
5261 force_to_mode (XEXP (x, 1), mode,
5262 bits, reg)));
5263 SUBST (XEXP (x, 2),
5264 gen_lowpart_for_combine (GET_MODE (x),
5265 force_to_mode (XEXP (x, 2), mode,
5266 bits, reg)));
5267 break;
dfbe1b2f
RK
5268 }
5269
d0ab8cd3 5270 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
5271 return gen_lowpart_for_combine (mode, x);
5272}
5273\f
1a26b032
RK
5274/* Return the value of expression X given the fact that condition COND
5275 is known to be true when applied to REG as its first operand and VAL
5276 as its second. X is known to not be shared and so can be modified in
5277 place.
5278
5279 We only handle the simplest cases, and specifically those cases that
5280 arise with IF_THEN_ELSE expressions. */
5281
5282static rtx
5283known_cond (x, cond, reg, val)
5284 rtx x;
5285 enum rtx_code cond;
5286 rtx reg, val;
5287{
5288 enum rtx_code code = GET_CODE (x);
5289 rtx new, temp;
5290 char *fmt;
5291 int i, j;
5292
5293 if (side_effects_p (x))
5294 return x;
5295
5296 if (cond == EQ && rtx_equal_p (x, reg))
5297 return val;
5298
5299 /* If X is (abs REG) and we know something about REG's relationship
5300 with zero, we may be able to simplify this. */
5301
5302 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5303 switch (cond)
5304 {
5305 case GE: case GT: case EQ:
5306 return XEXP (x, 0);
5307 case LT: case LE:
5308 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5309 }
5310
5311 /* The only other cases we handle are MIN, MAX, and comparisons if the
5312 operands are the same as REG and VAL. */
5313
5314 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5315 {
5316 if (rtx_equal_p (XEXP (x, 0), val))
5317 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
5318
5319 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
5320 {
5321 if (GET_RTX_CLASS (code) == '<')
5322 return (comparison_dominates_p (cond, code) ? const_true_rtx
5323 : (comparison_dominates_p (cond,
5324 reverse_condition (code))
5325 ? const0_rtx : x));
5326
5327 else if (code == SMAX || code == SMIN
5328 || code == UMIN || code == UMAX)
5329 {
5330 int unsignedp = (code == UMIN || code == UMAX);
5331
5332 if (code == SMAX || code == UMAX)
5333 cond = reverse_condition (cond);
5334
5335 switch (cond)
5336 {
5337 case GE: case GT:
5338 return unsignedp ? x : XEXP (x, 1);
5339 case LE: case LT:
5340 return unsignedp ? x : XEXP (x, 0);
5341 case GEU: case GTU:
5342 return unsignedp ? XEXP (x, 1) : x;
5343 case LEU: case LTU:
5344 return unsignedp ? XEXP (x, 0) : x;
5345 }
5346 }
5347 }
5348 }
5349
5350 fmt = GET_RTX_FORMAT (code);
5351 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5352 {
5353 if (fmt[i] == 'e')
5354 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
5355 else if (fmt[i] == 'E')
5356 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5357 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
5358 cond, reg, val));
5359 }
5360
5361 return x;
5362}
5363\f
230d793d
RS
5364/* See if X, a SET operation, can be rewritten as a bit-field assignment.
5365 Return that assignment if so.
5366
5367 We only handle the most common cases. */
5368
5369static rtx
5370make_field_assignment (x)
5371 rtx x;
5372{
5373 rtx dest = SET_DEST (x);
5374 rtx src = SET_SRC (x);
dfbe1b2f
RK
5375 rtx ourdest;
5376 rtx assign;
5f4f0e22
CH
5377 HOST_WIDE_INT c1;
5378 int pos, len;
dfbe1b2f
RK
5379 rtx other;
5380 enum machine_mode mode;
230d793d
RS
5381
5382 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
5383 a clear of a one-bit field. We will have changed it to
5384 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
5385 for a SUBREG. */
5386
5387 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
5388 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
5389 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
dfbe1b2f
RK
5390 && (rtx_equal_p (dest, XEXP (src, 1))
5391 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5392 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d
RS
5393 {
5394 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
5395 1, 1, 1, 0);
dfbe1b2f 5396 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
5397 }
5398
5399 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
5400 && subreg_lowpart_p (XEXP (src, 0))
5401 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
5402 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
5403 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
5404 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
dfbe1b2f
RK
5405 && (rtx_equal_p (dest, XEXP (src, 1))
5406 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5407 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d
RS
5408 {
5409 assign = make_extraction (VOIDmode, dest, -1,
5410 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
5411 1, 1, 1, 0);
dfbe1b2f 5412 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
5413 }
5414
5415 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
5416 one-bit field. */
5417 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
5418 && XEXP (XEXP (src, 0), 0) == const1_rtx
dfbe1b2f
RK
5419 && (rtx_equal_p (dest, XEXP (src, 1))
5420 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5421 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d
RS
5422 {
5423 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
5424 1, 1, 1, 0);
dfbe1b2f 5425 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
230d793d
RS
5426 }
5427
dfbe1b2f
RK
5428 /* The other case we handle is assignments into a constant-position
5429 field. They look like (ior (and DEST C1) OTHER). If C1 represents
5430 a mask that has all one bits except for a group of zero bits and
5431 OTHER is known to have zeros where C1 has ones, this is such an
5432 assignment. Compute the position and length from C1. Shift OTHER
5433 to the appropriate position, force it to the required mode, and
5434 make the extraction. Check for the AND in both operands. */
5435
5436 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
5437 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
5438 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
5439 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
5440 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
5441 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
5442 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
5443 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
5444 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
5445 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
5446 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
5447 dest)))
5448 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
5449 else
5450 return x;
230d793d 5451
dfbe1b2f
RK
5452 pos = get_pos_from_mask (~c1, &len);
5453 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949
RS
5454 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
5455 && (c1 & significant_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 5456 return x;
230d793d 5457
5f4f0e22 5458 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
230d793d 5459
dfbe1b2f
RK
5460 /* The mode to use for the source is the mode of the assignment, or of
5461 what is inside a possible STRICT_LOW_PART. */
5462 mode = (GET_CODE (assign) == STRICT_LOW_PART
5463 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 5464
dfbe1b2f
RK
5465 /* Shift OTHER right POS places and make it the source, restricting it
5466 to the proper length and mode. */
230d793d 5467
5f4f0e22
CH
5468 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5469 GET_MODE (src), other, pos),
dfbe1b2f 5470 mode, len, dest);
230d793d 5471
dfbe1b2f 5472 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
5473}
5474\f
5475/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5476 if so. */
5477
5478static rtx
5479apply_distributive_law (x)
5480 rtx x;
5481{
5482 enum rtx_code code = GET_CODE (x);
5483 rtx lhs, rhs, other;
5484 rtx tem;
5485 enum rtx_code inner_code;
5486
5487 /* The outer operation can only be one of the following: */
5488 if (code != IOR && code != AND && code != XOR
5489 && code != PLUS && code != MINUS)
5490 return x;
5491
5492 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5493
dfbe1b2f 5494 /* If either operand is a primitive we can't do anything, so get out fast. */
230d793d 5495 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 5496 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
5497 return x;
5498
5499 lhs = expand_compound_operation (lhs);
5500 rhs = expand_compound_operation (rhs);
5501 inner_code = GET_CODE (lhs);
5502 if (inner_code != GET_CODE (rhs))
5503 return x;
5504
5505 /* See if the inner and outer operations distribute. */
5506 switch (inner_code)
5507 {
5508 case LSHIFTRT:
5509 case ASHIFTRT:
5510 case AND:
5511 case IOR:
5512 /* These all distribute except over PLUS. */
5513 if (code == PLUS || code == MINUS)
5514 return x;
5515 break;
5516
5517 case MULT:
5518 if (code != PLUS && code != MINUS)
5519 return x;
5520 break;
5521
5522 case ASHIFT:
5523 case LSHIFT:
5524 /* These are also multiplies, so they distribute over everything. */
5525 break;
5526
5527 case SUBREG:
dfbe1b2f
RK
5528 /* Non-paradoxical SUBREGs distributes over all operations, provided
5529 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
5530 of a low-order part, we don't convert an fp operation to int or
5531 vice versa, and we would not be converting a single-word
dfbe1b2f 5532 operation into a multi-word operation. The latter test is not
2b4bd1bc 5533 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
5534 Some of the previous tests are redundant given the latter test, but
5535 are retained because they are required for correctness.
5536
5537 We produce the result slightly differently in this case. */
5538
5539 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5540 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5541 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
5542 || (GET_MODE_CLASS (GET_MODE (lhs))
5543 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f
RK
5544 || (GET_MODE_SIZE (GET_MODE (lhs))
5545 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5546 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
5547 return x;
5548
5549 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5550 SUBREG_REG (lhs), SUBREG_REG (rhs));
5551 return gen_lowpart_for_combine (GET_MODE (x), tem);
5552
5553 default:
5554 return x;
5555 }
5556
5557 /* Set LHS and RHS to the inner operands (A and B in the example
5558 above) and set OTHER to the common operand (C in the example).
5559 These is only one way to do this unless the inner operation is
5560 commutative. */
5561 if (GET_RTX_CLASS (inner_code) == 'c'
5562 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5563 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5564 else if (GET_RTX_CLASS (inner_code) == 'c'
5565 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5566 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5567 else if (GET_RTX_CLASS (inner_code) == 'c'
5568 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5569 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5570 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5571 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5572 else
5573 return x;
5574
5575 /* Form the new inner operation, seeing if it simplifies first. */
5576 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5577
5578 /* There is one exception to the general way of distributing:
5579 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5580 if (code == XOR && inner_code == IOR)
5581 {
5582 inner_code = AND;
5583 other = gen_unary (NOT, GET_MODE (x), other);
5584 }
5585
5586 /* We may be able to continuing distributing the result, so call
5587 ourselves recursively on the inner operation before forming the
5588 outer operation, which we return. */
5589 return gen_binary (inner_code, GET_MODE (x),
5590 apply_distributive_law (tem), other);
5591}
5592\f
5593/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5594 in MODE.
5595
5596 Return an equivalent form, if different from X. Otherwise, return X. If
5597 X is zero, we are to always construct the equivalent form. */
5598
5599static rtx
5600simplify_and_const_int (x, mode, varop, constop)
5601 rtx x;
5602 enum machine_mode mode;
5603 rtx varop;
5f4f0e22 5604 unsigned HOST_WIDE_INT constop;
230d793d
RS
5605{
5606 register enum machine_mode tmode;
5607 register rtx temp;
5f4f0e22 5608 unsigned HOST_WIDE_INT significant;
230d793d
RS
5609
5610 /* There is a large class of optimizations based on the principle that
5611 some operations produce results where certain bits are known to be zero,
5612 and hence are not significant to the AND. For example, if we have just
5613 done a left shift of one bit, the low-order bit is known to be zero and
5614 hence an AND with a mask of ~1 would not do anything.
5615
5616 At the end of the following loop, we set:
5617
5618 VAROP to be the item to be AND'ed with;
5619 CONSTOP to the constant value to AND it with. */
5620
5621 while (1)
5622 {
5f4f0e22
CH
5623 /* If we ever encounter a mode wider than the host machine's widest
5624 integer size, we can't compute the masks accurately, so give up. */
5625 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
230d793d
RS
5626 break;
5627
5628 /* Unless one of the cases below does a `continue',
5629 a `break' will be executed to exit the loop. */
5630
5631 switch (GET_CODE (varop))
5632 {
5633 case CLOBBER:
5634 /* If VAROP is a (clobber (const_int)), return it since we know
5635 we are generating something that won't match. */
5636 return varop;
5637
5638#if ! BITS_BIG_ENDIAN
5639 case USE:
5640 /* VAROP is a (use (mem ..)) that was made from a bit-field
5641 extraction that spanned the boundary of the MEM. If we are
5642 now masking so it is within that boundary, we don't need the
5643 USE any more. */
5644 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5645 {
5646 varop = XEXP (varop, 0);
5647 continue;
5648 }
5649 break;
5650#endif
5651
5652 case SUBREG:
5653 if (subreg_lowpart_p (varop)
5654 /* We can ignore the effect this SUBREG if it narrows the mode
457816e2 5655 or, on machines where byte operations extend, if the
230d793d
RS
5656 constant masks to zero all the bits the mode doesn't have. */
5657 && ((GET_MODE_SIZE (GET_MODE (varop))
5658 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
457816e2 5659#if defined(BYTE_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
230d793d
RS
5660 || (0 == (constop
5661 & GET_MODE_MASK (GET_MODE (varop))
5662 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5663#endif
5664 ))
5665 {
5666 varop = SUBREG_REG (varop);
5667 continue;
5668 }
5669 break;
5670
5671 case ZERO_EXTRACT:
5672 case SIGN_EXTRACT:
5673 case ZERO_EXTEND:
5674 case SIGN_EXTEND:
5675 /* Try to expand these into a series of shifts and then work
5676 with that result. If we can't, for example, if the extract
5677 isn't at a fixed position, give up. */
5678 temp = expand_compound_operation (varop);
5679 if (temp != varop)
5680 {
5681 varop = temp;
5682 continue;
5683 }
5684 break;
5685
5686 case AND:
5687 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5688 {
5689 constop &= INTVAL (XEXP (varop, 1));
5690 varop = XEXP (varop, 0);
5691 continue;
5692 }
5693 break;
5694
5695 case IOR:
5696 case XOR:
5697 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5698 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5699 operation which may be a bitfield extraction. */
5700
5701 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5702 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5703 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5f4f0e22 5704 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
5705 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5706 && (INTVAL (XEXP (varop, 1))
5707 & ~ significant_bits (XEXP (varop, 0),
5708 GET_MODE (varop)) == 0))
5709 {
5f4f0e22
CH
5710 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5711 << INTVAL (XEXP (XEXP (varop, 0), 1)));
230d793d
RS
5712 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5713 XEXP (XEXP (varop, 0), 0), temp);
5714 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5715 temp, XEXP (varop, 1));
5716 continue;
5717 }
5718
5719 /* Apply the AND to both branches of the IOR or XOR, then try to
5720 apply the distributive law. This may eliminate operations
5721 if either branch can be simplified because of the AND.
5722 It may also make some cases more complex, but those cases
5723 probably won't match a pattern either with or without this. */
5724 return
5725 gen_lowpart_for_combine
5726 (mode, apply_distributive_law
5727 (gen_rtx_combine
5728 (GET_CODE (varop), GET_MODE (varop),
5f4f0e22 5729 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
230d793d 5730 XEXP (varop, 0), constop),
5f4f0e22 5731 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
230d793d
RS
5732 XEXP (varop, 1), constop))));
5733
5734 case NOT:
5735 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5736 LSHIFTRT we can do the same as above. */
5737
5738 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5739 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5740 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5f4f0e22 5741 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d 5742 {
5f4f0e22 5743 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
230d793d
RS
5744 temp = gen_binary (XOR, GET_MODE (varop),
5745 XEXP (XEXP (varop, 0), 0), temp);
5746 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5747 temp, XEXP (XEXP (varop, 0), 1));
5748 continue;
5749 }
5750 break;
5751
5752 case ASHIFTRT:
5753 /* If we are just looking for the sign bit, we don't need this
5754 shift at all, even if it has a variable count. */
5f4f0e22
CH
5755 if (constop == ((HOST_WIDE_INT) 1
5756 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
230d793d
RS
5757 {
5758 varop = XEXP (varop, 0);
5759 continue;
5760 }
5761
5762 /* If this is a shift by a constant, get a mask that contains
5763 those bits that are not copies of the sign bit. We then have
5764 two cases: If CONSTOP only includes those bits, this can be
5765 a logical shift, which may allow simplifications. If CONSTOP
5766 is a single-bit field not within those bits, we are requesting
5767 a copy of the sign bit and hence can shift the sign bit to
5768 the appropriate location. */
5769 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5770 && INTVAL (XEXP (varop, 1)) >= 0
5f4f0e22 5771 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
5772 {
5773 int i = -1;
5774
5775 significant = GET_MODE_MASK (GET_MODE (varop));
5776 significant >>= INTVAL (XEXP (varop, 1));
5777
5778 if ((constop & ~significant) == 0
5779 || (i = exact_log2 (constop)) >= 0)
5780 {
5781 varop = simplify_shift_const
5782 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5783 i < 0 ? INTVAL (XEXP (varop, 1))
5784 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
5785 if (GET_CODE (varop) != ASHIFTRT)
5786 continue;
5787 }
5788 }
5789
5790 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5791 even if the shift count isn't a constant. */
5792 if (constop == 1)
5793 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5794 XEXP (varop, 0), XEXP (varop, 1));
5795 break;
5796
5797 case NE:
5798 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5799 included in STORE_FLAG_VALUE and FOO has no significant bits
5800 not in CONST. */
5801 if ((constop & ~ STORE_FLAG_VALUE) == 0
5802 && XEXP (varop, 0) == const0_rtx
5803 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5804 {
5805 varop = XEXP (varop, 0);
5806 continue;
5807 }
5808 break;
5809
5810 case PLUS:
5811 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5812 low-order bits (as in an alignment operation) and FOO is already
5813 aligned to that boundary, we can convert remove this AND
5814 and possibly the PLUS if it is now adding zero. */
5815 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5816 && exact_log2 (-constop) >= 0
5817 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5818 {
5819 varop = plus_constant (XEXP (varop, 0),
5820 INTVAL (XEXP (varop, 1)) & constop);
5821 constop = ~0;
5822 break;
5823 }
5824
5825 /* ... fall through ... */
5826
5827 case MINUS:
5828 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5829 less than powers of two and M2 is narrower than M1, we can
5830 eliminate the inner AND. This occurs when incrementing
5831 bit fields. */
5832
5833 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
5834 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
5835 SUBST (XEXP (varop, 0),
5836 expand_compound_operation (XEXP (varop, 0)));
5837
5838 if (GET_CODE (XEXP (varop, 0)) == AND
5839 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5840 && exact_log2 (constop + 1) >= 0
5841 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
5842 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
5843 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
5844 break;
5845 }
5846
5847 break;
5848 }
5849
5850 /* If we have reached a constant, this whole thing is constant. */
5851 if (GET_CODE (varop) == CONST_INT)
5f4f0e22 5852 return GEN_INT (constop & INTVAL (varop));
230d793d
RS
5853
5854 /* See what bits are significant in VAROP. */
5855 significant = significant_bits (varop, mode);
5856
5857 /* Turn off all bits in the constant that are known to already be zero.
5858 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
5859 which is tested below. */
5860
5861 constop &= significant;
5862
5863 /* If we don't have any bits left, return zero. */
5864 if (constop == 0)
5865 return const0_rtx;
5866
5867 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
5868 if we already had one (just check for the simplest cases). */
5869 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
5870 && GET_MODE (XEXP (x, 0)) == mode
5871 && SUBREG_REG (XEXP (x, 0)) == varop)
5872 varop = XEXP (x, 0);
5873 else
5874 varop = gen_lowpart_for_combine (mode, varop);
5875
5876 /* If we can't make the SUBREG, try to return what we were given. */
5877 if (GET_CODE (varop) == CLOBBER)
5878 return x ? x : varop;
5879
5880 /* If we are only masking insignificant bits, return VAROP. */
5881 if (constop == significant)
5882 x = varop;
5883
5884 /* Otherwise, return an AND. See how much, if any, of X we can use. */
5885 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
5f4f0e22 5886 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
230d793d
RS
5887
5888 else
5889 {
5890 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5891 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 5892 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
5893
5894 SUBST (XEXP (x, 0), varop);
5895 }
5896
5897 return x;
5898}
5899\f
5900/* Given an expression, X, compute which bits in X can be non-zero.
5901 We don't care about bits outside of those defined in MODE.
5902
5903 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
5904 a shift, AND, or zero_extract, we can do better. */
5905
5f4f0e22 5906static unsigned HOST_WIDE_INT
230d793d
RS
5907significant_bits (x, mode)
5908 rtx x;
5909 enum machine_mode mode;
5910{
5f4f0e22
CH
5911 unsigned HOST_WIDE_INT significant = GET_MODE_MASK (mode);
5912 unsigned HOST_WIDE_INT inner_sig;
230d793d
RS
5913 enum rtx_code code;
5914 int mode_width = GET_MODE_BITSIZE (mode);
5915 rtx tem;
5916
5917 /* If X is wider than MODE, use its mode instead. */
5918 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
5919 {
5920 mode = GET_MODE (x);
5921 significant = GET_MODE_MASK (mode);
5922 mode_width = GET_MODE_BITSIZE (mode);
5923 }
5924
5f4f0e22 5925 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
5926 /* Our only callers in this case look for single bit values. So
5927 just return the mode mask. Those tests will then be false. */
5928 return significant;
5929
5930 code = GET_CODE (x);
5931 switch (code)
5932 {
5933 case REG:
5934#ifdef STACK_BOUNDARY
5935 /* If this is the stack pointer, we may know something about its
5936 alignment. If PUSH_ROUNDING is defined, it is possible for the
5937 stack to be momentarily aligned only to that amount, so we pick
5938 the least alignment. */
5939
5940 if (x == stack_pointer_rtx)
5941 {
5942 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
5943
5944#ifdef PUSH_ROUNDING
5945 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
5946#endif
5947
5948 return significant & ~ (sp_alignment - 1);
5949 }
5950#endif
5951
5952 /* If X is a register whose value we can find, use that value.
5953 Otherwise, use the previously-computed significant bits for this
5954 register. */
5955
5956 tem = get_last_value (x);
5957 if (tem)
5958 return significant_bits (tem, mode);
5959 else if (significant_valid && reg_significant[REGNO (x)])
5960 return reg_significant[REGNO (x)] & significant;
5961 else
5962 return significant;
5963
5964 case CONST_INT:
5965 return INTVAL (x);
5966
5967#ifdef BYTE_LOADS_ZERO_EXTEND
5968 case MEM:
5969 /* In many, if not most, RISC machines, reading a byte from memory
5970 zeros the rest of the register. Noticing that fact saves a lot
5971 of extra zero-extends. */
5972 significant &= GET_MODE_MASK (GET_MODE (x));
5973 break;
5974#endif
5975
5976#if STORE_FLAG_VALUE == 1
5977 case EQ: case NE:
5978 case GT: case GTU:
5979 case LT: case LTU:
5980 case GE: case GEU:
5981 case LE: case LEU:
3f508eca
RK
5982
5983 if (GET_MODE_CLASS (mode) == MODE_INT)
5984 significant = 1;
230d793d
RS
5985
5986 /* A comparison operation only sets the bits given by its mode. The
5987 rest are set undefined. */
5988 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5989 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5990 break;
5991#endif
5992
230d793d 5993 case NEG:
d0ab8cd3
RK
5994 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5995 == GET_MODE_BITSIZE (GET_MODE (x)))
230d793d
RS
5996 significant = 1;
5997
5998 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5999 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6000 break;
d0ab8cd3
RK
6001
6002 case ABS:
6003 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6004 == GET_MODE_BITSIZE (GET_MODE (x)))
6005 significant = 1;
6006 break;
230d793d
RS
6007
6008 case TRUNCATE:
6009 significant &= (significant_bits (XEXP (x, 0), mode)
6010 & GET_MODE_MASK (mode));
6011 break;
6012
6013 case ZERO_EXTEND:
6014 significant &= significant_bits (XEXP (x, 0), mode);
6015 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6016 significant &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6017 break;
6018
6019 case SIGN_EXTEND:
6020 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6021 Otherwise, show all the bits in the outer mode but not the inner
6022 may be non-zero. */
6023 inner_sig = significant_bits (XEXP (x, 0), mode);
6024 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6025 {
6026 inner_sig &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6027 if (inner_sig &
5f4f0e22
CH
6028 (((HOST_WIDE_INT) 1
6029 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
230d793d
RS
6030 inner_sig |= (GET_MODE_MASK (mode)
6031 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6032 }
6033
6034 significant &= inner_sig;
6035 break;
6036
6037 case AND:
6038 significant &= (significant_bits (XEXP (x, 0), mode)
6039 & significant_bits (XEXP (x, 1), mode));
6040 break;
6041
d0ab8cd3
RK
6042 case XOR: case IOR:
6043 case UMIN: case UMAX: case SMIN: case SMAX:
230d793d
RS
6044 significant &= (significant_bits (XEXP (x, 0), mode)
6045 | significant_bits (XEXP (x, 1), mode));
6046 break;
6047
6048 case PLUS: case MINUS:
6049 case MULT:
6050 case DIV: case UDIV:
6051 case MOD: case UMOD:
6052 /* We can apply the rules of arithmetic to compute the number of
6053 high- and low-order zero bits of these operations. We start by
6054 computing the width (position of the highest-order non-zero bit)
6055 and the number of low-order zero bits for each value. */
6056 {
5f4f0e22
CH
6057 unsigned HOST_WIDE_INT sig0 = significant_bits (XEXP (x, 0), mode);
6058 unsigned HOST_WIDE_INT sig1 = significant_bits (XEXP (x, 1), mode);
230d793d
RS
6059 int width0 = floor_log2 (sig0) + 1;
6060 int width1 = floor_log2 (sig1) + 1;
6061 int low0 = floor_log2 (sig0 & -sig0);
6062 int low1 = floor_log2 (sig1 & -sig1);
6063 int op0_maybe_minusp = (sig0 & (1 << (mode_width - 1)));
6064 int op1_maybe_minusp = (sig1 & (1 << (mode_width - 1)));
6065 int result_width = mode_width;
6066 int result_low = 0;
6067
6068 switch (code)
6069 {
6070 case PLUS:
6071 result_width = MAX (width0, width1) + 1;
6072 result_low = MIN (low0, low1);
6073 break;
6074 case MINUS:
6075 result_low = MIN (low0, low1);
6076 break;
6077 case MULT:
6078 result_width = width0 + width1;
6079 result_low = low0 + low1;
6080 break;
6081 case DIV:
6082 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6083 result_width = width0;
6084 break;
6085 case UDIV:
6086 result_width = width0;
6087 break;
6088 case MOD:
6089 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6090 result_width = MIN (width0, width1);
6091 result_low = MIN (low0, low1);
6092 break;
6093 case UMOD:
6094 result_width = MIN (width0, width1);
6095 result_low = MIN (low0, low1);
6096 break;
6097 }
6098
6099 if (result_width < mode_width)
5f4f0e22 6100 significant &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
6101
6102 if (result_low > 0)
5f4f0e22 6103 significant &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
6104 }
6105 break;
6106
6107 case ZERO_EXTRACT:
6108 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22
CH
6109 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6110 significant &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
6111 break;
6112
6113 case SUBREG:
c3c2cb37
RK
6114 /* If this is a SUBREG formed for a promoted variable that has
6115 been zero-extended, we know that at least the high-order bits
6116 are zero, though others might be too. */
6117
6118 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
6119 significant = (GET_MODE_MASK (GET_MODE (x))
6120 & significant_bits (SUBREG_REG (x), GET_MODE (x)));
6121
230d793d
RS
6122 /* If the inner mode is a single word for both the host and target
6123 machines, we can compute this from which bits of the inner
6124 object are known significant. */
6125 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
6126 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6127 <= HOST_BITS_PER_WIDE_INT))
230d793d
RS
6128 {
6129 significant &= significant_bits (SUBREG_REG (x), mode);
457816e2 6130#if ! defined(BYTE_LOADS_ZERO_EXTEND) && ! defined(BYTE_LOADS_SIGN_EXTEND)
230d793d
RS
6131 /* On many CISC machines, accessing an object in a wider mode
6132 causes the high-order bits to become undefined. So they are
6133 not known to be zero. */
6134 if (GET_MODE_SIZE (GET_MODE (x))
6135 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6136 significant |= (GET_MODE_MASK (GET_MODE (x))
6137 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
6138#endif
6139 }
6140 break;
6141
6142 case ASHIFTRT:
6143 case LSHIFTRT:
6144 case ASHIFT:
6145 case LSHIFT:
6146 case ROTATE:
6147 /* The significant bits are in two classes: any bits within MODE
6148 that aren't in GET_MODE (x) are always significant. The rest of the
6149 significant bits are those that are significant in the operand of
6150 the shift when shifted the appropriate number of bits. This
6151 shows that high-order bits are cleared by the right shift and
6152 low-order bits by left shifts. */
6153 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6154 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 6155 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
6156 {
6157 enum machine_mode inner_mode = GET_MODE (x);
6158 int width = GET_MODE_BITSIZE (inner_mode);
6159 int count = INTVAL (XEXP (x, 1));
5f4f0e22
CH
6160 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
6161 unsigned HOST_WIDE_INT op_significant
6162 = significant_bits (XEXP (x, 0), mode);
6163 unsigned HOST_WIDE_INT inner = op_significant & mode_mask;
6164 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
6165
6166 if (mode_width > width)
6167 outer = (op_significant & significant & ~ mode_mask);
6168
6169 if (code == LSHIFTRT)
6170 inner >>= count;
6171 else if (code == ASHIFTRT)
6172 {
6173 inner >>= count;
6174
6175 /* If the sign bit was significant at before the shift, we
6176 need to mark all the places it could have been copied to
6177 by the shift significant. */
5f4f0e22
CH
6178 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6179 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d
RS
6180 }
6181 else if (code == LSHIFT || code == ASHIFT)
6182 inner <<= count;
6183 else
6184 inner = ((inner << (count % width)
6185 | (inner >> (width - (count % width)))) & mode_mask);
6186
6187 significant &= (outer | inner);
6188 }
6189 break;
6190
6191 case FFS:
6192 /* This is at most the number of bits in the mode. */
5f4f0e22 6193 significant = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 6194 break;
d0ab8cd3
RK
6195
6196 case IF_THEN_ELSE:
6197 significant &= (significant_bits (XEXP (x, 1), mode)
6198 | significant_bits (XEXP (x, 2), mode));
6199 break;
230d793d
RS
6200 }
6201
6202 return significant;
6203}
6204\f
d0ab8cd3
RK
6205/* Return the number of bits at the high-order end of X that are known to
6206 be equal to the sign bit. This number will always be between 1 and
6207 the number of bits in the mode of X. MODE is the mode to be used
6208 if X is VOIDmode. */
6209
6210static int
6211num_sign_bit_copies (x, mode)
6212 rtx x;
6213 enum machine_mode mode;
6214{
6215 enum rtx_code code = GET_CODE (x);
6216 int bitwidth;
6217 int num0, num1, result;
6218 unsigned HOST_WIDE_INT sig;
6219 rtx tem;
6220
6221 /* If we weren't given a mode, use the mode of X. If the mode is still
6222 VOIDmode, we don't know anything. */
6223
6224 if (mode == VOIDmode)
6225 mode = GET_MODE (x);
6226
6227 if (mode == VOIDmode)
6228 return 0;
6229
6230 bitwidth = GET_MODE_BITSIZE (mode);
6231
6232 switch (code)
6233 {
6234 case REG:
6235 if (significant_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6236 return reg_sign_bit_copies[REGNO (x)];
6237
6238 tem = get_last_value (x);
6239 if (tem != 0)
6240 return num_sign_bit_copies (tem, mode);
6241 break;
6242
457816e2
RK
6243#ifdef BYTE_LOADS_SIGN_EXTEND
6244 case MEM:
6245 /* Some RISC machines sign-extend all loads of smaller than a word. */
6246 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
6247#endif
6248
d0ab8cd3
RK
6249 case CONST_INT:
6250 /* If the constant is negative, take its 1's complement and remask.
6251 Then see how many zero bits we have. */
6252 sig = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949
RS
6253 if (bitwidth <= HOST_BITS_PER_WIDE_INT
6254 && (sig & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
d0ab8cd3
RK
6255 sig = (~ sig) & GET_MODE_MASK (mode);
6256
6257 return (sig == 0 ? bitwidth : bitwidth - floor_log2 (sig) - 1);
6258
6259 case SUBREG:
c3c2cb37
RK
6260 /* If this is a SUBREG for a promoted object that is sign-extended
6261 and we are looking at it in a wider mode, we know that at least the
6262 high-order bits are known to be sign bit copies. */
6263
6264 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
6265 return (GET_MODE_BITSIZE (mode) - GET_MODE_BITSIZE (GET_MODE (x))
6266 + num_sign_bit_copies (SUBREG_REG (x), GET_MODE (x)));
6267
d0ab8cd3
RK
6268 /* For a smaller object, just ignore the high bits. */
6269 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6270 {
6271 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6272 return MAX (1, (num0
6273 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6274 - bitwidth)));
6275 }
457816e2
RK
6276
6277#if defined(BYTE_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
6278 /* For paradoxical SUBREGs, just look inside since, on machines with
6279 one of these defined, we assume that operations are actually
6280 performed on the full register. Note that we are passing MODE
6281 to the recursive call, so the number of sign bit copies will
6282 remain relative to that mode, not the inner mode. */
6283
6284 if (GET_MODE_SIZE (GET_MODE (x))
6285 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6286 return num_sign_bit_copies (SUBREG_REG (x), mode);
6287#endif
6288
d0ab8cd3
RK
6289 break;
6290
6291 case SIGN_EXTRACT:
6292 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6293 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6294 break;
6295
6296 case SIGN_EXTEND:
6297 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6298 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6299
6300 case TRUNCATE:
6301 /* For a smaller object, just ignore the high bits. */
6302 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6303 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6304 - bitwidth)));
6305
6306 case NOT:
6307 return num_sign_bit_copies (XEXP (x, 0), mode);
6308
6309 case ROTATE: case ROTATERT:
6310 /* If we are rotating left by a number of bits less than the number
6311 of sign bit copies, we can just subtract that amount from the
6312 number. */
6313 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6314 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6315 {
6316 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6317 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6318 : bitwidth - INTVAL (XEXP (x, 1))));
6319 }
6320 break;
6321
6322 case NEG:
6323 /* In general, this subtracts one sign bit copy. But if the value
6324 is known to be positive, the number of sign bit copies is the
6325 same as that of the input. Finally, if the input has just one
6326 significant bit, all the bits are copies of the sign bit. */
6327 sig = significant_bits (XEXP (x, 0), mode);
6328 if (sig == 1)
6329 return bitwidth;
6330
6331 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6332 if (num0 > 1
ac49a949 6333 && bitwidth <= HOST_BITS_PER_WIDE_INT
d0ab8cd3
RK
6334 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig))
6335 num0--;
6336
6337 return num0;
6338
6339 case IOR: case AND: case XOR:
6340 case SMIN: case SMAX: case UMIN: case UMAX:
6341 /* Logical operations will preserve the number of sign-bit copies.
6342 MIN and MAX operations always return one of the operands. */
6343 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6344 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6345 return MIN (num0, num1);
6346
6347 case PLUS: case MINUS:
6348 /* For addition and subtraction, we can have a 1-bit carry. However,
6349 if we are subtracting 1 from a positive number, there will not
6350 be such a carry. Furthermore, if the positive number is known to
6351 be 0 or 1, we know the result is either -1 or 0. */
6352
3e3ea975
RS
6353 if (code == PLUS && XEXP (x, 1) == constm1_rtx
6354 /* Don't do this if XEXP (x, 0) is a paradoxical subreg
6355 because in principle we don't know what the high bits are. */
6356 && !(GET_CODE (XEXP (x, 0)) == SUBREG
6357 && (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
6358 < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))))
d0ab8cd3
RK
6359 {
6360 sig = significant_bits (XEXP (x, 0), mode);
6361 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig) == 0)
6362 return (sig == 1 || sig == 0 ? bitwidth
d038420e 6363 : bitwidth - floor_log2 (sig) - 1);
d0ab8cd3
RK
6364 }
6365
6366 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6367 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6368 return MAX (1, MIN (num0, num1) - 1);
6369
6370 case MULT:
6371 /* The number of bits of the product is the sum of the number of
6372 bits of both terms. However, unless one of the terms if known
6373 to be positive, we must allow for an additional bit since negating
6374 a negative number can remove one sign bit copy. */
6375
6376 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6377 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6378
6379 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6380 if (result > 0
ac49a949 6381 && bitwidth <= HOST_BITS_PER_INT
d0ab8cd3
RK
6382 && ((significant_bits (XEXP (x, 0), mode)
6383 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6384 && (significant_bits (XEXP (x, 1), mode)
6385 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6386 result--;
6387
6388 return MAX (1, result);
6389
6390 case UDIV:
6391 /* The result must be <= the first operand. */
6392 return num_sign_bit_copies (XEXP (x, 0), mode);
6393
6394 case UMOD:
6395 /* The result must be <= the scond operand. */
6396 return num_sign_bit_copies (XEXP (x, 1), mode);
6397
6398 case DIV:
6399 /* Similar to unsigned division, except that we have to worry about
6400 the case where the divisor is negative, in which case we have
6401 to add 1. */
6402 result = num_sign_bit_copies (XEXP (x, 0), mode);
6403 if (result > 1
ac49a949 6404 && bitwidth <= HOST_BITS_PER_WIDE_INT
d0ab8cd3
RK
6405 && (significant_bits (XEXP (x, 1), mode)
6406 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6407 result --;
6408
6409 return result;
6410
6411 case MOD:
6412 result = num_sign_bit_copies (XEXP (x, 1), mode);
6413 if (result > 1
ac49a949 6414 && bitwidth <= HOST_BITS_PER_WIDE_INT
d0ab8cd3
RK
6415 && (significant_bits (XEXP (x, 1), mode)
6416 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6417 result --;
6418
6419 return result;
6420
6421 case ASHIFTRT:
6422 /* Shifts by a constant add to the number of bits equal to the
6423 sign bit. */
6424 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6425 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6426 && INTVAL (XEXP (x, 1)) > 0)
6427 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6428
6429 return num0;
6430
6431 case ASHIFT:
6432 case LSHIFT:
6433 /* Left shifts destroy copies. */
6434 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6435 || INTVAL (XEXP (x, 1)) < 0
6436 || INTVAL (XEXP (x, 1)) >= bitwidth)
6437 return 1;
6438
6439 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6440 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6441
6442 case IF_THEN_ELSE:
6443 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6444 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6445 return MIN (num0, num1);
6446
6447#if STORE_FLAG_VALUE == -1
6448 case EQ: case NE: case GE: case GT: case LE: case LT:
6449 case GEU: case GTU: case LEU: case LTU:
6450 return bitwidth;
6451#endif
6452 }
6453
6454 /* If we haven't been able to figure it out by one of the above rules,
6455 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
6456 count those bits and return one less than that amount. If we can't
6457 safely compute the mask for this mode, always return BITWIDTH. */
6458
6459 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6460 return bitwidth;
d0ab8cd3
RK
6461
6462 sig = significant_bits (x, mode);
6463 return sig == GET_MODE_MASK (mode) ? 1 : bitwidth - floor_log2 (sig) - 1;
6464}
6465\f
1a26b032
RK
6466/* Return the number of "extended" bits there are in X, when interpreted
6467 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
6468 unsigned quantities, this is the number of high-order zero bits.
6469 For signed quantities, this is the number of copies of the sign bit
6470 minus 1. In both case, this function returns the number of "spare"
6471 bits. For example, if two quantities for which this function returns
6472 at least 1 are added, the addition is known not to overflow.
6473
6474 This function will always return 0 unless called during combine, which
6475 implies that it must be called from a define_split. */
6476
6477int
6478extended_count (x, mode, unsignedp)
6479 rtx x;
6480 enum machine_mode mode;
6481 int unsignedp;
6482{
6483 if (significant_valid == 0)
6484 return 0;
6485
6486 return (unsignedp
ac49a949
RS
6487 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6488 && (GET_MODE_BITSIZE (mode) - 1
6489 - floor_log2 (significant_bits (x, mode))))
1a26b032
RK
6490 : num_sign_bit_copies (x, mode) - 1);
6491}
6492\f
230d793d
RS
6493/* This function is called from `simplify_shift_const' to merge two
6494 outer operations. Specifically, we have already found that we need
6495 to perform operation *POP0 with constant *PCONST0 at the outermost
6496 position. We would now like to also perform OP1 with constant CONST1
6497 (with *POP0 being done last).
6498
6499 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
6500 the resulting operation. *PCOMP_P is set to 1 if we would need to
6501 complement the innermost operand, otherwise it is unchanged.
6502
6503 MODE is the mode in which the operation will be done. No bits outside
6504 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 6505 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
6506
6507 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
6508 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
6509 result is simply *PCONST0.
6510
6511 If the resulting operation cannot be expressed as one operation, we
6512 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
6513
6514static int
6515merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
6516 enum rtx_code *pop0;
5f4f0e22 6517 HOST_WIDE_INT *pconst0;
230d793d 6518 enum rtx_code op1;
5f4f0e22 6519 HOST_WIDE_INT const1;
230d793d
RS
6520 enum machine_mode mode;
6521 int *pcomp_p;
6522{
6523 enum rtx_code op0 = *pop0;
5f4f0e22 6524 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
6525
6526 const0 &= GET_MODE_MASK (mode);
6527 const1 &= GET_MODE_MASK (mode);
6528
6529 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6530 if (op0 == AND)
6531 const1 &= const0;
6532
6533 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6534 if OP0 is SET. */
6535
6536 if (op1 == NIL || op0 == SET)
6537 return 1;
6538
6539 else if (op0 == NIL)
6540 op0 = op1, const0 = const1;
6541
6542 else if (op0 == op1)
6543 {
6544 switch (op0)
6545 {
6546 case AND:
6547 const0 &= const1;
6548 break;
6549 case IOR:
6550 const0 |= const1;
6551 break;
6552 case XOR:
6553 const0 ^= const1;
6554 break;
6555 case PLUS:
6556 const0 += const1;
6557 break;
6558 case NEG:
6559 op0 = NIL;
6560 break;
6561 }
6562 }
6563
6564 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6565 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6566 return 0;
6567
6568 /* If the two constants aren't the same, we can't do anything. The
6569 remaining six cases can all be done. */
6570 else if (const0 != const1)
6571 return 0;
6572
6573 else
6574 switch (op0)
6575 {
6576 case IOR:
6577 if (op1 == AND)
6578 /* (a & b) | b == b */
6579 op0 = SET;
6580 else /* op1 == XOR */
6581 /* (a ^ b) | b == a | b */
6582 ;
6583 break;
6584
6585 case XOR:
6586 if (op1 == AND)
6587 /* (a & b) ^ b == (~a) & b */
6588 op0 = AND, *pcomp_p = 1;
6589 else /* op1 == IOR */
6590 /* (a | b) ^ b == a & ~b */
6591 op0 = AND, *pconst0 = ~ const0;
6592 break;
6593
6594 case AND:
6595 if (op1 == IOR)
6596 /* (a | b) & b == b */
6597 op0 = SET;
6598 else /* op1 == XOR */
6599 /* (a ^ b) & b) == (~a) & b */
6600 *pcomp_p = 1;
6601 break;
6602 }
6603
6604 /* Check for NO-OP cases. */
6605 const0 &= GET_MODE_MASK (mode);
6606 if (const0 == 0
6607 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6608 op0 = NIL;
6609 else if (const0 == 0 && op0 == AND)
6610 op0 = SET;
6611 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6612 op0 = NIL;
6613
6614 *pop0 = op0;
6615 *pconst0 = const0;
6616
6617 return 1;
6618}
6619\f
6620/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6621 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6622 that we started with.
6623
6624 The shift is normally computed in the widest mode we find in VAROP, as
6625 long as it isn't a different number of words than RESULT_MODE. Exceptions
6626 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6627
6628static rtx
6629simplify_shift_const (x, code, result_mode, varop, count)
6630 rtx x;
6631 enum rtx_code code;
6632 enum machine_mode result_mode;
6633 rtx varop;
6634 int count;
6635{
6636 enum rtx_code orig_code = code;
6637 int orig_count = count;
6638 enum machine_mode mode = result_mode;
6639 enum machine_mode shift_mode, tmode;
6640 int mode_words
6641 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6642 /* We form (outer_op (code varop count) (outer_const)). */
6643 enum rtx_code outer_op = NIL;
5f4f0e22 6644 HOST_WIDE_INT outer_const;
230d793d
RS
6645 rtx const_rtx;
6646 int complement_p = 0;
6647 rtx new;
6648
6649 /* If we were given an invalid count, don't do anything except exactly
6650 what was requested. */
6651
6652 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6653 {
6654 if (x)
6655 return x;
6656
5f4f0e22 6657 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
6658 }
6659
6660 /* Unless one of the branches of the `if' in this loop does a `continue',
6661 we will `break' the loop after the `if'. */
6662
6663 while (count != 0)
6664 {
6665 /* If we have an operand of (clobber (const_int 0)), just return that
6666 value. */
6667 if (GET_CODE (varop) == CLOBBER)
6668 return varop;
6669
6670 /* If we discovered we had to complement VAROP, leave. Making a NOT
6671 here would cause an infinite loop. */
6672 if (complement_p)
6673 break;
6674
6675 /* Convert ROTATETRT to ROTATE. */
6676 if (code == ROTATERT)
6677 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6678
6679 /* Canonicalize LSHIFT to ASHIFT. */
6680 if (code == LSHIFT)
6681 code = ASHIFT;
6682
6683 /* We need to determine what mode we will do the shift in. If the
6684 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6685 was originally done in. Otherwise, we can do it in MODE, the widest
6686 mode encountered. */
6687 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6688
6689 /* Handle cases where the count is greater than the size of the mode
6690 minus 1. For ASHIFT, use the size minus one as the count (this can
6691 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6692 take the count modulo the size. For other shifts, the result is
6693 zero.
6694
6695 Since these shifts are being produced by the compiler by combining
6696 multiple operations, each of which are defined, we know what the
6697 result is supposed to be. */
6698
6699 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6700 {
6701 if (code == ASHIFTRT)
6702 count = GET_MODE_BITSIZE (shift_mode) - 1;
6703 else if (code == ROTATE || code == ROTATERT)
6704 count %= GET_MODE_BITSIZE (shift_mode);
6705 else
6706 {
6707 /* We can't simply return zero because there may be an
6708 outer op. */
6709 varop = const0_rtx;
6710 count = 0;
6711 break;
6712 }
6713 }
6714
6715 /* Negative counts are invalid and should not have been made (a
6716 programmer-specified negative count should have been handled
6717 above). */
6718 else if (count < 0)
6719 abort ();
6720
d0ab8cd3
RK
6721 /* An arithmetic right shift of a quantity known to be -1 or 0
6722 is a no-op. */
6723 if (code == ASHIFTRT
6724 && (num_sign_bit_copies (varop, shift_mode)
6725 == GET_MODE_BITSIZE (shift_mode)))
6726 {
6727 count = 0;
6728 break;
6729 }
6730
230d793d
RS
6731 /* We simplify the tests below and elsewhere by converting
6732 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6733 `make_compound_operation' will convert it to a ASHIFTRT for
6734 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 6735 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 6736 && code == ASHIFTRT
5f4f0e22
CH
6737 && ((significant_bits (varop, shift_mode)
6738 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
6739 == 0))
230d793d
RS
6740 code = LSHIFTRT;
6741
6742 switch (GET_CODE (varop))
6743 {
6744 case SIGN_EXTEND:
6745 case ZERO_EXTEND:
6746 case SIGN_EXTRACT:
6747 case ZERO_EXTRACT:
6748 new = expand_compound_operation (varop);
6749 if (new != varop)
6750 {
6751 varop = new;
6752 continue;
6753 }
6754 break;
6755
6756 case MEM:
6757 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
6758 minus the width of a smaller mode, we can do this with a
6759 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
6760 if ((code == ASHIFTRT || code == LSHIFTRT)
6761 && ! mode_dependent_address_p (XEXP (varop, 0))
6762 && ! MEM_VOLATILE_P (varop)
6763 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6764 MODE_INT, 1)) != BLKmode)
6765 {
6766#if BYTES_BIG_ENDIAN
6767 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
6768#else
6769 new = gen_rtx (MEM, tmode,
6770 plus_constant (XEXP (varop, 0),
6771 count / BITS_PER_UNIT));
6772 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
6773 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
6774 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
6775#endif
6776 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6777 : ZERO_EXTEND, mode, new);
6778 count = 0;
6779 continue;
6780 }
6781 break;
6782
6783 case USE:
6784 /* Similar to the case above, except that we can only do this if
6785 the resulting mode is the same as that of the underlying
6786 MEM and adjust the address depending on the *bits* endianness
6787 because of the way that bit-field extract insns are defined. */
6788 if ((code == ASHIFTRT || code == LSHIFTRT)
6789 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6790 MODE_INT, 1)) != BLKmode
6791 && tmode == GET_MODE (XEXP (varop, 0)))
6792 {
6793#if BITS_BIG_ENDIAN
6794 new = XEXP (varop, 0);
6795#else
6796 new = copy_rtx (XEXP (varop, 0));
6797 SUBST (XEXP (new, 0),
6798 plus_constant (XEXP (new, 0),
6799 count / BITS_PER_UNIT));
6800#endif
6801
6802 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6803 : ZERO_EXTEND, mode, new);
6804 count = 0;
6805 continue;
6806 }
6807 break;
6808
6809 case SUBREG:
6810 /* If VAROP is a SUBREG, strip it as long as the inner operand has
6811 the same number of words as what we've seen so far. Then store
6812 the widest mode in MODE. */
f9e67232
RS
6813 if (subreg_lowpart_p (varop)
6814 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6815 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
6816 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6817 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6818 == mode_words))
6819 {
6820 varop = SUBREG_REG (varop);
6821 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
6822 mode = GET_MODE (varop);
6823 continue;
6824 }
6825 break;
6826
6827 case MULT:
6828 /* Some machines use MULT instead of ASHIFT because MULT
6829 is cheaper. But it is still better on those machines to
6830 merge two shifts into one. */
6831 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6832 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6833 {
6834 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 6835 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
6836 continue;
6837 }
6838 break;
6839
6840 case UDIV:
6841 /* Similar, for when divides are cheaper. */
6842 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6843 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6844 {
6845 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 6846 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
6847 continue;
6848 }
6849 break;
6850
6851 case ASHIFTRT:
6852 /* If we are extracting just the sign bit of an arithmetic right
6853 shift, that shift is not needed. */
6854 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
6855 {
6856 varop = XEXP (varop, 0);
6857 continue;
6858 }
6859
6860 /* ... fall through ... */
6861
6862 case LSHIFTRT:
6863 case ASHIFT:
6864 case LSHIFT:
6865 case ROTATE:
6866 /* Here we have two nested shifts. The result is usually the
6867 AND of a new shift with a mask. We compute the result below. */
6868 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6869 && INTVAL (XEXP (varop, 1)) >= 0
6870 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
6871 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
6872 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
6873 {
6874 enum rtx_code first_code = GET_CODE (varop);
6875 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 6876 unsigned HOST_WIDE_INT mask;
230d793d
RS
6877 rtx mask_rtx;
6878 rtx inner;
6879
6880 if (first_code == LSHIFT)
6881 first_code = ASHIFT;
6882
6883 /* We have one common special case. We can't do any merging if
6884 the inner code is an ASHIFTRT of a smaller mode. However, if
6885 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
6886 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
6887 we can convert it to
6888 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
6889 This simplifies certain SIGN_EXTEND operations. */
6890 if (code == ASHIFT && first_code == ASHIFTRT
6891 && (GET_MODE_BITSIZE (result_mode)
6892 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
6893 {
6894 /* C3 has the low-order C1 bits zero. */
6895
5f4f0e22
CH
6896 mask = (GET_MODE_MASK (mode)
6897 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 6898
5f4f0e22 6899 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 6900 XEXP (varop, 0), mask);
5f4f0e22 6901 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
6902 varop, count);
6903 count = first_count;
6904 code = ASHIFTRT;
6905 continue;
6906 }
6907
d0ab8cd3
RK
6908 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
6909 than C1 high-order bits equal to the sign bit, we can convert
6910 this to either an ASHIFT or a ASHIFTRT depending on the
6911 two counts.
230d793d
RS
6912
6913 We cannot do this if VAROP's mode is not SHIFT_MODE. */
6914
6915 if (code == ASHIFTRT && first_code == ASHIFT
6916 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
6917 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
6918 > first_count))
230d793d 6919 {
d0ab8cd3
RK
6920 count -= first_count;
6921 if (count < 0)
6922 count = - count, code = ASHIFT;
6923 varop = XEXP (varop, 0);
6924 continue;
230d793d
RS
6925 }
6926
6927 /* There are some cases we can't do. If CODE is ASHIFTRT,
6928 we can only do this if FIRST_CODE is also ASHIFTRT.
6929
6930 We can't do the case when CODE is ROTATE and FIRST_CODE is
6931 ASHIFTRT.
6932
6933 If the mode of this shift is not the mode of the outer shift,
6934 we can't do this if either shift is ASHIFTRT or ROTATE.
6935
6936 Finally, we can't do any of these if the mode is too wide
6937 unless the codes are the same.
6938
6939 Handle the case where the shift codes are the same
6940 first. */
6941
6942 if (code == first_code)
6943 {
6944 if (GET_MODE (varop) != result_mode
6945 && (code == ASHIFTRT || code == ROTATE))
6946 break;
6947
6948 count += first_count;
6949 varop = XEXP (varop, 0);
6950 continue;
6951 }
6952
6953 if (code == ASHIFTRT
6954 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 6955 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d
RS
6956 || (GET_MODE (varop) != result_mode
6957 && (first_code == ASHIFTRT || first_code == ROTATE
6958 || code == ROTATE)))
6959 break;
6960
6961 /* To compute the mask to apply after the shift, shift the
6962 significant bits of the inner shift the same way the
6963 outer shift will. */
6964
5f4f0e22 6965 mask_rtx = GEN_INT (significant_bits (varop, GET_MODE (varop)));
230d793d
RS
6966
6967 mask_rtx
6968 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 6969 GEN_INT (count));
230d793d
RS
6970
6971 /* Give up if we can't compute an outer operation to use. */
6972 if (mask_rtx == 0
6973 || GET_CODE (mask_rtx) != CONST_INT
6974 || ! merge_outer_ops (&outer_op, &outer_const, AND,
6975 INTVAL (mask_rtx),
6976 result_mode, &complement_p))
6977 break;
6978
6979 /* If the shifts are in the same direction, we add the
6980 counts. Otherwise, we subtract them. */
6981 if ((code == ASHIFTRT || code == LSHIFTRT)
6982 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
6983 count += first_count;
6984 else
6985 count -= first_count;
6986
6987 /* If COUNT is positive, the new shift is usually CODE,
6988 except for the two exceptions below, in which case it is
6989 FIRST_CODE. If the count is negative, FIRST_CODE should
6990 always be used */
6991 if (count > 0
6992 && ((first_code == ROTATE && code == ASHIFT)
6993 || (first_code == ASHIFTRT && code == LSHIFTRT)))
6994 code = first_code;
6995 else if (count < 0)
6996 code = first_code, count = - count;
6997
6998 varop = XEXP (varop, 0);
6999 continue;
7000 }
7001
7002 /* If we have (A << B << C) for any shift, we can convert this to
7003 (A << C << B). This wins if A is a constant. Only try this if
7004 B is not a constant. */
7005
7006 else if (GET_CODE (varop) == code
7007 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7008 && 0 != (new
7009 = simplify_binary_operation (code, mode,
7010 XEXP (varop, 0),
5f4f0e22 7011 GEN_INT (count))))
230d793d
RS
7012 {
7013 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7014 count = 0;
7015 continue;
7016 }
7017 break;
7018
7019 case NOT:
7020 /* Make this fit the case below. */
7021 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 7022 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
7023 continue;
7024
7025 case IOR:
7026 case AND:
7027 case XOR:
7028 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7029 with C the size of VAROP - 1 and the shift is logical if
7030 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7031 we have an (le X 0) operation. If we have an arithmetic shift
7032 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7033 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7034
7035 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7036 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7037 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7038 && (code == LSHIFTRT || code == ASHIFTRT)
7039 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7040 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7041 {
7042 count = 0;
7043 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7044 const0_rtx);
7045
7046 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7047 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7048
7049 continue;
7050 }
7051
7052 /* If we have (shift (logical)), move the logical to the outside
7053 to allow it to possibly combine with another logical and the
7054 shift to combine with another shift. This also canonicalizes to
7055 what a ZERO_EXTRACT looks like. Also, some machines have
7056 (and (shift)) insns. */
7057
7058 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7059 && (new = simplify_binary_operation (code, result_mode,
7060 XEXP (varop, 1),
5f4f0e22 7061 GEN_INT (count))) != 0
230d793d
RS
7062 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7063 INTVAL (new), result_mode, &complement_p))
7064 {
7065 varop = XEXP (varop, 0);
7066 continue;
7067 }
7068
7069 /* If we can't do that, try to simplify the shift in each arm of the
7070 logical expression, make a new logical expression, and apply
7071 the inverse distributive law. */
7072 {
5f4f0e22 7073 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
230d793d 7074 XEXP (varop, 0), count);
5f4f0e22 7075 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
230d793d
RS
7076 XEXP (varop, 1), count);
7077
7078 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
7079 varop = apply_distributive_law (varop);
7080
7081 count = 0;
7082 }
7083 break;
7084
7085 case EQ:
7086 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7087 says that the sign bit can be tested, FOO has mode MODE, C is
7088 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
7089 significant. */
7090 if (code == LSHIFT
7091 && XEXP (varop, 1) == const0_rtx
7092 && GET_MODE (XEXP (varop, 0)) == result_mode
7093 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 7094 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7095 && ((STORE_FLAG_VALUE
5f4f0e22 7096 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
230d793d 7097 && significant_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7098 && merge_outer_ops (&outer_op, &outer_const, XOR,
7099 (HOST_WIDE_INT) 1, result_mode,
7100 &complement_p))
230d793d
RS
7101 {
7102 varop = XEXP (varop, 0);
7103 count = 0;
7104 continue;
7105 }
7106 break;
7107
7108 case NEG:
d0ab8cd3
RK
7109 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7110 than the number of bits in the mode is equivalent to A. */
7111 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
230d793d
RS
7112 && significant_bits (XEXP (varop, 0), result_mode) == 1)
7113 {
d0ab8cd3 7114 varop = XEXP (varop, 0);
230d793d
RS
7115 count = 0;
7116 continue;
7117 }
7118
7119 /* NEG commutes with ASHIFT since it is multiplication. Move the
7120 NEG outside to allow shifts to combine. */
7121 if (code == ASHIFT
5f4f0e22
CH
7122 && merge_outer_ops (&outer_op, &outer_const, NEG,
7123 (HOST_WIDE_INT) 0, result_mode,
7124 &complement_p))
230d793d
RS
7125 {
7126 varop = XEXP (varop, 0);
7127 continue;
7128 }
7129 break;
7130
7131 case PLUS:
d0ab8cd3
RK
7132 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7133 is one less than the number of bits in the mode is
7134 equivalent to (xor A 1). */
230d793d
RS
7135 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7136 && XEXP (varop, 1) == constm1_rtx
7137 && significant_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7138 && merge_outer_ops (&outer_op, &outer_const, XOR,
7139 (HOST_WIDE_INT) 1, result_mode,
7140 &complement_p))
230d793d
RS
7141 {
7142 count = 0;
7143 varop = XEXP (varop, 0);
7144 continue;
7145 }
7146
3f508eca
RK
7147 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
7148 significant in BAR are those being shifted out and those
7149 bits are known zero in FOO, we can replace the PLUS with FOO.
7150 Similarly in the other operand order. This code occurs when
7151 we are computing the size of a variable-size array. */
7152
7153 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 7154 && count < HOST_BITS_PER_WIDE_INT
3f508eca
RK
7155 && significant_bits (XEXP (varop, 1), result_mode) >> count == 0
7156 && (significant_bits (XEXP (varop, 1), result_mode)
7157 & significant_bits (XEXP (varop, 0), result_mode)) == 0)
7158 {
7159 varop = XEXP (varop, 0);
7160 continue;
7161 }
7162 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 7163 && count < HOST_BITS_PER_WIDE_INT
ac49a949 7164 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
3f508eca
RK
7165 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
7166 >> count)
7167 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
7168 & significant_bits (XEXP (varop, 1),
7169 result_mode)))
7170 {
7171 varop = XEXP (varop, 1);
7172 continue;
7173 }
7174
230d793d
RS
7175 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7176 if (code == ASHIFT
7177 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7178 && (new = simplify_binary_operation (ASHIFT, result_mode,
7179 XEXP (varop, 1),
5f4f0e22 7180 GEN_INT (count))) != 0
230d793d
RS
7181 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7182 INTVAL (new), result_mode, &complement_p))
7183 {
7184 varop = XEXP (varop, 0);
7185 continue;
7186 }
7187 break;
7188
7189 case MINUS:
7190 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7191 with C the size of VAROP - 1 and the shift is logical if
7192 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7193 we have a (gt X 0) operation. If the shift is arithmetic with
7194 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7195 we have a (neg (gt X 0)) operation. */
7196
7197 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7198 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7199 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7200 && (code == LSHIFTRT || code == ASHIFTRT)
7201 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7202 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7203 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7204 {
7205 count = 0;
7206 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7207 const0_rtx);
7208
7209 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7210 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7211
7212 continue;
7213 }
7214 break;
7215 }
7216
7217 break;
7218 }
7219
7220 /* We need to determine what mode to do the shift in. If the shift is
7221 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7222 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7223 The code we care about is that of the shift that will actually be done,
7224 not the shift that was originally requested. */
7225 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7226
7227 /* We have now finished analyzing the shift. The result should be
7228 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7229 OUTER_OP is non-NIL, it is an operation that needs to be applied
7230 to the result of the shift. OUTER_CONST is the relevant constant,
7231 but we must turn off all bits turned off in the shift.
7232
7233 If we were passed a value for X, see if we can use any pieces of
7234 it. If not, make new rtx. */
7235
7236 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7237 && GET_CODE (XEXP (x, 1)) == CONST_INT
7238 && INTVAL (XEXP (x, 1)) == count)
7239 const_rtx = XEXP (x, 1);
7240 else
5f4f0e22 7241 const_rtx = GEN_INT (count);
230d793d
RS
7242
7243 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7244 && GET_MODE (XEXP (x, 0)) == shift_mode
7245 && SUBREG_REG (XEXP (x, 0)) == varop)
7246 varop = XEXP (x, 0);
7247 else if (GET_MODE (varop) != shift_mode)
7248 varop = gen_lowpart_for_combine (shift_mode, varop);
7249
7250 /* If we can't make the SUBREG, try to return what we were given. */
7251 if (GET_CODE (varop) == CLOBBER)
7252 return x ? x : varop;
7253
7254 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7255 if (new != 0)
7256 x = new;
7257 else
7258 {
7259 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7260 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7261
7262 SUBST (XEXP (x, 0), varop);
7263 SUBST (XEXP (x, 1), const_rtx);
7264 }
7265
7266 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7267 turn off all the bits that the shift would have turned off. */
7268 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 7269 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
7270 GET_MODE_MASK (result_mode) >> orig_count);
7271
7272 /* Do the remainder of the processing in RESULT_MODE. */
7273 x = gen_lowpart_for_combine (result_mode, x);
7274
7275 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7276 operation. */
7277 if (complement_p)
7278 x = gen_unary (NOT, result_mode, x);
7279
7280 if (outer_op != NIL)
7281 {
5f4f0e22 7282 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7283 outer_const &= GET_MODE_MASK (result_mode);
7284
7285 if (outer_op == AND)
5f4f0e22 7286 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
7287 else if (outer_op == SET)
7288 /* This means that we have determined that the result is
7289 equivalent to a constant. This should be rare. */
5f4f0e22 7290 x = GEN_INT (outer_const);
230d793d
RS
7291 else if (GET_RTX_CLASS (outer_op) == '1')
7292 x = gen_unary (outer_op, result_mode, x);
7293 else
5f4f0e22 7294 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
7295 }
7296
7297 return x;
7298}
7299\f
7300/* Like recog, but we receive the address of a pointer to a new pattern.
7301 We try to match the rtx that the pointer points to.
7302 If that fails, we may try to modify or replace the pattern,
7303 storing the replacement into the same pointer object.
7304
7305 Modifications include deletion or addition of CLOBBERs.
7306
7307 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7308 the CLOBBERs are placed.
7309
7310 The value is the final insn code from the pattern ultimately matched,
7311 or -1. */
7312
7313static int
7314recog_for_combine (pnewpat, insn, pnotes)
7315 rtx *pnewpat;
7316 rtx insn;
7317 rtx *pnotes;
7318{
7319 register rtx pat = *pnewpat;
7320 int insn_code_number;
7321 int num_clobbers_to_add = 0;
7322 int i;
7323 rtx notes = 0;
7324
7325 /* Is the result of combination a valid instruction? */
7326 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7327
7328 /* If it isn't, there is the possibility that we previously had an insn
7329 that clobbered some register as a side effect, but the combined
7330 insn doesn't need to do that. So try once more without the clobbers
7331 unless this represents an ASM insn. */
7332
7333 if (insn_code_number < 0 && ! check_asm_operands (pat)
7334 && GET_CODE (pat) == PARALLEL)
7335 {
7336 int pos;
7337
7338 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7339 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7340 {
7341 if (i != pos)
7342 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7343 pos++;
7344 }
7345
7346 SUBST_INT (XVECLEN (pat, 0), pos);
7347
7348 if (pos == 1)
7349 pat = XVECEXP (pat, 0, 0);
7350
7351 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7352 }
7353
7354 /* If we had any clobbers to add, make a new pattern than contains
7355 them. Then check to make sure that all of them are dead. */
7356 if (num_clobbers_to_add)
7357 {
7358 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7359 gen_rtvec (GET_CODE (pat) == PARALLEL
7360 ? XVECLEN (pat, 0) + num_clobbers_to_add
7361 : num_clobbers_to_add + 1));
7362
7363 if (GET_CODE (pat) == PARALLEL)
7364 for (i = 0; i < XVECLEN (pat, 0); i++)
7365 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7366 else
7367 XVECEXP (newpat, 0, 0) = pat;
7368
7369 add_clobbers (newpat, insn_code_number);
7370
7371 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7372 i < XVECLEN (newpat, 0); i++)
7373 {
7374 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7375 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7376 return -1;
7377 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7378 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7379 }
7380 pat = newpat;
7381 }
7382
7383 *pnewpat = pat;
7384 *pnotes = notes;
7385
7386 return insn_code_number;
7387}
7388\f
7389/* Like gen_lowpart but for use by combine. In combine it is not possible
7390 to create any new pseudoregs. However, it is safe to create
7391 invalid memory addresses, because combine will try to recognize
7392 them and all they will do is make the combine attempt fail.
7393
7394 If for some reason this cannot do its job, an rtx
7395 (clobber (const_int 0)) is returned.
7396 An insn containing that will not be recognized. */
7397
7398#undef gen_lowpart
7399
7400static rtx
7401gen_lowpart_for_combine (mode, x)
7402 enum machine_mode mode;
7403 register rtx x;
7404{
7405 rtx result;
7406
7407 if (GET_MODE (x) == mode)
7408 return x;
7409
7410 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
7411 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7412
7413 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
7414 won't know what to do. So we will strip off the SUBREG here and
7415 process normally. */
7416 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
7417 {
7418 x = SUBREG_REG (x);
7419 if (GET_MODE (x) == mode)
7420 return x;
7421 }
7422
7423 result = gen_lowpart_common (mode, x);
7424 if (result)
7425 return result;
7426
7427 if (GET_CODE (x) == MEM)
7428 {
7429 register int offset = 0;
7430 rtx new;
7431
7432 /* Refuse to work on a volatile memory ref or one with a mode-dependent
7433 address. */
7434 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
7435 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7436
7437 /* If we want to refer to something bigger than the original memref,
7438 generate a perverse subreg instead. That will force a reload
7439 of the original memref X. */
7440 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
7441 return gen_rtx (SUBREG, mode, x, 0);
7442
7443#if WORDS_BIG_ENDIAN
7444 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
7445 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
7446#endif
7447#if BYTES_BIG_ENDIAN
7448 /* Adjust the address so that the address-after-the-data
7449 is unchanged. */
7450 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
7451 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
7452#endif
7453 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
7454 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
7455 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
7456 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
7457 return new;
7458 }
7459
7460 /* If X is a comparison operator, rewrite it in a new mode. This
7461 probably won't match, but may allow further simplifications. */
7462 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
7463 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
7464
7465 /* If we couldn't simplify X any other way, just enclose it in a
7466 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 7467 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 7468 else
dfbe1b2f
RK
7469 {
7470 int word = 0;
7471
7472 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
7473 word = ((GET_MODE_SIZE (GET_MODE (x))
7474 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
7475 / UNITS_PER_WORD);
7476 return gen_rtx (SUBREG, mode, x, word);
7477 }
230d793d
RS
7478}
7479\f
7480/* Make an rtx expression. This is a subset of gen_rtx and only supports
7481 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
7482
7483 If the identical expression was previously in the insn (in the undobuf),
7484 it will be returned. Only if it is not found will a new expression
7485 be made. */
7486
7487/*VARARGS2*/
7488static rtx
7489gen_rtx_combine (va_alist)
7490 va_dcl
7491{
7492 va_list p;
7493 enum rtx_code code;
7494 enum machine_mode mode;
7495 int n_args;
7496 rtx args[3];
7497 int i, j;
7498 char *fmt;
7499 rtx rt;
7500
7501 va_start (p);
7502 code = va_arg (p, enum rtx_code);
7503 mode = va_arg (p, enum machine_mode);
7504 n_args = GET_RTX_LENGTH (code);
7505 fmt = GET_RTX_FORMAT (code);
7506
7507 if (n_args == 0 || n_args > 3)
7508 abort ();
7509
7510 /* Get each arg and verify that it is supposed to be an expression. */
7511 for (j = 0; j < n_args; j++)
7512 {
7513 if (*fmt++ != 'e')
7514 abort ();
7515
7516 args[j] = va_arg (p, rtx);
7517 }
7518
7519 /* See if this is in undobuf. Be sure we don't use objects that came
7520 from another insn; this could produce circular rtl structures. */
7521
7522 for (i = previous_num_undos; i < undobuf.num_undo; i++)
7523 if (!undobuf.undo[i].is_int
7c046e4e
RK
7524 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
7525 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
230d793d
RS
7526 {
7527 for (j = 0; j < n_args; j++)
7c046e4e 7528 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
230d793d
RS
7529 break;
7530
7531 if (j == n_args)
7c046e4e 7532 return undobuf.undo[i].old_contents.rtx;
230d793d
RS
7533 }
7534
7535 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7536 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7537 rt = rtx_alloc (code);
7538 PUT_MODE (rt, mode);
7539 XEXP (rt, 0) = args[0];
7540 if (n_args > 1)
7541 {
7542 XEXP (rt, 1) = args[1];
7543 if (n_args > 2)
7544 XEXP (rt, 2) = args[2];
7545 }
7546 return rt;
7547}
7548
7549/* These routines make binary and unary operations by first seeing if they
7550 fold; if not, a new expression is allocated. */
7551
7552static rtx
7553gen_binary (code, mode, op0, op1)
7554 enum rtx_code code;
7555 enum machine_mode mode;
7556 rtx op0, op1;
7557{
7558 rtx result;
1a26b032
RK
7559 rtx tem;
7560
7561 if (GET_RTX_CLASS (code) == 'c'
7562 && (GET_CODE (op0) == CONST_INT
7563 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
7564 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
7565
7566 if (GET_RTX_CLASS (code) == '<')
7567 {
7568 enum machine_mode op_mode = GET_MODE (op0);
7569 if (op_mode == VOIDmode)
7570 op_mode = GET_MODE (op1);
7571 result = simplify_relational_operation (code, op_mode, op0, op1);
7572 }
7573 else
7574 result = simplify_binary_operation (code, mode, op0, op1);
7575
7576 if (result)
7577 return result;
7578
7579 /* Put complex operands first and constants second. */
7580 if (GET_RTX_CLASS (code) == 'c'
7581 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7582 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7583 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7584 || (GET_CODE (op0) == SUBREG
7585 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7586 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7587 return gen_rtx_combine (code, mode, op1, op0);
7588
7589 return gen_rtx_combine (code, mode, op0, op1);
7590}
7591
7592static rtx
7593gen_unary (code, mode, op0)
7594 enum rtx_code code;
7595 enum machine_mode mode;
7596 rtx op0;
7597{
7598 rtx result = simplify_unary_operation (code, mode, op0, mode);
7599
7600 if (result)
7601 return result;
7602
7603 return gen_rtx_combine (code, mode, op0);
7604}
7605\f
7606/* Simplify a comparison between *POP0 and *POP1 where CODE is the
7607 comparison code that will be tested.
7608
7609 The result is a possibly different comparison code to use. *POP0 and
7610 *POP1 may be updated.
7611
7612 It is possible that we might detect that a comparison is either always
7613 true or always false. However, we do not perform general constant
5089e22e 7614 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
7615 should have been detected earlier. Hence we ignore all such cases. */
7616
7617static enum rtx_code
7618simplify_comparison (code, pop0, pop1)
7619 enum rtx_code code;
7620 rtx *pop0;
7621 rtx *pop1;
7622{
7623 rtx op0 = *pop0;
7624 rtx op1 = *pop1;
7625 rtx tem, tem1;
7626 int i;
7627 enum machine_mode mode, tmode;
7628
7629 /* Try a few ways of applying the same transformation to both operands. */
7630 while (1)
7631 {
7632 /* If both operands are the same constant shift, see if we can ignore the
7633 shift. We can if the shift is a rotate or if the bits shifted out of
7634 this shift are not significant for either input and if the type of
7635 comparison is compatible with the shift. */
7636 if (GET_CODE (op0) == GET_CODE (op1)
5f4f0e22 7637 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
7638 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7639 || ((GET_CODE (op0) == LSHIFTRT
7640 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7641 && (code != GT && code != LT && code != GE && code != LE))
7642 || (GET_CODE (op0) == ASHIFTRT
7643 && (code != GTU && code != LTU
7644 && code != GEU && code != GEU)))
7645 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7646 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22 7647 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
7648 && XEXP (op0, 1) == XEXP (op1, 1))
7649 {
7650 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 7651 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
7652 int shift_count = INTVAL (XEXP (op0, 1));
7653
7654 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7655 mask &= (mask >> shift_count) << shift_count;
7656 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7657 mask = (mask & (mask << shift_count)) >> shift_count;
7658
7659 if ((significant_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7660 && (significant_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
7661 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7662 else
7663 break;
7664 }
7665
7666 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7667 SUBREGs are of the same mode, and, in both cases, the AND would
7668 be redundant if the comparison was done in the narrower mode,
7669 do the comparison in the narrower mode (e.g., we are AND'ing with 1
7670 and the operand's significant bits are 0xffffff01; in that case if
7671 we only care about QImode, we don't need the AND). This case occurs
7672 if the output mode of an scc insn is not SImode and
7673 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7674
7675 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7676 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7677 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7678 && GET_CODE (XEXP (op0, 0)) == SUBREG
7679 && GET_CODE (XEXP (op1, 0)) == SUBREG
7680 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7681 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7682 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7683 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
ac49a949
RS
7684 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7685 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
7686 && (significant_bits (SUBREG_REG (XEXP (op0, 0)),
7687 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7688 & ~ INTVAL (XEXP (op0, 1))) == 0
7689 && (significant_bits (SUBREG_REG (XEXP (op1, 0)),
7690 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7691 & ~ INTVAL (XEXP (op1, 1))) == 0)
7692 {
7693 op0 = SUBREG_REG (XEXP (op0, 0));
7694 op1 = SUBREG_REG (XEXP (op1, 0));
7695
7696 /* the resulting comparison is always unsigned since we masked off
7697 the original sign bit. */
7698 code = unsigned_condition (code);
7699 }
7700 else
7701 break;
7702 }
7703
7704 /* If the first operand is a constant, swap the operands and adjust the
7705 comparison code appropriately. */
7706 if (CONSTANT_P (op0))
7707 {
7708 tem = op0, op0 = op1, op1 = tem;
7709 code = swap_condition (code);
7710 }
7711
7712 /* We now enter a loop during which we will try to simplify the comparison.
7713 For the most part, we only are concerned with comparisons with zero,
7714 but some things may really be comparisons with zero but not start
7715 out looking that way. */
7716
7717 while (GET_CODE (op1) == CONST_INT)
7718 {
7719 enum machine_mode mode = GET_MODE (op0);
7720 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 7721 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
7722 int equality_comparison_p;
7723 int sign_bit_comparison_p;
7724 int unsigned_comparison_p;
5f4f0e22 7725 HOST_WIDE_INT const_op;
230d793d
RS
7726
7727 /* We only want to handle integral modes. This catches VOIDmode,
7728 CCmode, and the floating-point modes. An exception is that we
7729 can handle VOIDmode if OP0 is a COMPARE or a comparison
7730 operation. */
7731
7732 if (GET_MODE_CLASS (mode) != MODE_INT
7733 && ! (mode == VOIDmode
7734 && (GET_CODE (op0) == COMPARE
7735 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
7736 break;
7737
7738 /* Get the constant we are comparing against and turn off all bits
7739 not on in our mode. */
7740 const_op = INTVAL (op1);
5f4f0e22 7741 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 7742 const_op &= mask;
230d793d
RS
7743
7744 /* If we are comparing against a constant power of two and the value
7745 being compared has only that single significant bit (e.g., it was
7746 `and'ed with that bit), we can replace this with a comparison
7747 with zero. */
7748 if (const_op
7749 && (code == EQ || code == NE || code == GE || code == GEU
7750 || code == LT || code == LTU)
5f4f0e22 7751 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
7752 && exact_log2 (const_op) >= 0
7753 && significant_bits (op0, mode) == const_op)
7754 {
7755 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
7756 op1 = const0_rtx, const_op = 0;
7757 }
7758
d0ab8cd3
RK
7759 /* Similarly, if we are comparing a value known to be either -1 or
7760 0 with -1, change it to the opposite comparison against zero. */
7761
7762 if (const_op == -1
7763 && (code == EQ || code == NE || code == GT || code == LE
7764 || code == GEU || code == LTU)
7765 && num_sign_bit_copies (op0, mode) == mode_width)
7766 {
7767 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
7768 op1 = const0_rtx, const_op = 0;
7769 }
7770
230d793d 7771 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
7772 comparisons against zero and then prefer equality comparisons.
7773 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
7774
7775 switch (code)
7776 {
7777 case LT:
4803a34a
RK
7778 /* < C is equivalent to <= (C - 1) */
7779 if (const_op > 0)
230d793d 7780 {
4803a34a 7781 const_op -= 1;
5f4f0e22 7782 op1 = GEN_INT (const_op);
230d793d
RS
7783 code = LE;
7784 /* ... fall through to LE case below. */
7785 }
7786 else
7787 break;
7788
7789 case LE:
4803a34a
RK
7790 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
7791 if (const_op < 0)
7792 {
7793 const_op += 1;
5f4f0e22 7794 op1 = GEN_INT (const_op);
4803a34a
RK
7795 code = LT;
7796 }
230d793d
RS
7797
7798 /* If we are doing a <= 0 comparison on a value known to have
7799 a zero sign bit, we can replace this with == 0. */
7800 else if (const_op == 0
5f4f0e22 7801 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 7802 && (significant_bits (op0, mode)
5f4f0e22 7803 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
7804 code = EQ;
7805 break;
7806
7807 case GE:
4803a34a
RK
7808 /* >= C is equivalent to > (C - 1). */
7809 if (const_op > 0)
230d793d 7810 {
4803a34a 7811 const_op -= 1;
5f4f0e22 7812 op1 = GEN_INT (const_op);
230d793d
RS
7813 code = GT;
7814 /* ... fall through to GT below. */
7815 }
7816 else
7817 break;
7818
7819 case GT:
4803a34a
RK
7820 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
7821 if (const_op < 0)
7822 {
7823 const_op += 1;
5f4f0e22 7824 op1 = GEN_INT (const_op);
4803a34a
RK
7825 code = GE;
7826 }
230d793d
RS
7827
7828 /* If we are doing a > 0 comparison on a value known to have
7829 a zero sign bit, we can replace this with != 0. */
7830 else if (const_op == 0
5f4f0e22 7831 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 7832 && (significant_bits (op0, mode)
5f4f0e22 7833 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
7834 code = NE;
7835 break;
7836
230d793d 7837 case LTU:
4803a34a
RK
7838 /* < C is equivalent to <= (C - 1). */
7839 if (const_op > 0)
7840 {
7841 const_op -= 1;
5f4f0e22 7842 op1 = GEN_INT (const_op);
4803a34a
RK
7843 code = LEU;
7844 /* ... fall through ... */
7845 }
d0ab8cd3
RK
7846
7847 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
7848 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7849 {
7850 const_op = 0, op1 = const0_rtx;
7851 code = GE;
7852 break;
7853 }
4803a34a
RK
7854 else
7855 break;
230d793d
RS
7856
7857 case LEU:
7858 /* unsigned <= 0 is equivalent to == 0 */
7859 if (const_op == 0)
7860 code = EQ;
d0ab8cd3
RK
7861
7862 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
7863 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7864 {
7865 const_op = 0, op1 = const0_rtx;
7866 code = GE;
7867 }
230d793d
RS
7868 break;
7869
4803a34a
RK
7870 case GEU:
7871 /* >= C is equivalent to < (C - 1). */
7872 if (const_op > 1)
7873 {
7874 const_op -= 1;
5f4f0e22 7875 op1 = GEN_INT (const_op);
4803a34a
RK
7876 code = GTU;
7877 /* ... fall through ... */
7878 }
d0ab8cd3
RK
7879
7880 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
7881 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7882 {
7883 const_op = 0, op1 = const0_rtx;
7884 code = LT;
7885 }
4803a34a
RK
7886 else
7887 break;
7888
230d793d
RS
7889 case GTU:
7890 /* unsigned > 0 is equivalent to != 0 */
7891 if (const_op == 0)
7892 code = NE;
d0ab8cd3
RK
7893
7894 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
7895 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7896 {
7897 const_op = 0, op1 = const0_rtx;
7898 code = LT;
7899 }
230d793d
RS
7900 break;
7901 }
7902
7903 /* Compute some predicates to simplify code below. */
7904
7905 equality_comparison_p = (code == EQ || code == NE);
7906 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
7907 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
7908 || code == LEU);
7909
7910 /* Now try cases based on the opcode of OP0. If none of the cases
7911 does a "continue", we exit this loop immediately after the
7912 switch. */
7913
7914 switch (GET_CODE (op0))
7915 {
7916 case ZERO_EXTRACT:
7917 /* If we are extracting a single bit from a variable position in
7918 a constant that has only a single bit set and are comparing it
7919 with zero, we can convert this into an equality comparison
7920 between the position and the location of the single bit. We can't
7921 do this if bit endian and we don't have an extzv since we then
7922 can't know what mode to use for the endianness adjustment. */
7923
7924#if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
7925 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
7926 && XEXP (op0, 1) == const1_rtx
7927 && equality_comparison_p && const_op == 0
7928 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
7929 {
7930#if BITS_BIG_ENDIAN
7931 i = (GET_MODE_BITSIZE
7932 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
7933#endif
7934
7935 op0 = XEXP (op0, 2);
5f4f0e22 7936 op1 = GEN_INT (i);
230d793d
RS
7937 const_op = i;
7938
7939 /* Result is nonzero iff shift count is equal to I. */
7940 code = reverse_condition (code);
7941 continue;
7942 }
7943#endif
7944
7945 /* ... fall through ... */
7946
7947 case SIGN_EXTRACT:
7948 tem = expand_compound_operation (op0);
7949 if (tem != op0)
7950 {
7951 op0 = tem;
7952 continue;
7953 }
7954 break;
7955
7956 case NOT:
7957 /* If testing for equality, we can take the NOT of the constant. */
7958 if (equality_comparison_p
7959 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
7960 {
7961 op0 = XEXP (op0, 0);
7962 op1 = tem;
7963 continue;
7964 }
7965
7966 /* If just looking at the sign bit, reverse the sense of the
7967 comparison. */
7968 if (sign_bit_comparison_p)
7969 {
7970 op0 = XEXP (op0, 0);
7971 code = (code == GE ? LT : GE);
7972 continue;
7973 }
7974 break;
7975
7976 case NEG:
7977 /* If testing for equality, we can take the NEG of the constant. */
7978 if (equality_comparison_p
7979 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
7980 {
7981 op0 = XEXP (op0, 0);
7982 op1 = tem;
7983 continue;
7984 }
7985
7986 /* The remaining cases only apply to comparisons with zero. */
7987 if (const_op != 0)
7988 break;
7989
7990 /* When X is ABS or is known positive,
7991 (neg X) is < 0 if and only if X != 0. */
7992
7993 if (sign_bit_comparison_p
7994 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 7995 || (mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 7996 && (significant_bits (XEXP (op0, 0), mode)
5f4f0e22 7997 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
7998 {
7999 op0 = XEXP (op0, 0);
8000 code = (code == LT ? NE : EQ);
8001 continue;
8002 }
8003
8004 /* If we have NEG of something that is the result of a
8005 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
8006 two high-order bits must be the same and hence that
8007 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
8008 do this. */
8009 if (GET_CODE (XEXP (op0, 0)) == SIGN_EXTEND
8010 || (GET_CODE (XEXP (op0, 0)) == SIGN_EXTRACT
8011 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8012 && (INTVAL (XEXP (XEXP (op0, 0), 1))
8013 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0, 0), 0)))))
8014 || (GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8015 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8016 && XEXP (XEXP (op0, 0), 1) != const0_rtx)
8017 || ((tem = get_last_value (XEXP (op0, 0))) != 0
8018 && (GET_CODE (tem) == SIGN_EXTEND
8019 || (GET_CODE (tem) == SIGN_EXTRACT
8020 && GET_CODE (XEXP (tem, 1)) == CONST_INT
8021 && (INTVAL (XEXP (tem, 1))
8022 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem, 0)))))
8023 || (GET_CODE (tem) == ASHIFTRT
8024 && GET_CODE (XEXP (tem, 1)) == CONST_INT
8025 && XEXP (tem, 1) != const0_rtx))))
8026 {
8027 op0 = XEXP (op0, 0);
8028 code = swap_condition (code);
8029 continue;
8030 }
8031 break;
8032
8033 case ROTATE:
8034 /* If we are testing equality and our count is a constant, we
8035 can perform the inverse operation on our RHS. */
8036 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8037 && (tem = simplify_binary_operation (ROTATERT, mode,
8038 op1, XEXP (op0, 1))) != 0)
8039 {
8040 op0 = XEXP (op0, 0);
8041 op1 = tem;
8042 continue;
8043 }
8044
8045 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8046 a particular bit. Convert it to an AND of a constant of that
8047 bit. This will be converted into a ZERO_EXTRACT. */
8048 if (const_op == 0 && sign_bit_comparison_p
8049 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8050 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 8051 {
5f4f0e22
CH
8052 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8053 ((HOST_WIDE_INT) 1
8054 << (mode_width - 1
8055 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
8056 code = (code == LT ? NE : EQ);
8057 continue;
8058 }
8059
8060 /* ... fall through ... */
8061
8062 case ABS:
8063 /* ABS is ignorable inside an equality comparison with zero. */
8064 if (const_op == 0 && equality_comparison_p)
8065 {
8066 op0 = XEXP (op0, 0);
8067 continue;
8068 }
8069 break;
8070
8071
8072 case SIGN_EXTEND:
8073 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8074 to (compare FOO CONST) if CONST fits in FOO's mode and we
8075 are either testing inequality or have an unsigned comparison
8076 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8077 if (! unsigned_comparison_p
8078 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
8079 <= HOST_BITS_PER_WIDE_INT)
8080 && ((unsigned HOST_WIDE_INT) const_op
8081 < (((HOST_WIDE_INT) 1
8082 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
8083 {
8084 op0 = XEXP (op0, 0);
8085 continue;
8086 }
8087 break;
8088
8089 case SUBREG:
a687e897
RK
8090 /* Check for the case where we are comparing A - C1 with C2,
8091 both constants are smaller than 1/2 the maxium positive
8092 value in MODE, and the comparison is equality or unsigned.
8093 In that case, if A is either zero-extended to MODE or has
8094 sufficient sign bits so that the high-order bit in MODE
8095 is a copy of the sign in the inner mode, we can prove that it is
8096 safe to do the operation in the wider mode. This simplifies
8097 many range checks. */
8098
8099 if (mode_width <= HOST_BITS_PER_WIDE_INT
8100 && subreg_lowpart_p (op0)
8101 && GET_CODE (SUBREG_REG (op0)) == PLUS
8102 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8103 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8104 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8105 < GET_MODE_MASK (mode) / 2)
8106 && (unsigned) const_op < GET_MODE_MASK (mode) / 2
8107 && (0 == (significant_bits (XEXP (SUBREG_REG (op0), 0),
8108 GET_MODE (SUBREG_REG (op0)))
8109 & ~ GET_MODE_MASK (mode))
8110 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8111 GET_MODE (SUBREG_REG (op0)))
8112 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8113 - GET_MODE_BITSIZE (mode)))))
8114 {
8115 op0 = SUBREG_REG (op0);
8116 continue;
8117 }
8118
fe0cf571
RK
8119 /* If the inner mode is narrower and we are extracting the low part,
8120 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8121 if (subreg_lowpart_p (op0)
89f1c7f2
RS
8122 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8123 /* Fall through */ ;
8124 else
230d793d
RS
8125 break;
8126
8127 /* ... fall through ... */
8128
8129 case ZERO_EXTEND:
8130 if ((unsigned_comparison_p || equality_comparison_p)
8131 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
8132 <= HOST_BITS_PER_WIDE_INT)
8133 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
8134 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8135 {
8136 op0 = XEXP (op0, 0);
8137 continue;
8138 }
8139 break;
8140
8141 case PLUS:
8142 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
5089e22e 8143 this for equality comparisons due to pathological cases involving
230d793d
RS
8144 overflows. */
8145 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8146 && (tem = simplify_binary_operation (MINUS, mode, op1,
8147 XEXP (op0, 1))) != 0)
8148 {
8149 op0 = XEXP (op0, 0);
8150 op1 = tem;
8151 continue;
8152 }
8153
8154 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8155 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8156 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8157 {
8158 op0 = XEXP (XEXP (op0, 0), 0);
8159 code = (code == LT ? EQ : NE);
8160 continue;
8161 }
8162 break;
8163
8164 case MINUS:
8165 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8166 of bits in X minus 1, is one iff X > 0. */
8167 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8168 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8169 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8170 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8171 {
8172 op0 = XEXP (op0, 1);
8173 code = (code == GE ? LE : GT);
8174 continue;
8175 }
8176 break;
8177
8178 case XOR:
8179 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8180 if C is zero or B is a constant. */
8181 if (equality_comparison_p
8182 && 0 != (tem = simplify_binary_operation (XOR, mode,
8183 XEXP (op0, 1), op1)))
8184 {
8185 op0 = XEXP (op0, 0);
8186 op1 = tem;
8187 continue;
8188 }
8189 break;
8190
8191 case EQ: case NE:
8192 case LT: case LTU: case LE: case LEU:
8193 case GT: case GTU: case GE: case GEU:
8194 /* We can't do anything if OP0 is a condition code value, rather
8195 than an actual data value. */
8196 if (const_op != 0
8197#ifdef HAVE_cc0
8198 || XEXP (op0, 0) == cc0_rtx
8199#endif
8200 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8201 break;
8202
8203 /* Get the two operands being compared. */
8204 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8205 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8206 else
8207 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8208
8209 /* Check for the cases where we simply want the result of the
8210 earlier test or the opposite of that result. */
8211 if (code == NE
8212 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 8213 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 8214 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 8215 && (STORE_FLAG_VALUE
5f4f0e22
CH
8216 & (((HOST_WIDE_INT) 1
8217 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
8218 && (code == LT
8219 || (code == GE && reversible_comparison_p (op0)))))
8220 {
8221 code = (code == LT || code == NE
8222 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8223 op0 = tem, op1 = tem1;
8224 continue;
8225 }
8226 break;
8227
8228 case IOR:
8229 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8230 iff X <= 0. */
8231 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8232 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8233 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8234 {
8235 op0 = XEXP (op0, 1);
8236 code = (code == GE ? GT : LE);
8237 continue;
8238 }
8239 break;
8240
8241 case AND:
8242 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8243 will be converted to a ZERO_EXTRACT later. */
8244 if (const_op == 0 && equality_comparison_p
8245 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8246 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8247 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8248 {
8249 op0 = simplify_and_const_int
8250 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8251 XEXP (op0, 1),
8252 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 8253 (HOST_WIDE_INT) 1);
230d793d
RS
8254 continue;
8255 }
8256
8257 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8258 zero and X is a comparison and C1 and C2 describe only bits set
8259 in STORE_FLAG_VALUE, we can compare with X. */
8260 if (const_op == 0 && equality_comparison_p
5f4f0e22 8261 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
8262 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8263 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8264 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8265 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 8266 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8267 {
8268 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8269 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8270 if ((~ STORE_FLAG_VALUE & mask) == 0
8271 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8272 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8273 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8274 {
8275 op0 = XEXP (XEXP (op0, 0), 0);
8276 continue;
8277 }
8278 }
8279
8280 /* If we are doing an equality comparison of an AND of a bit equal
8281 to the sign bit, replace this with a LT or GE comparison of
8282 the underlying value. */
8283 if (equality_comparison_p
8284 && const_op == 0
8285 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8286 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8287 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 8288 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
8289 {
8290 op0 = XEXP (op0, 0);
8291 code = (code == EQ ? GE : LT);
8292 continue;
8293 }
8294
8295 /* If this AND operation is really a ZERO_EXTEND from a narrower
8296 mode, the constant fits within that mode, and this is either an
8297 equality or unsigned comparison, try to do this comparison in
8298 the narrower mode. */
8299 if ((equality_comparison_p || unsigned_comparison_p)
8300 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8301 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8302 & GET_MODE_MASK (mode))
8303 + 1)) >= 0
8304 && const_op >> i == 0
8305 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8306 {
8307 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8308 continue;
8309 }
8310 break;
8311
8312 case ASHIFT:
8313 case LSHIFT:
8314 /* If we have (compare (xshift FOO N) (const_int C)) and
8315 the high order N bits of FOO (N+1 if an inequality comparison)
8316 are not significant, we can do this by comparing FOO with C
8317 shifted right N bits so long as the low-order N bits of C are
8318 zero. */
8319 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8320 && INTVAL (XEXP (op0, 1)) >= 0
8321 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
8322 < HOST_BITS_PER_WIDE_INT)
8323 && ((const_op
1a26b032 8324 & ((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1) == 0)
5f4f0e22 8325 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
8326 && (significant_bits (XEXP (op0, 0), mode)
8327 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8328 + ! equality_comparison_p))) == 0)
8329 {
8330 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 8331 op1 = GEN_INT (const_op);
230d793d
RS
8332 op0 = XEXP (op0, 0);
8333 continue;
8334 }
8335
dfbe1b2f 8336 /* If we are doing a sign bit comparison, it means we are testing
230d793d 8337 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 8338 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8339 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 8340 {
5f4f0e22
CH
8341 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8342 ((HOST_WIDE_INT) 1
8343 << (mode_width - 1
8344 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
8345 code = (code == LT ? NE : EQ);
8346 continue;
8347 }
dfbe1b2f
RK
8348
8349 /* If this an equality comparison with zero and we are shifting
8350 the low bit to the sign bit, we can convert this to an AND of the
8351 low-order bit. */
8352 if (const_op == 0 && equality_comparison_p
8353 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8354 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8355 {
5f4f0e22
CH
8356 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8357 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
8358 continue;
8359 }
230d793d
RS
8360 break;
8361
8362 case ASHIFTRT:
d0ab8cd3
RK
8363 /* If this is an equality comparison with zero, we can do this
8364 as a logical shift, which might be much simpler. */
8365 if (equality_comparison_p && const_op == 0
8366 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8367 {
8368 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8369 XEXP (op0, 0),
8370 INTVAL (XEXP (op0, 1)));
8371 continue;
8372 }
8373
230d793d
RS
8374 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8375 do the comparison in a narrower mode. */
8376 if (! unsigned_comparison_p
8377 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8378 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8379 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8380 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 8381 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
8382 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8383 || ((unsigned HOST_WIDE_INT) - const_op
8384 <= GET_MODE_MASK (tmode))))
230d793d
RS
8385 {
8386 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8387 continue;
8388 }
8389
8390 /* ... fall through ... */
8391 case LSHIFTRT:
8392 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
8393 the low order N bits of FOO are not significant, we can do this
8394 by comparing FOO with C shifted left N bits so long as no
8395 overflow occurs. */
8396 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8397 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
8398 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8399 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8400 && (significant_bits (XEXP (op0, 0), mode)
5f4f0e22 8401 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
8402 && (const_op == 0
8403 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
8404 < mode_width)))
8405 {
8406 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 8407 op1 = GEN_INT (const_op);
230d793d
RS
8408 op0 = XEXP (op0, 0);
8409 continue;
8410 }
8411
8412 /* If we are using this shift to extract just the sign bit, we
8413 can replace this with an LT or GE comparison. */
8414 if (const_op == 0
8415 && (equality_comparison_p || sign_bit_comparison_p)
8416 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8417 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8418 {
8419 op0 = XEXP (op0, 0);
8420 code = (code == NE || code == GT ? LT : GE);
8421 continue;
8422 }
8423 break;
8424 }
8425
8426 break;
8427 }
8428
8429 /* Now make any compound operations involved in this comparison. Then,
8430 check for an outmost SUBREG on OP0 that isn't doing anything or is
8431 paradoxical. The latter case can only occur when it is known that the
8432 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
8433 We can never remove a SUBREG for a non-equality comparison because the
8434 sign bit is in a different place in the underlying object. */
8435
8436 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
8437 op1 = make_compound_operation (op1, SET);
8438
8439 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8440 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8441 && (code == NE || code == EQ)
8442 && ((GET_MODE_SIZE (GET_MODE (op0))
8443 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
8444 {
8445 op0 = SUBREG_REG (op0);
8446 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
8447 }
8448
8449 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8450 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8451 && (code == NE || code == EQ)
ac49a949
RS
8452 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8453 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
8454 && (significant_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
8455 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
8456 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
8457 op1),
8458 (significant_bits (tem, GET_MODE (SUBREG_REG (op0)))
8459 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
8460 op0 = SUBREG_REG (op0), op1 = tem;
8461
8462 /* We now do the opposite procedure: Some machines don't have compare
8463 insns in all modes. If OP0's mode is an integer mode smaller than a
8464 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
8465 mode for which we can do the compare. There are a number of cases in
8466 which we can use the wider mode. */
230d793d
RS
8467
8468 mode = GET_MODE (op0);
8469 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
8470 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
8471 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
8472 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
8473 (tmode != VOIDmode
8474 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 8475 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 8476 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 8477 {
a687e897
RK
8478 /* If the only significant bits in OP0 and OP1 are those in the
8479 narrower mode and this is an equality or unsigned comparison,
8480 we can use the wider mode. Similarly for sign-extended
8481 values and equality or signed comparisons. */
8482 if (((code == EQ || code == NE
8483 || code == GEU || code == GTU || code == LEU || code == LTU)
8484 && ((significant_bits (op0, tmode) & ~ GET_MODE_MASK (mode))
8485 == 0)
8486 && ((significant_bits (op1, tmode) & ~ GET_MODE_MASK (mode))
8487 == 0))
8488 || ((code == EQ || code == NE
8489 || code == GE || code == GT || code == LE || code == LT)
8490 && (num_sign_bit_copies (op0, tmode)
58744483 8491 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 8492 && (num_sign_bit_copies (op1, tmode)
58744483 8493 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
8494 {
8495 op0 = gen_lowpart_for_combine (tmode, op0);
8496 op1 = gen_lowpart_for_combine (tmode, op1);
8497 break;
8498 }
230d793d 8499
a687e897
RK
8500 /* If this is a test for negative, we can make an explicit
8501 test of the sign bit. */
8502
8503 if (op1 == const0_rtx && (code == LT || code == GE)
8504 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 8505 {
a687e897
RK
8506 op0 = gen_binary (AND, tmode,
8507 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
8508 GEN_INT ((HOST_WIDE_INT) 1
8509 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 8510 code = (code == LT) ? NE : EQ;
a687e897 8511 break;
230d793d 8512 }
230d793d
RS
8513 }
8514
8515 *pop0 = op0;
8516 *pop1 = op1;
8517
8518 return code;
8519}
8520\f
8521/* Return 1 if we know that X, a comparison operation, is not operating
8522 on a floating-point value or is EQ or NE, meaning that we can safely
8523 reverse it. */
8524
8525static int
8526reversible_comparison_p (x)
8527 rtx x;
8528{
8529 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
8530 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
8531 return 1;
8532
8533 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
8534 {
8535 case MODE_INT:
8536 return 1;
8537
8538 case MODE_CC:
8539 x = get_last_value (XEXP (x, 0));
8540 return (x && GET_CODE (x) == COMPARE
8541 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8542 }
8543
8544 return 0;
8545}
8546\f
8547/* Utility function for following routine. Called when X is part of a value
8548 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8549 for each register mentioned. Similar to mention_regs in cse.c */
8550
8551static void
8552update_table_tick (x)
8553 rtx x;
8554{
8555 register enum rtx_code code = GET_CODE (x);
8556 register char *fmt = GET_RTX_FORMAT (code);
8557 register int i;
8558
8559 if (code == REG)
8560 {
8561 int regno = REGNO (x);
8562 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8563 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8564
8565 for (i = regno; i < endregno; i++)
8566 reg_last_set_table_tick[i] = label_tick;
8567
8568 return;
8569 }
8570
8571 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8572 /* Note that we can't have an "E" in values stored; see
8573 get_last_value_validate. */
8574 if (fmt[i] == 'e')
8575 update_table_tick (XEXP (x, i));
8576}
8577
8578/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8579 are saying that the register is clobbered and we no longer know its
8580 value. If INSN is zero, don't update reg_last_set; this call is normally
8581 done with VALUE also zero to invalidate the register. */
8582
8583static void
8584record_value_for_reg (reg, insn, value)
8585 rtx reg;
8586 rtx insn;
8587 rtx value;
8588{
8589 int regno = REGNO (reg);
8590 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8591 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8592 int i;
8593
8594 /* If VALUE contains REG and we have a previous value for REG, substitute
8595 the previous value. */
8596 if (value && insn && reg_overlap_mentioned_p (reg, value))
8597 {
8598 rtx tem;
8599
8600 /* Set things up so get_last_value is allowed to see anything set up to
8601 our insn. */
8602 subst_low_cuid = INSN_CUID (insn);
8603 tem = get_last_value (reg);
8604
8605 if (tem)
8606 value = replace_rtx (copy_rtx (value), reg, tem);
8607 }
8608
8609 /* For each register modified, show we don't know its value, that
8610 its value has been updated, and that we don't know the location of
8611 the death of the register. */
8612 for (i = regno; i < endregno; i ++)
8613 {
8614 if (insn)
8615 reg_last_set[i] = insn;
8616 reg_last_set_value[i] = 0;
8617 reg_last_death[i] = 0;
8618 }
8619
8620 /* Mark registers that are being referenced in this value. */
8621 if (value)
8622 update_table_tick (value);
8623
8624 /* Now update the status of each register being set.
8625 If someone is using this register in this block, set this register
8626 to invalid since we will get confused between the two lives in this
8627 basic block. This makes using this register always invalid. In cse, we
8628 scan the table to invalidate all entries using this register, but this
8629 is too much work for us. */
8630
8631 for (i = regno; i < endregno; i++)
8632 {
8633 reg_last_set_label[i] = label_tick;
8634 if (value && reg_last_set_table_tick[i] == label_tick)
8635 reg_last_set_invalid[i] = 1;
8636 else
8637 reg_last_set_invalid[i] = 0;
8638 }
8639
8640 /* The value being assigned might refer to X (like in "x++;"). In that
8641 case, we must replace it with (clobber (const_int 0)) to prevent
8642 infinite loops. */
8643 if (value && ! get_last_value_validate (&value,
8644 reg_last_set_label[regno], 0))
8645 {
8646 value = copy_rtx (value);
8647 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8648 value = 0;
8649 }
8650
8651 /* For the main register being modified, update the value. */
8652 reg_last_set_value[regno] = value;
8653
8654}
8655
8656/* Used for communication between the following two routines. */
8657static rtx record_dead_insn;
8658
8659/* Called via note_stores from record_dead_and_set_regs to handle one
8660 SET or CLOBBER in an insn. */
8661
8662static void
8663record_dead_and_set_regs_1 (dest, setter)
8664 rtx dest, setter;
8665{
8666 if (GET_CODE (dest) == REG)
8667 {
8668 /* If we are setting the whole register, we know its value. Otherwise
8669 show that we don't know the value. We can handle SUBREG in
8670 some cases. */
8671 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8672 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8673 else if (GET_CODE (setter) == SET
8674 && GET_CODE (SET_DEST (setter)) == SUBREG
8675 && SUBREG_REG (SET_DEST (setter)) == dest
8676 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
8677 record_value_for_reg (dest, record_dead_insn,
8678 gen_lowpart_for_combine (GET_MODE (dest),
8679 SET_SRC (setter)));
230d793d 8680 else
5f4f0e22 8681 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
8682 }
8683 else if (GET_CODE (dest) == MEM
8684 /* Ignore pushes, they clobber nothing. */
8685 && ! push_operand (dest, GET_MODE (dest)))
8686 mem_last_set = INSN_CUID (record_dead_insn);
8687}
8688
8689/* Update the records of when each REG was most recently set or killed
8690 for the things done by INSN. This is the last thing done in processing
8691 INSN in the combiner loop.
8692
8693 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8694 similar information mem_last_set (which insn most recently modified memory)
8695 and last_call_cuid (which insn was the most recent subroutine call). */
8696
8697static void
8698record_dead_and_set_regs (insn)
8699 rtx insn;
8700{
8701 register rtx link;
8702 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
8703 {
8704 if (REG_NOTE_KIND (link) == REG_DEAD)
8705 reg_last_death[REGNO (XEXP (link, 0))] = insn;
8706 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 8707 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
8708 }
8709
8710 if (GET_CODE (insn) == CALL_INSN)
8711 last_call_cuid = mem_last_set = INSN_CUID (insn);
8712
8713 record_dead_insn = insn;
8714 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
8715}
8716\f
8717/* Utility routine for the following function. Verify that all the registers
8718 mentioned in *LOC are valid when *LOC was part of a value set when
8719 label_tick == TICK. Return 0 if some are not.
8720
8721 If REPLACE is non-zero, replace the invalid reference with
8722 (clobber (const_int 0)) and return 1. This replacement is useful because
8723 we often can get useful information about the form of a value (e.g., if
8724 it was produced by a shift that always produces -1 or 0) even though
8725 we don't know exactly what registers it was produced from. */
8726
8727static int
8728get_last_value_validate (loc, tick, replace)
8729 rtx *loc;
8730 int tick;
8731 int replace;
8732{
8733 rtx x = *loc;
8734 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
8735 int len = GET_RTX_LENGTH (GET_CODE (x));
8736 int i;
8737
8738 if (GET_CODE (x) == REG)
8739 {
8740 int regno = REGNO (x);
8741 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8742 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8743 int j;
8744
8745 for (j = regno; j < endregno; j++)
8746 if (reg_last_set_invalid[j]
8747 /* If this is a pseudo-register that was only set once, it is
8748 always valid. */
8749 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
8750 && reg_last_set_label[j] > tick))
8751 {
8752 if (replace)
8753 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8754 return replace;
8755 }
8756
8757 return 1;
8758 }
8759
8760 for (i = 0; i < len; i++)
8761 if ((fmt[i] == 'e'
8762 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
8763 /* Don't bother with these. They shouldn't occur anyway. */
8764 || fmt[i] == 'E')
8765 return 0;
8766
8767 /* If we haven't found a reason for it to be invalid, it is valid. */
8768 return 1;
8769}
8770
8771/* Get the last value assigned to X, if known. Some registers
8772 in the value may be replaced with (clobber (const_int 0)) if their value
8773 is known longer known reliably. */
8774
8775static rtx
8776get_last_value (x)
8777 rtx x;
8778{
8779 int regno;
8780 rtx value;
8781
8782 /* If this is a non-paradoxical SUBREG, get the value of its operand and
8783 then convert it to the desired mode. If this is a paradoxical SUBREG,
8784 we cannot predict what values the "extra" bits might have. */
8785 if (GET_CODE (x) == SUBREG
8786 && subreg_lowpart_p (x)
8787 && (GET_MODE_SIZE (GET_MODE (x))
8788 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8789 && (value = get_last_value (SUBREG_REG (x))) != 0)
8790 return gen_lowpart_for_combine (GET_MODE (x), value);
8791
8792 if (GET_CODE (x) != REG)
8793 return 0;
8794
8795 regno = REGNO (x);
8796 value = reg_last_set_value[regno];
8797
d0ab8cd3 8798 /* If we don't have a value or if it isn't for this basic block, return 0. */
230d793d
RS
8799
8800 if (value == 0
8801 || (reg_n_sets[regno] != 1
d0ab8cd3 8802 && (reg_last_set_label[regno] != label_tick)))
230d793d
RS
8803 return 0;
8804
d0ab8cd3
RK
8805 /* If the value was set in a later insn that the ones we are processing,
8806 we can't use it, but make a quick check to see if the previous insn
8807 set it to something. This is commonly the case when the same pseudo
8808 is used by repeated insns. */
8809
8810 if (reg_n_sets[regno] != 1
8811 && INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
8812 {
8813 rtx insn, set;
8814
2fc9c644 8815 for (insn = prev_nonnote_insn (subst_insn);
d0ab8cd3 8816 insn && INSN_CUID (insn) >= subst_low_cuid;
2fc9c644 8817 insn = prev_nonnote_insn (insn))
d0ab8cd3
RK
8818 ;
8819
8820 if (insn
8821 && (set = single_set (insn)) != 0
8822 && rtx_equal_p (SET_DEST (set), x))
8823 {
8824 value = SET_SRC (set);
8825
8826 /* Make sure that VALUE doesn't reference X. Replace any
8827 expliit references with a CLOBBER. If there are any remaining
8828 references (rare), don't use the value. */
8829
8830 if (reg_mentioned_p (x, value))
8831 value = replace_rtx (copy_rtx (value), x,
8832 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
8833
8834 if (reg_overlap_mentioned_p (x, value))
8835 return 0;
8836 }
8837 else
8838 return 0;
8839 }
8840
8841 /* If the value has all its registers valid, return it. */
230d793d
RS
8842 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
8843 return value;
8844
8845 /* Otherwise, make a copy and replace any invalid register with
8846 (clobber (const_int 0)). If that fails for some reason, return 0. */
8847
8848 value = copy_rtx (value);
8849 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
8850 return value;
8851
8852 return 0;
8853}
8854\f
8855/* Return nonzero if expression X refers to a REG or to memory
8856 that is set in an instruction more recent than FROM_CUID. */
8857
8858static int
8859use_crosses_set_p (x, from_cuid)
8860 register rtx x;
8861 int from_cuid;
8862{
8863 register char *fmt;
8864 register int i;
8865 register enum rtx_code code = GET_CODE (x);
8866
8867 if (code == REG)
8868 {
8869 register int regno = REGNO (x);
8870#ifdef PUSH_ROUNDING
8871 /* Don't allow uses of the stack pointer to be moved,
8872 because we don't know whether the move crosses a push insn. */
8873 if (regno == STACK_POINTER_REGNUM)
8874 return 1;
8875#endif
8876 return (reg_last_set[regno]
8877 && INSN_CUID (reg_last_set[regno]) > from_cuid);
8878 }
8879
8880 if (code == MEM && mem_last_set > from_cuid)
8881 return 1;
8882
8883 fmt = GET_RTX_FORMAT (code);
8884
8885 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8886 {
8887 if (fmt[i] == 'E')
8888 {
8889 register int j;
8890 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8891 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
8892 return 1;
8893 }
8894 else if (fmt[i] == 'e'
8895 && use_crosses_set_p (XEXP (x, i), from_cuid))
8896 return 1;
8897 }
8898 return 0;
8899}
8900\f
8901/* Define three variables used for communication between the following
8902 routines. */
8903
8904static int reg_dead_regno, reg_dead_endregno;
8905static int reg_dead_flag;
8906
8907/* Function called via note_stores from reg_dead_at_p.
8908
8909 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
8910 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
8911
8912static void
8913reg_dead_at_p_1 (dest, x)
8914 rtx dest;
8915 rtx x;
8916{
8917 int regno, endregno;
8918
8919 if (GET_CODE (dest) != REG)
8920 return;
8921
8922 regno = REGNO (dest);
8923 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8924 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
8925
8926 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
8927 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
8928}
8929
8930/* Return non-zero if REG is known to be dead at INSN.
8931
8932 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
8933 referencing REG, it is dead. If we hit a SET referencing REG, it is
8934 live. Otherwise, see if it is live or dead at the start of the basic
8935 block we are in. */
8936
8937static int
8938reg_dead_at_p (reg, insn)
8939 rtx reg;
8940 rtx insn;
8941{
8942 int block, i;
8943
8944 /* Set variables for reg_dead_at_p_1. */
8945 reg_dead_regno = REGNO (reg);
8946 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
8947 ? HARD_REGNO_NREGS (reg_dead_regno,
8948 GET_MODE (reg))
8949 : 1);
8950
8951 reg_dead_flag = 0;
8952
8953 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
8954 beginning of function. */
8955 for (; insn && GET_CODE (insn) != CODE_LABEL;
8956 insn = prev_nonnote_insn (insn))
8957 {
8958 note_stores (PATTERN (insn), reg_dead_at_p_1);
8959 if (reg_dead_flag)
8960 return reg_dead_flag == 1 ? 1 : 0;
8961
8962 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
8963 return 1;
8964 }
8965
8966 /* Get the basic block number that we were in. */
8967 if (insn == 0)
8968 block = 0;
8969 else
8970 {
8971 for (block = 0; block < n_basic_blocks; block++)
8972 if (insn == basic_block_head[block])
8973 break;
8974
8975 if (block == n_basic_blocks)
8976 return 0;
8977 }
8978
8979 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
8980 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
8981 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
8982 return 0;
8983
8984 return 1;
8985}
8986\f
8987/* Remove register number REGNO from the dead registers list of INSN.
8988
8989 Return the note used to record the death, if there was one. */
8990
8991rtx
8992remove_death (regno, insn)
8993 int regno;
8994 rtx insn;
8995{
8996 register rtx note = find_regno_note (insn, REG_DEAD, regno);
8997
8998 if (note)
1a26b032
RK
8999 {
9000 reg_n_deaths[regno]--;
9001 remove_note (insn, note);
9002 }
230d793d
RS
9003
9004 return note;
9005}
9006
9007/* For each register (hardware or pseudo) used within expression X, if its
9008 death is in an instruction with cuid between FROM_CUID (inclusive) and
9009 TO_INSN (exclusive), put a REG_DEAD note for that register in the
9010 list headed by PNOTES.
9011
9012 This is done when X is being merged by combination into TO_INSN. These
9013 notes will then be distributed as needed. */
9014
9015static void
9016move_deaths (x, from_cuid, to_insn, pnotes)
9017 rtx x;
9018 int from_cuid;
9019 rtx to_insn;
9020 rtx *pnotes;
9021{
9022 register char *fmt;
9023 register int len, i;
9024 register enum rtx_code code = GET_CODE (x);
9025
9026 if (code == REG)
9027 {
9028 register int regno = REGNO (x);
9029 register rtx where_dead = reg_last_death[regno];
9030
9031 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9032 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9033 {
9034 rtx note = remove_death (regno, reg_last_death[regno]);
9035
9036 /* It is possible for the call above to return 0. This can occur
9037 when reg_last_death points to I2 or I1 that we combined with.
9038 In that case make a new note. */
9039
9040 if (note)
9041 {
9042 XEXP (note, 1) = *pnotes;
9043 *pnotes = note;
9044 }
9045 else
9046 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
1a26b032
RK
9047
9048 reg_n_deaths[regno]++;
230d793d
RS
9049 }
9050
9051 return;
9052 }
9053
9054 else if (GET_CODE (x) == SET)
9055 {
9056 rtx dest = SET_DEST (x);
9057
9058 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9059
a7c99304
RK
9060 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9061 that accesses one word of a multi-word item, some
9062 piece of everything register in the expression is used by
9063 this insn, so remove any old death. */
9064
9065 if (GET_CODE (dest) == ZERO_EXTRACT
9066 || GET_CODE (dest) == STRICT_LOW_PART
9067 || (GET_CODE (dest) == SUBREG
9068 && (((GET_MODE_SIZE (GET_MODE (dest))
9069 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9070 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9071 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 9072 {
a7c99304
RK
9073 move_deaths (dest, from_cuid, to_insn, pnotes);
9074 return;
230d793d
RS
9075 }
9076
a7c99304
RK
9077 /* If this is some other SUBREG, we know it replaces the entire
9078 value, so use that as the destination. */
9079 if (GET_CODE (dest) == SUBREG)
9080 dest = SUBREG_REG (dest);
9081
9082 /* If this is a MEM, adjust deaths of anything used in the address.
9083 For a REG (the only other possibility), the entire value is
9084 being replaced so the old value is not used in this insn. */
230d793d
RS
9085
9086 if (GET_CODE (dest) == MEM)
9087 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9088 return;
9089 }
9090
9091 else if (GET_CODE (x) == CLOBBER)
9092 return;
9093
9094 len = GET_RTX_LENGTH (code);
9095 fmt = GET_RTX_FORMAT (code);
9096
9097 for (i = 0; i < len; i++)
9098 {
9099 if (fmt[i] == 'E')
9100 {
9101 register int j;
9102 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9103 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9104 }
9105 else if (fmt[i] == 'e')
9106 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9107 }
9108}
9109\f
a7c99304
RK
9110/* Return 1 if X is the target of a bit-field assignment in BODY, the
9111 pattern of an insn. X must be a REG. */
230d793d
RS
9112
9113static int
a7c99304
RK
9114reg_bitfield_target_p (x, body)
9115 rtx x;
230d793d
RS
9116 rtx body;
9117{
9118 int i;
9119
9120 if (GET_CODE (body) == SET)
a7c99304
RK
9121 {
9122 rtx dest = SET_DEST (body);
9123 rtx target;
9124 int regno, tregno, endregno, endtregno;
9125
9126 if (GET_CODE (dest) == ZERO_EXTRACT)
9127 target = XEXP (dest, 0);
9128 else if (GET_CODE (dest) == STRICT_LOW_PART)
9129 target = SUBREG_REG (XEXP (dest, 0));
9130 else
9131 return 0;
9132
9133 if (GET_CODE (target) == SUBREG)
9134 target = SUBREG_REG (target);
9135
9136 if (GET_CODE (target) != REG)
9137 return 0;
9138
9139 tregno = REGNO (target), regno = REGNO (x);
9140 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9141 return target == x;
9142
9143 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9144 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9145
9146 return endregno > tregno && regno < endtregno;
9147 }
230d793d
RS
9148
9149 else if (GET_CODE (body) == PARALLEL)
9150 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 9151 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
9152 return 1;
9153
9154 return 0;
9155}
9156\f
9157/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9158 as appropriate. I3 and I2 are the insns resulting from the combination
9159 insns including FROM (I2 may be zero).
9160
9161 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9162 not need REG_DEAD notes because they are being substituted for. This
9163 saves searching in the most common cases.
9164
9165 Each note in the list is either ignored or placed on some insns, depending
9166 on the type of note. */
9167
9168static void
9169distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9170 rtx notes;
9171 rtx from_insn;
9172 rtx i3, i2;
9173 rtx elim_i2, elim_i1;
9174{
9175 rtx note, next_note;
9176 rtx tem;
9177
9178 for (note = notes; note; note = next_note)
9179 {
9180 rtx place = 0, place2 = 0;
9181
9182 /* If this NOTE references a pseudo register, ensure it references
9183 the latest copy of that register. */
9184 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9185 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9186 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9187
9188 next_note = XEXP (note, 1);
9189 switch (REG_NOTE_KIND (note))
9190 {
9191 case REG_UNUSED:
9192 /* If this register is set or clobbered in I3, put the note there
9193 unless there is one already. */
9194 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9195 {
9196 if (! (GET_CODE (XEXP (note, 0)) == REG
9197 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9198 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9199 place = i3;
9200 }
9201 /* Otherwise, if this register is used by I3, then this register
9202 now dies here, so we must put a REG_DEAD note here unless there
9203 is one already. */
9204 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9205 && ! (GET_CODE (XEXP (note, 0)) == REG
9206 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9207 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9208 {
9209 PUT_REG_NOTE_KIND (note, REG_DEAD);
9210 place = i3;
9211 }
9212 break;
9213
9214 case REG_EQUAL:
9215 case REG_EQUIV:
9216 case REG_NONNEG:
9217 /* These notes say something about results of an insn. We can
9218 only support them if they used to be on I3 in which case they
a687e897
RK
9219 remain on I3. Otherwise they are ignored.
9220
9221 If the note refers to an expression that is not a constant, we
9222 must also ignore the note since we cannot tell whether the
9223 equivalence is still true. It might be possible to do
9224 slightly better than this (we only have a problem if I2DEST
9225 or I1DEST is present in the expression), but it doesn't
9226 seem worth the trouble. */
9227
9228 if (from_insn == i3
9229 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
9230 place = i3;
9231 break;
9232
9233 case REG_INC:
9234 case REG_NO_CONFLICT:
9235 case REG_LABEL:
9236 /* These notes say something about how a register is used. They must
9237 be present on any use of the register in I2 or I3. */
9238 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9239 place = i3;
9240
9241 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9242 {
9243 if (place)
9244 place2 = i2;
9245 else
9246 place = i2;
9247 }
9248 break;
9249
9250 case REG_WAS_0:
9251 /* It is too much trouble to try to see if this note is still
9252 correct in all situations. It is better to simply delete it. */
9253 break;
9254
9255 case REG_RETVAL:
9256 /* If the insn previously containing this note still exists,
9257 put it back where it was. Otherwise move it to the previous
9258 insn. Adjust the corresponding REG_LIBCALL note. */
9259 if (GET_CODE (from_insn) != NOTE)
9260 place = from_insn;
9261 else
9262 {
5f4f0e22 9263 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
9264 place = prev_real_insn (from_insn);
9265 if (tem && place)
9266 XEXP (tem, 0) = place;
9267 }
9268 break;
9269
9270 case REG_LIBCALL:
9271 /* This is handled similarly to REG_RETVAL. */
9272 if (GET_CODE (from_insn) != NOTE)
9273 place = from_insn;
9274 else
9275 {
5f4f0e22 9276 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
9277 place = next_real_insn (from_insn);
9278 if (tem && place)
9279 XEXP (tem, 0) = place;
9280 }
9281 break;
9282
9283 case REG_DEAD:
9284 /* If the register is used as an input in I3, it dies there.
9285 Similarly for I2, if it is non-zero and adjacent to I3.
9286
9287 If the register is not used as an input in either I3 or I2
9288 and it is not one of the registers we were supposed to eliminate,
9289 there are two possibilities. We might have a non-adjacent I2
9290 or we might have somehow eliminated an additional register
9291 from a computation. For example, we might have had A & B where
9292 we discover that B will always be zero. In this case we will
9293 eliminate the reference to A.
9294
9295 In both cases, we must search to see if we can find a previous
9296 use of A and put the death note there. */
9297
9298 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9299 place = i3;
9300 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9301 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9302 place = i2;
9303
9304 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9305 break;
9306
510dd77e
RK
9307 /* If the register is used in both I2 and I3 and it dies in I3,
9308 we might have added another reference to it. If reg_n_refs
9309 was 2, bump it to 3. This has to be correct since the
9310 register must have been set somewhere. The reason this is
9311 done is because local-alloc.c treats 2 references as a
9312 special case. */
9313
9314 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9315 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9316 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9317 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9318
230d793d
RS
9319 if (place == 0)
9320 for (tem = prev_nonnote_insn (i3);
9321 tem && (GET_CODE (tem) == INSN
9322 || GET_CODE (tem) == CALL_INSN);
9323 tem = prev_nonnote_insn (tem))
9324 {
9325 /* If the register is being set at TEM, see if that is all
9326 TEM is doing. If so, delete TEM. Otherwise, make this
9327 into a REG_UNUSED note instead. */
9328 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9329 {
9330 rtx set = single_set (tem);
9331
5089e22e
RS
9332 /* Verify that it was the set, and not a clobber that
9333 modified the register. */
9334
9335 if (set != 0 && ! side_effects_p (SET_SRC (set))
9336 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
230d793d
RS
9337 {
9338 /* Move the notes and links of TEM elsewhere.
9339 This might delete other dead insns recursively.
9340 First set the pattern to something that won't use
9341 any register. */
9342
9343 PATTERN (tem) = pc_rtx;
9344
5f4f0e22
CH
9345 distribute_notes (REG_NOTES (tem), tem, tem,
9346 NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
9347 distribute_links (LOG_LINKS (tem));
9348
9349 PUT_CODE (tem, NOTE);
9350 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
9351 NOTE_SOURCE_FILE (tem) = 0;
9352 }
9353 else
9354 {
9355 PUT_REG_NOTE_KIND (note, REG_UNUSED);
9356
9357 /* If there isn't already a REG_UNUSED note, put one
9358 here. */
9359 if (! find_regno_note (tem, REG_UNUSED,
9360 REGNO (XEXP (note, 0))))
9361 place = tem;
9362 break;
9363 }
9364 }
9365 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
9366 {
9367 place = tem;
9368 break;
9369 }
9370 }
9371
9372 /* If the register is set or already dead at PLACE, we needn't do
9373 anything with this note if it is still a REG_DEAD note.
9374
9375 Note that we cannot use just `dead_or_set_p' here since we can
9376 convert an assignment to a register into a bit-field assignment.
9377 Therefore, we must also omit the note if the register is the
9378 target of a bitfield assignment. */
9379
9380 if (place && REG_NOTE_KIND (note) == REG_DEAD)
9381 {
9382 int regno = REGNO (XEXP (note, 0));
9383
9384 if (dead_or_set_p (place, XEXP (note, 0))
9385 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
9386 {
9387 /* Unless the register previously died in PLACE, clear
9388 reg_last_death. [I no longer understand why this is
9389 being done.] */
9390 if (reg_last_death[regno] != place)
9391 reg_last_death[regno] = 0;
9392 place = 0;
9393 }
9394 else
9395 reg_last_death[regno] = place;
9396
9397 /* If this is a death note for a hard reg that is occupying
9398 multiple registers, ensure that we are still using all
9399 parts of the object. If we find a piece of the object
9400 that is unused, we must add a USE for that piece before
9401 PLACE and put the appropriate REG_DEAD note on it.
9402
9403 An alternative would be to put a REG_UNUSED for the pieces
9404 on the insn that set the register, but that can't be done if
9405 it is not in the same block. It is simpler, though less
9406 efficient, to add the USE insns. */
9407
9408 if (place && regno < FIRST_PSEUDO_REGISTER
9409 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
9410 {
9411 int endregno
9412 = regno + HARD_REGNO_NREGS (regno,
9413 GET_MODE (XEXP (note, 0)));
9414 int all_used = 1;
9415 int i;
9416
9417 for (i = regno; i < endregno; i++)
9418 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
9419 {
9420 rtx piece = gen_rtx (REG, word_mode, i);
28f6d3af
RK
9421 rtx p;
9422
9423 /* See if we already placed a USE note for this
9424 register in front of PLACE. */
9425 for (p = place;
9426 GET_CODE (PREV_INSN (p)) == INSN
9427 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
9428 p = PREV_INSN (p))
9429 if (rtx_equal_p (piece,
9430 XEXP (PATTERN (PREV_INSN (p)), 0)))
9431 {
9432 p = 0;
9433 break;
9434 }
9435
9436 if (p)
9437 {
9438 rtx use_insn
9439 = emit_insn_before (gen_rtx (USE, VOIDmode,
9440 piece),
9441 p);
9442 REG_NOTES (use_insn)
9443 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
9444 REG_NOTES (use_insn));
9445 }
230d793d 9446
5089e22e 9447 all_used = 0;
230d793d
RS
9448 }
9449
9450 if (! all_used)
9451 {
9452 /* Put only REG_DEAD notes for pieces that are
9453 still used and that are not already dead or set. */
9454
9455 for (i = regno; i < endregno; i++)
9456 {
9457 rtx piece = gen_rtx (REG, word_mode, i);
9458
9459 if (reg_referenced_p (piece, PATTERN (place))
9460 && ! dead_or_set_p (place, piece)
9461 && ! reg_bitfield_target_p (piece,
9462 PATTERN (place)))
9463 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
9464 piece,
9465 REG_NOTES (place));
9466 }
9467
9468 place = 0;
9469 }
9470 }
9471 }
9472 break;
9473
9474 default:
9475 /* Any other notes should not be present at this point in the
9476 compilation. */
9477 abort ();
9478 }
9479
9480 if (place)
9481 {
9482 XEXP (note, 1) = REG_NOTES (place);
9483 REG_NOTES (place) = note;
9484 }
1a26b032
RK
9485 else if ((REG_NOTE_KIND (note) == REG_DEAD
9486 || REG_NOTE_KIND (note) == REG_UNUSED)
9487 && GET_CODE (XEXP (note, 0)) == REG)
9488 reg_n_deaths[REGNO (XEXP (note, 0))]--;
230d793d
RS
9489
9490 if (place2)
1a26b032
RK
9491 {
9492 if ((REG_NOTE_KIND (note) == REG_DEAD
9493 || REG_NOTE_KIND (note) == REG_UNUSED)
9494 && GET_CODE (XEXP (note, 0)) == REG)
9495 reg_n_deaths[REGNO (XEXP (note, 0))]++;
9496
9497 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
9498 XEXP (note, 0), REG_NOTES (place2));
9499 }
230d793d
RS
9500 }
9501}
9502\f
9503/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
9504 I3, I2, and I1 to new locations. This is also called in one case to
9505 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
9506
9507static void
9508distribute_links (links)
9509 rtx links;
9510{
9511 rtx link, next_link;
9512
9513 for (link = links; link; link = next_link)
9514 {
9515 rtx place = 0;
9516 rtx insn;
9517 rtx set, reg;
9518
9519 next_link = XEXP (link, 1);
9520
9521 /* If the insn that this link points to is a NOTE or isn't a single
9522 set, ignore it. In the latter case, it isn't clear what we
9523 can do other than ignore the link, since we can't tell which
9524 register it was for. Such links wouldn't be used by combine
9525 anyway.
9526
9527 It is not possible for the destination of the target of the link to
9528 have been changed by combine. The only potential of this is if we
9529 replace I3, I2, and I1 by I3 and I2. But in that case the
9530 destination of I2 also remains unchanged. */
9531
9532 if (GET_CODE (XEXP (link, 0)) == NOTE
9533 || (set = single_set (XEXP (link, 0))) == 0)
9534 continue;
9535
9536 reg = SET_DEST (set);
9537 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
9538 || GET_CODE (reg) == SIGN_EXTRACT
9539 || GET_CODE (reg) == STRICT_LOW_PART)
9540 reg = XEXP (reg, 0);
9541
9542 /* A LOG_LINK is defined as being placed on the first insn that uses
9543 a register and points to the insn that sets the register. Start
9544 searching at the next insn after the target of the link and stop
9545 when we reach a set of the register or the end of the basic block.
9546
9547 Note that this correctly handles the link that used to point from
5089e22e 9548 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
9549 since most links don't point very far away. */
9550
9551 for (insn = NEXT_INSN (XEXP (link, 0));
9552 (insn && GET_CODE (insn) != CODE_LABEL
9553 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
9554 insn = NEXT_INSN (insn))
9555 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9556 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9557 {
9558 if (reg_referenced_p (reg, PATTERN (insn)))
9559 place = insn;
9560 break;
9561 }
9562
9563 /* If we found a place to put the link, place it there unless there
9564 is already a link to the same insn as LINK at that point. */
9565
9566 if (place)
9567 {
9568 rtx link2;
9569
9570 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9571 if (XEXP (link2, 0) == XEXP (link, 0))
9572 break;
9573
9574 if (link2 == 0)
9575 {
9576 XEXP (link, 1) = LOG_LINKS (place);
9577 LOG_LINKS (place) = link;
9578 }
9579 }
9580 }
9581}
9582\f
9583void
9584dump_combine_stats (file)
9585 FILE *file;
9586{
9587 fprintf
9588 (file,
9589 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
9590 combine_attempts, combine_merges, combine_extras, combine_successes);
9591}
9592
9593void
9594dump_combine_total_stats (file)
9595 FILE *file;
9596{
9597 fprintf
9598 (file,
9599 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
9600 total_attempts, total_merges, total_extras, total_successes);
9601}
This page took 1.073478 seconds and 5 git commands to generate.