]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
combine.c (expand_compound_operation): Do not convert ZERO_EXTEND to SIGN_EXTEND...
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
1bf27b5b 2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
230d793d
RS
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
670ee920 78#include "system.h"
c5c76735 79#include "rtl.h"
a091679a 80#include "tm_p.h"
230d793d
RS
81#include "flags.h"
82#include "regs.h"
55310dad 83#include "hard-reg-set.h"
230d793d
RS
84#include "basic-block.h"
85#include "insn-config.h"
49ad7cfa 86#include "function.h"
d6f4ec51
KG
87/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
88#include "expr.h"
230d793d
RS
89#include "insn-flags.h"
90#include "insn-codes.h"
91#include "insn-attr.h"
92#include "recog.h"
93#include "real.h"
2e107e9e 94#include "toplev.h"
230d793d
RS
95
96/* It is not safe to use ordinary gen_lowpart in combine.
97 Use gen_lowpart_for_combine instead. See comments there. */
98#define gen_lowpart dont_use_gen_lowpart_you_dummy
99
100/* Number of attempts to combine instructions in this function. */
101
102static int combine_attempts;
103
104/* Number of attempts that got as far as substitution in this function. */
105
106static int combine_merges;
107
108/* Number of instructions combined with added SETs in this function. */
109
110static int combine_extras;
111
112/* Number of instructions combined in this function. */
113
114static int combine_successes;
115
116/* Totals over entire compilation. */
117
118static int total_attempts, total_merges, total_extras, total_successes;
9210df58 119
ddd5a7c1 120/* Define a default value for REVERSIBLE_CC_MODE.
9210df58
RK
121 We can never assume that a condition code mode is safe to reverse unless
122 the md tells us so. */
123#ifndef REVERSIBLE_CC_MODE
124#define REVERSIBLE_CC_MODE(MODE) 0
125#endif
230d793d
RS
126\f
127/* Vector mapping INSN_UIDs to cuids.
5089e22e 128 The cuids are like uids but increase monotonically always.
230d793d
RS
129 Combine always uses cuids so that it can compare them.
130 But actually renumbering the uids, which we used to do,
131 proves to be a bad idea because it makes it hard to compare
132 the dumps produced by earlier passes with those from later passes. */
133
134static int *uid_cuid;
4255220d 135static int max_uid_cuid;
230d793d
RS
136
137/* Get the cuid of an insn. */
138
1427d6d2
RK
139#define INSN_CUID(INSN) \
140(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d
RS
141
142/* Maximum register number, which is the size of the tables below. */
143
144static int combine_max_regno;
145
146/* Record last point of death of (hard or pseudo) register n. */
147
148static rtx *reg_last_death;
149
150/* Record last point of modification of (hard or pseudo) register n. */
151
152static rtx *reg_last_set;
153
154/* Record the cuid of the last insn that invalidated memory
155 (anything that writes memory, and subroutine calls, but not pushes). */
156
157static int mem_last_set;
158
159/* Record the cuid of the last CALL_INSN
160 so we can tell whether a potential combination crosses any calls. */
161
162static int last_call_cuid;
163
164/* When `subst' is called, this is the insn that is being modified
165 (by combining in a previous insn). The PATTERN of this insn
166 is still the old pattern partially modified and it should not be
167 looked at, but this may be used to examine the successors of the insn
168 to judge whether a simplification is valid. */
169
170static rtx subst_insn;
171
0d9641d1
JW
172/* This is an insn that belongs before subst_insn, but is not currently
173 on the insn chain. */
174
175static rtx subst_prev_insn;
176
230d793d
RS
177/* This is the lowest CUID that `subst' is currently dealing with.
178 get_last_value will not return a value if the register was set at or
179 after this CUID. If not for this mechanism, we could get confused if
180 I2 or I1 in try_combine were an insn that used the old value of a register
181 to obtain a new value. In that case, we might erroneously get the
182 new value of the register when we wanted the old one. */
183
184static int subst_low_cuid;
185
6e25d159
RK
186/* This contains any hard registers that are used in newpat; reg_dead_at_p
187 must consider all these registers to be always live. */
188
189static HARD_REG_SET newpat_used_regs;
190
abe6e52f
RK
191/* This is an insn to which a LOG_LINKS entry has been added. If this
192 insn is the earlier than I2 or I3, combine should rescan starting at
193 that location. */
194
195static rtx added_links_insn;
196
0d4d42c3
RK
197/* Basic block number of the block in which we are performing combines. */
198static int this_basic_block;
715e7fbc
RH
199
200/* A bitmap indicating which blocks had registers go dead at entry.
201 After combine, we'll need to re-do global life analysis with
202 those blocks as starting points. */
203static sbitmap refresh_blocks;
204static int need_refresh;
230d793d
RS
205\f
206/* The next group of arrays allows the recording of the last value assigned
207 to (hard or pseudo) register n. We use this information to see if a
5089e22e 208 operation being processed is redundant given a prior operation performed
230d793d
RS
209 on the register. For example, an `and' with a constant is redundant if
210 all the zero bits are already known to be turned off.
211
212 We use an approach similar to that used by cse, but change it in the
213 following ways:
214
215 (1) We do not want to reinitialize at each label.
216 (2) It is useful, but not critical, to know the actual value assigned
217 to a register. Often just its form is helpful.
218
219 Therefore, we maintain the following arrays:
220
221 reg_last_set_value the last value assigned
222 reg_last_set_label records the value of label_tick when the
223 register was assigned
224 reg_last_set_table_tick records the value of label_tick when a
225 value using the register is assigned
226 reg_last_set_invalid set to non-zero when it is not valid
227 to use the value of this register in some
228 register's value
229
230 To understand the usage of these tables, it is important to understand
231 the distinction between the value in reg_last_set_value being valid
232 and the register being validly contained in some other expression in the
233 table.
234
235 Entry I in reg_last_set_value is valid if it is non-zero, and either
236 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
237
238 Register I may validly appear in any expression returned for the value
239 of another register if reg_n_sets[i] is 1. It may also appear in the
240 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
241 reg_last_set_invalid[j] is zero.
242
243 If an expression is found in the table containing a register which may
244 not validly appear in an expression, the register is replaced by
245 something that won't match, (clobber (const_int 0)).
246
247 reg_last_set_invalid[i] is set non-zero when register I is being assigned
248 to and reg_last_set_table_tick[i] == label_tick. */
249
0f41302f 250/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
251
252static rtx *reg_last_set_value;
253
254/* Record the value of label_tick when the value for register n is placed in
255 reg_last_set_value[n]. */
256
568356af 257static int *reg_last_set_label;
230d793d
RS
258
259/* Record the value of label_tick when an expression involving register n
0f41302f 260 is placed in reg_last_set_value. */
230d793d 261
568356af 262static int *reg_last_set_table_tick;
230d793d
RS
263
264/* Set non-zero if references to register n in expressions should not be
265 used. */
266
267static char *reg_last_set_invalid;
268
0f41302f 269/* Incremented for each label. */
230d793d 270
568356af 271static int label_tick;
230d793d
RS
272
273/* Some registers that are set more than once and used in more than one
274 basic block are nevertheless always set in similar ways. For example,
275 a QImode register may be loaded from memory in two places on a machine
276 where byte loads zero extend.
277
951553af 278 We record in the following array what we know about the nonzero
230d793d
RS
279 bits of a register, specifically which bits are known to be zero.
280
281 If an entry is zero, it means that we don't know anything special. */
282
55310dad 283static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 284
951553af 285/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 286 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 287
951553af 288static enum machine_mode nonzero_bits_mode;
230d793d 289
d0ab8cd3
RK
290/* Nonzero if we know that a register has some leading bits that are always
291 equal to the sign bit. */
292
293static char *reg_sign_bit_copies;
294
951553af 295/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
296 It is zero while computing them and after combine has completed. This
297 former test prevents propagating values based on previously set values,
298 which can be incorrect if a variable is modified in a loop. */
230d793d 299
951553af 300static int nonzero_sign_valid;
55310dad
RK
301
302/* These arrays are maintained in parallel with reg_last_set_value
303 and are used to store the mode in which the register was last set,
304 the bits that were known to be zero when it was last set, and the
305 number of sign bits copies it was known to have when it was last set. */
306
307static enum machine_mode *reg_last_set_mode;
308static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
309static char *reg_last_set_sign_bit_copies;
230d793d
RS
310\f
311/* Record one modification to rtl structure
312 to be undone by storing old_contents into *where.
313 is_int is 1 if the contents are an int. */
314
315struct undo
316{
241cea85 317 struct undo *next;
230d793d 318 int is_int;
f5393ab9
RS
319 union {rtx r; int i;} old_contents;
320 union {rtx *r; int *i;} where;
230d793d
RS
321};
322
323/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
324 num_undo says how many are currently recorded.
325
326 storage is nonzero if we must undo the allocation of new storage.
327 The value of storage is what to pass to obfree.
328
329 other_insn is nonzero if we have modified some other insn in the process
241cea85 330 of working on subst_insn. It must be verified too.
230d793d 331
241cea85
RK
332 previous_undos is the value of undobuf.undos when we started processing
333 this substitution. This will prevent gen_rtx_combine from re-used a piece
334 from the previous expression. Doing so can produce circular rtl
335 structures. */
230d793d
RS
336
337struct undobuf
338{
230d793d 339 char *storage;
241cea85
RK
340 struct undo *undos;
341 struct undo *frees;
342 struct undo *previous_undos;
230d793d
RS
343 rtx other_insn;
344};
345
346static struct undobuf undobuf;
347
230d793d
RS
348/* Number of times the pseudo being substituted for
349 was found and replaced. */
350
351static int n_occurrences;
352
76095e2f
RH
353static void do_SUBST PROTO((rtx *, rtx));
354static void do_SUBST_INT PROTO((int *, int));
c5ad722c
RK
355static void init_reg_last_arrays PROTO((void));
356static void setup_incoming_promotions PROTO((void));
84832317 357static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx, void *));
fe2db4fb 358static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
e009aaf3 359static int sets_function_arg_p PROTO((rtx));
fe2db4fb 360static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
14a774a9 361static int contains_muldiv PROTO((rtx));
fe2db4fb
RK
362static rtx try_combine PROTO((rtx, rtx, rtx));
363static void undo_all PROTO((void));
e7749837 364static void undo_commit PROTO((void));
fe2db4fb
RK
365static rtx *find_split_point PROTO((rtx *, rtx));
366static rtx subst PROTO((rtx, rtx, rtx, int, int));
31ec4e5e 367static rtx combine_simplify_rtx PROTO((rtx, enum machine_mode, int, int));
8079805d
RK
368static rtx simplify_if_then_else PROTO((rtx));
369static rtx simplify_set PROTO((rtx));
370static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
371static rtx expand_compound_operation PROTO((rtx));
372static rtx expand_field_assignment PROTO((rtx));
373static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
374 int, int, int));
71923da7 375static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
376static rtx make_compound_operation PROTO((rtx, enum rtx_code));
377static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 378static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 379 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 380static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb 381static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
e11fa86f 382static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
fe2db4fb
RK
383static rtx make_field_assignment PROTO((rtx));
384static rtx apply_distributive_law PROTO((rtx));
385static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
386 unsigned HOST_WIDE_INT));
387static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
388static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
389static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
390 enum rtx_code, HOST_WIDE_INT,
391 enum machine_mode, int *));
392static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
393 rtx, int));
8e2f6e35 394static int recog_for_combine PROTO((rtx *, rtx, rtx *));
fe2db4fb 395static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 396static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 397 ...));
fe2db4fb
RK
398static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
399 rtx, rtx));
0c1c8ea6
RK
400static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
401 enum machine_mode, rtx));
fe2db4fb
RK
402static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
403static int reversible_comparison_p PROTO((rtx));
404static void update_table_tick PROTO((rtx));
405static void record_value_for_reg PROTO((rtx, rtx, rtx));
732f2ac9 406static void check_promoted_subreg PROTO((rtx, rtx));
84832317 407static void record_dead_and_set_regs_1 PROTO((rtx, rtx, void *));
fe2db4fb 408static void record_dead_and_set_regs PROTO((rtx));
9a893315 409static int get_last_value_validate PROTO((rtx *, rtx, int, int));
fe2db4fb
RK
410static rtx get_last_value PROTO((rtx));
411static int use_crosses_set_p PROTO((rtx, int));
84832317 412static void reg_dead_at_p_1 PROTO((rtx, rtx, void *));
fe2db4fb 413static int reg_dead_at_p PROTO((rtx, rtx));
6eb12cef 414static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
fe2db4fb
RK
415static int reg_bitfield_target_p PROTO((rtx, rtx));
416static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
417static void distribute_links PROTO((rtx));
6e25d159 418static void mark_used_regs_combine PROTO((rtx));
1427d6d2 419static int insn_cuid PROTO((rtx));
230d793d 420\f
76095e2f
RH
421/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
422 insn. The substitution can be undone by undo_all. If INTO is already
423 set to NEWVAL, do not record this change. Because computing NEWVAL might
424 also call SUBST, we have to compute it before we put anything into
425 the undo table. */
426
427static void
428do_SUBST(into, newval)
429 rtx *into, newval;
430{
431 struct undo *buf;
432 rtx oldval = *into;
433
434 if (oldval == newval)
435 return;
436
437 if (undobuf.frees)
438 buf = undobuf.frees, undobuf.frees = buf->next;
439 else
440 buf = (struct undo *) xmalloc (sizeof (struct undo));
441
442 buf->is_int = 0;
443 buf->where.r = into;
444 buf->old_contents.r = oldval;
445 *into = newval;
446
447 buf->next = undobuf.undos, undobuf.undos = buf;
448}
449
450#define SUBST(INTO, NEWVAL) do_SUBST(&(INTO), (NEWVAL))
451
452/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
453 for the value of a HOST_WIDE_INT value (including CONST_INT) is
454 not safe. */
455
456static void
457do_SUBST_INT(into, newval)
458 int *into, newval;
459{
460 struct undo *buf;
461 int oldval = *into;
462
463 if (oldval == newval)
464 return;
465
466 if (undobuf.frees)
467 buf = undobuf.frees, undobuf.frees = buf->next;
468 else
469 buf = (struct undo *) xmalloc (sizeof (struct undo));
470
471 buf->is_int = 1;
472 buf->where.i = into;
473 buf->old_contents.i = oldval;
474 *into = newval;
475
476 buf->next = undobuf.undos, undobuf.undos = buf;
477}
478
479#define SUBST_INT(INTO, NEWVAL) do_SUBST_INT(&(INTO), (NEWVAL))
480\f
230d793d
RS
481/* Main entry point for combiner. F is the first insn of the function.
482 NREGS is the first unused pseudo-reg number. */
483
484void
485combine_instructions (f, nregs)
486 rtx f;
487 int nregs;
488{
b729186a
JL
489 register rtx insn, next;
490#ifdef HAVE_cc0
491 register rtx prev;
492#endif
230d793d
RS
493 register int i;
494 register rtx links, nextlinks;
495
496 combine_attempts = 0;
497 combine_merges = 0;
498 combine_extras = 0;
499 combine_successes = 0;
500
501 combine_max_regno = nregs;
502
c05ddfa7
MM
503 reg_nonzero_bits = ((unsigned HOST_WIDE_INT *)
504 xcalloc (nregs, sizeof (unsigned HOST_WIDE_INT)));
505 reg_sign_bit_copies = (char *) xcalloc (nregs, sizeof (char));
506
507 reg_last_death = (rtx *) xmalloc (nregs * sizeof (rtx));
508 reg_last_set = (rtx *) xmalloc (nregs * sizeof (rtx));
509 reg_last_set_value = (rtx *) xmalloc (nregs * sizeof (rtx));
510 reg_last_set_table_tick = (int *) xmalloc (nregs * sizeof (int));
511 reg_last_set_label = (int *) xmalloc (nregs * sizeof (int));
512 reg_last_set_invalid = (char *) xmalloc (nregs * sizeof (char));
55310dad 513 reg_last_set_mode
c05ddfa7 514 = (enum machine_mode *) xmalloc (nregs * sizeof (enum machine_mode));
55310dad 515 reg_last_set_nonzero_bits
c05ddfa7 516 = (unsigned HOST_WIDE_INT *) xmalloc (nregs * sizeof (HOST_WIDE_INT));
55310dad 517 reg_last_set_sign_bit_copies
c05ddfa7 518 = (char *) xmalloc (nregs * sizeof (char));
55310dad 519
ef026f91 520 init_reg_last_arrays ();
230d793d
RS
521
522 init_recog_no_volatile ();
523
524 /* Compute maximum uid value so uid_cuid can be allocated. */
525
526 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
527 if (INSN_UID (insn) > i)
528 i = INSN_UID (insn);
529
c05ddfa7 530 uid_cuid = (int *) xmalloc ((i + 1) * sizeof (int));
4255220d 531 max_uid_cuid = i;
230d793d 532
951553af 533 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 534
951553af 535 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
536 when, for example, we have j <<= 1 in a loop. */
537
951553af 538 nonzero_sign_valid = 0;
230d793d
RS
539
540 /* Compute the mapping from uids to cuids.
541 Cuids are numbers assigned to insns, like uids,
542 except that cuids increase monotonically through the code.
543
544 Scan all SETs and see if we can deduce anything about what
951553af 545 bits are known to be zero for some registers and how many copies
d79f08e0
RK
546 of the sign bit are known to exist for those registers.
547
548 Also set any known values so that we can use it while searching
549 for what bits are known to be set. */
550
551 label_tick = 1;
230d793d 552
bcd49eb7
JW
553 /* We need to initialize it here, because record_dead_and_set_regs may call
554 get_last_value. */
555 subst_prev_insn = NULL_RTX;
556
7988fd36
RK
557 setup_incoming_promotions ();
558
715e7fbc
RH
559 refresh_blocks = sbitmap_alloc (n_basic_blocks);
560 sbitmap_zero (refresh_blocks);
561 need_refresh = 0;
562
230d793d
RS
563 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
564 {
4255220d 565 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
566 subst_low_cuid = i;
567 subst_insn = insn;
568
230d793d 569 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0 570 {
84832317
MM
571 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies,
572 NULL);
d79f08e0 573 record_dead_and_set_regs (insn);
2dab894a
RK
574
575#ifdef AUTO_INC_DEC
576 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
577 if (REG_NOTE_KIND (links) == REG_INC)
84832317
MM
578 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX,
579 NULL);
2dab894a 580#endif
d79f08e0
RK
581 }
582
583 if (GET_CODE (insn) == CODE_LABEL)
584 label_tick++;
230d793d
RS
585 }
586
951553af 587 nonzero_sign_valid = 1;
230d793d
RS
588
589 /* Now scan all the insns in forward order. */
590
0d4d42c3 591 this_basic_block = -1;
230d793d
RS
592 label_tick = 1;
593 last_call_cuid = 0;
594 mem_last_set = 0;
ef026f91 595 init_reg_last_arrays ();
7988fd36
RK
596 setup_incoming_promotions ();
597
230d793d
RS
598 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
599 {
600 next = 0;
601
0d4d42c3 602 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 603 if (this_basic_block + 1 < n_basic_blocks
3b413743 604 && BLOCK_HEAD (this_basic_block + 1) == insn)
0d4d42c3
RK
605 this_basic_block++;
606
230d793d
RS
607 if (GET_CODE (insn) == CODE_LABEL)
608 label_tick++;
609
0d4d42c3 610 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d 611 {
732f2ac9
JJ
612 /* See if we know about function return values before this
613 insn based upon SUBREG flags. */
614 check_promoted_subreg (insn, PATTERN (insn));
732f2ac9 615
230d793d
RS
616 /* Try this insn with each insn it links back to. */
617
618 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 619 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
620 goto retry;
621
622 /* Try each sequence of three linked insns ending with this one. */
623
624 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
625 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
626 nextlinks = XEXP (nextlinks, 1))
627 if ((next = try_combine (insn, XEXP (links, 0),
628 XEXP (nextlinks, 0))) != 0)
629 goto retry;
630
631#ifdef HAVE_cc0
632 /* Try to combine a jump insn that uses CC0
633 with a preceding insn that sets CC0, and maybe with its
634 logical predecessor as well.
635 This is how we make decrement-and-branch insns.
636 We need this special code because data flow connections
637 via CC0 do not get entered in LOG_LINKS. */
638
639 if (GET_CODE (insn) == JUMP_INSN
640 && (prev = prev_nonnote_insn (insn)) != 0
641 && GET_CODE (prev) == INSN
642 && sets_cc0_p (PATTERN (prev)))
643 {
5f4f0e22 644 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
645 goto retry;
646
647 for (nextlinks = LOG_LINKS (prev); nextlinks;
648 nextlinks = XEXP (nextlinks, 1))
649 if ((next = try_combine (insn, prev,
650 XEXP (nextlinks, 0))) != 0)
651 goto retry;
652 }
653
654 /* Do the same for an insn that explicitly references CC0. */
655 if (GET_CODE (insn) == INSN
656 && (prev = prev_nonnote_insn (insn)) != 0
657 && GET_CODE (prev) == INSN
658 && sets_cc0_p (PATTERN (prev))
659 && GET_CODE (PATTERN (insn)) == SET
660 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
661 {
5f4f0e22 662 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
663 goto retry;
664
665 for (nextlinks = LOG_LINKS (prev); nextlinks;
666 nextlinks = XEXP (nextlinks, 1))
667 if ((next = try_combine (insn, prev,
668 XEXP (nextlinks, 0))) != 0)
669 goto retry;
670 }
671
672 /* Finally, see if any of the insns that this insn links to
673 explicitly references CC0. If so, try this insn, that insn,
5089e22e 674 and its predecessor if it sets CC0. */
230d793d
RS
675 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
676 if (GET_CODE (XEXP (links, 0)) == INSN
677 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
678 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
679 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
680 && GET_CODE (prev) == INSN
681 && sets_cc0_p (PATTERN (prev))
682 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
683 goto retry;
684#endif
685
686 /* Try combining an insn with two different insns whose results it
687 uses. */
688 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
689 for (nextlinks = XEXP (links, 1); nextlinks;
690 nextlinks = XEXP (nextlinks, 1))
691 if ((next = try_combine (insn, XEXP (links, 0),
692 XEXP (nextlinks, 0))) != 0)
693 goto retry;
694
695 if (GET_CODE (insn) != NOTE)
696 record_dead_and_set_regs (insn);
697
698 retry:
699 ;
700 }
701 }
702
715e7fbc 703 if (need_refresh)
49c3bb12
RH
704 {
705 compute_bb_for_insn (get_max_uid ());
706 update_life_info (refresh_blocks, UPDATE_LIFE_GLOBAL_RM_NOTES,
707 PROP_DEATH_NOTES);
708 }
c05ddfa7
MM
709
710 /* Clean up. */
715e7fbc 711 sbitmap_free (refresh_blocks);
c05ddfa7
MM
712 free (reg_nonzero_bits);
713 free (reg_sign_bit_copies);
714 free (reg_last_death);
715 free (reg_last_set);
716 free (reg_last_set_value);
717 free (reg_last_set_table_tick);
718 free (reg_last_set_label);
719 free (reg_last_set_invalid);
720 free (reg_last_set_mode);
721 free (reg_last_set_nonzero_bits);
722 free (reg_last_set_sign_bit_copies);
723 free (uid_cuid);
715e7fbc 724
e7749837
RH
725 {
726 struct undo *undo, *next;
727 for (undo = undobuf.frees; undo; undo = next)
728 {
729 next = undo->next;
730 free (undo);
731 }
732 undobuf.frees = 0;
733 }
734
230d793d
RS
735 total_attempts += combine_attempts;
736 total_merges += combine_merges;
737 total_extras += combine_extras;
738 total_successes += combine_successes;
1a26b032 739
951553af 740 nonzero_sign_valid = 0;
972b320c
R
741
742 /* Make recognizer allow volatile MEMs again. */
743 init_recog ();
230d793d 744}
ef026f91
RS
745
746/* Wipe the reg_last_xxx arrays in preparation for another pass. */
747
748static void
749init_reg_last_arrays ()
750{
751 int nregs = combine_max_regno;
752
4c9a05bc
RK
753 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
754 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
755 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
756 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
757 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 758 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
759 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
760 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
761 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
762}
230d793d 763\f
7988fd36
RK
764/* Set up any promoted values for incoming argument registers. */
765
ee791cc3 766static void
7988fd36
RK
767setup_incoming_promotions ()
768{
769#ifdef PROMOTE_FUNCTION_ARGS
770 int regno;
771 rtx reg;
772 enum machine_mode mode;
773 int unsignedp;
774 rtx first = get_insns ();
775
c285f57a
JJ
776#ifndef OUTGOING_REGNO
777#define OUTGOING_REGNO(N) N
778#endif
7988fd36 779 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
c285f57a
JJ
780 /* Check whether this register can hold an incoming pointer
781 argument. FUNCTION_ARG_REGNO_P tests outgoing register
782 numbers, so translate if necessary due to register windows. */
783 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (regno))
7988fd36 784 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
38a448ca
RH
785 {
786 record_value_for_reg
787 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
788 : SIGN_EXTEND),
789 GET_MODE (reg),
790 gen_rtx_CLOBBER (mode, const0_rtx)));
791 }
7988fd36
RK
792#endif
793}
794\f
91102d5a
RK
795/* Called via note_stores. If X is a pseudo that is narrower than
796 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
797
798 If we are setting only a portion of X and we can't figure out what
799 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
800 be happening.
801
802 Similarly, set how many bits of X are known to be copies of the sign bit
803 at all locations in the function. This is the smallest number implied
804 by any set of X. */
230d793d
RS
805
806static void
84832317 807set_nonzero_bits_and_sign_copies (x, set, data)
230d793d
RS
808 rtx x;
809 rtx set;
84832317 810 void *data ATTRIBUTE_UNUSED;
230d793d 811{
d0ab8cd3
RK
812 int num;
813
230d793d
RS
814 if (GET_CODE (x) == REG
815 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
816 /* If this register is undefined at the start of the file, we can't
817 say what its contents were. */
e881bb1b 818 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
5f4f0e22 819 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 820 {
2dab894a 821 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
822 {
823 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 824 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
825 return;
826 }
230d793d
RS
827
828 /* If this is a complex assignment, see if we can convert it into a
5089e22e 829 simple assignment. */
230d793d 830 set = expand_field_assignment (set);
d79f08e0
RK
831
832 /* If this is a simple assignment, or we have a paradoxical SUBREG,
833 set what we know about X. */
834
835 if (SET_DEST (set) == x
836 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
837 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
838 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 839 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 840 {
9afa3d54
RK
841 rtx src = SET_SRC (set);
842
843#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
844 /* If X is narrower than a word and SRC is a non-negative
845 constant that would appear negative in the mode of X,
846 sign-extend it for use in reg_nonzero_bits because some
847 machines (maybe most) will actually do the sign-extension
848 and this is the conservative approach.
849
850 ??? For 2.5, try to tighten up the MD files in this regard
851 instead of this kludge. */
852
853 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
854 && GET_CODE (src) == CONST_INT
855 && INTVAL (src) > 0
856 && 0 != (INTVAL (src)
857 & ((HOST_WIDE_INT) 1
9e69be8c 858 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
859 src = GEN_INT (INTVAL (src)
860 | ((HOST_WIDE_INT) (-1)
861 << GET_MODE_BITSIZE (GET_MODE (x))));
862#endif
863
951553af 864 reg_nonzero_bits[REGNO (x)]
9afa3d54 865 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
866 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
867 if (reg_sign_bit_copies[REGNO (x)] == 0
868 || reg_sign_bit_copies[REGNO (x)] > num)
869 reg_sign_bit_copies[REGNO (x)] = num;
870 }
230d793d 871 else
d0ab8cd3 872 {
951553af 873 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 874 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 875 }
230d793d
RS
876 }
877}
878\f
879/* See if INSN can be combined into I3. PRED and SUCC are optionally
880 insns that were previously combined into I3 or that will be combined
881 into the merger of INSN and I3.
882
883 Return 0 if the combination is not allowed for any reason.
884
885 If the combination is allowed, *PDEST will be set to the single
886 destination of INSN and *PSRC to the single source, and this function
887 will return 1. */
888
889static int
890can_combine_p (insn, i3, pred, succ, pdest, psrc)
891 rtx insn;
892 rtx i3;
e51712db
KG
893 rtx pred ATTRIBUTE_UNUSED;
894 rtx succ;
230d793d
RS
895 rtx *pdest, *psrc;
896{
897 int i;
898 rtx set = 0, src, dest;
b729186a
JL
899 rtx p;
900#ifdef AUTO_INC_DEC
76d31c63 901 rtx link;
b729186a 902#endif
230d793d
RS
903 int all_adjacent = (succ ? (next_active_insn (insn) == succ
904 && next_active_insn (succ) == i3)
905 : next_active_insn (insn) == i3);
906
907 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
908 or a PARALLEL consisting of such a SET and CLOBBERs.
909
910 If INSN has CLOBBER parallel parts, ignore them for our processing.
911 By definition, these happen during the execution of the insn. When it
912 is merged with another insn, all bets are off. If they are, in fact,
913 needed and aren't also supplied in I3, they may be added by
914 recog_for_combine. Otherwise, it won't match.
915
916 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
917 note.
918
919 Get the source and destination of INSN. If more than one, can't
920 combine. */
921
922 if (GET_CODE (PATTERN (insn)) == SET)
923 set = PATTERN (insn);
924 else if (GET_CODE (PATTERN (insn)) == PARALLEL
925 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
926 {
927 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
928 {
929 rtx elt = XVECEXP (PATTERN (insn), 0, i);
930
931 switch (GET_CODE (elt))
932 {
e3258cef
R
933 /* This is important to combine floating point insns
934 for the SH4 port. */
935 case USE:
936 /* Combining an isolated USE doesn't make sense.
937 We depend here on combinable_i3_pat to reject them. */
938 /* The code below this loop only verifies that the inputs of
939 the SET in INSN do not change. We call reg_set_between_p
940 to verify that the REG in the USE does not change betweeen
941 I3 and INSN.
942 If the USE in INSN was for a pseudo register, the matching
943 insn pattern will likely match any register; combining this
944 with any other USE would only be safe if we knew that the
945 used registers have identical values, or if there was
946 something to tell them apart, e.g. different modes. For
947 now, we forgo such compilcated tests and simply disallow
948 combining of USES of pseudo registers with any other USE. */
949 if (GET_CODE (XEXP (elt, 0)) == REG
950 && GET_CODE (PATTERN (i3)) == PARALLEL)
951 {
952 rtx i3pat = PATTERN (i3);
953 int i = XVECLEN (i3pat, 0) - 1;
954 int regno = REGNO (XEXP (elt, 0));
955 do
956 {
957 rtx i3elt = XVECEXP (i3pat, 0, i);
958 if (GET_CODE (i3elt) == USE
959 && GET_CODE (XEXP (i3elt, 0)) == REG
960 && (REGNO (XEXP (i3elt, 0)) == regno
961 ? reg_set_between_p (XEXP (elt, 0),
962 PREV_INSN (insn), i3)
963 : regno >= FIRST_PSEUDO_REGISTER))
964 return 0;
965 }
966 while (--i >= 0);
967 }
968 break;
969
230d793d
RS
970 /* We can ignore CLOBBERs. */
971 case CLOBBER:
972 break;
973
974 case SET:
975 /* Ignore SETs whose result isn't used but not those that
976 have side-effects. */
977 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
978 && ! side_effects_p (elt))
979 break;
980
981 /* If we have already found a SET, this is a second one and
982 so we cannot combine with this insn. */
983 if (set)
984 return 0;
985
986 set = elt;
987 break;
988
989 default:
990 /* Anything else means we can't combine. */
991 return 0;
992 }
993 }
994
995 if (set == 0
996 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
997 so don't do anything with it. */
998 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
999 return 0;
1000 }
1001 else
1002 return 0;
1003
1004 if (set == 0)
1005 return 0;
1006
1007 set = expand_field_assignment (set);
1008 src = SET_SRC (set), dest = SET_DEST (set);
1009
1010 /* Don't eliminate a store in the stack pointer. */
1011 if (dest == stack_pointer_rtx
230d793d
RS
1012 /* If we couldn't eliminate a field assignment, we can't combine. */
1013 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
1014 /* Don't combine with an insn that sets a register to itself if it has
1015 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 1016 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
1017 /* Can't merge a function call. */
1018 || GET_CODE (src) == CALL
cd5e8f1f 1019 /* Don't eliminate a function call argument. */
4dca5ec5
RK
1020 || (GET_CODE (i3) == CALL_INSN
1021 && (find_reg_fusage (i3, USE, dest)
1022 || (GET_CODE (dest) == REG
1023 && REGNO (dest) < FIRST_PSEUDO_REGISTER
1024 && global_regs[REGNO (dest)])))
230d793d
RS
1025 /* Don't substitute into an incremented register. */
1026 || FIND_REG_INC_NOTE (i3, dest)
1027 || (succ && FIND_REG_INC_NOTE (succ, dest))
ec35104c 1028#if 0
230d793d 1029 /* Don't combine the end of a libcall into anything. */
ec35104c
JL
1030 /* ??? This gives worse code, and appears to be unnecessary, since no
1031 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
1032 use REG_RETVAL notes for noconflict blocks, but other code here
1033 makes sure that those insns don't disappear. */
5f4f0e22 1034 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
ec35104c 1035#endif
230d793d
RS
1036 /* Make sure that DEST is not used after SUCC but before I3. */
1037 || (succ && ! all_adjacent
1038 && reg_used_between_p (dest, succ, i3))
1039 /* Make sure that the value that is to be substituted for the register
1040 does not use any registers whose values alter in between. However,
1041 If the insns are adjacent, a use can't cross a set even though we
1042 think it might (this can happen for a sequence of insns each setting
1043 the same destination; reg_last_set of that register might point to
d81481d3
RK
1044 a NOTE). If INSN has a REG_EQUIV note, the register is always
1045 equivalent to the memory so the substitution is valid even if there
1046 are intervening stores. Also, don't move a volatile asm or
1047 UNSPEC_VOLATILE across any other insns. */
230d793d 1048 || (! all_adjacent
d81481d3
RK
1049 && (((GET_CODE (src) != MEM
1050 || ! find_reg_note (insn, REG_EQUIV, src))
1051 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
1052 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
1053 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
1054 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
1055 better register allocation by not doing the combine. */
1056 || find_reg_note (i3, REG_NO_CONFLICT, dest)
1057 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
1058 /* Don't combine across a CALL_INSN, because that would possibly
1059 change whether the life span of some REGs crosses calls or not,
1060 and it is a pain to update that information.
1061 Exception: if source is a constant, moving it later can't hurt.
1062 Accept that special case, because it helps -fforce-addr a lot. */
1063 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
1064 return 0;
1065
1066 /* DEST must either be a REG or CC0. */
1067 if (GET_CODE (dest) == REG)
1068 {
1069 /* If register alignment is being enforced for multi-word items in all
1070 cases except for parameters, it is possible to have a register copy
1071 insn referencing a hard register that is not allowed to contain the
1072 mode being copied and which would not be valid as an operand of most
1073 insns. Eliminate this problem by not combining with such an insn.
1074
1075 Also, on some machines we don't want to extend the life of a hard
4d2c432d
RK
1076 register.
1077
1078 This is the same test done in can_combine except that we don't test
1079 if SRC is a CALL operation to permit a hard register with
1080 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1081 into account. */
230d793d
RS
1082
1083 if (GET_CODE (src) == REG
1084 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1085 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1086 /* Don't extend the life of a hard register unless it is
1087 user variable (if we have few registers) or it can't
1088 fit into the desired register (meaning something special
ecd40809
RK
1089 is going on).
1090 Also avoid substituting a return register into I3, because
1091 reload can't handle a conflict with constraints of other
1092 inputs. */
230d793d 1093 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e 1094 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
f95182a4
ILT
1095 || (SMALL_REGISTER_CLASSES
1096 && ((! all_adjacent && ! REG_USERVAR_P (src))
1097 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
e9a25f70 1098 && ! REG_USERVAR_P (src))))))))
230d793d
RS
1099 return 0;
1100 }
1101 else if (GET_CODE (dest) != CC0)
1102 return 0;
1103
5f96750d
RS
1104 /* Don't substitute for a register intended as a clobberable operand.
1105 Similarly, don't substitute an expression containing a register that
1106 will be clobbered in I3. */
230d793d
RS
1107 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1108 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1109 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
1110 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1111 src)
1112 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
1113 return 0;
1114
1115 /* If INSN contains anything volatile, or is an `asm' (whether volatile
d276f2bb 1116 or not), reject, unless nothing volatile comes between it and I3 */
230d793d
RS
1117
1118 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
d276f2bb
CM
1119 {
1120 /* Make sure succ doesn't contain a volatile reference. */
1121 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1122 return 0;
1123
1124 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1125 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1126 && p != succ && volatile_refs_p (PATTERN (p)))
1127 return 0;
1128 }
230d793d 1129
b79ee7eb
RH
1130 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1131 to be an explicit register variable, and was chosen for a reason. */
1132
1133 if (GET_CODE (src) == ASM_OPERANDS
1134 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1135 return 0;
1136
4b2cb4a2
RS
1137 /* If there are any volatile insns between INSN and I3, reject, because
1138 they might affect machine state. */
1139
1140 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1141 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1142 && p != succ && volatile_insn_p (PATTERN (p)))
1143 return 0;
1144
230d793d
RS
1145 /* If INSN or I2 contains an autoincrement or autodecrement,
1146 make sure that register is not used between there and I3,
1147 and not already used in I3 either.
1148 Also insist that I3 not be a jump; if it were one
1149 and the incremented register were spilled, we would lose. */
1150
1151#ifdef AUTO_INC_DEC
1152 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1153 if (REG_NOTE_KIND (link) == REG_INC
1154 && (GET_CODE (i3) == JUMP_INSN
1155 || reg_used_between_p (XEXP (link, 0), insn, i3)
1156 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1157 return 0;
1158#endif
1159
1160#ifdef HAVE_cc0
1161 /* Don't combine an insn that follows a CC0-setting insn.
1162 An insn that uses CC0 must not be separated from the one that sets it.
1163 We do, however, allow I2 to follow a CC0-setting insn if that insn
1164 is passed as I1; in that case it will be deleted also.
1165 We also allow combining in this case if all the insns are adjacent
1166 because that would leave the two CC0 insns adjacent as well.
1167 It would be more logical to test whether CC0 occurs inside I1 or I2,
1168 but that would be much slower, and this ought to be equivalent. */
1169
1170 p = prev_nonnote_insn (insn);
1171 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1172 && ! all_adjacent)
1173 return 0;
1174#endif
1175
1176 /* If we get here, we have passed all the tests and the combination is
1177 to be allowed. */
1178
1179 *pdest = dest;
1180 *psrc = src;
1181
1182 return 1;
1183}
1184\f
956d6950
JL
1185/* Check if PAT is an insn - or a part of it - used to set up an
1186 argument for a function in a hard register. */
1187
1188static int
1189sets_function_arg_p (pat)
1190 rtx pat;
1191{
1192 int i;
1193 rtx inner_dest;
1194
1195 switch (GET_CODE (pat))
1196 {
1197 case INSN:
1198 return sets_function_arg_p (PATTERN (pat));
1199
1200 case PARALLEL:
1201 for (i = XVECLEN (pat, 0); --i >= 0;)
1202 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1203 return 1;
1204
1205 break;
1206
1207 case SET:
1208 inner_dest = SET_DEST (pat);
1209 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1210 || GET_CODE (inner_dest) == SUBREG
1211 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1212 inner_dest = XEXP (inner_dest, 0);
1213
1214 return (GET_CODE (inner_dest) == REG
1215 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1216 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1d300e19
KG
1217
1218 default:
1219 break;
956d6950
JL
1220 }
1221
1222 return 0;
1223}
1224
230d793d
RS
1225/* LOC is the location within I3 that contains its pattern or the component
1226 of a PARALLEL of the pattern. We validate that it is valid for combining.
1227
1228 One problem is if I3 modifies its output, as opposed to replacing it
1229 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1230 so would produce an insn that is not equivalent to the original insns.
1231
1232 Consider:
1233
1234 (set (reg:DI 101) (reg:DI 100))
1235 (set (subreg:SI (reg:DI 101) 0) <foo>)
1236
1237 This is NOT equivalent to:
1238
1239 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1240 (set (reg:DI 101) (reg:DI 100))])
1241
1242 Not only does this modify 100 (in which case it might still be valid
1243 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1244
1245 We can also run into a problem if I2 sets a register that I1
1246 uses and I1 gets directly substituted into I3 (not via I2). In that
1247 case, we would be getting the wrong value of I2DEST into I3, so we
1248 must reject the combination. This case occurs when I2 and I1 both
1249 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1250 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1251 of a SET must prevent combination from occurring.
1252
e9a25f70 1253 On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
c448a43e
RK
1254 if the destination of a SET is a hard register that isn't a user
1255 variable.
230d793d
RS
1256
1257 Before doing the above check, we first try to expand a field assignment
1258 into a set of logical operations.
1259
1260 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1261 we place a register that is both set and used within I3. If more than one
1262 such register is detected, we fail.
1263
1264 Return 1 if the combination is valid, zero otherwise. */
1265
1266static int
1267combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1268 rtx i3;
1269 rtx *loc;
1270 rtx i2dest;
1271 rtx i1dest;
1272 int i1_not_in_src;
1273 rtx *pi3dest_killed;
1274{
1275 rtx x = *loc;
1276
1277 if (GET_CODE (x) == SET)
1278 {
1279 rtx set = expand_field_assignment (x);
1280 rtx dest = SET_DEST (set);
1281 rtx src = SET_SRC (set);
29a82058
JL
1282 rtx inner_dest = dest;
1283
1284#if 0
1285 rtx inner_src = src;
1286#endif
230d793d
RS
1287
1288 SUBST (*loc, set);
1289
1290 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1291 || GET_CODE (inner_dest) == SUBREG
1292 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1293 inner_dest = XEXP (inner_dest, 0);
1294
1295 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1296 was added. */
1297#if 0
1298 while (GET_CODE (inner_src) == STRICT_LOW_PART
1299 || GET_CODE (inner_src) == SUBREG
1300 || GET_CODE (inner_src) == ZERO_EXTRACT)
1301 inner_src = XEXP (inner_src, 0);
1302
1303 /* If it is better that two different modes keep two different pseudos,
1304 avoid combining them. This avoids producing the following pattern
1305 on a 386:
1306 (set (subreg:SI (reg/v:QI 21) 0)
1307 (lshiftrt:SI (reg/v:SI 20)
1308 (const_int 24)))
1309 If that were made, reload could not handle the pair of
1310 reg 20/21, since it would try to get any GENERAL_REGS
1311 but some of them don't handle QImode. */
1312
1313 if (rtx_equal_p (inner_src, i2dest)
1314 && GET_CODE (inner_dest) == REG
1315 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1316 return 0;
1317#endif
1318
1319 /* Check for the case where I3 modifies its output, as
1320 discussed above. */
1321 if ((inner_dest != dest
1322 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1323 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1324
3f508eca
RK
1325 /* This is the same test done in can_combine_p except that we
1326 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
956d6950
JL
1327 CALL operation. Moreover, we can't test all_adjacent; we don't
1328 have to, since this instruction will stay in place, thus we are
1329 not considering increasing the lifetime of INNER_DEST.
1330
1331 Also, if this insn sets a function argument, combining it with
1332 something that might need a spill could clobber a previous
1333 function argument; the all_adjacent test in can_combine_p also
1334 checks this; here, we do a more specific test for this case. */
1335
230d793d 1336 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1337 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1338 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1339 GET_MODE (inner_dest))
e9a25f70
JL
1340 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1341 && ! REG_USERVAR_P (inner_dest)
956d6950
JL
1342 && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1343 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1344 && i3 != 0
1345 && sets_function_arg_p (prev_nonnote_insn (i3)))))))
230d793d
RS
1346 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1347 return 0;
1348
1349 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1350 so record that for later.
1351 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1352 STACK_POINTER_REGNUM, since these are always considered to be
1353 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1354 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1355 && reg_referenced_p (dest, PATTERN (i3))
1356 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1357#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1358 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1359#endif
36a9c2e9
JL
1360#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1361 && (REGNO (dest) != ARG_POINTER_REGNUM
1362 || ! fixed_regs [REGNO (dest)])
1363#endif
1364 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1365 {
1366 if (*pi3dest_killed)
1367 return 0;
1368
1369 *pi3dest_killed = dest;
1370 }
1371 }
1372
1373 else if (GET_CODE (x) == PARALLEL)
1374 {
1375 int i;
1376
1377 for (i = 0; i < XVECLEN (x, 0); i++)
1378 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1379 i1_not_in_src, pi3dest_killed))
1380 return 0;
1381 }
1382
1383 return 1;
1384}
1385\f
14a774a9
RK
1386/* Return 1 if X is an arithmetic expression that contains a multiplication
1387 and division. We don't count multiplications by powers of two here. */
1388
1389static int
1390contains_muldiv (x)
1391 rtx x;
1392{
1393 switch (GET_CODE (x))
1394 {
1395 case MOD: case DIV: case UMOD: case UDIV:
1396 return 1;
1397
1398 case MULT:
1399 return ! (GET_CODE (XEXP (x, 1)) == CONST_INT
1400 && exact_log2 (INTVAL (XEXP (x, 1))) >= 0);
1401 default:
1402 switch (GET_RTX_CLASS (GET_CODE (x)))
1403 {
1404 case 'c': case '<': case '2':
1405 return contains_muldiv (XEXP (x, 0))
1406 || contains_muldiv (XEXP (x, 1));
1407
1408 case '1':
1409 return contains_muldiv (XEXP (x, 0));
1410
1411 default:
1412 return 0;
1413 }
1414 }
1415}
1416\f
230d793d
RS
1417/* Try to combine the insns I1 and I2 into I3.
1418 Here I1 and I2 appear earlier than I3.
1419 I1 can be zero; then we combine just I2 into I3.
1420
1421 It we are combining three insns and the resulting insn is not recognized,
1422 try splitting it into two insns. If that happens, I2 and I3 are retained
1423 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1424 are pseudo-deleted.
1425
abe6e52f
RK
1426 Return 0 if the combination does not work. Then nothing is changed.
1427 If we did the combination, return the insn at which combine should
1428 resume scanning. */
230d793d
RS
1429
1430static rtx
1431try_combine (i3, i2, i1)
1432 register rtx i3, i2, i1;
1433{
1434 /* New patterns for I3 and I3, respectively. */
1435 rtx newpat, newi2pat = 0;
1436 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1437 int added_sets_1, added_sets_2;
1438 /* Total number of SETs to put into I3. */
1439 int total_sets;
1440 /* Nonzero is I2's body now appears in I3. */
1441 int i2_is_used;
1442 /* INSN_CODEs for new I3, new I2, and user of condition code. */
6a651371 1443 int insn_code_number, i2_code_number = 0, other_code_number = 0;
230d793d
RS
1444 /* Contains I3 if the destination of I3 is used in its source, which means
1445 that the old life of I3 is being killed. If that usage is placed into
1446 I2 and not in I3, a REG_DEAD note must be made. */
1447 rtx i3dest_killed = 0;
1448 /* SET_DEST and SET_SRC of I2 and I1. */
1449 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1450 /* PATTERN (I2), or a copy of it in certain cases. */
1451 rtx i2pat;
1452 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1453 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1454 int i1_feeds_i3 = 0;
1455 /* Notes that must be added to REG_NOTES in I3 and I2. */
1456 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1457 /* Notes that we substituted I3 into I2 instead of the normal case. */
1458 int i3_subst_into_i2 = 0;
df7d75de
RK
1459 /* Notes that I1, I2 or I3 is a MULT operation. */
1460 int have_mult = 0;
230d793d
RS
1461
1462 int maxreg;
1463 rtx temp;
1464 register rtx link;
1465 int i;
1466
1467 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1468 This can occur when flow deletes an insn that it has merged into an
1469 auto-increment address. We also can't do anything if I3 has a
1470 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1471 libcall. */
1472
1473 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1474 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1475 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
ec35104c
JL
1476#if 0
1477 /* ??? This gives worse code, and appears to be unnecessary, since no
1478 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1479 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1480#endif
1481)
230d793d
RS
1482 return 0;
1483
1484 combine_attempts++;
230d793d
RS
1485 undobuf.other_insn = 0;
1486
1487 /* Save the current high-water-mark so we can free storage if we didn't
1488 accept this combination. */
1489 undobuf.storage = (char *) oballoc (0);
1490
6e25d159
RK
1491 /* Reset the hard register usage information. */
1492 CLEAR_HARD_REG_SET (newpat_used_regs);
1493
230d793d
RS
1494 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1495 code below, set I1 to be the earlier of the two insns. */
1496 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1497 temp = i1, i1 = i2, i2 = temp;
1498
abe6e52f 1499 added_links_insn = 0;
137e889e 1500
230d793d
RS
1501 /* First check for one important special-case that the code below will
1502 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1503 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1504 we may be able to replace that destination with the destination of I3.
1505 This occurs in the common code where we compute both a quotient and
1506 remainder into a structure, in which case we want to do the computation
1507 directly into the structure to avoid register-register copies.
1508
1509 We make very conservative checks below and only try to handle the
1510 most common cases of this. For example, we only handle the case
1511 where I2 and I3 are adjacent to avoid making difficult register
1512 usage tests. */
1513
1514 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1515 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1516 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
f95182a4 1517 && (! SMALL_REGISTER_CLASSES
e9a25f70
JL
1518 || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1519 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1520 || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
230d793d
RS
1521 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1522 && GET_CODE (PATTERN (i2)) == PARALLEL
1523 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1524 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1525 below would need to check what is inside (and reg_overlap_mentioned_p
1526 doesn't support those codes anyway). Don't allow those destinations;
1527 the resulting insn isn't likely to be recognized anyway. */
1528 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1529 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1530 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1531 SET_DEST (PATTERN (i3)))
1532 && next_real_insn (i2) == i3)
5089e22e
RS
1533 {
1534 rtx p2 = PATTERN (i2);
1535
1536 /* Make sure that the destination of I3,
1537 which we are going to substitute into one output of I2,
1538 is not used within another output of I2. We must avoid making this:
1539 (parallel [(set (mem (reg 69)) ...)
1540 (set (reg 69) ...)])
1541 which is not well-defined as to order of actions.
1542 (Besides, reload can't handle output reloads for this.)
1543
1544 The problem can also happen if the dest of I3 is a memory ref,
1545 if another dest in I2 is an indirect memory ref. */
1546 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1547 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1548 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1549 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1550 SET_DEST (XVECEXP (p2, 0, i))))
1551 break;
230d793d 1552
5089e22e
RS
1553 if (i == XVECLEN (p2, 0))
1554 for (i = 0; i < XVECLEN (p2, 0); i++)
481c7efa
FS
1555 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1556 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
1557 && SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
5089e22e
RS
1558 {
1559 combine_merges++;
230d793d 1560
5089e22e
RS
1561 subst_insn = i3;
1562 subst_low_cuid = INSN_CUID (i2);
230d793d 1563
c4e861e8 1564 added_sets_2 = added_sets_1 = 0;
5089e22e 1565 i2dest = SET_SRC (PATTERN (i3));
230d793d 1566
5089e22e
RS
1567 /* Replace the dest in I2 with our dest and make the resulting
1568 insn the new pattern for I3. Then skip to where we
1569 validate the pattern. Everything was set up above. */
1570 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1571 SET_DEST (PATTERN (i3)));
1572
1573 newpat = p2;
176c9e6b 1574 i3_subst_into_i2 = 1;
5089e22e
RS
1575 goto validate_replacement;
1576 }
1577 }
230d793d 1578
667c1c2c
RK
1579 /* If I2 is setting a double-word pseudo to a constant and I3 is setting
1580 one of those words to another constant, merge them by making a new
1581 constant. */
1582 if (i1 == 0
1583 && (temp = single_set (i2)) != 0
1584 && (GET_CODE (SET_SRC (temp)) == CONST_INT
1585 || GET_CODE (SET_SRC (temp)) == CONST_DOUBLE)
1586 && GET_CODE (SET_DEST (temp)) == REG
1587 && GET_MODE_CLASS (GET_MODE (SET_DEST (temp))) == MODE_INT
1588 && GET_MODE_SIZE (GET_MODE (SET_DEST (temp))) == 2 * UNITS_PER_WORD
1589 && GET_CODE (PATTERN (i3)) == SET
1590 && GET_CODE (SET_DEST (PATTERN (i3))) == SUBREG
1591 && SUBREG_REG (SET_DEST (PATTERN (i3))) == SET_DEST (temp)
1592 && GET_MODE_CLASS (GET_MODE (SET_DEST (PATTERN (i3)))) == MODE_INT
1593 && GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (i3)))) == UNITS_PER_WORD
1594 && GET_CODE (SET_SRC (PATTERN (i3))) == CONST_INT)
1595 {
1596 HOST_WIDE_INT lo, hi;
1597
1598 if (GET_CODE (SET_SRC (temp)) == CONST_INT)
1599 lo = INTVAL (SET_SRC (temp)), hi = lo < 0 ? -1 : 0;
1600 else
1601 {
1602 lo = CONST_DOUBLE_LOW (SET_SRC (temp));
1603 hi = CONST_DOUBLE_HIGH (SET_SRC (temp));
1604 }
1605
1606 if (subreg_lowpart_p (SET_DEST (PATTERN (i3))))
1607 lo = INTVAL (SET_SRC (PATTERN (i3)));
1608 else
1609 hi = INTVAL (SET_SRC (PATTERN (i3)));
1610
1611 combine_merges++;
1612 subst_insn = i3;
1613 subst_low_cuid = INSN_CUID (i2);
1614 added_sets_2 = added_sets_1 = 0;
1615 i2dest = SET_DEST (temp);
1616
1617 SUBST (SET_SRC (temp),
1618 immed_double_const (lo, hi, GET_MODE (SET_DEST (temp))));
1619
1620 newpat = PATTERN (i2);
1621 i3_subst_into_i2 = 1;
1622 goto validate_replacement;
1623 }
1624
230d793d
RS
1625#ifndef HAVE_cc0
1626 /* If we have no I1 and I2 looks like:
1627 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1628 (set Y OP)])
1629 make up a dummy I1 that is
1630 (set Y OP)
1631 and change I2 to be
1632 (set (reg:CC X) (compare:CC Y (const_int 0)))
1633
1634 (We can ignore any trailing CLOBBERs.)
1635
1636 This undoes a previous combination and allows us to match a branch-and-
1637 decrement insn. */
1638
1639 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1640 && XVECLEN (PATTERN (i2), 0) >= 2
1641 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1642 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1643 == MODE_CC)
1644 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1645 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1646 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1647 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1648 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1649 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1650 {
1651 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1652 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1653 break;
1654
1655 if (i == 1)
1656 {
1657 /* We make I1 with the same INSN_UID as I2. This gives it
1658 the same INSN_CUID for value tracking. Our fake I1 will
1659 never appear in the insn stream so giving it the same INSN_UID
1660 as I2 will not cause a problem. */
1661
0d9641d1 1662 subst_prev_insn = i1
38a448ca
RH
1663 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1664 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1665 NULL_RTX);
230d793d
RS
1666
1667 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1668 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1669 SET_DEST (PATTERN (i1)));
1670 }
1671 }
1672#endif
1673
1674 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1675 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1676 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1677 {
1678 undo_all ();
1679 return 0;
1680 }
1681
1682 /* Record whether I2DEST is used in I2SRC and similarly for the other
1683 cases. Knowing this will help in register status updating below. */
1684 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1685 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1686 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1687
916f14f1 1688 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1689 in I2SRC. */
1690 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1691
1692 /* Ensure that I3's pattern can be the destination of combines. */
1693 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1694 i1 && i2dest_in_i1src && i1_feeds_i3,
1695 &i3dest_killed))
1696 {
1697 undo_all ();
1698 return 0;
1699 }
1700
df7d75de
RK
1701 /* See if any of the insns is a MULT operation. Unless one is, we will
1702 reject a combination that is, since it must be slower. Be conservative
1703 here. */
1704 if (GET_CODE (i2src) == MULT
1705 || (i1 != 0 && GET_CODE (i1src) == MULT)
1706 || (GET_CODE (PATTERN (i3)) == SET
1707 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1708 have_mult = 1;
1709
230d793d
RS
1710 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1711 We used to do this EXCEPT in one case: I3 has a post-inc in an
1712 output operand. However, that exception can give rise to insns like
1713 mov r3,(r3)+
1714 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1715 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1716
1717#if 0
1718 if (!(GET_CODE (PATTERN (i3)) == SET
1719 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1720 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1721 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1722 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1723 /* It's not the exception. */
1724#endif
1725#ifdef AUTO_INC_DEC
1726 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1727 if (REG_NOTE_KIND (link) == REG_INC
1728 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1729 || (i1 != 0
1730 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1731 {
1732 undo_all ();
1733 return 0;
1734 }
1735#endif
1736
1737 /* See if the SETs in I1 or I2 need to be kept around in the merged
1738 instruction: whenever the value set there is still needed past I3.
1739 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1740
1741 For the SET in I1, we have two cases: If I1 and I2 independently
1742 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1743 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1744 in I1 needs to be kept around unless I1DEST dies or is set in either
1745 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1746 I1DEST. If so, we know I1 feeds into I2. */
1747
1748 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1749
1750 added_sets_1
1751 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1752 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1753
1754 /* If the set in I2 needs to be kept around, we must make a copy of
1755 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1756 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1757 an already-substituted copy. This also prevents making self-referential
1758 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1759 I2DEST. */
1760
1761 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
38a448ca 1762 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
230d793d
RS
1763 : PATTERN (i2));
1764
1765 if (added_sets_2)
1766 i2pat = copy_rtx (i2pat);
1767
1768 combine_merges++;
1769
1770 /* Substitute in the latest insn for the regs set by the earlier ones. */
1771
1772 maxreg = max_reg_num ();
1773
1774 subst_insn = i3;
230d793d
RS
1775
1776 /* It is possible that the source of I2 or I1 may be performing an
1777 unneeded operation, such as a ZERO_EXTEND of something that is known
1778 to have the high part zero. Handle that case by letting subst look at
1779 the innermost one of them.
1780
1781 Another way to do this would be to have a function that tries to
1782 simplify a single insn instead of merging two or more insns. We don't
1783 do this because of the potential of infinite loops and because
1784 of the potential extra memory required. However, doing it the way
1785 we are is a bit of a kludge and doesn't catch all cases.
1786
1787 But only do this if -fexpensive-optimizations since it slows things down
1788 and doesn't usually win. */
1789
1790 if (flag_expensive_optimizations)
1791 {
1792 /* Pass pc_rtx so no substitutions are done, just simplifications.
1793 The cases that we are interested in here do not involve the few
1794 cases were is_replaced is checked. */
1795 if (i1)
d0ab8cd3
RK
1796 {
1797 subst_low_cuid = INSN_CUID (i1);
1798 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1799 }
230d793d 1800 else
d0ab8cd3
RK
1801 {
1802 subst_low_cuid = INSN_CUID (i2);
1803 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1804 }
230d793d 1805
241cea85 1806 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1807 }
1808
1809#ifndef HAVE_cc0
1810 /* Many machines that don't use CC0 have insns that can both perform an
1811 arithmetic operation and set the condition code. These operations will
1812 be represented as a PARALLEL with the first element of the vector
1813 being a COMPARE of an arithmetic operation with the constant zero.
1814 The second element of the vector will set some pseudo to the result
1815 of the same arithmetic operation. If we simplify the COMPARE, we won't
1816 match such a pattern and so will generate an extra insn. Here we test
1817 for this case, where both the comparison and the operation result are
1818 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1819 I2SRC. Later we will make the PARALLEL that contains I2. */
1820
1821 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1822 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1823 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1824 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1825 {
081f5e7e 1826#ifdef EXTRA_CC_MODES
230d793d
RS
1827 rtx *cc_use;
1828 enum machine_mode compare_mode;
081f5e7e 1829#endif
230d793d
RS
1830
1831 newpat = PATTERN (i3);
1832 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1833
1834 i2_is_used = 1;
1835
1836#ifdef EXTRA_CC_MODES
1837 /* See if a COMPARE with the operand we substituted in should be done
1838 with the mode that is currently being used. If not, do the same
1839 processing we do in `subst' for a SET; namely, if the destination
1840 is used only once, try to replace it with a register of the proper
1841 mode and also replace the COMPARE. */
1842 if (undobuf.other_insn == 0
1843 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1844 &undobuf.other_insn))
77fa0940
RK
1845 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1846 i2src, const0_rtx))
230d793d
RS
1847 != GET_MODE (SET_DEST (newpat))))
1848 {
1849 int regno = REGNO (SET_DEST (newpat));
38a448ca 1850 rtx new_dest = gen_rtx_REG (compare_mode, regno);
230d793d
RS
1851
1852 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 1853 || (REG_N_SETS (regno) == 1 && ! added_sets_2
230d793d
RS
1854 && ! REG_USERVAR_P (SET_DEST (newpat))))
1855 {
1856 if (regno >= FIRST_PSEUDO_REGISTER)
1857 SUBST (regno_reg_rtx[regno], new_dest);
1858
1859 SUBST (SET_DEST (newpat), new_dest);
1860 SUBST (XEXP (*cc_use, 0), new_dest);
1861 SUBST (SET_SRC (newpat),
1862 gen_rtx_combine (COMPARE, compare_mode,
1863 i2src, const0_rtx));
1864 }
1865 else
1866 undobuf.other_insn = 0;
1867 }
1868#endif
1869 }
1870 else
1871#endif
1872 {
1873 n_occurrences = 0; /* `subst' counts here */
1874
1875 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1876 need to make a unique copy of I2SRC each time we substitute it
1877 to avoid self-referential rtl. */
1878
d0ab8cd3 1879 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1880 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1881 ! i1_feeds_i3 && i1dest_in_i1src);
241cea85 1882 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1883
1884 /* Record whether i2's body now appears within i3's body. */
1885 i2_is_used = n_occurrences;
1886 }
1887
1888 /* If we already got a failure, don't try to do more. Otherwise,
1889 try to substitute in I1 if we have it. */
1890
1891 if (i1 && GET_CODE (newpat) != CLOBBER)
1892 {
1893 /* Before we can do this substitution, we must redo the test done
1894 above (see detailed comments there) that ensures that I1DEST
0f41302f 1895 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1896
5f4f0e22
CH
1897 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1898 0, NULL_PTR))
230d793d
RS
1899 {
1900 undo_all ();
1901 return 0;
1902 }
1903
1904 n_occurrences = 0;
d0ab8cd3 1905 subst_low_cuid = INSN_CUID (i1);
230d793d 1906 newpat = subst (newpat, i1dest, i1src, 0, 0);
241cea85 1907 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1908 }
1909
916f14f1
RK
1910 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1911 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1912 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1913 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1914 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1915 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1916 > 1))
230d793d
RS
1917 /* Fail if we tried to make a new register (we used to abort, but there's
1918 really no reason to). */
1919 || max_reg_num () != maxreg
1920 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1921 || GET_CODE (newpat) == CLOBBER
1922 /* Fail if this new pattern is a MULT and we didn't have one before
1923 at the outer level. */
1924 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1925 && ! have_mult))
230d793d
RS
1926 {
1927 undo_all ();
1928 return 0;
1929 }
1930
1931 /* If the actions of the earlier insns must be kept
1932 in addition to substituting them into the latest one,
1933 we must make a new PARALLEL for the latest insn
1934 to hold additional the SETs. */
1935
1936 if (added_sets_1 || added_sets_2)
1937 {
1938 combine_extras++;
1939
1940 if (GET_CODE (newpat) == PARALLEL)
1941 {
1942 rtvec old = XVEC (newpat, 0);
1943 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 1944 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
59888de2 1945 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
230d793d
RS
1946 sizeof (old->elem[0]) * old->num_elem);
1947 }
1948 else
1949 {
1950 rtx old = newpat;
1951 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 1952 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
1953 XVECEXP (newpat, 0, 0) = old;
1954 }
1955
1956 if (added_sets_1)
1957 XVECEXP (newpat, 0, --total_sets)
1958 = (GET_CODE (PATTERN (i1)) == PARALLEL
38a448ca 1959 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
230d793d
RS
1960
1961 if (added_sets_2)
c5c76735
JL
1962 {
1963 /* If there is no I1, use I2's body as is. We used to also not do
1964 the subst call below if I2 was substituted into I3,
1965 but that could lose a simplification. */
1966 if (i1 == 0)
1967 XVECEXP (newpat, 0, --total_sets) = i2pat;
1968 else
1969 /* See comment where i2pat is assigned. */
1970 XVECEXP (newpat, 0, --total_sets)
1971 = subst (i2pat, i1dest, i1src, 0, 0);
1972 }
230d793d
RS
1973 }
1974
1975 /* We come here when we are replacing a destination in I2 with the
1976 destination of I3. */
1977 validate_replacement:
1978
6e25d159
RK
1979 /* Note which hard regs this insn has as inputs. */
1980 mark_used_regs_combine (newpat);
1981
230d793d 1982 /* Is the result of combination a valid instruction? */
8e2f6e35 1983 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
1984
1985 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1986 the second SET's destination is a register that is unused. In that case,
1987 we just need the first SET. This can occur when simplifying a divmod
1988 insn. We *must* test for this case here because the code below that
1989 splits two independent SETs doesn't handle this case correctly when it
1990 updates the register status. Also check the case where the first
1991 SET's destination is unused. That would not cause incorrect code, but
1992 does cause an unneeded insn to remain. */
1993
1994 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1995 && XVECLEN (newpat, 0) == 2
1996 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1997 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1998 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1999 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
2000 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
2001 && asm_noperands (newpat) < 0)
2002 {
2003 newpat = XVECEXP (newpat, 0, 0);
8e2f6e35 2004 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2005 }
2006
2007 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
2008 && XVECLEN (newpat, 0) == 2
2009 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2010 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2011 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
2012 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
2013 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
2014 && asm_noperands (newpat) < 0)
2015 {
2016 newpat = XVECEXP (newpat, 0, 1);
8e2f6e35 2017 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2018 }
2019
2020 /* If we were combining three insns and the result is a simple SET
2021 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
2022 insns. There are two ways to do this. It can be split using a
2023 machine-specific method (like when you have an addition of a large
2024 constant) or by combine in the function find_split_point. */
2025
230d793d
RS
2026 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
2027 && asm_noperands (newpat) < 0)
2028 {
916f14f1 2029 rtx m_split, *split;
42495ca0 2030 rtx ni2dest = i2dest;
916f14f1
RK
2031
2032 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
2033 use I2DEST as a scratch register will help. In the latter case,
2034 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
2035
2036 m_split = split_insns (newpat, i3);
a70c61d9
JW
2037
2038 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
2039 inputs of NEWPAT. */
2040
2041 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
2042 possible to try that as a scratch reg. This would require adding
2043 more code to make it work though. */
2044
2045 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
2046 {
2047 /* If I2DEST is a hard register or the only use of a pseudo,
2048 we can change its mode. */
2049 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 2050 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 2051 && GET_CODE (i2dest) == REG
42495ca0 2052 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2053 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
42495ca0 2054 && ! REG_USERVAR_P (i2dest))))
38a448ca 2055 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
c5c76735
JL
2056 REGNO (i2dest));
2057
2058 m_split = split_insns (gen_rtx_PARALLEL
2059 (VOIDmode,
2060 gen_rtvec (2, newpat,
2061 gen_rtx_CLOBBER (VOIDmode,
2062 ni2dest))),
2063 i3);
42495ca0 2064 }
916f14f1
RK
2065
2066 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
2067 && XVECLEN (m_split, 0) == 2
2068 && (next_real_insn (i2) == i3
2069 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
2070 INSN_CUID (i2))))
916f14f1 2071 {
1a26b032 2072 rtx i2set, i3set;
d0ab8cd3 2073 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 2074 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 2075
e4ba89be
RK
2076 i3set = single_set (XVECEXP (m_split, 0, 1));
2077 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 2078
42495ca0
RK
2079 /* In case we changed the mode of I2DEST, replace it in the
2080 pseudo-register table here. We can't do it above in case this
2081 code doesn't get executed and we do a split the other way. */
2082
2083 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2084 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
2085
8e2f6e35 2086 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
2087
2088 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
2089 register status, so don't use these insns. If I2's destination
2090 is used between I2 and I3, we also can't use these insns. */
1a26b032 2091
9cc96794
RK
2092 if (i2_code_number >= 0 && i2set && i3set
2093 && (next_real_insn (i2) == i3
2094 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
8e2f6e35
BS
2095 insn_code_number = recog_for_combine (&newi3pat, i3,
2096 &new_i3_notes);
d0ab8cd3
RK
2097 if (insn_code_number >= 0)
2098 newpat = newi3pat;
2099
c767f54b 2100 /* It is possible that both insns now set the destination of I3.
22609cbf 2101 If so, we must show an extra use of it. */
c767f54b 2102
393de53f
RK
2103 if (insn_code_number >= 0)
2104 {
2105 rtx new_i3_dest = SET_DEST (i3set);
2106 rtx new_i2_dest = SET_DEST (i2set);
2107
2108 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
2109 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
2110 || GET_CODE (new_i3_dest) == SUBREG)
2111 new_i3_dest = XEXP (new_i3_dest, 0);
2112
d4096689
RK
2113 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
2114 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
2115 || GET_CODE (new_i2_dest) == SUBREG)
2116 new_i2_dest = XEXP (new_i2_dest, 0);
2117
393de53f
RK
2118 if (GET_CODE (new_i3_dest) == REG
2119 && GET_CODE (new_i2_dest) == REG
2120 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
b1f21e0a 2121 REG_N_SETS (REGNO (new_i2_dest))++;
393de53f 2122 }
916f14f1 2123 }
230d793d
RS
2124
2125 /* If we can split it and use I2DEST, go ahead and see if that
2126 helps things be recognized. Verify that none of the registers
2127 are set between I2 and I3. */
d0ab8cd3 2128 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
2129#ifdef HAVE_cc0
2130 && GET_CODE (i2dest) == REG
2131#endif
2132 /* We need I2DEST in the proper mode. If it is a hard register
2133 or the only use of a pseudo, we can change its mode. */
2134 && (GET_MODE (*split) == GET_MODE (i2dest)
2135 || GET_MODE (*split) == VOIDmode
2136 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 2137 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
230d793d
RS
2138 && ! REG_USERVAR_P (i2dest)))
2139 && (next_real_insn (i2) == i3
2140 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
2141 /* We can't overwrite I2DEST if its value is still used by
2142 NEWPAT. */
2143 && ! reg_referenced_p (i2dest, newpat))
2144 {
2145 rtx newdest = i2dest;
df7d75de
RK
2146 enum rtx_code split_code = GET_CODE (*split);
2147 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
2148
2149 /* Get NEWDEST as a register in the proper mode. We have already
2150 validated that we can do this. */
df7d75de 2151 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 2152 {
38a448ca 2153 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
230d793d
RS
2154
2155 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2156 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2157 }
2158
2159 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2160 an ASHIFT. This can occur if it was inside a PLUS and hence
2161 appeared to be a memory address. This is a kludge. */
df7d75de 2162 if (split_code == MULT
230d793d
RS
2163 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2164 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
2165 {
2166 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2167 XEXP (*split, 0), GEN_INT (i)));
2168 /* Update split_code because we may not have a multiply
2169 anymore. */
2170 split_code = GET_CODE (*split);
2171 }
230d793d
RS
2172
2173#ifdef INSN_SCHEDULING
2174 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2175 be written as a ZERO_EXTEND. */
df7d75de
RK
2176 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2177 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
2178 XEXP (*split, 0)));
2179#endif
2180
2181 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2182 SUBST (*split, newdest);
8e2f6e35 2183 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
df7d75de
RK
2184
2185 /* If the split point was a MULT and we didn't have one before,
2186 don't use one now. */
2187 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
8e2f6e35 2188 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2189 }
2190 }
2191
2192 /* Check for a case where we loaded from memory in a narrow mode and
2193 then sign extended it, but we need both registers. In that case,
2194 we have a PARALLEL with both loads from the same memory location.
2195 We can split this into a load from memory followed by a register-register
2196 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
2197 eliminate the copy.
2198
2199 We cannot do this if the destination of the second assignment is
2200 a register that we have already assumed is zero-extended. Similarly
2201 for a SUBREG of such a register. */
230d793d
RS
2202
2203 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2204 && GET_CODE (newpat) == PARALLEL
2205 && XVECLEN (newpat, 0) == 2
2206 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2207 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2208 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2209 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2210 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2211 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2212 INSN_CUID (i2))
2213 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2214 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
2215 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2216 (GET_CODE (temp) == REG
2217 && reg_nonzero_bits[REGNO (temp)] != 0
2218 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2219 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2220 && (reg_nonzero_bits[REGNO (temp)]
2221 != GET_MODE_MASK (word_mode))))
2222 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2223 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2224 (GET_CODE (temp) == REG
2225 && reg_nonzero_bits[REGNO (temp)] != 0
2226 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2227 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2228 && (reg_nonzero_bits[REGNO (temp)]
2229 != GET_MODE_MASK (word_mode)))))
230d793d
RS
2230 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2231 SET_SRC (XVECEXP (newpat, 0, 1)))
2232 && ! find_reg_note (i3, REG_UNUSED,
2233 SET_DEST (XVECEXP (newpat, 0, 0))))
2234 {
472fbdd1
RK
2235 rtx ni2dest;
2236
230d793d 2237 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 2238 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
2239 newpat = XVECEXP (newpat, 0, 1);
2240 SUBST (SET_SRC (newpat),
472fbdd1 2241 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
8e2f6e35 2242 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2243
230d793d 2244 if (i2_code_number >= 0)
8e2f6e35 2245 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
2246
2247 if (insn_code_number >= 0)
2248 {
2249 rtx insn;
2250 rtx link;
2251
2252 /* If we will be able to accept this, we have made a change to the
2253 destination of I3. This can invalidate a LOG_LINKS pointing
2254 to I3. No other part of combine.c makes such a transformation.
2255
2256 The new I3 will have a destination that was previously the
2257 destination of I1 or I2 and which was used in i2 or I3. Call
2258 distribute_links to make a LOG_LINK from the next use of
2259 that destination. */
2260
2261 PATTERN (i3) = newpat;
38a448ca 2262 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
5089e22e
RS
2263
2264 /* I3 now uses what used to be its destination and which is
2265 now I2's destination. That means we need a LOG_LINK from
2266 I3 to I2. But we used to have one, so we still will.
2267
2268 However, some later insn might be using I2's dest and have
2269 a LOG_LINK pointing at I3. We must remove this link.
2270 The simplest way to remove the link is to point it at I1,
2271 which we know will be a NOTE. */
2272
2273 for (insn = NEXT_INSN (i3);
0d4d42c3 2274 insn && (this_basic_block == n_basic_blocks - 1
3b413743 2275 || insn != BLOCK_HEAD (this_basic_block + 1));
5089e22e
RS
2276 insn = NEXT_INSN (insn))
2277 {
2278 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 2279 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2280 {
2281 for (link = LOG_LINKS (insn); link;
2282 link = XEXP (link, 1))
2283 if (XEXP (link, 0) == i3)
2284 XEXP (link, 0) = i1;
2285
2286 break;
2287 }
2288 }
2289 }
230d793d
RS
2290 }
2291
2292 /* Similarly, check for a case where we have a PARALLEL of two independent
2293 SETs but we started with three insns. In this case, we can do the sets
2294 as two separate insns. This case occurs when some SET allows two
2295 other insns to combine, but the destination of that SET is still live. */
2296
2297 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2298 && GET_CODE (newpat) == PARALLEL
2299 && XVECLEN (newpat, 0) == 2
2300 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2301 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2302 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2303 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2304 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2305 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2306 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2307 INSN_CUID (i2))
2308 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2309 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2310 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2311 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2312 XVECEXP (newpat, 0, 0))
2313 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
14a774a9
RK
2314 XVECEXP (newpat, 0, 1))
2315 && ! (contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 0)))
2316 && contains_muldiv (SET_SRC (XVECEXP (newpat, 0, 1)))))
230d793d 2317 {
e9a25f70
JL
2318 /* Normally, it doesn't matter which of the two is done first,
2319 but it does if one references cc0. In that case, it has to
2320 be first. */
2321#ifdef HAVE_cc0
2322 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2323 {
2324 newi2pat = XVECEXP (newpat, 0, 0);
2325 newpat = XVECEXP (newpat, 0, 1);
2326 }
2327 else
2328#endif
2329 {
2330 newi2pat = XVECEXP (newpat, 0, 1);
2331 newpat = XVECEXP (newpat, 0, 0);
2332 }
230d793d 2333
8e2f6e35 2334 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2335
230d793d 2336 if (i2_code_number >= 0)
8e2f6e35 2337 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2338 }
2339
2340 /* If it still isn't recognized, fail and change things back the way they
2341 were. */
2342 if ((insn_code_number < 0
2343 /* Is the result a reasonable ASM_OPERANDS? */
2344 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2345 {
2346 undo_all ();
2347 return 0;
2348 }
2349
2350 /* If we had to change another insn, make sure it is valid also. */
2351 if (undobuf.other_insn)
2352 {
230d793d
RS
2353 rtx other_pat = PATTERN (undobuf.other_insn);
2354 rtx new_other_notes;
2355 rtx note, next;
2356
6e25d159
RK
2357 CLEAR_HARD_REG_SET (newpat_used_regs);
2358
8e2f6e35
BS
2359 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2360 &new_other_notes);
230d793d
RS
2361
2362 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2363 {
2364 undo_all ();
2365 return 0;
2366 }
2367
2368 PATTERN (undobuf.other_insn) = other_pat;
2369
2370 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2371 are still valid. Then add any non-duplicate notes added by
2372 recog_for_combine. */
2373 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2374 {
2375 next = XEXP (note, 1);
2376
2377 if (REG_NOTE_KIND (note) == REG_UNUSED
2378 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2379 {
2380 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2381 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
1a26b032
RK
2382
2383 remove_note (undobuf.other_insn, note);
2384 }
230d793d
RS
2385 }
2386
1a26b032
RK
2387 for (note = new_other_notes; note; note = XEXP (note, 1))
2388 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2389 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 2390
230d793d 2391 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2392 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2393 }
2394
2395 /* We now know that we can do this combination. Merge the insns and
2396 update the status of registers and LOG_LINKS. */
2397
2398 {
2399 rtx i3notes, i2notes, i1notes = 0;
2400 rtx i3links, i2links, i1links = 0;
2401 rtx midnotes = 0;
230d793d 2402 register int regno;
ff3467a9
JW
2403 /* Compute which registers we expect to eliminate. newi2pat may be setting
2404 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2405 same as i3dest, in which case newi2pat may be setting i1dest. */
2406 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2407 || i2dest_in_i2src || i2dest_in_i1src
230d793d 2408 ? 0 : i2dest);
ff3467a9
JW
2409 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2410 || (newi2pat && reg_set_p (i1dest, newi2pat))
2411 ? 0 : i1dest);
230d793d
RS
2412
2413 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2414 clear them. */
2415 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2416 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2417 if (i1)
2418 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2419
2420 /* Ensure that we do not have something that should not be shared but
2421 occurs multiple times in the new insns. Check this by first
5089e22e 2422 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2423
2424 reset_used_flags (i3notes);
2425 reset_used_flags (i2notes);
2426 reset_used_flags (i1notes);
2427 reset_used_flags (newpat);
2428 reset_used_flags (newi2pat);
2429 if (undobuf.other_insn)
2430 reset_used_flags (PATTERN (undobuf.other_insn));
2431
2432 i3notes = copy_rtx_if_shared (i3notes);
2433 i2notes = copy_rtx_if_shared (i2notes);
2434 i1notes = copy_rtx_if_shared (i1notes);
2435 newpat = copy_rtx_if_shared (newpat);
2436 newi2pat = copy_rtx_if_shared (newi2pat);
2437 if (undobuf.other_insn)
2438 reset_used_flags (PATTERN (undobuf.other_insn));
2439
2440 INSN_CODE (i3) = insn_code_number;
2441 PATTERN (i3) = newpat;
2442 if (undobuf.other_insn)
2443 INSN_CODE (undobuf.other_insn) = other_code_number;
2444
2445 /* We had one special case above where I2 had more than one set and
2446 we replaced a destination of one of those sets with the destination
2447 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2448 in this basic block. Note that this (expensive) case is rare.
2449
2450 Also, in this case, we must pretend that all REG_NOTEs for I2
2451 actually came from I3, so that REG_UNUSED notes from I2 will be
2452 properly handled. */
2453
2454 if (i3_subst_into_i2)
2455 {
e6770d3c
R
2456 if (GET_CODE (PATTERN (i2)) == PARALLEL)
2457 {
2458 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2459 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2460 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2461 && ! find_reg_note (i2, REG_UNUSED,
2462 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2463 for (temp = NEXT_INSN (i2);
2464 temp && (this_basic_block == n_basic_blocks - 1
2465 || BLOCK_HEAD (this_basic_block) != temp);
2466 temp = NEXT_INSN (temp))
2467 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2468 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2469 if (XEXP (link, 0) == i2)
2470 XEXP (link, 0) = i3;
2471 }
176c9e6b
JW
2472
2473 if (i3notes)
2474 {
2475 rtx link = i3notes;
2476 while (XEXP (link, 1))
2477 link = XEXP (link, 1);
2478 XEXP (link, 1) = i2notes;
2479 }
2480 else
2481 i3notes = i2notes;
2482 i2notes = 0;
2483 }
230d793d
RS
2484
2485 LOG_LINKS (i3) = 0;
2486 REG_NOTES (i3) = 0;
2487 LOG_LINKS (i2) = 0;
2488 REG_NOTES (i2) = 0;
2489
2490 if (newi2pat)
2491 {
2492 INSN_CODE (i2) = i2_code_number;
2493 PATTERN (i2) = newi2pat;
2494 }
2495 else
2496 {
2497 PUT_CODE (i2, NOTE);
2498 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2499 NOTE_SOURCE_FILE (i2) = 0;
2500 }
2501
2502 if (i1)
2503 {
2504 LOG_LINKS (i1) = 0;
2505 REG_NOTES (i1) = 0;
2506 PUT_CODE (i1, NOTE);
2507 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2508 NOTE_SOURCE_FILE (i1) = 0;
2509 }
2510
2511 /* Get death notes for everything that is now used in either I3 or
6eb12cef
RK
2512 I2 and used to die in a previous insn. If we built two new
2513 patterns, move from I1 to I2 then I2 to I3 so that we get the
2514 proper movement on registers that I2 modifies. */
230d793d 2515
230d793d 2516 if (newi2pat)
6eb12cef
RK
2517 {
2518 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2519 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2520 }
2521 else
2522 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2523 i3, &midnotes);
230d793d
RS
2524
2525 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2526 if (i3notes)
5f4f0e22
CH
2527 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2528 elim_i2, elim_i1);
230d793d 2529 if (i2notes)
5f4f0e22
CH
2530 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2531 elim_i2, elim_i1);
230d793d 2532 if (i1notes)
5f4f0e22
CH
2533 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2534 elim_i2, elim_i1);
230d793d 2535 if (midnotes)
5f4f0e22
CH
2536 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2537 elim_i2, elim_i1);
230d793d
RS
2538
2539 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2540 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2541 so we always pass it as i3. We have not counted the notes in
2542 reg_n_deaths yet, so we need to do so now. */
2543
230d793d 2544 if (newi2pat && new_i2_notes)
1a26b032
RK
2545 {
2546 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2547 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2548 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2549
2550 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2551 }
2552
230d793d 2553 if (new_i3_notes)
1a26b032
RK
2554 {
2555 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2556 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2557 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2558
2559 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2560 }
230d793d
RS
2561
2562 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
2563 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2564 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2565 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
2566 Show an additional death due to the REG_DEAD note we make here. If
2567 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2568
230d793d 2569 if (i3dest_killed)
1a26b032
RK
2570 {
2571 if (GET_CODE (i3dest_killed) == REG)
b1f21e0a 2572 REG_N_DEATHS (REGNO (i3dest_killed))++;
1a26b032 2573
e9a25f70 2574 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
38a448ca
RH
2575 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2576 NULL_RTX),
ff3467a9 2577 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 2578 else
38a448ca
RH
2579 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2580 NULL_RTX),
e9a25f70 2581 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
ff3467a9 2582 elim_i2, elim_i1);
1a26b032 2583 }
58c8c593 2584
230d793d 2585 if (i2dest_in_i2src)
58c8c593 2586 {
1a26b032 2587 if (GET_CODE (i2dest) == REG)
b1f21e0a 2588 REG_N_DEATHS (REGNO (i2dest))++;
1a26b032 2589
58c8c593 2590 if (newi2pat && reg_set_p (i2dest, newi2pat))
38a448ca 2591 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2592 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2593 else
38a448ca 2594 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2595 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2596 NULL_RTX, NULL_RTX);
2597 }
2598
230d793d 2599 if (i1dest_in_i1src)
58c8c593 2600 {
1a26b032 2601 if (GET_CODE (i1dest) == REG)
b1f21e0a 2602 REG_N_DEATHS (REGNO (i1dest))++;
1a26b032 2603
58c8c593 2604 if (newi2pat && reg_set_p (i1dest, newi2pat))
38a448ca 2605 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2606 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2607 else
38a448ca 2608 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2609 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2610 NULL_RTX, NULL_RTX);
2611 }
230d793d
RS
2612
2613 distribute_links (i3links);
2614 distribute_links (i2links);
2615 distribute_links (i1links);
2616
2617 if (GET_CODE (i2dest) == REG)
2618 {
d0ab8cd3
RK
2619 rtx link;
2620 rtx i2_insn = 0, i2_val = 0, set;
2621
2622 /* The insn that used to set this register doesn't exist, and
2623 this life of the register may not exist either. See if one of
2624 I3's links points to an insn that sets I2DEST. If it does,
2625 that is now the last known value for I2DEST. If we don't update
2626 this and I2 set the register to a value that depended on its old
230d793d
RS
2627 contents, we will get confused. If this insn is used, thing
2628 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2629
2630 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2631 if ((set = single_set (XEXP (link, 0))) != 0
2632 && rtx_equal_p (i2dest, SET_DEST (set)))
2633 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2634
2635 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2636
2637 /* If the reg formerly set in I2 died only once and that was in I3,
2638 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2639 if (! added_sets_2
2640 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2641 && ! i2dest_in_i2src)
230d793d
RS
2642 {
2643 regno = REGNO (i2dest);
b1f21e0a 2644 REG_N_SETS (regno)--;
230d793d
RS
2645 }
2646 }
2647
2648 if (i1 && GET_CODE (i1dest) == REG)
2649 {
d0ab8cd3
RK
2650 rtx link;
2651 rtx i1_insn = 0, i1_val = 0, set;
2652
2653 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2654 if ((set = single_set (XEXP (link, 0))) != 0
2655 && rtx_equal_p (i1dest, SET_DEST (set)))
2656 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2657
2658 record_value_for_reg (i1dest, i1_insn, i1_val);
2659
230d793d 2660 regno = REGNO (i1dest);
5af91171 2661 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d 2662 {
b1f21e0a 2663 REG_N_SETS (regno)--;
230d793d
RS
2664 }
2665 }
2666
951553af 2667 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2668 to this insn. */
2669
84832317 2670 note_stores (newpat, set_nonzero_bits_and_sign_copies, NULL);
22609cbf 2671 if (newi2pat)
84832317 2672 note_stores (newi2pat, set_nonzero_bits_and_sign_copies, NULL);
22609cbf 2673
230d793d
RS
2674 /* If I3 is now an unconditional jump, ensure that it has a
2675 BARRIER following it since it may have initially been a
381ee8af 2676 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2677
2678 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2679 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2680 || GET_CODE (temp) != BARRIER))
230d793d
RS
2681 emit_barrier_after (i3);
2682 }
2683
2684 combine_successes++;
e7749837 2685 undo_commit ();
230d793d 2686
bcd49eb7
JW
2687 /* Clear this here, so that subsequent get_last_value calls are not
2688 affected. */
2689 subst_prev_insn = NULL_RTX;
2690
abe6e52f
RK
2691 if (added_links_insn
2692 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2693 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2694 return added_links_insn;
2695 else
2696 return newi2pat ? i2 : i3;
230d793d
RS
2697}
2698\f
2699/* Undo all the modifications recorded in undobuf. */
2700
2701static void
2702undo_all ()
2703{
241cea85
RK
2704 struct undo *undo, *next;
2705
2706 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2707 {
241cea85
RK
2708 next = undo->next;
2709 if (undo->is_int)
2710 *undo->where.i = undo->old_contents.i;
7c046e4e 2711 else
241cea85
RK
2712 *undo->where.r = undo->old_contents.r;
2713
2714 undo->next = undobuf.frees;
2715 undobuf.frees = undo;
7c046e4e 2716 }
230d793d
RS
2717
2718 obfree (undobuf.storage);
845fc875 2719 undobuf.undos = undobuf.previous_undos = 0;
bcd49eb7
JW
2720
2721 /* Clear this here, so that subsequent get_last_value calls are not
2722 affected. */
2723 subst_prev_insn = NULL_RTX;
230d793d 2724}
e7749837
RH
2725
2726/* We've committed to accepting the changes we made. Move all
2727 of the undos to the free list. */
2728
2729static void
2730undo_commit ()
2731{
2732 struct undo *undo, *next;
2733
2734 for (undo = undobuf.undos; undo; undo = next)
2735 {
2736 next = undo->next;
2737 undo->next = undobuf.frees;
2738 undobuf.frees = undo;
2739 }
2740 undobuf.undos = undobuf.previous_undos = 0;
2741}
2742
230d793d
RS
2743\f
2744/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2745 where we have an arithmetic expression and return that point. LOC will
2746 be inside INSN.
230d793d
RS
2747
2748 try_combine will call this function to see if an insn can be split into
2749 two insns. */
2750
2751static rtx *
d0ab8cd3 2752find_split_point (loc, insn)
230d793d 2753 rtx *loc;
d0ab8cd3 2754 rtx insn;
230d793d
RS
2755{
2756 rtx x = *loc;
2757 enum rtx_code code = GET_CODE (x);
2758 rtx *split;
6a651371
KG
2759 int len = 0, pos = 0, unsignedp = 0;
2760 rtx inner = NULL_RTX;
230d793d
RS
2761
2762 /* First special-case some codes. */
2763 switch (code)
2764 {
2765 case SUBREG:
2766#ifdef INSN_SCHEDULING
2767 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2768 point. */
2769 if (GET_CODE (SUBREG_REG (x)) == MEM)
2770 return loc;
2771#endif
d0ab8cd3 2772 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2773
230d793d 2774 case MEM:
916f14f1 2775#ifdef HAVE_lo_sum
230d793d
RS
2776 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2777 using LO_SUM and HIGH. */
2778 if (GET_CODE (XEXP (x, 0)) == CONST
2779 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2780 {
2781 SUBST (XEXP (x, 0),
2782 gen_rtx_combine (LO_SUM, Pmode,
2783 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2784 XEXP (x, 0)));
2785 return &XEXP (XEXP (x, 0), 0);
2786 }
230d793d
RS
2787#endif
2788
916f14f1
RK
2789 /* If we have a PLUS whose second operand is a constant and the
2790 address is not valid, perhaps will can split it up using
2791 the machine-specific way to split large constants. We use
ddd5a7c1 2792 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2793 it will not remain in the result. */
2794 if (GET_CODE (XEXP (x, 0)) == PLUS
2795 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2796 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2797 {
2798 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
38a448ca 2799 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
916f14f1
RK
2800 subst_insn);
2801
2802 /* This should have produced two insns, each of which sets our
2803 placeholder. If the source of the second is a valid address,
2804 we can make put both sources together and make a split point
2805 in the middle. */
2806
2807 if (seq && XVECLEN (seq, 0) == 2
2808 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2809 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2810 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2811 && ! reg_mentioned_p (reg,
2812 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2813 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2814 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2815 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2816 && memory_address_p (GET_MODE (x),
2817 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2818 {
2819 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2820 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2821
2822 /* Replace the placeholder in SRC2 with SRC1. If we can
2823 find where in SRC2 it was placed, that can become our
2824 split point and we can replace this address with SRC2.
2825 Just try two obvious places. */
2826
2827 src2 = replace_rtx (src2, reg, src1);
2828 split = 0;
2829 if (XEXP (src2, 0) == src1)
2830 split = &XEXP (src2, 0);
2831 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2832 && XEXP (XEXP (src2, 0), 0) == src1)
2833 split = &XEXP (XEXP (src2, 0), 0);
2834
2835 if (split)
2836 {
2837 SUBST (XEXP (x, 0), src2);
2838 return split;
2839 }
2840 }
1a26b032
RK
2841
2842 /* If that didn't work, perhaps the first operand is complex and
2843 needs to be computed separately, so make a split point there.
2844 This will occur on machines that just support REG + CONST
2845 and have a constant moved through some previous computation. */
2846
2847 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2848 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2849 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2850 == 'o')))
2851 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2852 }
2853 break;
2854
230d793d
RS
2855 case SET:
2856#ifdef HAVE_cc0
2857 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2858 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2859 we need to put the operand into a register. So split at that
2860 point. */
2861
2862 if (SET_DEST (x) == cc0_rtx
2863 && GET_CODE (SET_SRC (x)) != COMPARE
2864 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2865 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2866 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2867 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2868 return &SET_SRC (x);
2869#endif
2870
2871 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2872 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2873 if (split && split != &SET_SRC (x))
2874 return split;
2875
041d7180
JL
2876 /* See if we can split SET_DEST as it stands. */
2877 split = find_split_point (&SET_DEST (x), insn);
2878 if (split && split != &SET_DEST (x))
2879 return split;
2880
230d793d
RS
2881 /* See if this is a bitfield assignment with everything constant. If
2882 so, this is an IOR of an AND, so split it into that. */
2883 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2884 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2885 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2886 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2887 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2888 && GET_CODE (SET_SRC (x)) == CONST_INT
2889 && ((INTVAL (XEXP (SET_DEST (x), 1))
2890 + INTVAL (XEXP (SET_DEST (x), 2)))
2891 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2892 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2893 {
2894 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2895 int len = INTVAL (XEXP (SET_DEST (x), 1));
2896 int src = INTVAL (SET_SRC (x));
2897 rtx dest = XEXP (SET_DEST (x), 0);
2898 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2899 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2900
f76b9db2
ILT
2901 if (BITS_BIG_ENDIAN)
2902 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d 2903
e51712db 2904 if ((unsigned HOST_WIDE_INT) src == mask)
230d793d 2905 SUBST (SET_SRC (x),
5f4f0e22 2906 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2907 else
2908 SUBST (SET_SRC (x),
2909 gen_binary (IOR, mode,
2910 gen_binary (AND, mode, dest,
5f4f0e22
CH
2911 GEN_INT (~ (mask << pos)
2912 & GET_MODE_MASK (mode))),
2913 GEN_INT (src << pos)));
230d793d
RS
2914
2915 SUBST (SET_DEST (x), dest);
2916
d0ab8cd3 2917 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2918 if (split && split != &SET_SRC (x))
2919 return split;
2920 }
2921
2922 /* Otherwise, see if this is an operation that we can split into two.
2923 If so, try to split that. */
2924 code = GET_CODE (SET_SRC (x));
2925
2926 switch (code)
2927 {
d0ab8cd3
RK
2928 case AND:
2929 /* If we are AND'ing with a large constant that is only a single
2930 bit and the result is only being used in a context where we
2931 need to know if it is zero or non-zero, replace it with a bit
2932 extraction. This will avoid the large constant, which might
2933 have taken more than one insn to make. If the constant were
2934 not a valid argument to the AND but took only one insn to make,
2935 this is no worse, but if it took more than one insn, it will
2936 be better. */
2937
2938 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2939 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2940 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2941 && GET_CODE (SET_DEST (x)) == REG
2942 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2943 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2944 && XEXP (*split, 0) == SET_DEST (x)
2945 && XEXP (*split, 1) == const0_rtx)
2946 {
76184def
DE
2947 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2948 XEXP (SET_SRC (x), 0),
2949 pos, NULL_RTX, 1, 1, 0, 0);
2950 if (extraction != 0)
2951 {
2952 SUBST (SET_SRC (x), extraction);
2953 return find_split_point (loc, insn);
2954 }
d0ab8cd3
RK
2955 }
2956 break;
2957
1a6ec070
RK
2958 case NE:
2959 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2960 is known to be on, this can be converted into a NEG of a shift. */
2961 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2962 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 2963 && 1 <= (pos = exact_log2
1a6ec070
RK
2964 (nonzero_bits (XEXP (SET_SRC (x), 0),
2965 GET_MODE (XEXP (SET_SRC (x), 0))))))
2966 {
2967 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2968
2969 SUBST (SET_SRC (x),
2970 gen_rtx_combine (NEG, mode,
2971 gen_rtx_combine (LSHIFTRT, mode,
2972 XEXP (SET_SRC (x), 0),
4eb2cb10 2973 GEN_INT (pos))));
1a6ec070
RK
2974
2975 split = find_split_point (&SET_SRC (x), insn);
2976 if (split && split != &SET_SRC (x))
2977 return split;
2978 }
2979 break;
2980
230d793d
RS
2981 case SIGN_EXTEND:
2982 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
2983
2984 /* We can't optimize if either mode is a partial integer
2985 mode as we don't know how many bits are significant
2986 in those modes. */
2987 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
2988 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
2989 break;
2990
230d793d
RS
2991 pos = 0;
2992 len = GET_MODE_BITSIZE (GET_MODE (inner));
2993 unsignedp = 0;
2994 break;
2995
2996 case SIGN_EXTRACT:
2997 case ZERO_EXTRACT:
2998 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2999 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
3000 {
3001 inner = XEXP (SET_SRC (x), 0);
3002 len = INTVAL (XEXP (SET_SRC (x), 1));
3003 pos = INTVAL (XEXP (SET_SRC (x), 2));
3004
f76b9db2
ILT
3005 if (BITS_BIG_ENDIAN)
3006 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
3007 unsignedp = (code == ZERO_EXTRACT);
3008 }
3009 break;
e9a25f70
JL
3010
3011 default:
3012 break;
230d793d
RS
3013 }
3014
3015 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
3016 {
3017 enum machine_mode mode = GET_MODE (SET_SRC (x));
3018
d0ab8cd3
RK
3019 /* For unsigned, we have a choice of a shift followed by an
3020 AND or two shifts. Use two shifts for field sizes where the
3021 constant might be too large. We assume here that we can
3022 always at least get 8-bit constants in an AND insn, which is
3023 true for every current RISC. */
3024
3025 if (unsignedp && len <= 8)
230d793d
RS
3026 {
3027 SUBST (SET_SRC (x),
3028 gen_rtx_combine
3029 (AND, mode,
3030 gen_rtx_combine (LSHIFTRT, mode,
3031 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
3032 GEN_INT (pos)),
3033 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 3034
d0ab8cd3 3035 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3036 if (split && split != &SET_SRC (x))
3037 return split;
3038 }
3039 else
3040 {
3041 SUBST (SET_SRC (x),
3042 gen_rtx_combine
d0ab8cd3 3043 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
3044 gen_rtx_combine (ASHIFT, mode,
3045 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
3046 GEN_INT (GET_MODE_BITSIZE (mode)
3047 - len - pos)),
3048 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 3049
d0ab8cd3 3050 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
3051 if (split && split != &SET_SRC (x))
3052 return split;
3053 }
3054 }
3055
3056 /* See if this is a simple operation with a constant as the second
3057 operand. It might be that this constant is out of range and hence
3058 could be used as a split point. */
3059 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3060 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3061 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
3062 && CONSTANT_P (XEXP (SET_SRC (x), 1))
3063 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
3064 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
3065 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
3066 == 'o'))))
3067 return &XEXP (SET_SRC (x), 1);
3068
3069 /* Finally, see if this is a simple operation with its first operand
3070 not in a register. The operation might require this operand in a
3071 register, so return it as a split point. We can always do this
3072 because if the first operand were another operation, we would have
3073 already found it as a split point. */
3074 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
3075 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
3076 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
3077 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
3078 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
3079 return &XEXP (SET_SRC (x), 0);
3080
3081 return 0;
3082
3083 case AND:
3084 case IOR:
3085 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
3086 it is better to write this as (not (ior A B)) so we can split it.
3087 Similarly for IOR. */
3088 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
3089 {
3090 SUBST (*loc,
3091 gen_rtx_combine (NOT, GET_MODE (x),
3092 gen_rtx_combine (code == IOR ? AND : IOR,
3093 GET_MODE (x),
3094 XEXP (XEXP (x, 0), 0),
3095 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 3096 return find_split_point (loc, insn);
230d793d
RS
3097 }
3098
3099 /* Many RISC machines have a large set of logical insns. If the
3100 second operand is a NOT, put it first so we will try to split the
3101 other operand first. */
3102 if (GET_CODE (XEXP (x, 1)) == NOT)
3103 {
3104 rtx tem = XEXP (x, 0);
3105 SUBST (XEXP (x, 0), XEXP (x, 1));
3106 SUBST (XEXP (x, 1), tem);
3107 }
3108 break;
e9a25f70
JL
3109
3110 default:
3111 break;
230d793d
RS
3112 }
3113
3114 /* Otherwise, select our actions depending on our rtx class. */
3115 switch (GET_RTX_CLASS (code))
3116 {
3117 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
3118 case '3':
d0ab8cd3 3119 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
3120 if (split)
3121 return split;
0f41302f 3122 /* ... fall through ... */
230d793d
RS
3123 case '2':
3124 case 'c':
3125 case '<':
d0ab8cd3 3126 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
3127 if (split)
3128 return split;
0f41302f 3129 /* ... fall through ... */
230d793d
RS
3130 case '1':
3131 /* Some machines have (and (shift ...) ...) insns. If X is not
3132 an AND, but XEXP (X, 0) is, use it as our split point. */
3133 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
3134 return &XEXP (x, 0);
3135
d0ab8cd3 3136 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
3137 if (split)
3138 return split;
3139 return loc;
3140 }
3141
3142 /* Otherwise, we don't have a split point. */
3143 return 0;
3144}
3145\f
3146/* Throughout X, replace FROM with TO, and return the result.
3147 The result is TO if X is FROM;
3148 otherwise the result is X, but its contents may have been modified.
3149 If they were modified, a record was made in undobuf so that
3150 undo_all will (among other things) return X to its original state.
3151
3152 If the number of changes necessary is too much to record to undo,
3153 the excess changes are not made, so the result is invalid.
3154 The changes already made can still be undone.
3155 undobuf.num_undo is incremented for such changes, so by testing that
3156 the caller can tell whether the result is valid.
3157
3158 `n_occurrences' is incremented each time FROM is replaced.
3159
3160 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3161
5089e22e 3162 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
3163 by copying if `n_occurrences' is non-zero. */
3164
3165static rtx
3166subst (x, from, to, in_dest, unique_copy)
3167 register rtx x, from, to;
3168 int in_dest;
3169 int unique_copy;
3170{
f24ad0e4 3171 register enum rtx_code code = GET_CODE (x);
230d793d 3172 enum machine_mode op0_mode = VOIDmode;
6f7d635c 3173 register const char *fmt;
8079805d
RK
3174 register int len, i;
3175 rtx new;
230d793d
RS
3176
3177/* Two expressions are equal if they are identical copies of a shared
3178 RTX or if they are both registers with the same register number
3179 and mode. */
3180
3181#define COMBINE_RTX_EQUAL_P(X,Y) \
3182 ((X) == (Y) \
3183 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3184 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3185
3186 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3187 {
3188 n_occurrences++;
3189 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3190 }
3191
3192 /* If X and FROM are the same register but different modes, they will
3193 not have been seen as equal above. However, flow.c will make a
3194 LOG_LINKS entry for that case. If we do nothing, we will try to
3195 rerecognize our original insn and, when it succeeds, we will
3196 delete the feeding insn, which is incorrect.
3197
3198 So force this insn not to match in this (rare) case. */
3199 if (! in_dest && code == REG && GET_CODE (from) == REG
3200 && REGNO (x) == REGNO (from))
38a448ca 3201 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
3202
3203 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3204 of which may contain things that can be combined. */
3205 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3206 return x;
3207
3208 /* It is possible to have a subexpression appear twice in the insn.
3209 Suppose that FROM is a register that appears within TO.
3210 Then, after that subexpression has been scanned once by `subst',
3211 the second time it is scanned, TO may be found. If we were
3212 to scan TO here, we would find FROM within it and create a
3213 self-referent rtl structure which is completely wrong. */
3214 if (COMBINE_RTX_EQUAL_P (x, to))
3215 return to;
3216
4f4b3679
RH
3217 /* Parallel asm_operands need special attention because all of the
3218 inputs are shared across the arms. Furthermore, unsharing the
3219 rtl results in recognition failures. Failure to handle this case
3220 specially can result in circular rtl.
3221
3222 Solve this by doing a normal pass across the first entry of the
3223 parallel, and only processing the SET_DESTs of the subsequent
3224 entries. Ug. */
3225
3226 if (code == PARALLEL
3227 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3228 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
230d793d 3229 {
4f4b3679
RH
3230 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3231
3232 /* If this substitution failed, this whole thing fails. */
3233 if (GET_CODE (new) == CLOBBER
3234 && XEXP (new, 0) == const0_rtx)
3235 return new;
3236
3237 SUBST (XVECEXP (x, 0, 0), new);
3238
3239 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
230d793d 3240 {
4f4b3679
RH
3241 rtx dest = SET_DEST (XVECEXP (x, 0, i));
3242
3243 if (GET_CODE (dest) != REG
3244 && GET_CODE (dest) != CC0
3245 && GET_CODE (dest) != PC)
230d793d 3246 {
4f4b3679 3247 new = subst (dest, from, to, 0, unique_copy);
230d793d 3248
4f4b3679
RH
3249 /* If this substitution failed, this whole thing fails. */
3250 if (GET_CODE (new) == CLOBBER
3251 && XEXP (new, 0) == const0_rtx)
3252 return new;
230d793d 3253
4f4b3679 3254 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
230d793d
RS
3255 }
3256 }
4f4b3679
RH
3257 }
3258 else
3259 {
3260 len = GET_RTX_LENGTH (code);
3261 fmt = GET_RTX_FORMAT (code);
3262
3263 /* We don't need to process a SET_DEST that is a register, CC0,
3264 or PC, so set up to skip this common case. All other cases
3265 where we want to suppress replacing something inside a
3266 SET_SRC are handled via the IN_DEST operand. */
3267 if (code == SET
3268 && (GET_CODE (SET_DEST (x)) == REG
3269 || GET_CODE (SET_DEST (x)) == CC0
3270 || GET_CODE (SET_DEST (x)) == PC))
3271 fmt = "ie";
3272
3273 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3274 constant. */
3275 if (fmt[0] == 'e')
3276 op0_mode = GET_MODE (XEXP (x, 0));
3277
3278 for (i = 0; i < len; i++)
230d793d 3279 {
4f4b3679 3280 if (fmt[i] == 'E')
230d793d 3281 {
4f4b3679
RH
3282 register int j;
3283 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3284 {
3285 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3286 {
3287 new = (unique_copy && n_occurrences
3288 ? copy_rtx (to) : to);
3289 n_occurrences++;
3290 }
3291 else
3292 {
3293 new = subst (XVECEXP (x, i, j), from, to, 0,
3294 unique_copy);
3295
3296 /* If this substitution failed, this whole thing
3297 fails. */
3298 if (GET_CODE (new) == CLOBBER
3299 && XEXP (new, 0) == const0_rtx)
3300 return new;
3301 }
3302
3303 SUBST (XVECEXP (x, i, j), new);
3304 }
3305 }
3306 else if (fmt[i] == 'e')
3307 {
3308 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3309 {
3310 /* In general, don't install a subreg involving two
3311 modes not tieable. It can worsen register
3312 allocation, and can even make invalid reload
3313 insns, since the reg inside may need to be copied
3314 from in the outside mode, and that may be invalid
3315 if it is an fp reg copied in integer mode.
3316
3317 We allow two exceptions to this: It is valid if
3318 it is inside another SUBREG and the mode of that
3319 SUBREG and the mode of the inside of TO is
3320 tieable and it is valid if X is a SET that copies
3321 FROM to CC0. */
3322
3323 if (GET_CODE (to) == SUBREG
3324 && ! MODES_TIEABLE_P (GET_MODE (to),
3325 GET_MODE (SUBREG_REG (to)))
3326 && ! (code == SUBREG
3327 && MODES_TIEABLE_P (GET_MODE (x),
3328 GET_MODE (SUBREG_REG (to))))
42301240 3329#ifdef HAVE_cc0
4f4b3679 3330 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
42301240 3331#endif
4f4b3679
RH
3332 )
3333 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 3334
4f4b3679
RH
3335 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3336 n_occurrences++;
3337 }
3338 else
3339 /* If we are in a SET_DEST, suppress most cases unless we
3340 have gone inside a MEM, in which case we want to
3341 simplify the address. We assume here that things that
3342 are actually part of the destination have their inner
3343 parts in the first expression. This is true for SUBREG,
3344 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3345 things aside from REG and MEM that should appear in a
3346 SET_DEST. */
3347 new = subst (XEXP (x, i), from, to,
3348 (((in_dest
3349 && (code == SUBREG || code == STRICT_LOW_PART
3350 || code == ZERO_EXTRACT))
3351 || code == SET)
3352 && i == 0), unique_copy);
3353
3354 /* If we found that we will have to reject this combination,
3355 indicate that by returning the CLOBBER ourselves, rather than
3356 an expression containing it. This will speed things up as
3357 well as prevent accidents where two CLOBBERs are considered
3358 to be equal, thus producing an incorrect simplification. */
3359
3360 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3361 return new;
3362
3363 SUBST (XEXP (x, i), new);
230d793d 3364 }
230d793d
RS
3365 }
3366 }
3367
8079805d
RK
3368 /* Try to simplify X. If the simplification changed the code, it is likely
3369 that further simplification will help, so loop, but limit the number
3370 of repetitions that will be performed. */
3371
3372 for (i = 0; i < 4; i++)
3373 {
3374 /* If X is sufficiently simple, don't bother trying to do anything
3375 with it. */
3376 if (code != CONST_INT && code != REG && code != CLOBBER)
31ec4e5e 3377 x = combine_simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3378
8079805d
RK
3379 if (GET_CODE (x) == code)
3380 break;
d0ab8cd3 3381
8079805d 3382 code = GET_CODE (x);
eeb43d32 3383
8079805d
RK
3384 /* We no longer know the original mode of operand 0 since we
3385 have changed the form of X) */
3386 op0_mode = VOIDmode;
3387 }
eeb43d32 3388
8079805d
RK
3389 return x;
3390}
3391\f
3392/* Simplify X, a piece of RTL. We just operate on the expression at the
3393 outer level; call `subst' to simplify recursively. Return the new
3394 expression.
3395
3396 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3397 will be the iteration even if an expression with a code different from
3398 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3399
8079805d 3400static rtx
31ec4e5e 3401combine_simplify_rtx (x, op0_mode, last, in_dest)
8079805d
RK
3402 rtx x;
3403 enum machine_mode op0_mode;
3404 int last;
3405 int in_dest;
3406{
3407 enum rtx_code code = GET_CODE (x);
3408 enum machine_mode mode = GET_MODE (x);
3409 rtx temp;
3410 int i;
d0ab8cd3 3411
230d793d
RS
3412 /* If this is a commutative operation, put a constant last and a complex
3413 expression first. We don't need to do this for comparisons here. */
3414 if (GET_RTX_CLASS (code) == 'c'
3415 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3416 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3417 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3418 || (GET_CODE (XEXP (x, 0)) == SUBREG
3419 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3420 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3421 {
3422 temp = XEXP (x, 0);
3423 SUBST (XEXP (x, 0), XEXP (x, 1));
3424 SUBST (XEXP (x, 1), temp);
3425 }
3426
22609cbf
RK
3427 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3428 sign extension of a PLUS with a constant, reverse the order of the sign
3429 extension and the addition. Note that this not the same as the original
3430 code, but overflow is undefined for signed values. Also note that the
3431 PLUS will have been partially moved "inside" the sign-extension, so that
3432 the first operand of X will really look like:
3433 (ashiftrt (plus (ashift A C4) C5) C4).
3434 We convert this to
3435 (plus (ashiftrt (ashift A C4) C2) C4)
3436 and replace the first operand of X with that expression. Later parts
3437 of this function may simplify the expression further.
3438
3439 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3440 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3441 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3442
3443 We do this to simplify address expressions. */
3444
3445 if ((code == PLUS || code == MINUS || code == MULT)
3446 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3447 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3448 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3449 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3450 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3451 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3452 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3453 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3454 XEXP (XEXP (XEXP (x, 0), 0), 1),
3455 XEXP (XEXP (x, 0), 1))) != 0)
3456 {
3457 rtx new
3458 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3459 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3460 INTVAL (XEXP (XEXP (x, 0), 1)));
3461
3462 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3463 INTVAL (XEXP (XEXP (x, 0), 1)));
3464
3465 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3466 }
3467
d0ab8cd3
RK
3468 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3469 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3470 things. Check for cases where both arms are testing the same
3471 condition.
3472
3473 Don't do anything if all operands are very simple. */
3474
3475 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3476 || GET_RTX_CLASS (code) == '<')
3477 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3478 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3479 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3480 == 'o')))
3481 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3482 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3483 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3484 == 'o')))))
3485 || (GET_RTX_CLASS (code) == '1'
3486 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3487 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3488 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3489 == 'o'))))))
d0ab8cd3 3490 {
abe6e52f
RK
3491 rtx cond, true, false;
3492
3493 cond = if_then_else_cond (x, &true, &false);
0802d516
RK
3494 if (cond != 0
3495 /* If everything is a comparison, what we have is highly unlikely
3496 to be simpler, so don't use it. */
3497 && ! (GET_RTX_CLASS (code) == '<'
3498 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3499 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
abe6e52f
RK
3500 {
3501 rtx cop1 = const0_rtx;
3502 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3503
15448afc
RK
3504 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3505 return x;
3506
9210df58
RK
3507 /* Simplify the alternative arms; this may collapse the true and
3508 false arms to store-flag values. */
3509 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3510 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3511
3512 /* Restarting if we generate a store-flag expression will cause
3513 us to loop. Just drop through in this case. */
3514
abe6e52f
RK
3515 /* If the result values are STORE_FLAG_VALUE and zero, we can
3516 just make the comparison operation. */
3517 if (true == const_true_rtx && false == const0_rtx)
3518 x = gen_binary (cond_code, mode, cond, cop1);
3519 else if (true == const0_rtx && false == const_true_rtx)
3520 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3521
3522 /* Likewise, we can make the negate of a comparison operation
3523 if the result values are - STORE_FLAG_VALUE and zero. */
3524 else if (GET_CODE (true) == CONST_INT
3525 && INTVAL (true) == - STORE_FLAG_VALUE
3526 && false == const0_rtx)
0c1c8ea6 3527 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3528 gen_binary (cond_code, mode, cond, cop1));
3529 else if (GET_CODE (false) == CONST_INT
3530 && INTVAL (false) == - STORE_FLAG_VALUE
3531 && true == const0_rtx)
0c1c8ea6 3532 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3533 gen_binary (reverse_condition (cond_code),
3534 mode, cond, cop1));
3535 else
38a448ca
RH
3536 return gen_rtx_IF_THEN_ELSE (mode,
3537 gen_binary (cond_code, VOIDmode,
3538 cond, cop1),
3539 true, false);
5109d49f 3540
9210df58
RK
3541 code = GET_CODE (x);
3542 op0_mode = VOIDmode;
abe6e52f 3543 }
d0ab8cd3
RK
3544 }
3545
230d793d
RS
3546 /* Try to fold this expression in case we have constants that weren't
3547 present before. */
3548 temp = 0;
3549 switch (GET_RTX_CLASS (code))
3550 {
3551 case '1':
3552 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3553 break;
3554 case '<':
3555 temp = simplify_relational_operation (code, op0_mode,
3556 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3557#ifdef FLOAT_STORE_FLAG_VALUE
3558 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3559 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3560 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3561#endif
230d793d
RS
3562 break;
3563 case 'c':
3564 case '2':
3565 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3566 break;
3567 case 'b':
3568 case '3':
3569 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3570 XEXP (x, 1), XEXP (x, 2));
3571 break;
3572 }
3573
3574 if (temp)
d0ab8cd3 3575 x = temp, code = GET_CODE (temp);
230d793d 3576
230d793d 3577 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3578 if (code == PLUS || code == MINUS
3579 || code == AND || code == IOR || code == XOR)
230d793d
RS
3580 {
3581 x = apply_distributive_law (x);
3582 code = GET_CODE (x);
3583 }
3584
3585 /* If CODE is an associative operation not otherwise handled, see if we
3586 can associate some operands. This can win if they are constants or
3587 if they are logically related (i.e. (a & b) & a. */
3588 if ((code == PLUS || code == MINUS
3589 || code == MULT || code == AND || code == IOR || code == XOR
3590 || code == DIV || code == UDIV
3591 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3592 && INTEGRAL_MODE_P (mode))
230d793d
RS
3593 {
3594 if (GET_CODE (XEXP (x, 0)) == code)
3595 {
3596 rtx other = XEXP (XEXP (x, 0), 0);
3597 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3598 rtx inner_op1 = XEXP (x, 1);
3599 rtx inner;
3600
3601 /* Make sure we pass the constant operand if any as the second
3602 one if this is a commutative operation. */
3603 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3604 {
3605 rtx tem = inner_op0;
3606 inner_op0 = inner_op1;
3607 inner_op1 = tem;
3608 }
3609 inner = simplify_binary_operation (code == MINUS ? PLUS
3610 : code == DIV ? MULT
3611 : code == UDIV ? MULT
3612 : code,
3613 mode, inner_op0, inner_op1);
3614
3615 /* For commutative operations, try the other pair if that one
3616 didn't simplify. */
3617 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3618 {
3619 other = XEXP (XEXP (x, 0), 1);
3620 inner = simplify_binary_operation (code, mode,
3621 XEXP (XEXP (x, 0), 0),
3622 XEXP (x, 1));
3623 }
3624
3625 if (inner)
8079805d 3626 return gen_binary (code, mode, other, inner);
230d793d
RS
3627 }
3628 }
3629
3630 /* A little bit of algebraic simplification here. */
3631 switch (code)
3632 {
3633 case MEM:
3634 /* Ensure that our address has any ASHIFTs converted to MULT in case
3635 address-recognizing predicates are called later. */
3636 temp = make_compound_operation (XEXP (x, 0), MEM);
3637 SUBST (XEXP (x, 0), temp);
3638 break;
3639
3640 case SUBREG:
3641 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3642 is paradoxical. If we can't do that safely, then it becomes
3643 something nonsensical so that this combination won't take place. */
3644
3645 if (GET_CODE (SUBREG_REG (x)) == MEM
3646 && (GET_MODE_SIZE (mode)
3647 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3648 {
3649 rtx inner = SUBREG_REG (x);
3650 int endian_offset = 0;
3651 /* Don't change the mode of the MEM
3652 if that would change the meaning of the address. */
3653 if (MEM_VOLATILE_P (SUBREG_REG (x))
3654 || mode_dependent_address_p (XEXP (inner, 0)))
38a448ca 3655 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d 3656
f76b9db2
ILT
3657 if (BYTES_BIG_ENDIAN)
3658 {
3659 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3660 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3661 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3662 endian_offset -= (UNITS_PER_WORD
3663 - GET_MODE_SIZE (GET_MODE (inner)));
3664 }
230d793d
RS
3665 /* Note if the plus_constant doesn't make a valid address
3666 then this combination won't be accepted. */
38a448ca
RH
3667 x = gen_rtx_MEM (mode,
3668 plus_constant (XEXP (inner, 0),
3669 (SUBREG_WORD (x) * UNITS_PER_WORD
3670 + endian_offset)));
230d793d 3671 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
c6df88cb 3672 MEM_COPY_ATTRIBUTES (x, inner);
230d793d
RS
3673 return x;
3674 }
3675
3676 /* If we are in a SET_DEST, these other cases can't apply. */
3677 if (in_dest)
3678 return x;
3679
3680 /* Changing mode twice with SUBREG => just change it once,
3681 or not at all if changing back to starting mode. */
3682 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3683 {
3684 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3685 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3686 return SUBREG_REG (SUBREG_REG (x));
3687
3688 SUBST_INT (SUBREG_WORD (x),
3689 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3690 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3691 }
3692
3693 /* SUBREG of a hard register => just change the register number
3694 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3695 suppress this combination. If the hard register is the stack,
3696 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3697
3698 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3699 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3700 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3701#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3702 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3703#endif
26ecfc76
RK
3704#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3705 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3706#endif
3707 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3708 {
3709 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3710 mode))
38a448ca
RH
3711 return gen_rtx_REG (mode,
3712 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
230d793d 3713 else
38a448ca 3714 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d
RS
3715 }
3716
3717 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3718 word and low-order part. Only do this if we are narrowing
3719 the constant; if it is being widened, we have no idea what
3720 the extra bits will have been set to. */
230d793d
RS
3721
3722 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3723 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3c99d5ff 3724 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
230d793d
RS
3725 && GET_MODE_CLASS (mode) == MODE_INT)
3726 {
3727 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3728 0, op0_mode);
230d793d
RS
3729 if (temp)
3730 return temp;
3731 }
3732
19808e22
RS
3733 /* If we want a subreg of a constant, at offset 0,
3734 take the low bits. On a little-endian machine, that's
3735 always valid. On a big-endian machine, it's valid
3c99d5ff 3736 only if the constant's mode fits in one word. Note that we
61b1bece 3737 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3c99d5ff
RK
3738 if (CONSTANT_P (SUBREG_REG (x))
3739 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3740 || ! WORDS_BIG_ENDIAN)
3741 ? SUBREG_WORD (x) == 0
3742 : (SUBREG_WORD (x)
3743 == ((GET_MODE_SIZE (op0_mode)
3744 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3745 / UNITS_PER_WORD)))
f82da7d2 3746 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3747 && (! WORDS_BIG_ENDIAN
3748 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3749 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3750
b65c1b5b
RK
3751 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3752 since we are saying that the high bits don't matter. */
3753 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3754 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
54f3b5c2
R
3755 {
3756 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3757 && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0))
3758 return operand_subword (SUBREG_REG (x), SUBREG_WORD (x), 0, mode);
3759 return SUBREG_REG (x);
3760 }
b65c1b5b 3761
87e3e0c1
RK
3762 /* Note that we cannot do any narrowing for non-constants since
3763 we might have been counting on using the fact that some bits were
3764 zero. We now do this in the SET. */
3765
230d793d
RS
3766 break;
3767
3768 case NOT:
3769 /* (not (plus X -1)) can become (neg X). */
3770 if (GET_CODE (XEXP (x, 0)) == PLUS
3771 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3772 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3773
3774 /* Similarly, (not (neg X)) is (plus X -1). */
3775 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3776 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3777 constm1_rtx);
230d793d 3778
d0ab8cd3
RK
3779 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3780 if (GET_CODE (XEXP (x, 0)) == XOR
3781 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3782 && (temp = simplify_unary_operation (NOT, mode,
3783 XEXP (XEXP (x, 0), 1),
3784 mode)) != 0)
787745f5 3785 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3786
230d793d
RS
3787 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3788 other than 1, but that is not valid. We could do a similar
3789 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3790 but this doesn't seem common enough to bother with. */
3791 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3792 && XEXP (XEXP (x, 0), 0) == const1_rtx)
38a448ca
RH
3793 return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3794 XEXP (XEXP (x, 0), 1));
230d793d
RS
3795
3796 if (GET_CODE (XEXP (x, 0)) == SUBREG
3797 && subreg_lowpart_p (XEXP (x, 0))
3798 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3799 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3800 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3801 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3802 {
3803 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3804
38a448ca
RH
3805 x = gen_rtx_ROTATE (inner_mode,
3806 gen_unary (NOT, inner_mode, inner_mode,
3807 const1_rtx),
3808 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3809 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3810 }
3811
0802d516
RK
3812 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3813 reversing the comparison code if valid. */
3814 if (STORE_FLAG_VALUE == -1
3815 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
230d793d
RS
3816 && reversible_comparison_p (XEXP (x, 0)))
3817 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3818 mode, XEXP (XEXP (x, 0), 0),
3819 XEXP (XEXP (x, 0), 1));
500c518b
RK
3820
3821 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3822 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3823 perform the above simplification. */
500c518b 3824
0802d516 3825 if (STORE_FLAG_VALUE == -1
500c518b 3826 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
37ac53d9 3827 && XEXP (x, 1) == const1_rtx
500c518b
RK
3828 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3829 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3830 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3831
3832 /* Apply De Morgan's laws to reduce number of patterns for machines
3833 with negating logical insns (and-not, nand, etc.). If result has
3834 only one NOT, put it first, since that is how the patterns are
3835 coded. */
3836
3837 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3838 {
3839 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3840
3841 if (GET_CODE (in1) == NOT)
3842 in1 = XEXP (in1, 0);
3843 else
3844 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3845
3846 if (GET_CODE (in2) == NOT)
3847 in2 = XEXP (in2, 0);
3848 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3849 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3850 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3851 else
3852 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3853
3854 if (GET_CODE (in2) == NOT)
3855 {
3856 rtx tem = in2;
3857 in2 = in1; in1 = tem;
3858 }
3859
8079805d
RK
3860 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3861 mode, in1, in2);
230d793d
RS
3862 }
3863 break;
3864
3865 case NEG:
3866 /* (neg (plus X 1)) can become (not X). */
3867 if (GET_CODE (XEXP (x, 0)) == PLUS
3868 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3869 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3870
3871 /* Similarly, (neg (not X)) is (plus X 1). */
3872 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3873 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3874
230d793d
RS
3875 /* (neg (minus X Y)) can become (minus Y X). */
3876 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3877 && (! FLOAT_MODE_P (mode)
0f41302f 3878 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3879 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3880 || flag_fast_math))
8079805d
RK
3881 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3882 XEXP (XEXP (x, 0), 0));
230d793d 3883
0f41302f 3884 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3885 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3886 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3887 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3888
230d793d
RS
3889 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3890 if we can then eliminate the NEG (e.g.,
3891 if the operand is a constant). */
3892
3893 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3894 {
3895 temp = simplify_unary_operation (NEG, mode,
3896 XEXP (XEXP (x, 0), 0), mode);
3897 if (temp)
3898 {
3899 SUBST (XEXP (XEXP (x, 0), 0), temp);
3900 return XEXP (x, 0);
3901 }
3902 }
3903
3904 temp = expand_compound_operation (XEXP (x, 0));
3905
3906 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3907 replaced by (lshiftrt X C). This will convert
3908 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3909
3910 if (GET_CODE (temp) == ASHIFTRT
3911 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3912 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3913 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3914 INTVAL (XEXP (temp, 1)));
230d793d 3915
951553af 3916 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3917 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3918 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3919 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3920 or a SUBREG of one since we'd be making the expression more
3921 complex if it was just a register. */
3922
3923 if (GET_CODE (temp) != REG
3924 && ! (GET_CODE (temp) == SUBREG
3925 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3926 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3927 {
3928 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3929 (NULL_RTX, ASHIFTRT, mode,
3930 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3931 GET_MODE_BITSIZE (mode) - 1 - i),
3932 GET_MODE_BITSIZE (mode) - 1 - i);
3933
3934 /* If all we did was surround TEMP with the two shifts, we
3935 haven't improved anything, so don't use it. Otherwise,
3936 we are better off with TEMP1. */
3937 if (GET_CODE (temp1) != ASHIFTRT
3938 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3939 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3940 return temp1;
230d793d
RS
3941 }
3942 break;
3943
2ca9ae17 3944 case TRUNCATE:
e30fb98f
JL
3945 /* We can't handle truncation to a partial integer mode here
3946 because we don't know the real bitsize of the partial
3947 integer mode. */
3948 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3949 break;
3950
80608e27
JL
3951 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3952 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3953 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
2ca9ae17
JW
3954 SUBST (XEXP (x, 0),
3955 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3956 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
3957
3958 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3959 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3960 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3961 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3962 return XEXP (XEXP (x, 0), 0);
3963
3964 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3965 (OP:SI foo:SI) if OP is NEG or ABS. */
3966 if ((GET_CODE (XEXP (x, 0)) == ABS
3967 || GET_CODE (XEXP (x, 0)) == NEG)
3968 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3969 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3970 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3971 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3972 XEXP (XEXP (XEXP (x, 0), 0), 0));
3973
3974 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3975 (truncate:SI x). */
3976 if (GET_CODE (XEXP (x, 0)) == SUBREG
3977 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3978 && subreg_lowpart_p (XEXP (x, 0)))
3979 return SUBREG_REG (XEXP (x, 0));
3980
3981 /* If we know that the value is already truncated, we can
14a774a9
RK
3982 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION
3983 is nonzero for the corresponding modes. But don't do this
3984 for an (LSHIFTRT (MULT ...)) since this will cause problems
3985 with the umulXi3_highpart patterns. */
6a992214
JL
3986 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3987 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
3988 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
14a774a9
RK
3989 >= GET_MODE_BITSIZE (mode) + 1
3990 && ! (GET_CODE (XEXP (x, 0)) == LSHIFTRT
3991 && GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT))
0f13a422
ILT
3992 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3993
3994 /* A truncate of a comparison can be replaced with a subreg if
3995 STORE_FLAG_VALUE permits. This is like the previous test,
3996 but it works even if the comparison is done in a mode larger
3997 than HOST_BITS_PER_WIDE_INT. */
3998 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3999 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4000 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
4001 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4002
4003 /* Similarly, a truncate of a register whose value is a
4004 comparison can be replaced with a subreg if STORE_FLAG_VALUE
4005 permits. */
4006 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4007 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
4008 && (temp = get_last_value (XEXP (x, 0)))
4009 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
4010 return gen_lowpart_for_combine (mode, XEXP (x, 0));
4011
2ca9ae17
JW
4012 break;
4013
230d793d
RS
4014 case FLOAT_TRUNCATE:
4015 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
4016 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
4017 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
4018 return XEXP (XEXP (x, 0), 0);
4635f748
RK
4019
4020 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
4021 (OP:SF foo:SF) if OP is NEG or ABS. */
4022 if ((GET_CODE (XEXP (x, 0)) == ABS
4023 || GET_CODE (XEXP (x, 0)) == NEG)
4024 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
4025 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
4026 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
4027 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
4028
4029 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
4030 is (float_truncate:SF x). */
4031 if (GET_CODE (XEXP (x, 0)) == SUBREG
4032 && subreg_lowpart_p (XEXP (x, 0))
4033 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
4034 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
4035 break;
4036
4037#ifdef HAVE_cc0
4038 case COMPARE:
4039 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
4040 using cc0, in which case we want to leave it as a COMPARE
4041 so we can distinguish it from a register-register-copy. */
4042 if (XEXP (x, 1) == const0_rtx)
4043 return XEXP (x, 0);
4044
4045 /* In IEEE floating point, x-0 is not the same as x. */
4046 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
4047 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
4048 || flag_fast_math)
230d793d
RS
4049 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
4050 return XEXP (x, 0);
4051 break;
4052#endif
4053
4054 case CONST:
4055 /* (const (const X)) can become (const X). Do it this way rather than
4056 returning the inner CONST since CONST can be shared with a
4057 REG_EQUAL note. */
4058 if (GET_CODE (XEXP (x, 0)) == CONST)
4059 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4060 break;
4061
4062#ifdef HAVE_lo_sum
4063 case LO_SUM:
4064 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
4065 can add in an offset. find_split_point will split this address up
4066 again if it doesn't match. */
4067 if (GET_CODE (XEXP (x, 0)) == HIGH
4068 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
4069 return XEXP (x, 1);
4070 break;
4071#endif
4072
4073 case PLUS:
4074 /* If we have (plus (plus (A const) B)), associate it so that CONST is
4075 outermost. That's because that's the way indexed addresses are
4076 supposed to appear. This code used to check many more cases, but
4077 they are now checked elsewhere. */
4078 if (GET_CODE (XEXP (x, 0)) == PLUS
4079 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
4080 return gen_binary (PLUS, mode,
4081 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
4082 XEXP (x, 1)),
4083 XEXP (XEXP (x, 0), 1));
4084
4085 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
4086 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
4087 bit-field and can be replaced by either a sign_extend or a
e6380233
JL
4088 sign_extract. The `and' may be a zero_extend and the two
4089 <c>, -<c> constants may be reversed. */
230d793d
RS
4090 if (GET_CODE (XEXP (x, 0)) == XOR
4091 && GET_CODE (XEXP (x, 1)) == CONST_INT
4092 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4093 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
e6380233
JL
4094 && ((i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
4095 || (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
5f4f0e22 4096 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
4097 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
4098 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4099 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 4100 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
4101 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
4102 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
4103 == i + 1))))
8079805d
RK
4104 return simplify_shift_const
4105 (NULL_RTX, ASHIFTRT, mode,
4106 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4107 XEXP (XEXP (XEXP (x, 0), 0), 0),
4108 GET_MODE_BITSIZE (mode) - (i + 1)),
4109 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 4110
bc0776c6
RK
4111 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
4112 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
4113 is 1. This produces better code than the alternative immediately
4114 below. */
4115 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4116 && reversible_comparison_p (XEXP (x, 0))
4117 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
4118 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 4119 return
0c1c8ea6 4120 gen_unary (NEG, mode, mode,
8079805d
RK
4121 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
4122 mode, XEXP (XEXP (x, 0), 0),
4123 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
4124
4125 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
4126 can become (ashiftrt (ashift (xor x 1) C) C) where C is
4127 the bitsize of the mode - 1. This allows simplification of
4128 "a = (b & 8) == 0;" */
4129 if (XEXP (x, 1) == constm1_rtx
4130 && GET_CODE (XEXP (x, 0)) != REG
4131 && ! (GET_CODE (XEXP (x,0)) == SUBREG
4132 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 4133 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
4134 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
4135 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4136 gen_rtx_combine (XOR, mode,
4137 XEXP (x, 0), const1_rtx),
4138 GET_MODE_BITSIZE (mode) - 1),
4139 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
4140
4141 /* If we are adding two things that have no bits in common, convert
4142 the addition into an IOR. This will often be further simplified,
4143 for example in cases like ((a & 1) + (a & 2)), which can
4144 become a & 3. */
4145
ac49a949 4146 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
4147 && (nonzero_bits (XEXP (x, 0), mode)
4148 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 4149 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
4150 break;
4151
4152 case MINUS:
0802d516
RK
4153 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
4154 by reversing the comparison code if valid. */
4155 if (STORE_FLAG_VALUE == 1
4156 && XEXP (x, 0) == const1_rtx
5109d49f
RK
4157 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
4158 && reversible_comparison_p (XEXP (x, 1)))
4159 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
4160 mode, XEXP (XEXP (x, 1), 0),
4161 XEXP (XEXP (x, 1), 1));
5109d49f 4162
230d793d
RS
4163 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
4164 (and <foo> (const_int pow2-1)) */
4165 if (GET_CODE (XEXP (x, 1)) == AND
4166 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4167 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
4168 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
4169 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
4170 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
4171
4172 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
4173 integers. */
4174 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
4175 return gen_binary (MINUS, mode,
4176 gen_binary (MINUS, mode, XEXP (x, 0),
4177 XEXP (XEXP (x, 1), 0)),
4178 XEXP (XEXP (x, 1), 1));
230d793d
RS
4179 break;
4180
4181 case MULT:
4182 /* If we have (mult (plus A B) C), apply the distributive law and then
4183 the inverse distributive law to see if things simplify. This
4184 occurs mostly in addresses, often when unrolling loops. */
4185
4186 if (GET_CODE (XEXP (x, 0)) == PLUS)
4187 {
4188 x = apply_distributive_law
4189 (gen_binary (PLUS, mode,
4190 gen_binary (MULT, mode,
4191 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4192 gen_binary (MULT, mode,
3749f4ca
BS
4193 XEXP (XEXP (x, 0), 1),
4194 copy_rtx (XEXP (x, 1)))));
230d793d
RS
4195
4196 if (GET_CODE (x) != MULT)
8079805d 4197 return x;
230d793d 4198 }
230d793d
RS
4199 break;
4200
4201 case UDIV:
4202 /* If this is a divide by a power of two, treat it as a shift if
4203 its first operand is a shift. */
4204 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4205 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4206 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4207 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4208 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4209 || GET_CODE (XEXP (x, 0)) == ROTATE
4210 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 4211 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
4212 break;
4213
4214 case EQ: case NE:
4215 case GT: case GTU: case GE: case GEU:
4216 case LT: case LTU: case LE: case LEU:
4217 /* If the first operand is a condition code, we can't do anything
4218 with it. */
4219 if (GET_CODE (XEXP (x, 0)) == COMPARE
4220 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4221#ifdef HAVE_cc0
4222 && XEXP (x, 0) != cc0_rtx
4223#endif
4224 ))
4225 {
4226 rtx op0 = XEXP (x, 0);
4227 rtx op1 = XEXP (x, 1);
4228 enum rtx_code new_code;
4229
4230 if (GET_CODE (op0) == COMPARE)
4231 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4232
4233 /* Simplify our comparison, if possible. */
4234 new_code = simplify_comparison (code, &op0, &op1);
4235
230d793d 4236 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 4237 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
4238 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4239 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4240 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4241 (plus X 1).
4242
4243 Remove any ZERO_EXTRACT we made when thinking this was a
4244 comparison. It may now be simpler to use, e.g., an AND. If a
4245 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4246 the call to make_compound_operation in the SET case. */
4247
0802d516
RK
4248 if (STORE_FLAG_VALUE == 1
4249 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4250 && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4251 return gen_lowpart_for_combine (mode,
4252 expand_compound_operation (op0));
5109d49f 4253
0802d516
RK
4254 else if (STORE_FLAG_VALUE == 1
4255 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4256 && op1 == const0_rtx
4257 && (num_sign_bit_copies (op0, mode)
4258 == GET_MODE_BITSIZE (mode)))
4259 {
4260 op0 = expand_compound_operation (op0);
0c1c8ea6 4261 return gen_unary (NEG, mode, mode,
8079805d 4262 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4263 }
4264
0802d516
RK
4265 else if (STORE_FLAG_VALUE == 1
4266 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4267 && op1 == const0_rtx
5109d49f 4268 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4269 {
4270 op0 = expand_compound_operation (op0);
8079805d
RK
4271 return gen_binary (XOR, mode,
4272 gen_lowpart_for_combine (mode, op0),
4273 const1_rtx);
5109d49f 4274 }
818b11b9 4275
0802d516
RK
4276 else if (STORE_FLAG_VALUE == 1
4277 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4278 && op1 == const0_rtx
4279 && (num_sign_bit_copies (op0, mode)
4280 == GET_MODE_BITSIZE (mode)))
4281 {
4282 op0 = expand_compound_operation (op0);
8079805d 4283 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 4284 }
230d793d 4285
5109d49f
RK
4286 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4287 those above. */
0802d516
RK
4288 if (STORE_FLAG_VALUE == -1
4289 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4290 && op1 == const0_rtx
5109d49f
RK
4291 && (num_sign_bit_copies (op0, mode)
4292 == GET_MODE_BITSIZE (mode)))
4293 return gen_lowpart_for_combine (mode,
4294 expand_compound_operation (op0));
4295
0802d516
RK
4296 else if (STORE_FLAG_VALUE == -1
4297 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4298 && op1 == const0_rtx
4299 && nonzero_bits (op0, mode) == 1)
4300 {
4301 op0 = expand_compound_operation (op0);
0c1c8ea6 4302 return gen_unary (NEG, mode, mode,
8079805d 4303 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4304 }
4305
0802d516
RK
4306 else if (STORE_FLAG_VALUE == -1
4307 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4308 && op1 == const0_rtx
4309 && (num_sign_bit_copies (op0, mode)
4310 == GET_MODE_BITSIZE (mode)))
230d793d 4311 {
818b11b9 4312 op0 = expand_compound_operation (op0);
0c1c8ea6 4313 return gen_unary (NOT, mode, mode,
8079805d 4314 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4315 }
4316
4317 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
4318 else if (STORE_FLAG_VALUE == -1
4319 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4320 && op1 == const0_rtx
4321 && nonzero_bits (op0, mode) == 1)
4322 {
4323 op0 = expand_compound_operation (op0);
8079805d 4324 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 4325 }
230d793d
RS
4326
4327 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
4328 one bit that might be nonzero, we can convert (ne x 0) to
4329 (ashift x c) where C puts the bit in the sign bit. Remove any
4330 AND with STORE_FLAG_VALUE when we are done, since we are only
4331 going to test the sign bit. */
3f508eca 4332 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 4333 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4334 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 4335 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
230d793d
RS
4336 && op1 == const0_rtx
4337 && mode == GET_MODE (op0)
5109d49f 4338 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 4339 {
818b11b9
RK
4340 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4341 expand_compound_operation (op0),
230d793d
RS
4342 GET_MODE_BITSIZE (mode) - 1 - i);
4343 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4344 return XEXP (x, 0);
4345 else
4346 return x;
4347 }
4348
4349 /* If the code changed, return a whole new comparison. */
4350 if (new_code != code)
4351 return gen_rtx_combine (new_code, mode, op0, op1);
4352
4353 /* Otherwise, keep this operation, but maybe change its operands.
4354 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4355 SUBST (XEXP (x, 0), op0);
4356 SUBST (XEXP (x, 1), op1);
4357 }
4358 break;
4359
4360 case IF_THEN_ELSE:
8079805d 4361 return simplify_if_then_else (x);
9210df58 4362
8079805d
RK
4363 case ZERO_EXTRACT:
4364 case SIGN_EXTRACT:
4365 case ZERO_EXTEND:
4366 case SIGN_EXTEND:
0f41302f 4367 /* If we are processing SET_DEST, we are done. */
8079805d
RK
4368 if (in_dest)
4369 return x;
d0ab8cd3 4370
8079805d 4371 return expand_compound_operation (x);
d0ab8cd3 4372
8079805d
RK
4373 case SET:
4374 return simplify_set (x);
1a26b032 4375
8079805d
RK
4376 case AND:
4377 case IOR:
4378 case XOR:
4379 return simplify_logical (x, last);
d0ab8cd3 4380
b472527b 4381 case ABS:
8079805d
RK
4382 /* (abs (neg <foo>)) -> (abs <foo>) */
4383 if (GET_CODE (XEXP (x, 0)) == NEG)
4384 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4385
b472527b
JL
4386 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4387 do nothing. */
4388 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4389 break;
f40421ce 4390
8079805d
RK
4391 /* If operand is something known to be positive, ignore the ABS. */
4392 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4393 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4394 <= HOST_BITS_PER_WIDE_INT)
4395 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4396 & ((HOST_WIDE_INT) 1
4397 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4398 == 0)))
4399 return XEXP (x, 0);
1a26b032 4400
1a26b032 4401
8079805d
RK
4402 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4403 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4404 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 4405
8079805d 4406 break;
1a26b032 4407
8079805d
RK
4408 case FFS:
4409 /* (ffs (*_extend <X>)) = (ffs <X>) */
4410 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4411 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4412 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4413 break;
1a26b032 4414
8079805d
RK
4415 case FLOAT:
4416 /* (float (sign_extend <X>)) = (float <X>). */
4417 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4418 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4419 break;
1a26b032 4420
8079805d
RK
4421 case ASHIFT:
4422 case LSHIFTRT:
4423 case ASHIFTRT:
4424 case ROTATE:
4425 case ROTATERT:
4426 /* If this is a shift by a constant amount, simplify it. */
4427 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4428 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4429 INTVAL (XEXP (x, 1)));
4430
4431#ifdef SHIFT_COUNT_TRUNCATED
4432 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4433 SUBST (XEXP (x, 1),
4434 force_to_mode (XEXP (x, 1), GET_MODE (x),
4435 ((HOST_WIDE_INT) 1
4436 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4437 - 1,
4438 NULL_RTX, 0));
4439#endif
4440
4441 break;
e9a25f70
JL
4442
4443 default:
4444 break;
8079805d
RK
4445 }
4446
4447 return x;
4448}
4449\f
4450/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4451
8079805d
RK
4452static rtx
4453simplify_if_then_else (x)
4454 rtx x;
4455{
4456 enum machine_mode mode = GET_MODE (x);
4457 rtx cond = XEXP (x, 0);
4458 rtx true = XEXP (x, 1);
4459 rtx false = XEXP (x, 2);
4460 enum rtx_code true_code = GET_CODE (cond);
4461 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4462 rtx temp;
4463 int i;
4464
0f41302f 4465 /* Simplify storing of the truth value. */
8079805d
RK
4466 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4467 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4468
0f41302f 4469 /* Also when the truth value has to be reversed. */
8079805d
RK
4470 if (comparison_p && reversible_comparison_p (cond)
4471 && true == const0_rtx && false == const_true_rtx)
4472 return gen_binary (reverse_condition (true_code),
4473 mode, XEXP (cond, 0), XEXP (cond, 1));
4474
4475 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4476 in it is being compared against certain values. Get the true and false
4477 comparisons and see if that says anything about the value of each arm. */
4478
4479 if (comparison_p && reversible_comparison_p (cond)
4480 && GET_CODE (XEXP (cond, 0)) == REG)
4481 {
4482 HOST_WIDE_INT nzb;
4483 rtx from = XEXP (cond, 0);
4484 enum rtx_code false_code = reverse_condition (true_code);
4485 rtx true_val = XEXP (cond, 1);
4486 rtx false_val = true_val;
4487 int swapped = 0;
9210df58 4488
8079805d 4489 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4490
8079805d 4491 if (false_code == EQ)
1a26b032 4492 {
8079805d
RK
4493 swapped = 1, true_code = EQ, false_code = NE;
4494 temp = true, true = false, false = temp;
4495 }
5109d49f 4496
8079805d
RK
4497 /* If we are comparing against zero and the expression being tested has
4498 only a single bit that might be nonzero, that is its value when it is
4499 not equal to zero. Similarly if it is known to be -1 or 0. */
4500
4501 if (true_code == EQ && true_val == const0_rtx
4502 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4503 false_code = EQ, false_val = GEN_INT (nzb);
4504 else if (true_code == EQ && true_val == const0_rtx
4505 && (num_sign_bit_copies (from, GET_MODE (from))
4506 == GET_MODE_BITSIZE (GET_MODE (from))))
4507 false_code = EQ, false_val = constm1_rtx;
4508
4509 /* Now simplify an arm if we know the value of the register in the
4510 branch and it is used in the arm. Be careful due to the potential
4511 of locally-shared RTL. */
4512
4513 if (reg_mentioned_p (from, true))
4514 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4515 pc_rtx, pc_rtx, 0, 0);
4516 if (reg_mentioned_p (from, false))
4517 false = subst (known_cond (copy_rtx (false), false_code,
4518 from, false_val),
4519 pc_rtx, pc_rtx, 0, 0);
4520
4521 SUBST (XEXP (x, 1), swapped ? false : true);
4522 SUBST (XEXP (x, 2), swapped ? true : false);
4523
4524 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4525 }
5109d49f 4526
8079805d
RK
4527 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4528 reversed, do so to avoid needing two sets of patterns for
4529 subtract-and-branch insns. Similarly if we have a constant in the true
4530 arm, the false arm is the same as the first operand of the comparison, or
4531 the false arm is more complicated than the true arm. */
4532
4533 if (comparison_p && reversible_comparison_p (cond)
4534 && (true == pc_rtx
4535 || (CONSTANT_P (true)
4536 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4537 || true == const0_rtx
4538 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4539 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4540 || (GET_CODE (true) == SUBREG
4541 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4542 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4543 || reg_mentioned_p (true, false)
4544 || rtx_equal_p (false, XEXP (cond, 0))))
4545 {
4546 true_code = reverse_condition (true_code);
4547 SUBST (XEXP (x, 0),
4548 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4549 XEXP (cond, 1)));
5109d49f 4550
8079805d
RK
4551 SUBST (XEXP (x, 1), false);
4552 SUBST (XEXP (x, 2), true);
1a26b032 4553
8079805d 4554 temp = true, true = false, false = temp, cond = XEXP (x, 0);
bb821298 4555
0f41302f 4556 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4557 true_code = GET_CODE (cond);
4558 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4559 }
abe6e52f 4560
8079805d 4561 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4562
8079805d
RK
4563 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4564 return true;
1a26b032 4565
5be669c7
RK
4566 /* Convert a == b ? b : a to "a". */
4567 if (true_code == EQ && ! side_effects_p (cond)
4568 && rtx_equal_p (XEXP (cond, 0), false)
4569 && rtx_equal_p (XEXP (cond, 1), true))
4570 return false;
4571 else if (true_code == NE && ! side_effects_p (cond)
4572 && rtx_equal_p (XEXP (cond, 0), true)
4573 && rtx_equal_p (XEXP (cond, 1), false))
4574 return true;
4575
8079805d
RK
4576 /* Look for cases where we have (abs x) or (neg (abs X)). */
4577
4578 if (GET_MODE_CLASS (mode) == MODE_INT
4579 && GET_CODE (false) == NEG
4580 && rtx_equal_p (true, XEXP (false, 0))
4581 && comparison_p
4582 && rtx_equal_p (true, XEXP (cond, 0))
4583 && ! side_effects_p (true))
4584 switch (true_code)
4585 {
4586 case GT:
4587 case GE:
0c1c8ea6 4588 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4589 case LT:
4590 case LE:
0c1c8ea6 4591 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
e9a25f70
JL
4592 default:
4593 break;
8079805d
RK
4594 }
4595
4596 /* Look for MIN or MAX. */
4597
34c8be72 4598 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4599 && comparison_p
4600 && rtx_equal_p (XEXP (cond, 0), true)
4601 && rtx_equal_p (XEXP (cond, 1), false)
4602 && ! side_effects_p (cond))
4603 switch (true_code)
4604 {
4605 case GE:
4606 case GT:
4607 return gen_binary (SMAX, mode, true, false);
4608 case LE:
4609 case LT:
4610 return gen_binary (SMIN, mode, true, false);
4611 case GEU:
4612 case GTU:
4613 return gen_binary (UMAX, mode, true, false);
4614 case LEU:
4615 case LTU:
4616 return gen_binary (UMIN, mode, true, false);
e9a25f70
JL
4617 default:
4618 break;
8079805d
RK
4619 }
4620
8079805d
RK
4621 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4622 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4623 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4624 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4625 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4626 neither 1 or -1, but it isn't worth checking for. */
8079805d 4627
0802d516
RK
4628 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4629 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d
RK
4630 {
4631 rtx t = make_compound_operation (true, SET);
4632 rtx f = make_compound_operation (false, SET);
4633 rtx cond_op0 = XEXP (cond, 0);
4634 rtx cond_op1 = XEXP (cond, 1);
6a651371 4635 enum rtx_code op = NIL, extend_op = NIL;
8079805d 4636 enum machine_mode m = mode;
6a651371 4637 rtx z = 0, c1 = NULL_RTX;
8079805d 4638
8079805d
RK
4639 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4640 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4641 || GET_CODE (t) == ASHIFT
4642 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4643 && rtx_equal_p (XEXP (t, 0), f))
4644 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4645
4646 /* If an identity-zero op is commutative, check whether there
0f41302f 4647 would be a match if we swapped the operands. */
8079805d
RK
4648 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4649 || GET_CODE (t) == XOR)
4650 && rtx_equal_p (XEXP (t, 1), f))
4651 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4652 else if (GET_CODE (t) == SIGN_EXTEND
4653 && (GET_CODE (XEXP (t, 0)) == PLUS
4654 || GET_CODE (XEXP (t, 0)) == MINUS
4655 || GET_CODE (XEXP (t, 0)) == IOR
4656 || GET_CODE (XEXP (t, 0)) == XOR
4657 || GET_CODE (XEXP (t, 0)) == ASHIFT
4658 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4659 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4660 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4661 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4662 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4663 && (num_sign_bit_copies (f, GET_MODE (f))
4664 > (GET_MODE_BITSIZE (mode)
4665 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4666 {
4667 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4668 extend_op = SIGN_EXTEND;
4669 m = GET_MODE (XEXP (t, 0));
1a26b032 4670 }
8079805d
RK
4671 else if (GET_CODE (t) == SIGN_EXTEND
4672 && (GET_CODE (XEXP (t, 0)) == PLUS
4673 || GET_CODE (XEXP (t, 0)) == IOR
4674 || GET_CODE (XEXP (t, 0)) == XOR)
4675 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4676 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4677 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4678 && (num_sign_bit_copies (f, GET_MODE (f))
4679 > (GET_MODE_BITSIZE (mode)
4680 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4681 {
4682 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4683 extend_op = SIGN_EXTEND;
4684 m = GET_MODE (XEXP (t, 0));
4685 }
4686 else if (GET_CODE (t) == ZERO_EXTEND
4687 && (GET_CODE (XEXP (t, 0)) == PLUS
4688 || GET_CODE (XEXP (t, 0)) == MINUS
4689 || GET_CODE (XEXP (t, 0)) == IOR
4690 || GET_CODE (XEXP (t, 0)) == XOR
4691 || GET_CODE (XEXP (t, 0)) == ASHIFT
4692 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4693 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4694 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4695 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4696 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4697 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4698 && ((nonzero_bits (f, GET_MODE (f))
4699 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4700 == 0))
4701 {
4702 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4703 extend_op = ZERO_EXTEND;
4704 m = GET_MODE (XEXP (t, 0));
4705 }
4706 else if (GET_CODE (t) == ZERO_EXTEND
4707 && (GET_CODE (XEXP (t, 0)) == PLUS
4708 || GET_CODE (XEXP (t, 0)) == IOR
4709 || GET_CODE (XEXP (t, 0)) == XOR)
4710 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4711 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4712 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4713 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4714 && ((nonzero_bits (f, GET_MODE (f))
4715 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4716 == 0))
4717 {
4718 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4719 extend_op = ZERO_EXTEND;
4720 m = GET_MODE (XEXP (t, 0));
4721 }
4722
4723 if (z)
4724 {
4725 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4726 pc_rtx, pc_rtx, 0, 0);
4727 temp = gen_binary (MULT, m, temp,
4728 gen_binary (MULT, m, c1, const_true_rtx));
4729 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4730 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4731
4732 if (extend_op != NIL)
0c1c8ea6 4733 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4734
4735 return temp;
4736 }
4737 }
224eeff2 4738
8079805d
RK
4739 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4740 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4741 negation of a single bit, we can convert this operation to a shift. We
4742 can actually do this more generally, but it doesn't seem worth it. */
4743
4744 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4745 && false == const0_rtx && GET_CODE (true) == CONST_INT
4746 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4747 && (i = exact_log2 (INTVAL (true))) >= 0)
4748 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4749 == GET_MODE_BITSIZE (mode))
4750 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4751 return
4752 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4753 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4754
8079805d
RK
4755 return x;
4756}
4757\f
4758/* Simplify X, a SET expression. Return the new expression. */
230d793d 4759
8079805d
RK
4760static rtx
4761simplify_set (x)
4762 rtx x;
4763{
4764 rtx src = SET_SRC (x);
4765 rtx dest = SET_DEST (x);
4766 enum machine_mode mode
4767 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4768 rtx other_insn;
4769 rtx *cc_use;
4770
4771 /* (set (pc) (return)) gets written as (return). */
4772 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4773 return src;
230d793d 4774
87e3e0c1
RK
4775 /* Now that we know for sure which bits of SRC we are using, see if we can
4776 simplify the expression for the object knowing that we only need the
4777 low-order bits. */
4778
4779 if (GET_MODE_CLASS (mode) == MODE_INT)
c5c76735
JL
4780 {
4781 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4782 SUBST (SET_SRC (x), src);
4783 }
87e3e0c1 4784
8079805d
RK
4785 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4786 the comparison result and try to simplify it unless we already have used
4787 undobuf.other_insn. */
4788 if ((GET_CODE (src) == COMPARE
230d793d 4789#ifdef HAVE_cc0
8079805d 4790 || dest == cc0_rtx
230d793d 4791#endif
8079805d
RK
4792 )
4793 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4794 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4795 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4796 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4797 {
4798 enum rtx_code old_code = GET_CODE (*cc_use);
4799 enum rtx_code new_code;
4800 rtx op0, op1;
4801 int other_changed = 0;
4802 enum machine_mode compare_mode = GET_MODE (dest);
4803
4804 if (GET_CODE (src) == COMPARE)
4805 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4806 else
4807 op0 = src, op1 = const0_rtx;
230d793d 4808
8079805d
RK
4809 /* Simplify our comparison, if possible. */
4810 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4811
c141a106 4812#ifdef EXTRA_CC_MODES
8079805d
RK
4813 /* If this machine has CC modes other than CCmode, check to see if we
4814 need to use a different CC mode here. */
4815 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4816#endif /* EXTRA_CC_MODES */
230d793d 4817
c141a106 4818#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4819 /* If the mode changed, we have to change SET_DEST, the mode in the
4820 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4821 a hard register, just build new versions with the proper mode. If it
4822 is a pseudo, we lose unless it is only time we set the pseudo, in
4823 which case we can safely change its mode. */
4824 if (compare_mode != GET_MODE (dest))
4825 {
4826 int regno = REGNO (dest);
38a448ca 4827 rtx new_dest = gen_rtx_REG (compare_mode, regno);
8079805d
RK
4828
4829 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 4830 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
230d793d 4831 {
8079805d
RK
4832 if (regno >= FIRST_PSEUDO_REGISTER)
4833 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4834
8079805d
RK
4835 SUBST (SET_DEST (x), new_dest);
4836 SUBST (XEXP (*cc_use, 0), new_dest);
4837 other_changed = 1;
230d793d 4838
8079805d 4839 dest = new_dest;
230d793d 4840 }
8079805d 4841 }
230d793d
RS
4842#endif
4843
8079805d
RK
4844 /* If the code changed, we have to build a new comparison in
4845 undobuf.other_insn. */
4846 if (new_code != old_code)
4847 {
4848 unsigned HOST_WIDE_INT mask;
4849
4850 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4851 dest, const0_rtx));
4852
4853 /* If the only change we made was to change an EQ into an NE or
4854 vice versa, OP0 has only one bit that might be nonzero, and OP1
4855 is zero, check if changing the user of the condition code will
4856 produce a valid insn. If it won't, we can keep the original code
4857 in that insn by surrounding our operation with an XOR. */
4858
4859 if (((old_code == NE && new_code == EQ)
4860 || (old_code == EQ && new_code == NE))
4861 && ! other_changed && op1 == const0_rtx
4862 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4863 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4864 {
8079805d 4865 rtx pat = PATTERN (other_insn), note = 0;
230d793d 4866
8e2f6e35 4867 if ((recog_for_combine (&pat, other_insn, &note) < 0
8079805d
RK
4868 && ! check_asm_operands (pat)))
4869 {
4870 PUT_CODE (*cc_use, old_code);
4871 other_insn = 0;
230d793d 4872
8079805d 4873 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4874 }
230d793d
RS
4875 }
4876
8079805d
RK
4877 other_changed = 1;
4878 }
4879
4880 if (other_changed)
4881 undobuf.other_insn = other_insn;
230d793d
RS
4882
4883#ifdef HAVE_cc0
8079805d
RK
4884 /* If we are now comparing against zero, change our source if
4885 needed. If we do not use cc0, we always have a COMPARE. */
4886 if (op1 == const0_rtx && dest == cc0_rtx)
4887 {
4888 SUBST (SET_SRC (x), op0);
4889 src = op0;
4890 }
4891 else
230d793d
RS
4892#endif
4893
8079805d
RK
4894 /* Otherwise, if we didn't previously have a COMPARE in the
4895 correct mode, we need one. */
4896 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4897 {
4898 SUBST (SET_SRC (x),
4899 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4900 src = SET_SRC (x);
230d793d
RS
4901 }
4902 else
4903 {
8079805d
RK
4904 /* Otherwise, update the COMPARE if needed. */
4905 SUBST (XEXP (src, 0), op0);
4906 SUBST (XEXP (src, 1), op1);
230d793d 4907 }
8079805d
RK
4908 }
4909 else
4910 {
4911 /* Get SET_SRC in a form where we have placed back any
4912 compound expressions. Then do the checks below. */
4913 src = make_compound_operation (src, SET);
4914 SUBST (SET_SRC (x), src);
4915 }
230d793d 4916
8079805d
RK
4917 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4918 and X being a REG or (subreg (reg)), we may be able to convert this to
4919 (set (subreg:m2 x) (op)).
df62f951 4920
8079805d
RK
4921 We can always do this if M1 is narrower than M2 because that means that
4922 we only care about the low bits of the result.
df62f951 4923
8079805d 4924 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
9ec36da5 4925 perform a narrower operation than requested since the high-order bits will
8079805d
RK
4926 be undefined. On machine where it is defined, this transformation is safe
4927 as long as M1 and M2 have the same number of words. */
df62f951 4928
8079805d
RK
4929 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4930 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4931 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4932 / UNITS_PER_WORD)
4933 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4934 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4935#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4936 && (GET_MODE_SIZE (GET_MODE (src))
4937 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4938#endif
f507a070
RK
4939#ifdef CLASS_CANNOT_CHANGE_SIZE
4940 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4941 && (TEST_HARD_REG_BIT
4942 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4943 REGNO (dest)))
4944 && (GET_MODE_SIZE (GET_MODE (src))
4945 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4946#endif
8079805d
RK
4947 && (GET_CODE (dest) == REG
4948 || (GET_CODE (dest) == SUBREG
4949 && GET_CODE (SUBREG_REG (dest)) == REG)))
4950 {
4951 SUBST (SET_DEST (x),
4952 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4953 dest));
4954 SUBST (SET_SRC (x), SUBREG_REG (src));
4955
4956 src = SET_SRC (x), dest = SET_DEST (x);
4957 }
df62f951 4958
8baf60bb 4959#ifdef LOAD_EXTEND_OP
8079805d
RK
4960 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4961 would require a paradoxical subreg. Replace the subreg with a
0f41302f 4962 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
4963
4964 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4965 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4966 && SUBREG_WORD (src) == 0
4967 && (GET_MODE_SIZE (GET_MODE (src))
4968 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4969 && GET_CODE (SUBREG_REG (src)) == MEM)
4970 {
4971 SUBST (SET_SRC (x),
4972 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4973 GET_MODE (src), XEXP (src, 0)));
4974
4975 src = SET_SRC (x);
4976 }
230d793d
RS
4977#endif
4978
8079805d
RK
4979 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4980 are comparing an item known to be 0 or -1 against 0, use a logical
4981 operation instead. Check for one of the arms being an IOR of the other
4982 arm with some value. We compute three terms to be IOR'ed together. In
4983 practice, at most two will be nonzero. Then we do the IOR's. */
4984
4985 if (GET_CODE (dest) != PC
4986 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 4987 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4988 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4989 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 4990 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
4991#ifdef HAVE_conditional_move
4992 && ! can_conditionally_move_p (GET_MODE (src))
4993#endif
8079805d
RK
4994 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4995 GET_MODE (XEXP (XEXP (src, 0), 0)))
4996 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4997 && ! side_effects_p (src))
4998 {
4999 rtx true = (GET_CODE (XEXP (src, 0)) == NE
5000 ? XEXP (src, 1) : XEXP (src, 2));
5001 rtx false = (GET_CODE (XEXP (src, 0)) == NE
5002 ? XEXP (src, 2) : XEXP (src, 1));
5003 rtx term1 = const0_rtx, term2, term3;
5004
5005 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
5006 term1 = false, true = XEXP (true, 1), false = const0_rtx;
5007 else if (GET_CODE (true) == IOR
5008 && rtx_equal_p (XEXP (true, 1), false))
5009 term1 = false, true = XEXP (true, 0), false = const0_rtx;
5010 else if (GET_CODE (false) == IOR
5011 && rtx_equal_p (XEXP (false, 0), true))
5012 term1 = true, false = XEXP (false, 1), true = const0_rtx;
5013 else if (GET_CODE (false) == IOR
5014 && rtx_equal_p (XEXP (false, 1), true))
5015 term1 = true, false = XEXP (false, 0), true = const0_rtx;
5016
5017 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
5018 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 5019 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
5020 XEXP (XEXP (src, 0), 0)),
5021 false);
5022
5023 SUBST (SET_SRC (x),
5024 gen_binary (IOR, GET_MODE (src),
5025 gen_binary (IOR, GET_MODE (src), term1, term2),
5026 term3));
5027
5028 src = SET_SRC (x);
5029 }
230d793d 5030
c5c76735
JL
5031#ifdef HAVE_conditional_arithmetic
5032 /* If we have conditional arithmetic and the operand of a SET is
5033 a conditional expression, replace this with an IF_THEN_ELSE.
5034 We can either have a conditional expression or a MULT of that expression
5035 with a constant. */
5036 if ((GET_RTX_CLASS (GET_CODE (src)) == '1'
5037 || GET_RTX_CLASS (GET_CODE (src)) == '2'
5038 || GET_RTX_CLASS (GET_CODE (src)) == 'c')
5039 && (GET_RTX_CLASS (GET_CODE (XEXP (src, 0))) == '<'
5040 || (GET_CODE (XEXP (src, 0)) == MULT
5041 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (src, 0), 0))) == '<'
5042 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT)))
5043 {
5044 rtx cond = XEXP (src, 0);
5045 rtx true_val = const1_rtx;
5046 rtx false_arm, true_arm;
5047
5048 if (GET_CODE (cond) == MULT)
5049 {
5050 true_val = XEXP (cond, 1);
5051 cond = XEXP (cond, 0);
5052 }
5053
5054 if (GET_RTX_CLASS (GET_CODE (src)) == '1')
5055 {
5056 true_arm = gen_unary (GET_CODE (src), GET_MODE (src),
5057 GET_MODE (XEXP (src, 0)), true_val);
5058 false_arm = gen_unary (GET_CODE (src), GET_MODE (src),
5059 GET_MODE (XEXP (src, 0)), const0_rtx);
5060 }
5061 else
5062 {
5063 true_arm = gen_binary (GET_CODE (src), GET_MODE (src),
5064 true_val, XEXP (src, 1));
5065 false_arm = gen_binary (GET_CODE (src), GET_MODE (src),
5066 const0_rtx, XEXP (src, 1));
5067 }
5068
5069 /* Canonicalize if true_arm is the simpler one. */
5070 if (GET_RTX_CLASS (GET_CODE (true_arm)) == 'o'
5071 && GET_RTX_CLASS (GET_CODE (false_arm)) != 'o'
5072 && reversible_comparison_p (cond))
5073 {
5074 rtx temp = true_arm;
5075
5076 true_arm = false_arm;
5077 false_arm = temp;
5078
5079 cond = gen_rtx_combine (reverse_condition (GET_CODE (cond)),
5080 GET_MODE (cond), XEXP (cond, 0),
5081 XEXP (cond, 1));
5082 }
5083
5084 src = gen_rtx_combine (IF_THEN_ELSE, GET_MODE (src),
5085 gen_rtx_combine (GET_CODE (cond), VOIDmode,
5086 XEXP (cond, 0),
5087 XEXP (cond, 1)),
5088 true_arm, false_arm);
5089 SUBST (SET_SRC (x), src);
5090 }
5091#endif
5092
246e00f2
RK
5093 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
5094 whole thing fail. */
5095 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
5096 return src;
5097 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
5098 return dest;
5099 else
5100 /* Convert this into a field assignment operation, if possible. */
5101 return make_field_assignment (x);
8079805d
RK
5102}
5103\f
5104/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
5105 result. LAST is nonzero if this is the last retry. */
5106
5107static rtx
5108simplify_logical (x, last)
5109 rtx x;
5110 int last;
5111{
5112 enum machine_mode mode = GET_MODE (x);
5113 rtx op0 = XEXP (x, 0);
5114 rtx op1 = XEXP (x, 1);
5115
5116 switch (GET_CODE (x))
5117 {
230d793d 5118 case AND:
8079805d
RK
5119 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
5120 insn (and may simplify more). */
5121 if (GET_CODE (op0) == XOR
5122 && rtx_equal_p (XEXP (op0, 0), op1)
5123 && ! side_effects_p (op1))
0c1c8ea6
RK
5124 x = gen_binary (AND, mode,
5125 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
5126
5127 if (GET_CODE (op0) == XOR
5128 && rtx_equal_p (XEXP (op0, 1), op1)
5129 && ! side_effects_p (op1))
0c1c8ea6
RK
5130 x = gen_binary (AND, mode,
5131 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
5132
5133 /* Similarly for (~ (A ^ B)) & A. */
5134 if (GET_CODE (op0) == NOT
5135 && GET_CODE (XEXP (op0, 0)) == XOR
5136 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
5137 && ! side_effects_p (op1))
5138 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
5139
5140 if (GET_CODE (op0) == NOT
5141 && GET_CODE (XEXP (op0, 0)) == XOR
5142 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
5143 && ! side_effects_p (op1))
5144 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
5145
2e8f9abf
DM
5146 /* We can call simplify_and_const_int only if we don't lose
5147 any (sign) bits when converting INTVAL (op1) to
5148 "unsigned HOST_WIDE_INT". */
5149 if (GET_CODE (op1) == CONST_INT
5150 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5151 || INTVAL (op1) > 0))
230d793d 5152 {
8079805d 5153 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
5154
5155 /* If we have (ior (and (X C1) C2)) and the next restart would be
5156 the last, simplify this by making C1 as small as possible
0f41302f 5157 and then exit. */
8079805d
RK
5158 if (last
5159 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
5160 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5161 && GET_CODE (op1) == CONST_INT)
5162 return gen_binary (IOR, mode,
5163 gen_binary (AND, mode, XEXP (op0, 0),
5164 GEN_INT (INTVAL (XEXP (op0, 1))
5165 & ~ INTVAL (op1))), op1);
230d793d
RS
5166
5167 if (GET_CODE (x) != AND)
8079805d 5168 return x;
0e32506c
RK
5169
5170 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
5171 || GET_RTX_CLASS (GET_CODE (x)) == '2')
5172 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
5173 }
5174
5175 /* Convert (A | B) & A to A. */
8079805d
RK
5176 if (GET_CODE (op0) == IOR
5177 && (rtx_equal_p (XEXP (op0, 0), op1)
5178 || rtx_equal_p (XEXP (op0, 1), op1))
5179 && ! side_effects_p (XEXP (op0, 0))
5180 && ! side_effects_p (XEXP (op0, 1)))
5181 return op1;
230d793d 5182
d0ab8cd3 5183 /* In the following group of tests (and those in case IOR below),
230d793d
RS
5184 we start with some combination of logical operations and apply
5185 the distributive law followed by the inverse distributive law.
5186 Most of the time, this results in no change. However, if some of
5187 the operands are the same or inverses of each other, simplifications
5188 will result.
5189
5190 For example, (and (ior A B) (not B)) can occur as the result of
5191 expanding a bit field assignment. When we apply the distributive
5192 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 5193 which then simplifies to (and (A (not B))).
230d793d 5194
8079805d 5195 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
5196 the inverse distributive law to see if things simplify. */
5197
8079805d 5198 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
5199 {
5200 x = apply_distributive_law
8079805d
RK
5201 (gen_binary (GET_CODE (op0), mode,
5202 gen_binary (AND, mode, XEXP (op0, 0), op1),
3749f4ca
BS
5203 gen_binary (AND, mode, XEXP (op0, 1),
5204 copy_rtx (op1))));
230d793d 5205 if (GET_CODE (x) != AND)
8079805d 5206 return x;
230d793d
RS
5207 }
5208
8079805d
RK
5209 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
5210 return apply_distributive_law
5211 (gen_binary (GET_CODE (op1), mode,
5212 gen_binary (AND, mode, XEXP (op1, 0), op0),
3749f4ca
BS
5213 gen_binary (AND, mode, XEXP (op1, 1),
5214 copy_rtx (op0))));
230d793d
RS
5215
5216 /* Similarly, taking advantage of the fact that
5217 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
5218
8079805d
RK
5219 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
5220 return apply_distributive_law
5221 (gen_binary (XOR, mode,
5222 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
3749f4ca
BS
5223 gen_binary (IOR, mode, copy_rtx (XEXP (op0, 0)),
5224 XEXP (op1, 1))));
230d793d 5225
8079805d
RK
5226 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
5227 return apply_distributive_law
5228 (gen_binary (XOR, mode,
5229 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
3749f4ca 5230 gen_binary (IOR, mode, copy_rtx (XEXP (op1, 0)), XEXP (op0, 1))));
230d793d
RS
5231 break;
5232
5233 case IOR:
951553af 5234 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 5235 if (GET_CODE (op1) == CONST_INT
ac49a949 5236 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
5237 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
5238 return op1;
d0ab8cd3 5239
230d793d 5240 /* Convert (A & B) | A to A. */
8079805d
RK
5241 if (GET_CODE (op0) == AND
5242 && (rtx_equal_p (XEXP (op0, 0), op1)
5243 || rtx_equal_p (XEXP (op0, 1), op1))
5244 && ! side_effects_p (XEXP (op0, 0))
5245 && ! side_effects_p (XEXP (op0, 1)))
5246 return op1;
230d793d
RS
5247
5248 /* If we have (ior (and A B) C), apply the distributive law and then
5249 the inverse distributive law to see if things simplify. */
5250
8079805d 5251 if (GET_CODE (op0) == AND)
230d793d
RS
5252 {
5253 x = apply_distributive_law
5254 (gen_binary (AND, mode,
8079805d 5255 gen_binary (IOR, mode, XEXP (op0, 0), op1),
3749f4ca
BS
5256 gen_binary (IOR, mode, XEXP (op0, 1),
5257 copy_rtx (op1))));
230d793d
RS
5258
5259 if (GET_CODE (x) != IOR)
8079805d 5260 return x;
230d793d
RS
5261 }
5262
8079805d 5263 if (GET_CODE (op1) == AND)
230d793d
RS
5264 {
5265 x = apply_distributive_law
5266 (gen_binary (AND, mode,
8079805d 5267 gen_binary (IOR, mode, XEXP (op1, 0), op0),
3749f4ca
BS
5268 gen_binary (IOR, mode, XEXP (op1, 1),
5269 copy_rtx (op0))));
230d793d
RS
5270
5271 if (GET_CODE (x) != IOR)
8079805d 5272 return x;
230d793d
RS
5273 }
5274
5275 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5276 mode size to (rotate A CX). */
5277
8079805d
RK
5278 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5279 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5280 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5281 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5282 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5283 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 5284 == GET_MODE_BITSIZE (mode)))
38a448ca
RH
5285 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5286 (GET_CODE (op0) == ASHIFT
5287 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 5288
71923da7
RK
5289 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5290 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5291 does not affect any of the bits in OP1, it can really be done
5292 as a PLUS and we can associate. We do this by seeing if OP1
5293 can be safely shifted left C bits. */
5294 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5295 && GET_CODE (XEXP (op0, 0)) == PLUS
5296 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5297 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5298 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5299 {
5300 int count = INTVAL (XEXP (op0, 1));
5301 HOST_WIDE_INT mask = INTVAL (op1) << count;
5302
5303 if (mask >> count == INTVAL (op1)
5304 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5305 {
5306 SUBST (XEXP (XEXP (op0, 0), 1),
5307 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5308 return op0;
5309 }
5310 }
230d793d
RS
5311 break;
5312
5313 case XOR:
79e8185c
JH
5314 /* If we are XORing two things that have no bits in common,
5315 convert them into an IOR. This helps to detect rotation encoded
5316 using those methods and possibly other simplifications. */
5317
5318 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5319 && (nonzero_bits (op0, mode)
5320 & nonzero_bits (op1, mode)) == 0)
5321 return (gen_binary (IOR, mode, op0, op1));
5322
230d793d
RS
5323 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5324 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5325 (NOT y). */
5326 {
5327 int num_negated = 0;
230d793d 5328
8079805d
RK
5329 if (GET_CODE (op0) == NOT)
5330 num_negated++, op0 = XEXP (op0, 0);
5331 if (GET_CODE (op1) == NOT)
5332 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
5333
5334 if (num_negated == 2)
5335 {
8079805d
RK
5336 SUBST (XEXP (x, 0), op0);
5337 SUBST (XEXP (x, 1), op1);
230d793d
RS
5338 }
5339 else if (num_negated == 1)
0c1c8ea6 5340 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
5341 }
5342
5343 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5344 correspond to a machine insn or result in further simplifications
5345 if B is a constant. */
5346
8079805d
RK
5347 if (GET_CODE (op0) == AND
5348 && rtx_equal_p (XEXP (op0, 1), op1)
5349 && ! side_effects_p (op1))
0c1c8ea6
RK
5350 return gen_binary (AND, mode,
5351 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 5352 op1);
230d793d 5353
8079805d
RK
5354 else if (GET_CODE (op0) == AND
5355 && rtx_equal_p (XEXP (op0, 0), op1)
5356 && ! side_effects_p (op1))
0c1c8ea6
RK
5357 return gen_binary (AND, mode,
5358 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 5359 op1);
230d793d 5360
230d793d 5361 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
5362 comparison if STORE_FLAG_VALUE is 1. */
5363 if (STORE_FLAG_VALUE == 1
5364 && op1 == const1_rtx
8079805d
RK
5365 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5366 && reversible_comparison_p (op0))
5367 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5368 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
5369
5370 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5371 is (lt foo (const_int 0)), so we can perform the above
0802d516 5372 simplification if STORE_FLAG_VALUE is 1. */
500c518b 5373
0802d516
RK
5374 if (STORE_FLAG_VALUE == 1
5375 && op1 == const1_rtx
8079805d
RK
5376 && GET_CODE (op0) == LSHIFTRT
5377 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5378 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5379 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
5380
5381 /* (xor (comparison foo bar) (const_int sign-bit))
5382 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 5383 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5384 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 5385 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
5386 && op1 == const_true_rtx
5387 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5388 && reversible_comparison_p (op0))
5389 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5390 mode, XEXP (op0, 0), XEXP (op0, 1));
0918eca0 5391
230d793d 5392 break;
e9a25f70
JL
5393
5394 default:
5395 abort ();
230d793d
RS
5396 }
5397
5398 return x;
5399}
5400\f
5401/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5402 operations" because they can be replaced with two more basic operations.
5403 ZERO_EXTEND is also considered "compound" because it can be replaced with
5404 an AND operation, which is simpler, though only one operation.
5405
5406 The function expand_compound_operation is called with an rtx expression
5407 and will convert it to the appropriate shifts and AND operations,
5408 simplifying at each stage.
5409
5410 The function make_compound_operation is called to convert an expression
5411 consisting of shifts and ANDs into the equivalent compound expression.
5412 It is the inverse of this function, loosely speaking. */
5413
5414static rtx
5415expand_compound_operation (x)
5416 rtx x;
5417{
5418 int pos = 0, len;
5419 int unsignedp = 0;
5420 int modewidth;
5421 rtx tem;
5422
5423 switch (GET_CODE (x))
5424 {
5425 case ZERO_EXTEND:
5426 unsignedp = 1;
5427 case SIGN_EXTEND:
75473182
RS
5428 /* We can't necessarily use a const_int for a multiword mode;
5429 it depends on implicitly extending the value.
5430 Since we don't know the right way to extend it,
5431 we can't tell whether the implicit way is right.
5432
5433 Even for a mode that is no wider than a const_int,
5434 we can't win, because we need to sign extend one of its bits through
5435 the rest of it, and we don't know which bit. */
230d793d 5436 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 5437 return x;
230d793d 5438
8079805d
RK
5439 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5440 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5441 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5442 reloaded. If not for that, MEM's would very rarely be safe.
5443
5444 Reject MODEs bigger than a word, because we might not be able
5445 to reference a two-register group starting with an arbitrary register
5446 (and currently gen_lowpart might crash for a SUBREG). */
5447
5448 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
5449 return x;
5450
5451 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5452 /* If the inner object has VOIDmode (the only way this can happen
5453 is if it is a ASM_OPERANDS), we can't do anything since we don't
5454 know how much masking to do. */
5455 if (len == 0)
5456 return x;
5457
5458 break;
5459
5460 case ZERO_EXTRACT:
5461 unsignedp = 1;
5462 case SIGN_EXTRACT:
5463 /* If the operand is a CLOBBER, just return it. */
5464 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5465 return XEXP (x, 0);
5466
5467 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5468 || GET_CODE (XEXP (x, 2)) != CONST_INT
5469 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5470 return x;
5471
5472 len = INTVAL (XEXP (x, 1));
5473 pos = INTVAL (XEXP (x, 2));
5474
5475 /* If this goes outside the object being extracted, replace the object
5476 with a (use (mem ...)) construct that only combine understands
5477 and is used only for this purpose. */
5478 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
38a448ca 5479 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
230d793d 5480
f76b9db2
ILT
5481 if (BITS_BIG_ENDIAN)
5482 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5483
230d793d
RS
5484 break;
5485
5486 default:
5487 return x;
5488 }
0f808b6f
JH
5489 /* Convert sign extension to zero extension, if we know that the high
5490 bit is not set, as this is easier to optimize. It will be converted
5491 back to cheaper alternative in make_extraction. */
5492 if (GET_CODE (x) == SIGN_EXTEND
5493 && (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5494 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5495 & ~ (((unsigned HOST_WIDE_INT)
5496 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5497 >> 1))
5498 == 0)))
5499 {
5500 rtx temp = gen_rtx_ZERO_EXTEND (GET_MODE (x), XEXP (x, 0));
5501 return expand_compound_operation (temp);
5502 }
230d793d 5503
0f13a422
ILT
5504 /* We can optimize some special cases of ZERO_EXTEND. */
5505 if (GET_CODE (x) == ZERO_EXTEND)
5506 {
5507 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5508 know that the last value didn't have any inappropriate bits
5509 set. */
5510 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5511 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5512 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5513 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5514 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5515 return XEXP (XEXP (x, 0), 0);
5516
5517 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5518 if (GET_CODE (XEXP (x, 0)) == SUBREG
5519 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5520 && subreg_lowpart_p (XEXP (x, 0))
5521 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5522 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
fcc60894 5523 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5524 return SUBREG_REG (XEXP (x, 0));
5525
5526 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5527 is a comparison and STORE_FLAG_VALUE permits. This is like
5528 the first case, but it works even when GET_MODE (x) is larger
5529 than HOST_WIDE_INT. */
5530 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5531 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5532 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5533 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5534 <= HOST_BITS_PER_WIDE_INT)
5535 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5536 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5537 return XEXP (XEXP (x, 0), 0);
5538
5539 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5540 if (GET_CODE (XEXP (x, 0)) == SUBREG
5541 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5542 && subreg_lowpart_p (XEXP (x, 0))
5543 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5544 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5545 <= HOST_BITS_PER_WIDE_INT)
5546 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5547 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5548 return SUBREG_REG (XEXP (x, 0));
5549
0f13a422
ILT
5550 }
5551
230d793d
RS
5552 /* If we reach here, we want to return a pair of shifts. The inner
5553 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5554 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5555 logical depending on the value of UNSIGNEDP.
5556
5557 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5558 converted into an AND of a shift.
5559
5560 We must check for the case where the left shift would have a negative
5561 count. This can happen in a case like (x >> 31) & 255 on machines
5562 that can't shift by a constant. On those machines, we would first
5563 combine the shift with the AND to produce a variable-position
5564 extraction. Then the constant of 31 would be substituted in to produce
5565 a such a position. */
5566
5567 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5568 if (modewidth >= pos - len)
5f4f0e22 5569 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5570 GET_MODE (x),
5f4f0e22
CH
5571 simplify_shift_const (NULL_RTX, ASHIFT,
5572 GET_MODE (x),
230d793d
RS
5573 XEXP (x, 0),
5574 modewidth - pos - len),
5575 modewidth - len);
5576
5f4f0e22
CH
5577 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5578 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5579 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5580 GET_MODE (x),
5581 XEXP (x, 0), pos),
5f4f0e22 5582 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5583 else
5584 /* Any other cases we can't handle. */
5585 return x;
5586
5587
5588 /* If we couldn't do this for some reason, return the original
5589 expression. */
5590 if (GET_CODE (tem) == CLOBBER)
5591 return x;
5592
5593 return tem;
5594}
5595\f
5596/* X is a SET which contains an assignment of one object into
5597 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5598 or certain SUBREGS). If possible, convert it into a series of
5599 logical operations.
5600
5601 We half-heartedly support variable positions, but do not at all
5602 support variable lengths. */
5603
5604static rtx
5605expand_field_assignment (x)
5606 rtx x;
5607{
5608 rtx inner;
0f41302f 5609 rtx pos; /* Always counts from low bit. */
230d793d
RS
5610 int len;
5611 rtx mask;
5612 enum machine_mode compute_mode;
5613
5614 /* Loop until we find something we can't simplify. */
5615 while (1)
5616 {
5617 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5618 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5619 {
5620 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5621 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4d9cfc7b 5622 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
230d793d
RS
5623 }
5624 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5625 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5626 {
5627 inner = XEXP (SET_DEST (x), 0);
5628 len = INTVAL (XEXP (SET_DEST (x), 1));
5629 pos = XEXP (SET_DEST (x), 2);
5630
5631 /* If the position is constant and spans the width of INNER,
5632 surround INNER with a USE to indicate this. */
5633 if (GET_CODE (pos) == CONST_INT
5634 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
38a448ca 5635 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
230d793d 5636
f76b9db2
ILT
5637 if (BITS_BIG_ENDIAN)
5638 {
5639 if (GET_CODE (pos) == CONST_INT)
5640 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5641 - INTVAL (pos));
5642 else if (GET_CODE (pos) == MINUS
5643 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5644 && (INTVAL (XEXP (pos, 1))
5645 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5646 /* If position is ADJUST - X, new position is X. */
5647 pos = XEXP (pos, 0);
5648 else
5649 pos = gen_binary (MINUS, GET_MODE (pos),
5650 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5651 - len),
5652 pos);
5653 }
230d793d
RS
5654 }
5655
5656 /* A SUBREG between two modes that occupy the same numbers of words
5657 can be done by moving the SUBREG to the source. */
5658 else if (GET_CODE (SET_DEST (x)) == SUBREG
5659 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5660 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5661 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5662 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5663 {
38a448ca 5664 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
c5c76735
JL
5665 gen_lowpart_for_combine
5666 (GET_MODE (SUBREG_REG (SET_DEST (x))),
5667 SET_SRC (x)));
230d793d
RS
5668 continue;
5669 }
5670 else
5671 break;
5672
5673 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5674 inner = SUBREG_REG (inner);
5675
5676 compute_mode = GET_MODE (inner);
5677
861556b4
RH
5678 /* Don't attempt bitwise arithmetic on non-integral modes. */
5679 if (! INTEGRAL_MODE_P (compute_mode))
5680 {
5681 enum machine_mode imode;
5682
5683 /* Something is probably seriously wrong if this matches. */
5684 if (! FLOAT_MODE_P (compute_mode))
5685 break;
5686
5687 /* Try to find an integral mode to pun with. */
5688 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5689 if (imode == BLKmode)
5690 break;
5691
5692 compute_mode = imode;
5693 inner = gen_lowpart_for_combine (imode, inner);
5694 }
5695
230d793d 5696 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5697 if (len < HOST_BITS_PER_WIDE_INT)
5698 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5699 else
5700 break;
5701
5702 /* Now compute the equivalent expression. Make a copy of INNER
5703 for the SET_DEST in case it is a MEM into which we will substitute;
5704 we don't want shared RTL in that case. */
c5c76735
JL
5705 x = gen_rtx_SET
5706 (VOIDmode, copy_rtx (inner),
5707 gen_binary (IOR, compute_mode,
5708 gen_binary (AND, compute_mode,
5709 gen_unary (NOT, compute_mode,
5710 compute_mode,
5711 gen_binary (ASHIFT,
5712 compute_mode,
5713 mask, pos)),
5714 inner),
5715 gen_binary (ASHIFT, compute_mode,
5716 gen_binary (AND, compute_mode,
5717 gen_lowpart_for_combine
5718 (compute_mode, SET_SRC (x)),
5719 mask),
5720 pos)));
230d793d
RS
5721 }
5722
5723 return x;
5724}
5725\f
8999a12e
RK
5726/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5727 it is an RTX that represents a variable starting position; otherwise,
5728 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5729
5730 INNER may be a USE. This will occur when we started with a bitfield
5731 that went outside the boundary of the object in memory, which is
5732 allowed on most machines. To isolate this case, we produce a USE
5733 whose mode is wide enough and surround the MEM with it. The only
5734 code that understands the USE is this routine. If it is not removed,
5735 it will cause the resulting insn not to match.
5736
5737 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5738 signed reference.
5739
5740 IN_DEST is non-zero if this is a reference in the destination of a
5741 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5742 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5743 be used.
5744
5745 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5746 ZERO_EXTRACT should be built even for bits starting at bit 0.
5747
76184def
DE
5748 MODE is the desired mode of the result (if IN_DEST == 0).
5749
5750 The result is an RTX for the extraction or NULL_RTX if the target
5751 can't handle it. */
230d793d
RS
5752
5753static rtx
5754make_extraction (mode, inner, pos, pos_rtx, len,
5755 unsignedp, in_dest, in_compare)
5756 enum machine_mode mode;
5757 rtx inner;
5758 int pos;
5759 rtx pos_rtx;
5760 int len;
5761 int unsignedp;
5762 int in_dest, in_compare;
5763{
94b4b17a
RS
5764 /* This mode describes the size of the storage area
5765 to fetch the overall value from. Within that, we
5766 ignore the POS lowest bits, etc. */
230d793d
RS
5767 enum machine_mode is_mode = GET_MODE (inner);
5768 enum machine_mode inner_mode;
d7cd794f
RK
5769 enum machine_mode wanted_inner_mode = byte_mode;
5770 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5771 enum machine_mode pos_mode = word_mode;
5772 enum machine_mode extraction_mode = word_mode;
5773 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5774 int spans_byte = 0;
5775 rtx new = 0;
8999a12e 5776 rtx orig_pos_rtx = pos_rtx;
6139ff20 5777 int orig_pos;
230d793d
RS
5778
5779 /* Get some information about INNER and get the innermost object. */
5780 if (GET_CODE (inner) == USE)
94b4b17a 5781 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5782 /* We don't need to adjust the position because we set up the USE
5783 to pretend that it was a full-word object. */
5784 spans_byte = 1, inner = XEXP (inner, 0);
5785 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5786 {
5787 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5788 consider just the QI as the memory to extract from.
5789 The subreg adds or removes high bits; its mode is
5790 irrelevant to the meaning of this extraction,
5791 since POS and LEN count from the lsb. */
5792 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5793 is_mode = GET_MODE (SUBREG_REG (inner));
5794 inner = SUBREG_REG (inner);
5795 }
230d793d
RS
5796
5797 inner_mode = GET_MODE (inner);
5798
5799 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5800 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5801
5802 /* See if this can be done without an extraction. We never can if the
5803 width of the field is not the same as that of some integer mode. For
5804 registers, we can only avoid the extraction if the position is at the
5805 low-order bit and this is either not in the destination or we have the
5806 appropriate STRICT_LOW_PART operation available.
5807
5808 For MEM, we can avoid an extract if the field starts on an appropriate
5809 boundary and we can change the mode of the memory reference. However,
5810 we cannot directly access the MEM if we have a USE and the underlying
5811 MEM is not TMODE. This combination means that MEM was being used in a
5812 context where bits outside its mode were being referenced; that is only
5813 valid in bit-field insns. */
5814
5815 if (tmode != BLKmode
5816 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5817 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5818 && GET_CODE (inner) != MEM
230d793d 5819 && (! in_dest
df62f951
RK
5820 || (GET_CODE (inner) == REG
5821 && (movstrict_optab->handlers[(int) tmode].insn_code
5822 != CODE_FOR_nothing))))
8999a12e 5823 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5824 && (pos
5825 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5826 : BITS_PER_UNIT)) == 0
230d793d
RS
5827 /* We can't do this if we are widening INNER_MODE (it
5828 may not be aligned, for one thing). */
5829 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5830 && (inner_mode == tmode
5831 || (! mode_dependent_address_p (XEXP (inner, 0))
5832 && ! MEM_VOLATILE_P (inner))))))
5833 {
230d793d
RS
5834 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5835 field. If the original and current mode are the same, we need not
5836 adjust the offset. Otherwise, we do if bytes big endian.
5837
4d9cfc7b
RK
5838 If INNER is not a MEM, get a piece consisting of just the field
5839 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5840
5841 if (GET_CODE (inner) == MEM)
5842 {
94b4b17a
RS
5843 int offset;
5844 /* POS counts from lsb, but make OFFSET count in memory order. */
5845 if (BYTES_BIG_ENDIAN)
5846 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5847 else
5848 offset = pos / BITS_PER_UNIT;
230d793d 5849
38a448ca 5850 new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
230d793d 5851 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
c6df88cb 5852 MEM_COPY_ATTRIBUTES (new, inner);
230d793d 5853 }
df62f951 5854 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5855 {
5856 /* We can't call gen_lowpart_for_combine here since we always want
5857 a SUBREG and it would sometimes return a new hard register. */
5858 if (tmode != inner_mode)
38a448ca
RH
5859 new = gen_rtx_SUBREG (tmode, inner,
5860 (WORDS_BIG_ENDIAN
c5c76735
JL
5861 && (GET_MODE_SIZE (inner_mode)
5862 > UNITS_PER_WORD)
38a448ca
RH
5863 ? (((GET_MODE_SIZE (inner_mode)
5864 - GET_MODE_SIZE (tmode))
5865 / UNITS_PER_WORD)
5866 - pos / BITS_PER_WORD)
5867 : pos / BITS_PER_WORD));
c0d3ac4d
RK
5868 else
5869 new = inner;
5870 }
230d793d 5871 else
6139ff20
RK
5872 new = force_to_mode (inner, tmode,
5873 len >= HOST_BITS_PER_WIDE_INT
5874 ? GET_MODE_MASK (tmode)
5875 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5876 NULL_RTX, 0);
230d793d
RS
5877
5878 /* If this extraction is going into the destination of a SET,
5879 make a STRICT_LOW_PART unless we made a MEM. */
5880
5881 if (in_dest)
5882 return (GET_CODE (new) == MEM ? new
77fa0940 5883 : (GET_CODE (new) != SUBREG
38a448ca 5884 ? gen_rtx_CLOBBER (tmode, const0_rtx)
77fa0940 5885 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d 5886
0f808b6f
JH
5887 if (mode == tmode)
5888 return new;
5889
5890 /* If we know that no extraneous bits are set, and that the high
5891 bit is not set, convert the extraction to the cheaper of
5892 sign and zero extension, that are equivalent in these cases. */
5893 if (flag_expensive_optimizations
5894 && (GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT
5895 && ((nonzero_bits (new, tmode)
5896 & ~ (((unsigned HOST_WIDE_INT)
5897 GET_MODE_MASK (tmode))
5898 >> 1))
5899 == 0)))
5900 {
5901 rtx temp = gen_rtx_ZERO_EXTEND (mode, new);
5902 rtx temp1 = gen_rtx_SIGN_EXTEND (mode, new);
5903
5904 /* Prefer ZERO_EXTENSION, since it gives more information to
5905 backends. */
5906 if (rtx_cost (temp, SET) < rtx_cost (temp1, SET))
5907 return temp;
5908 return temp1;
5909 }
5910
230d793d
RS
5911 /* Otherwise, sign- or zero-extend unless we already are in the
5912 proper mode. */
5913
0f808b6f
JH
5914 return (gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5915 mode, new));
230d793d
RS
5916 }
5917
cc471082
RS
5918 /* Unless this is a COMPARE or we have a funny memory reference,
5919 don't do anything with zero-extending field extracts starting at
5920 the low-order bit since they are simple AND operations. */
8999a12e
RK
5921 if (pos_rtx == 0 && pos == 0 && ! in_dest
5922 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5923 return 0;
5924
c5c76735
JL
5925 /* Unless we are allowed to span bytes or INNER is not MEM, reject this if
5926 we would be spanning bytes or if the position is not a constant and the
5927 length is not 1. In all other cases, we would only be going outside
5928 our object in cases when an original shift would have been
e7373556 5929 undefined. */
c5c76735 5930 if (! spans_byte && GET_CODE (inner) == MEM
e7373556
RK
5931 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5932 || (pos_rtx != 0 && len != 1)))
5933 return 0;
5934
d7cd794f 5935 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
5936 and the mode for the result. */
5937#ifdef HAVE_insv
5938 if (in_dest)
5939 {
0d8e55d8 5940 wanted_inner_reg_mode
a995e389
RH
5941 = insn_data[(int) CODE_FOR_insv].operand[0].mode;
5942 if (wanted_inner_reg_mode == VOIDmode)
5943 wanted_inner_reg_mode = word_mode;
5944
5945 pos_mode = insn_data[(int) CODE_FOR_insv].operand[2].mode;
5946 if (pos_mode == VOIDmode)
5947 pos_mode = word_mode;
5948
5949 extraction_mode = insn_data[(int) CODE_FOR_insv].operand[3].mode;
5950 if (extraction_mode == VOIDmode)
5951 extraction_mode = word_mode;
230d793d
RS
5952 }
5953#endif
5954
5955#ifdef HAVE_extzv
5956 if (! in_dest && unsignedp)
5957 {
0d8e55d8 5958 wanted_inner_reg_mode
a995e389
RH
5959 = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
5960 if (wanted_inner_reg_mode == VOIDmode)
5961 wanted_inner_reg_mode = word_mode;
5962
5963 pos_mode = insn_data[(int) CODE_FOR_extzv].operand[3].mode;
5964 if (pos_mode == VOIDmode)
5965 pos_mode = word_mode;
5966
5967 extraction_mode = insn_data[(int) CODE_FOR_extzv].operand[0].mode;
5968 if (extraction_mode == VOIDmode)
5969 extraction_mode = word_mode;
230d793d
RS
5970 }
5971#endif
5972
5973#ifdef HAVE_extv
5974 if (! in_dest && ! unsignedp)
5975 {
0d8e55d8 5976 wanted_inner_reg_mode
a995e389
RH
5977 = insn_data[(int) CODE_FOR_extv].operand[1].mode;
5978 if (wanted_inner_reg_mode == VOIDmode)
5979 wanted_inner_reg_mode = word_mode;
5980
5981 pos_mode = insn_data[(int) CODE_FOR_extv].operand[3].mode;
5982 if (pos_mode == VOIDmode)
5983 pos_mode = word_mode;
5984
5985 extraction_mode = insn_data[(int) CODE_FOR_extv].operand[0].mode;
5986 if (extraction_mode == VOIDmode)
5987 extraction_mode = word_mode;
230d793d
RS
5988 }
5989#endif
5990
5991 /* Never narrow an object, since that might not be safe. */
5992
5993 if (mode != VOIDmode
5994 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5995 extraction_mode = mode;
5996
5997 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5998 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5999 pos_mode = GET_MODE (pos_rtx);
6000
d7cd794f
RK
6001 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
6002 if we have to change the mode of memory and cannot, the desired mode is
6003 EXTRACTION_MODE. */
6004 if (GET_CODE (inner) != MEM)
6005 wanted_inner_mode = wanted_inner_reg_mode;
6006 else if (inner_mode != wanted_inner_mode
6007 && (mode_dependent_address_p (XEXP (inner, 0))
6008 || MEM_VOLATILE_P (inner)))
6009 wanted_inner_mode = extraction_mode;
230d793d 6010
6139ff20
RK
6011 orig_pos = pos;
6012
f76b9db2
ILT
6013 if (BITS_BIG_ENDIAN)
6014 {
cf54c2cd
DE
6015 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
6016 BITS_BIG_ENDIAN style. If position is constant, compute new
6017 position. Otherwise, build subtraction.
6018 Note that POS is relative to the mode of the original argument.
6019 If it's a MEM we need to recompute POS relative to that.
6020 However, if we're extracting from (or inserting into) a register,
6021 we want to recompute POS relative to wanted_inner_mode. */
6022 int width = (GET_CODE (inner) == MEM
6023 ? GET_MODE_BITSIZE (is_mode)
6024 : GET_MODE_BITSIZE (wanted_inner_mode));
6025
f76b9db2 6026 if (pos_rtx == 0)
cf54c2cd 6027 pos = width - len - pos;
f76b9db2
ILT
6028 else
6029 pos_rtx
6030 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
cf54c2cd
DE
6031 GEN_INT (width - len), pos_rtx);
6032 /* POS may be less than 0 now, but we check for that below.
6033 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 6034 }
230d793d
RS
6035
6036 /* If INNER has a wider mode, make it smaller. If this is a constant
6037 extract, try to adjust the byte to point to the byte containing
6038 the value. */
d7cd794f
RK
6039 if (wanted_inner_mode != VOIDmode
6040 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 6041 && ((GET_CODE (inner) == MEM
d7cd794f 6042 && (inner_mode == wanted_inner_mode
230d793d
RS
6043 || (! mode_dependent_address_p (XEXP (inner, 0))
6044 && ! MEM_VOLATILE_P (inner))))))
6045 {
6046 int offset = 0;
6047
6048 /* The computations below will be correct if the machine is big
6049 endian in both bits and bytes or little endian in bits and bytes.
6050 If it is mixed, we must adjust. */
6051
230d793d 6052 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 6053 adjust OFFSET to compensate. */
f76b9db2
ILT
6054 if (BYTES_BIG_ENDIAN
6055 && ! spans_byte
230d793d
RS
6056 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
6057 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
6058
6059 /* If this is a constant position, we can move to the desired byte. */
8999a12e 6060 if (pos_rtx == 0)
230d793d
RS
6061 {
6062 offset += pos / BITS_PER_UNIT;
d7cd794f 6063 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
6064 }
6065
f76b9db2
ILT
6066 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
6067 && ! spans_byte
d7cd794f 6068 && is_mode != wanted_inner_mode)
c6b3f1f2 6069 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 6070 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 6071
d7cd794f 6072 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 6073 {
38a448ca
RH
6074 rtx newmem = gen_rtx_MEM (wanted_inner_mode,
6075 plus_constant (XEXP (inner, 0), offset));
230d793d 6076 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
c6df88cb 6077 MEM_COPY_ATTRIBUTES (newmem, inner);
230d793d
RS
6078 inner = newmem;
6079 }
6080 }
6081
9e74dc41
RK
6082 /* If INNER is not memory, we can always get it into the proper mode. If we
6083 are changing its mode, POS must be a constant and smaller than the size
6084 of the new mode. */
230d793d 6085 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
6086 {
6087 if (GET_MODE (inner) != wanted_inner_mode
6088 && (pos_rtx != 0
6089 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
6090 return 0;
6091
6092 inner = force_to_mode (inner, wanted_inner_mode,
6093 pos_rtx
6094 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
6095 ? GET_MODE_MASK (wanted_inner_mode)
6096 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
6097 NULL_RTX, 0);
6098 }
230d793d
RS
6099
6100 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
6101 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 6102 if (pos_rtx != 0
230d793d 6103 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
0f808b6f
JH
6104 {
6105 rtx temp = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
6106
6107 /* If we know that no extraneous bits are set, and that the high
6108 bit is not set, convert extraction to cheaper one - eighter
6109 SIGN_EXTENSION or ZERO_EXTENSION, that are equivalent in these
6110 cases. */
6111 if (flag_expensive_optimizations
6112 && (GET_MODE_BITSIZE (GET_MODE (pos_rtx)) <= HOST_BITS_PER_WIDE_INT
6113 && ((nonzero_bits (pos_rtx, GET_MODE (pos_rtx))
6114 & ~ (((unsigned HOST_WIDE_INT)
6115 GET_MODE_MASK (GET_MODE (pos_rtx)))
6116 >> 1))
6117 == 0)))
6118 {
6119 rtx temp1 = gen_rtx_SIGN_EXTEND (pos_mode, pos_rtx);
6120
6121 /* Preffer ZERO_EXTENSION, since it gives more information to
6122 backends. */
6123 if (rtx_cost (temp1, SET) < rtx_cost (temp, SET))
6124 temp = temp1;
6125 }
6126 pos_rtx = temp;
6127 }
8999a12e 6128 else if (pos_rtx != 0
230d793d
RS
6129 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
6130 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
6131
8999a12e
RK
6132 /* Make POS_RTX unless we already have it and it is correct. If we don't
6133 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 6134 be a CONST_INT. */
8999a12e
RK
6135 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
6136 pos_rtx = orig_pos_rtx;
6137
6138 else if (pos_rtx == 0)
5f4f0e22 6139 pos_rtx = GEN_INT (pos);
230d793d
RS
6140
6141 /* Make the required operation. See if we can use existing rtx. */
6142 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 6143 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
6144 if (! in_dest)
6145 new = gen_lowpart_for_combine (mode, new);
6146
6147 return new;
6148}
6149\f
71923da7
RK
6150/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
6151 with any other operations in X. Return X without that shift if so. */
6152
6153static rtx
6154extract_left_shift (x, count)
6155 rtx x;
6156 int count;
6157{
6158 enum rtx_code code = GET_CODE (x);
6159 enum machine_mode mode = GET_MODE (x);
6160 rtx tem;
6161
6162 switch (code)
6163 {
6164 case ASHIFT:
6165 /* This is the shift itself. If it is wide enough, we will return
6166 either the value being shifted if the shift count is equal to
6167 COUNT or a shift for the difference. */
6168 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6169 && INTVAL (XEXP (x, 1)) >= count)
6170 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
6171 INTVAL (XEXP (x, 1)) - count);
6172 break;
6173
6174 case NEG: case NOT:
6175 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 6176 return gen_unary (code, mode, mode, tem);
71923da7
RK
6177
6178 break;
6179
6180 case PLUS: case IOR: case XOR: case AND:
6181 /* If we can safely shift this constant and we find the inner shift,
6182 make a new operation. */
6183 if (GET_CODE (XEXP (x,1)) == CONST_INT
b729186a 6184 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7
RK
6185 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
6186 return gen_binary (code, mode, tem,
6187 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
6188
6189 break;
e9a25f70
JL
6190
6191 default:
6192 break;
71923da7
RK
6193 }
6194
6195 return 0;
6196}
6197\f
230d793d
RS
6198/* Look at the expression rooted at X. Look for expressions
6199 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
6200 Form these expressions.
6201
6202 Return the new rtx, usually just X.
6203
6204 Also, for machines like the Vax that don't have logical shift insns,
6205 try to convert logical to arithmetic shift operations in cases where
6206 they are equivalent. This undoes the canonicalizations to logical
6207 shifts done elsewhere.
6208
6209 We try, as much as possible, to re-use rtl expressions to save memory.
6210
6211 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
6212 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
6213 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
6214 or a COMPARE against zero, it is COMPARE. */
6215
6216static rtx
6217make_compound_operation (x, in_code)
6218 rtx x;
6219 enum rtx_code in_code;
6220{
6221 enum rtx_code code = GET_CODE (x);
6222 enum machine_mode mode = GET_MODE (x);
6223 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 6224 rtx rhs, lhs;
230d793d 6225 enum rtx_code next_code;
f24ad0e4 6226 int i;
230d793d 6227 rtx new = 0;
280f58ba 6228 rtx tem;
6f7d635c 6229 const char *fmt;
230d793d
RS
6230
6231 /* Select the code to be used in recursive calls. Once we are inside an
6232 address, we stay there. If we have a comparison, set to COMPARE,
6233 but once inside, go back to our default of SET. */
6234
42495ca0 6235 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
6236 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
6237 && XEXP (x, 1) == const0_rtx) ? COMPARE
6238 : in_code == COMPARE ? SET : in_code);
6239
6240 /* Process depending on the code of this operation. If NEW is set
6241 non-zero, it will be returned. */
6242
6243 switch (code)
6244 {
6245 case ASHIFT:
230d793d
RS
6246 /* Convert shifts by constants into multiplications if inside
6247 an address. */
6248 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6249 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 6250 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
6251 {
6252 new = make_compound_operation (XEXP (x, 0), next_code);
6253 new = gen_rtx_combine (MULT, mode, new,
6254 GEN_INT ((HOST_WIDE_INT) 1
6255 << INTVAL (XEXP (x, 1))));
6256 }
230d793d
RS
6257 break;
6258
6259 case AND:
6260 /* If the second operand is not a constant, we can't do anything
6261 with it. */
6262 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
6263 break;
6264
6265 /* If the constant is a power of two minus one and the first operand
6266 is a logical right shift, make an extraction. */
6267 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6268 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6269 {
6270 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6271 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
6272 0, in_code == COMPARE);
6273 }
dfbe1b2f 6274
230d793d
RS
6275 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
6276 else if (GET_CODE (XEXP (x, 0)) == SUBREG
6277 && subreg_lowpart_p (XEXP (x, 0))
6278 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
6279 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6280 {
6281 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
6282 next_code);
2f99f437 6283 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
6284 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
6285 0, in_code == COMPARE);
6286 }
45620ed4 6287 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
6288 else if ((GET_CODE (XEXP (x, 0)) == XOR
6289 || GET_CODE (XEXP (x, 0)) == IOR)
6290 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
6291 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
6292 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
6293 {
6294 /* Apply the distributive law, and then try to make extractions. */
6295 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
38a448ca
RH
6296 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
6297 XEXP (x, 1)),
6298 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
6299 XEXP (x, 1)));
c2f9f64e
JW
6300 new = make_compound_operation (new, in_code);
6301 }
a7c99304
RK
6302
6303 /* If we are have (and (rotate X C) M) and C is larger than the number
6304 of bits in M, this is an extraction. */
6305
6306 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6307 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6308 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6309 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
6310 {
6311 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6312 new = make_extraction (mode, new,
6313 (GET_MODE_BITSIZE (mode)
6314 - INTVAL (XEXP (XEXP (x, 0), 1))),
6315 NULL_RTX, i, 1, 0, in_code == COMPARE);
6316 }
a7c99304
RK
6317
6318 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
6319 a logical shift and our mask turns off all the propagated sign
6320 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
6321 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6322 && (lshr_optab->handlers[(int) mode].insn_code
6323 == CODE_FOR_nothing)
230d793d
RS
6324 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
6325 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6326 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
6327 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6328 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 6329 {
5f4f0e22 6330 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
6331
6332 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6333 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6334 SUBST (XEXP (x, 0),
280f58ba
RK
6335 gen_rtx_combine (ASHIFTRT, mode,
6336 make_compound_operation (XEXP (XEXP (x, 0), 0),
6337 next_code),
230d793d
RS
6338 XEXP (XEXP (x, 0), 1)));
6339 }
6340
6341 /* If the constant is one less than a power of two, this might be
6342 representable by an extraction even if no shift is present.
6343 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6344 we are in a COMPARE. */
6345 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6346 new = make_extraction (mode,
6347 make_compound_operation (XEXP (x, 0),
6348 next_code),
6349 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
6350
6351 /* If we are in a comparison and this is an AND with a power of two,
6352 convert this into the appropriate bit extract. */
6353 else if (in_code == COMPARE
6354 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
6355 new = make_extraction (mode,
6356 make_compound_operation (XEXP (x, 0),
6357 next_code),
6358 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
6359
6360 break;
6361
6362 case LSHIFTRT:
6363 /* If the sign bit is known to be zero, replace this with an
6364 arithmetic shift. */
d0ab8cd3
RK
6365 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6366 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 6367 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 6368 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 6369 {
280f58ba
RK
6370 new = gen_rtx_combine (ASHIFTRT, mode,
6371 make_compound_operation (XEXP (x, 0),
6372 next_code),
6373 XEXP (x, 1));
230d793d
RS
6374 break;
6375 }
6376
0f41302f 6377 /* ... fall through ... */
230d793d
RS
6378
6379 case ASHIFTRT:
71923da7
RK
6380 lhs = XEXP (x, 0);
6381 rhs = XEXP (x, 1);
6382
230d793d
RS
6383 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6384 this is a SIGN_EXTRACT. */
71923da7
RK
6385 if (GET_CODE (rhs) == CONST_INT
6386 && GET_CODE (lhs) == ASHIFT
6387 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6388 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 6389 {
71923da7 6390 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 6391 new = make_extraction (mode, new,
71923da7
RK
6392 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6393 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
6394 code == LSHIFTRT, 0, in_code == COMPARE);
6395 }
6396
71923da7
RK
6397 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6398 If so, try to merge the shifts into a SIGN_EXTEND. We could
6399 also do this for some cases of SIGN_EXTRACT, but it doesn't
6400 seem worth the effort; the case checked for occurs on Alpha. */
6401
6402 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6403 && ! (GET_CODE (lhs) == SUBREG
6404 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6405 && GET_CODE (rhs) == CONST_INT
6406 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6407 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6408 new = make_extraction (mode, make_compound_operation (new, next_code),
6409 0, NULL_RTX, mode_width - INTVAL (rhs),
6410 code == LSHIFTRT, 0, in_code == COMPARE);
6411
230d793d 6412 break;
280f58ba
RK
6413
6414 case SUBREG:
6415 /* Call ourselves recursively on the inner expression. If we are
6416 narrowing the object and it has a different RTL code from
6417 what it originally did, do this SUBREG as a force_to_mode. */
6418
0a5cbff6 6419 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
6420 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6421 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6422 && subreg_lowpart_p (x))
0a5cbff6
RK
6423 {
6424 rtx newer = force_to_mode (tem, mode,
e3d616e3 6425 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
6426
6427 /* If we have something other than a SUBREG, we might have
6428 done an expansion, so rerun outselves. */
6429 if (GET_CODE (newer) != SUBREG)
6430 newer = make_compound_operation (newer, in_code);
6431
6432 return newer;
6433 }
6f28d3e9
RH
6434
6435 /* If this is a paradoxical subreg, and the new code is a sign or
6436 zero extension, omit the subreg and widen the extension. If it
6437 is a regular subreg, we can still get rid of the subreg by not
6438 widening so much, or in fact removing the extension entirely. */
6439 if ((GET_CODE (tem) == SIGN_EXTEND
6440 || GET_CODE (tem) == ZERO_EXTEND)
6441 && subreg_lowpart_p (x))
6442 {
6443 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6444 || (GET_MODE_SIZE (mode) >
6445 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6446 tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0));
6447 else
6448 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6449 return tem;
6450 }
e9a25f70
JL
6451 break;
6452
6453 default:
6454 break;
230d793d
RS
6455 }
6456
6457 if (new)
6458 {
df62f951 6459 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
6460 code = GET_CODE (x);
6461 }
6462
6463 /* Now recursively process each operand of this operation. */
6464 fmt = GET_RTX_FORMAT (code);
6465 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6466 if (fmt[i] == 'e')
6467 {
6468 new = make_compound_operation (XEXP (x, i), next_code);
6469 SUBST (XEXP (x, i), new);
6470 }
6471
6472 return x;
6473}
6474\f
6475/* Given M see if it is a value that would select a field of bits
6476 within an item, but not the entire word. Return -1 if not.
6477 Otherwise, return the starting position of the field, where 0 is the
6478 low-order bit.
6479
6480 *PLEN is set to the length of the field. */
6481
6482static int
6483get_pos_from_mask (m, plen)
5f4f0e22 6484 unsigned HOST_WIDE_INT m;
230d793d
RS
6485 int *plen;
6486{
6487 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6488 int pos = exact_log2 (m & - m);
6489
6490 if (pos < 0)
6491 return -1;
6492
6493 /* Now shift off the low-order zero bits and see if we have a power of
6494 two minus 1. */
6495 *plen = exact_log2 ((m >> pos) + 1);
6496
6497 if (*plen <= 0)
6498 return -1;
6499
6500 return pos;
6501}
6502\f
6139ff20
RK
6503/* See if X can be simplified knowing that we will only refer to it in
6504 MODE and will only refer to those bits that are nonzero in MASK.
6505 If other bits are being computed or if masking operations are done
6506 that select a superset of the bits in MASK, they can sometimes be
6507 ignored.
6508
6509 Return a possibly simplified expression, but always convert X to
6510 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
6511
6512 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
6513 replace X with REG.
6514
6515 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6516 are all off in X. This is used when X will be complemented, by either
180b8e4b 6517 NOT, NEG, or XOR. */
dfbe1b2f
RK
6518
6519static rtx
e3d616e3 6520force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
6521 rtx x;
6522 enum machine_mode mode;
6139ff20 6523 unsigned HOST_WIDE_INT mask;
dfbe1b2f 6524 rtx reg;
e3d616e3 6525 int just_select;
dfbe1b2f
RK
6526{
6527 enum rtx_code code = GET_CODE (x);
180b8e4b 6528 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
6529 enum machine_mode op_mode;
6530 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
6531 rtx op0, op1, temp;
6532
132d2040
RK
6533 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6534 code below will do the wrong thing since the mode of such an
be3d27d6
CI
6535 expression is VOIDmode.
6536
6537 Also do nothing if X is a CLOBBER; this can happen if X was
6538 the return value from a call to gen_lowpart_for_combine. */
6539 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
6540 return x;
6541
6139ff20
RK
6542 /* We want to perform the operation is its present mode unless we know
6543 that the operation is valid in MODE, in which case we do the operation
6544 in MODE. */
1c75dfa4
RK
6545 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6546 && code_to_optab[(int) code] != 0
ef026f91
RS
6547 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6548 != CODE_FOR_nothing))
6549 ? mode : GET_MODE (x));
e3d616e3 6550
aa988991
RS
6551 /* It is not valid to do a right-shift in a narrower mode
6552 than the one it came in with. */
6553 if ((code == LSHIFTRT || code == ASHIFTRT)
6554 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6555 op_mode = GET_MODE (x);
ef026f91
RS
6556
6557 /* Truncate MASK to fit OP_MODE. */
6558 if (op_mode)
6559 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
6560
6561 /* When we have an arithmetic operation, or a shift whose count we
6562 do not know, we need to assume that all bit the up to the highest-order
6563 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
6564 if (op_mode)
6565 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6566 ? GET_MODE_MASK (op_mode)
6567 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
6568 else
6569 fuller_mask = ~ (HOST_WIDE_INT) 0;
6570
6571 /* Determine what bits of X are guaranteed to be (non)zero. */
6572 nonzero = nonzero_bits (x, mode);
6139ff20
RK
6573
6574 /* If none of the bits in X are needed, return a zero. */
e3d616e3 6575 if (! just_select && (nonzero & mask) == 0)
6139ff20 6576 return const0_rtx;
dfbe1b2f 6577
6139ff20
RK
6578 /* If X is a CONST_INT, return a new one. Do this here since the
6579 test below will fail. */
6580 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
6581 {
6582 HOST_WIDE_INT cval = INTVAL (x) & mask;
6583 int width = GET_MODE_BITSIZE (mode);
6584
6585 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6586 number, sign extend it. */
6587 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6588 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6589 cval |= (HOST_WIDE_INT) -1 << width;
6590
6591 return GEN_INT (cval);
6592 }
dfbe1b2f 6593
180b8e4b
RK
6594 /* If X is narrower than MODE and we want all the bits in X's mode, just
6595 get X in the proper mode. */
6596 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6597 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
6598 return gen_lowpart_for_combine (mode, x);
6599
71923da7
RK
6600 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6601 MASK are already known to be zero in X, we need not do anything. */
6602 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
6603 return x;
6604
dfbe1b2f
RK
6605 switch (code)
6606 {
6139ff20
RK
6607 case CLOBBER:
6608 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6609 generating something that won't match. */
6139ff20
RK
6610 return x;
6611
6139ff20
RK
6612 case USE:
6613 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6614 spanned the boundary of the MEM. If we are now masking so it is
6615 within that boundary, we don't need the USE any more. */
f76b9db2
ILT
6616 if (! BITS_BIG_ENDIAN
6617 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6618 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6619 break;
6139ff20 6620
dfbe1b2f
RK
6621 case SIGN_EXTEND:
6622 case ZERO_EXTEND:
6623 case ZERO_EXTRACT:
6624 case SIGN_EXTRACT:
6625 x = expand_compound_operation (x);
6626 if (GET_CODE (x) != code)
e3d616e3 6627 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6628 break;
6629
6630 case REG:
6631 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6632 || rtx_equal_p (reg, get_last_value (x))))
6633 x = reg;
6634 break;
6635
dfbe1b2f 6636 case SUBREG:
6139ff20 6637 if (subreg_lowpart_p (x)
180b8e4b
RK
6638 /* We can ignore the effect of this SUBREG if it narrows the mode or
6639 if the constant masks to zero all the bits the mode doesn't
6640 have. */
6139ff20
RK
6641 && ((GET_MODE_SIZE (GET_MODE (x))
6642 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6643 || (0 == (mask
6644 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 6645 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6646 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6647 break;
6648
6649 case AND:
6139ff20
RK
6650 /* If this is an AND with a constant, convert it into an AND
6651 whose constant is the AND of that constant with MASK. If it
6652 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6653
2ca9ae17 6654 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6655 {
6139ff20
RK
6656 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6657 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6658
6659 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6660 is just some low-order bits. If so, and it is MASK, we don't
6661 need it. */
dfbe1b2f
RK
6662
6663 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
e51712db 6664 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6665 x = XEXP (x, 0);
d0ab8cd3 6666
71923da7
RK
6667 /* If it remains an AND, try making another AND with the bits
6668 in the mode mask that aren't in MASK turned on. If the
6669 constant in the AND is wide enough, this might make a
6670 cheaper constant. */
6671
6672 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6673 && GET_MODE_MASK (GET_MODE (x)) != mask
6674 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6675 {
6676 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6677 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6678 int width = GET_MODE_BITSIZE (GET_MODE (x));
6679 rtx y;
6680
6681 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6682 number, sign extend it. */
6683 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6684 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6685 cval |= (HOST_WIDE_INT) -1 << width;
6686
6687 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6688 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6689 x = y;
6690 }
6691
d0ab8cd3 6692 break;
dfbe1b2f
RK
6693 }
6694
6139ff20 6695 goto binop;
dfbe1b2f
RK
6696
6697 case PLUS:
6139ff20
RK
6698 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6699 low-order bits (as in an alignment operation) and FOO is already
6700 aligned to that boundary, mask C1 to that boundary as well.
6701 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6702
6703 {
6704 int width = GET_MODE_BITSIZE (mode);
6705 unsigned HOST_WIDE_INT smask = mask;
6706
6707 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6708 number, sign extend it. */
6709
6710 if (width < HOST_BITS_PER_WIDE_INT
6711 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6712 smask |= (HOST_WIDE_INT) -1 << width;
6713
6714 if (GET_CODE (XEXP (x, 1)) == CONST_INT
0e9ff885
DM
6715 && exact_log2 (- smask) >= 0)
6716 {
6717#ifdef STACK_BIAS
6718 if (STACK_BIAS
6719 && (XEXP (x, 0) == stack_pointer_rtx
6720 || XEXP (x, 0) == frame_pointer_rtx))
6721 {
6722 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6723 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6724
6725 sp_mask &= ~ (sp_alignment - 1);
835c8e04
DT
6726 if ((sp_mask & ~ smask) == 0
6727 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~ smask) != 0)
0e9ff885
DM
6728 return force_to_mode (plus_constant (XEXP (x, 0),
6729 ((INTVAL (XEXP (x, 1)) -
835c8e04 6730 STACK_BIAS) & smask)
0e9ff885 6731 + STACK_BIAS),
835c8e04 6732 mode, smask, reg, next_select);
0e9ff885
DM
6733 }
6734#endif
835c8e04
DT
6735 if ((nonzero_bits (XEXP (x, 0), mode) & ~ smask) == 0
6736 && (INTVAL (XEXP (x, 1)) & ~ smask) != 0)
0e9ff885 6737 return force_to_mode (plus_constant (XEXP (x, 0),
835c8e04
DT
6738 (INTVAL (XEXP (x, 1))
6739 & smask)),
6740 mode, smask, reg, next_select);
0e9ff885 6741 }
9fa6d012 6742 }
6139ff20 6743
0f41302f 6744 /* ... fall through ... */
6139ff20 6745
dfbe1b2f
RK
6746 case MINUS:
6747 case MULT:
6139ff20
RK
6748 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6749 most significant bit in MASK since carries from those bits will
6750 affect the bits we are interested in. */
6751 mask = fuller_mask;
6752 goto binop;
6753
dfbe1b2f
RK
6754 case IOR:
6755 case XOR:
6139ff20
RK
6756 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6757 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6758 operation which may be a bitfield extraction. Ensure that the
6759 constant we form is not wider than the mode of X. */
6760
6761 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6762 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6763 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6764 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6765 && GET_CODE (XEXP (x, 1)) == CONST_INT
6766 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6767 + floor_log2 (INTVAL (XEXP (x, 1))))
6768 < GET_MODE_BITSIZE (GET_MODE (x)))
6769 && (INTVAL (XEXP (x, 1))
01c82bbb 6770 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6771 {
6772 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6773 << INTVAL (XEXP (XEXP (x, 0), 1)));
6774 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6775 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6776 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6777 XEXP (XEXP (x, 0), 1));
e3d616e3 6778 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6779 }
6780
6781 binop:
dfbe1b2f 6782 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6783 change the mode if we have an operation of that mode. */
6784
e3d616e3
RK
6785 op0 = gen_lowpart_for_combine (op_mode,
6786 force_to_mode (XEXP (x, 0), mode, mask,
6787 reg, next_select));
6788 op1 = gen_lowpart_for_combine (op_mode,
6789 force_to_mode (XEXP (x, 1), mode, mask,
6790 reg, next_select));
6139ff20 6791
2dd484ed
RK
6792 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6793 MASK since OP1 might have been sign-extended but we never want
6794 to turn on extra bits, since combine might have previously relied
6795 on them being off. */
6796 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6797 && (INTVAL (op1) & mask) != 0)
6798 op1 = GEN_INT (INTVAL (op1) & mask);
6799
6139ff20
RK
6800 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6801 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6802 break;
dfbe1b2f
RK
6803
6804 case ASHIFT:
dfbe1b2f 6805 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6806 However, we cannot do anything with shifts where we cannot
6807 guarantee that the counts are smaller than the size of the mode
6808 because such a count will have a different meaning in a
6139ff20 6809 wider mode. */
f6785026
RK
6810
6811 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6812 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6813 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6814 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6815 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6816 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
6817 break;
6818
6139ff20
RK
6819 /* If the shift count is a constant and we can do arithmetic in
6820 the mode of the shift, refine which bits we need. Otherwise, use the
6821 conservative form of the mask. */
6822 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6823 && INTVAL (XEXP (x, 1)) >= 0
6824 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6825 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6826 mask >>= INTVAL (XEXP (x, 1));
6827 else
6828 mask = fuller_mask;
6829
6830 op0 = gen_lowpart_for_combine (op_mode,
6831 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6832 mask, reg, next_select));
6139ff20
RK
6833
6834 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6835 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6836 break;
dfbe1b2f
RK
6837
6838 case LSHIFTRT:
1347292b
JW
6839 /* Here we can only do something if the shift count is a constant,
6840 this shift constant is valid for the host, and we can do arithmetic
6841 in OP_MODE. */
dfbe1b2f
RK
6842
6843 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6844 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6845 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6846 {
6139ff20 6847 rtx inner = XEXP (x, 0);
402b6c2a 6848 unsigned HOST_WIDE_INT inner_mask;
6139ff20
RK
6849
6850 /* Select the mask of the bits we need for the shift operand. */
402b6c2a 6851 inner_mask = mask << INTVAL (XEXP (x, 1));
d0ab8cd3 6852
6139ff20 6853 /* We can only change the mode of the shift if we can do arithmetic
402b6c2a
JW
6854 in the mode of the shift and INNER_MASK is no wider than the
6855 width of OP_MODE. */
6139ff20 6856 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
402b6c2a 6857 || (inner_mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6858 op_mode = GET_MODE (x);
6859
402b6c2a 6860 inner = force_to_mode (inner, op_mode, inner_mask, reg, next_select);
6139ff20
RK
6861
6862 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6863 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6864 }
6139ff20
RK
6865
6866 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6867 shift and AND produces only copies of the sign bit (C2 is one less
6868 than a power of two), we can do this with just a shift. */
6869
6870 if (GET_CODE (x) == LSHIFTRT
6871 && GET_CODE (XEXP (x, 1)) == CONST_INT
6872 && ((INTVAL (XEXP (x, 1))
6873 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6874 >= GET_MODE_BITSIZE (GET_MODE (x)))
6875 && exact_log2 (mask + 1) >= 0
6876 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6877 >= exact_log2 (mask + 1)))
6878 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6879 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6880 - exact_log2 (mask + 1)));
fae2db47
JW
6881
6882 goto shiftrt;
d0ab8cd3
RK
6883
6884 case ASHIFTRT:
6139ff20
RK
6885 /* If we are just looking for the sign bit, we don't need this shift at
6886 all, even if it has a variable count. */
9bf22b75 6887 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
e51712db 6888 && (mask == ((unsigned HOST_WIDE_INT) 1
9bf22b75 6889 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6890 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6891
6892 /* If this is a shift by a constant, get a mask that contains those bits
6893 that are not copies of the sign bit. We then have two cases: If
6894 MASK only includes those bits, this can be a logical shift, which may
6895 allow simplifications. If MASK is a single-bit field not within
6896 those bits, we are requesting a copy of the sign bit and hence can
6897 shift the sign bit to the appropriate location. */
6898
6899 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6900 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6901 {
6902 int i = -1;
6903
b69960ac
RK
6904 /* If the considered data is wider then HOST_WIDE_INT, we can't
6905 represent a mask for all its bits in a single scalar.
6906 But we only care about the lower bits, so calculate these. */
6907
6a11342f 6908 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 6909 {
0f41302f 6910 nonzero = ~ (HOST_WIDE_INT) 0;
b69960ac
RK
6911
6912 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6913 is the number of bits a full-width mask would have set.
6914 We need only shift if these are fewer than nonzero can
6915 hold. If not, we must keep all bits set in nonzero. */
6916
6917 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6918 < HOST_BITS_PER_WIDE_INT)
6919 nonzero >>= INTVAL (XEXP (x, 1))
6920 + HOST_BITS_PER_WIDE_INT
6921 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6922 }
6923 else
6924 {
6925 nonzero = GET_MODE_MASK (GET_MODE (x));
6926 nonzero >>= INTVAL (XEXP (x, 1));
6927 }
6139ff20
RK
6928
6929 if ((mask & ~ nonzero) == 0
6930 || (i = exact_log2 (mask)) >= 0)
6931 {
6932 x = simplify_shift_const
6933 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6934 i < 0 ? INTVAL (XEXP (x, 1))
6935 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6936
6937 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 6938 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6939 }
6940 }
6941
6942 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6943 even if the shift count isn't a constant. */
6944 if (mask == 1)
6945 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6946
fae2db47
JW
6947 shiftrt:
6948
6949 /* If this is a zero- or sign-extension operation that just affects bits
4c002f29
RK
6950 we don't care about, remove it. Be sure the call above returned
6951 something that is still a shift. */
d0ab8cd3 6952
4c002f29
RK
6953 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6954 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 6955 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
6956 && (INTVAL (XEXP (x, 1))
6957 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
6958 && GET_CODE (XEXP (x, 0)) == ASHIFT
6959 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6960 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
6961 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6962 reg, next_select);
6139ff20 6963
dfbe1b2f
RK
6964 break;
6965
6139ff20
RK
6966 case ROTATE:
6967 case ROTATERT:
6968 /* If the shift count is constant and we can do computations
6969 in the mode of X, compute where the bits we care about are.
6970 Otherwise, we can't do anything. Don't change the mode of
6971 the shift or propagate MODE into the shift, though. */
6972 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6973 && INTVAL (XEXP (x, 1)) >= 0)
6974 {
6975 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6976 GET_MODE (x), GEN_INT (mask),
6977 XEXP (x, 1));
7d171a1e 6978 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
6979 SUBST (XEXP (x, 0),
6980 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6981 INTVAL (temp), reg, next_select));
6139ff20
RK
6982 }
6983 break;
6984
dfbe1b2f 6985 case NEG:
180b8e4b
RK
6986 /* If we just want the low-order bit, the NEG isn't needed since it
6987 won't change the low-order bit. */
6988 if (mask == 1)
6989 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6990
6139ff20
RK
6991 /* We need any bits less significant than the most significant bit in
6992 MASK since carries from those bits will affect the bits we are
6993 interested in. */
6994 mask = fuller_mask;
6995 goto unop;
6996
dfbe1b2f 6997 case NOT:
6139ff20
RK
6998 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6999 same as the XOR case above. Ensure that the constant we form is not
7000 wider than the mode of X. */
7001
7002 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
7003 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
7004 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
7005 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
7006 < GET_MODE_BITSIZE (GET_MODE (x)))
7007 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7008 {
7009 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
7010 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
7011 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
7012
e3d616e3 7013 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
7014 }
7015
f82da7d2
JW
7016 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
7017 use the full mask inside the NOT. */
7018 mask = fuller_mask;
7019
6139ff20 7020 unop:
e3d616e3
RK
7021 op0 = gen_lowpart_for_combine (op_mode,
7022 force_to_mode (XEXP (x, 0), mode, mask,
7023 reg, next_select));
6139ff20 7024 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 7025 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
7026 break;
7027
7028 case NE:
7029 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 7030 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 7031 which is equal to STORE_FLAG_VALUE. */
3aceff0d
RK
7032 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
7033 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 7034 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 7035 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 7036
d0ab8cd3
RK
7037 break;
7038
7039 case IF_THEN_ELSE:
7040 /* We have no way of knowing if the IF_THEN_ELSE can itself be
7041 written in a narrower mode. We play it safe and do not do so. */
7042
7043 SUBST (XEXP (x, 1),
7044 gen_lowpart_for_combine (GET_MODE (x),
7045 force_to_mode (XEXP (x, 1), mode,
e3d616e3 7046 mask, reg, next_select)));
d0ab8cd3
RK
7047 SUBST (XEXP (x, 2),
7048 gen_lowpart_for_combine (GET_MODE (x),
7049 force_to_mode (XEXP (x, 2), mode,
e3d616e3 7050 mask, reg,next_select)));
d0ab8cd3 7051 break;
e9a25f70
JL
7052
7053 default:
7054 break;
dfbe1b2f
RK
7055 }
7056
d0ab8cd3 7057 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
7058 return gen_lowpart_for_combine (mode, x);
7059}
7060\f
abe6e52f
RK
7061/* Return nonzero if X is an expression that has one of two values depending on
7062 whether some other value is zero or nonzero. In that case, we return the
7063 value that is being tested, *PTRUE is set to the value if the rtx being
7064 returned has a nonzero value, and *PFALSE is set to the other alternative.
7065
7066 If we return zero, we set *PTRUE and *PFALSE to X. */
7067
7068static rtx
7069if_then_else_cond (x, ptrue, pfalse)
7070 rtx x;
7071 rtx *ptrue, *pfalse;
7072{
7073 enum machine_mode mode = GET_MODE (x);
7074 enum rtx_code code = GET_CODE (x);
7075 int size = GET_MODE_BITSIZE (mode);
7076 rtx cond0, cond1, true0, true1, false0, false1;
7077 unsigned HOST_WIDE_INT nz;
7078
14a774a9
RK
7079 /* If we are comparing a value against zero, we are done. */
7080 if ((code == NE || code == EQ)
7081 && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
7082 {
e8758a3a
JL
7083 *ptrue = (code == NE) ? const_true_rtx : const0_rtx;
7084 *pfalse = (code == NE) ? const0_rtx : const_true_rtx;
14a774a9
RK
7085 return XEXP (x, 0);
7086 }
7087
abe6e52f
RK
7088 /* If this is a unary operation whose operand has one of two values, apply
7089 our opcode to compute those values. */
14a774a9
RK
7090 else if (GET_RTX_CLASS (code) == '1'
7091 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
abe6e52f 7092 {
0c1c8ea6
RK
7093 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
7094 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
7095 return cond0;
7096 }
7097
3a19aabc 7098 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 7099 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
7100 else if (code == COMPARE)
7101 ;
7102
abe6e52f
RK
7103 /* If this is a binary operation, see if either side has only one of two
7104 values. If either one does or if both do and they are conditional on
7105 the same value, compute the new true and false values. */
7106 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
7107 || GET_RTX_CLASS (code) == '<')
7108 {
7109 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
7110 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
7111
7112 if ((cond0 != 0 || cond1 != 0)
7113 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
7114 {
987e845a
JW
7115 /* If if_then_else_cond returned zero, then true/false are the
7116 same rtl. We must copy one of them to prevent invalid rtl
7117 sharing. */
7118 if (cond0 == 0)
7119 true0 = copy_rtx (true0);
7120 else if (cond1 == 0)
7121 true1 = copy_rtx (true1);
7122
abe6e52f
RK
7123 *ptrue = gen_binary (code, mode, true0, true1);
7124 *pfalse = gen_binary (code, mode, false0, false1);
7125 return cond0 ? cond0 : cond1;
7126 }
9210df58 7127
9210df58 7128 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
7129 operands is zero when the other is non-zero, and vice-versa,
7130 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 7131
0802d516
RK
7132 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7133 && (code == PLUS || code == IOR || code == XOR || code == MINUS
9210df58
RK
7134 || code == UMAX)
7135 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7136 {
7137 rtx op0 = XEXP (XEXP (x, 0), 1);
7138 rtx op1 = XEXP (XEXP (x, 1), 1);
7139
7140 cond0 = XEXP (XEXP (x, 0), 0);
7141 cond1 = XEXP (XEXP (x, 1), 0);
7142
7143 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7144 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7145 && reversible_comparison_p (cond1)
7146 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
7147 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7148 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7149 || ((swap_condition (GET_CODE (cond0))
7150 == reverse_condition (GET_CODE (cond1)))
7151 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7152 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7153 && ! side_effects_p (x))
7154 {
7155 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
7156 *pfalse = gen_binary (MULT, mode,
7157 (code == MINUS
0c1c8ea6 7158 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
7159 const_true_rtx);
7160 return cond0;
7161 }
7162 }
7163
7164 /* Similarly for MULT, AND and UMIN, execpt that for these the result
7165 is always zero. */
0802d516
RK
7166 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7167 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
7168 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
7169 {
7170 cond0 = XEXP (XEXP (x, 0), 0);
7171 cond1 = XEXP (XEXP (x, 1), 0);
7172
7173 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
7174 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
7175 && reversible_comparison_p (cond1)
7176 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
7177 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
7178 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
7179 || ((swap_condition (GET_CODE (cond0))
7180 == reverse_condition (GET_CODE (cond1)))
7181 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
7182 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
7183 && ! side_effects_p (x))
7184 {
7185 *ptrue = *pfalse = const0_rtx;
7186 return cond0;
7187 }
7188 }
abe6e52f
RK
7189 }
7190
7191 else if (code == IF_THEN_ELSE)
7192 {
7193 /* If we have IF_THEN_ELSE already, extract the condition and
7194 canonicalize it if it is NE or EQ. */
7195 cond0 = XEXP (x, 0);
7196 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
7197 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
7198 return XEXP (cond0, 0);
7199 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
7200 {
7201 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
7202 return XEXP (cond0, 0);
7203 }
7204 else
7205 return cond0;
7206 }
7207
7208 /* If X is a normal SUBREG with both inner and outer modes integral,
7209 we can narrow both the true and false values of the inner expression,
7210 if there is a condition. */
7211 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
7212 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
7213 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
7214 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
7215 &true0, &false0)))
7216 {
54f3b5c2
R
7217 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
7218 && (WORDS_BIG_ENDIAN || SUBREG_WORD (x) != 0))
7219 {
7220 true0 = operand_subword (true0, SUBREG_WORD (x), 0, mode);
7221 false0 = operand_subword (false0, SUBREG_WORD (x), 0, mode);
7222 }
00244e6b
RK
7223 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
7224 *pfalse
7225 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 7226
abe6e52f
RK
7227 return cond0;
7228 }
7229
7230 /* If X is a constant, this isn't special and will cause confusions
7231 if we treat it as such. Likewise if it is equivalent to a constant. */
7232 else if (CONSTANT_P (x)
7233 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
7234 ;
7235
7236 /* If X is known to be either 0 or -1, those are the true and
7237 false values when testing X. */
7238 else if (num_sign_bit_copies (x, mode) == size)
7239 {
7240 *ptrue = constm1_rtx, *pfalse = const0_rtx;
7241 return x;
7242 }
7243
7244 /* Likewise for 0 or a single bit. */
7245 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
7246 {
7247 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
7248 return x;
7249 }
7250
7251 /* Otherwise fail; show no condition with true and false values the same. */
7252 *ptrue = *pfalse = x;
7253 return 0;
7254}
7255\f
1a26b032
RK
7256/* Return the value of expression X given the fact that condition COND
7257 is known to be true when applied to REG as its first operand and VAL
7258 as its second. X is known to not be shared and so can be modified in
7259 place.
7260
7261 We only handle the simplest cases, and specifically those cases that
7262 arise with IF_THEN_ELSE expressions. */
7263
7264static rtx
7265known_cond (x, cond, reg, val)
7266 rtx x;
7267 enum rtx_code cond;
7268 rtx reg, val;
7269{
7270 enum rtx_code code = GET_CODE (x);
f24ad0e4 7271 rtx temp;
6f7d635c 7272 const char *fmt;
1a26b032
RK
7273 int i, j;
7274
7275 if (side_effects_p (x))
7276 return x;
7277
7278 if (cond == EQ && rtx_equal_p (x, reg))
7279 return val;
7280
7281 /* If X is (abs REG) and we know something about REG's relationship
7282 with zero, we may be able to simplify this. */
7283
7284 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
7285 switch (cond)
7286 {
7287 case GE: case GT: case EQ:
7288 return XEXP (x, 0);
7289 case LT: case LE:
0c1c8ea6
RK
7290 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
7291 XEXP (x, 0));
e9a25f70
JL
7292 default:
7293 break;
1a26b032
RK
7294 }
7295
7296 /* The only other cases we handle are MIN, MAX, and comparisons if the
7297 operands are the same as REG and VAL. */
7298
7299 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
7300 {
7301 if (rtx_equal_p (XEXP (x, 0), val))
7302 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
7303
7304 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
7305 {
7306 if (GET_RTX_CLASS (code) == '<')
7307 return (comparison_dominates_p (cond, code) ? const_true_rtx
7308 : (comparison_dominates_p (cond,
7309 reverse_condition (code))
7310 ? const0_rtx : x));
7311
7312 else if (code == SMAX || code == SMIN
7313 || code == UMIN || code == UMAX)
7314 {
7315 int unsignedp = (code == UMIN || code == UMAX);
7316
7317 if (code == SMAX || code == UMAX)
7318 cond = reverse_condition (cond);
7319
7320 switch (cond)
7321 {
7322 case GE: case GT:
7323 return unsignedp ? x : XEXP (x, 1);
7324 case LE: case LT:
7325 return unsignedp ? x : XEXP (x, 0);
7326 case GEU: case GTU:
7327 return unsignedp ? XEXP (x, 1) : x;
7328 case LEU: case LTU:
7329 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
7330 default:
7331 break;
1a26b032
RK
7332 }
7333 }
7334 }
7335 }
7336
7337 fmt = GET_RTX_FORMAT (code);
7338 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7339 {
7340 if (fmt[i] == 'e')
7341 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7342 else if (fmt[i] == 'E')
7343 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7344 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7345 cond, reg, val));
7346 }
7347
7348 return x;
7349}
7350\f
e11fa86f
RK
7351/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7352 assignment as a field assignment. */
7353
7354static int
7355rtx_equal_for_field_assignment_p (x, y)
7356 rtx x;
7357 rtx y;
7358{
e11fa86f
RK
7359 if (x == y || rtx_equal_p (x, y))
7360 return 1;
7361
7362 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7363 return 0;
7364
7365 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7366 Note that all SUBREGs of MEM are paradoxical; otherwise they
7367 would have been rewritten. */
7368 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7369 && GET_CODE (SUBREG_REG (y)) == MEM
7370 && rtx_equal_p (SUBREG_REG (y),
7371 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7372 return 1;
7373
7374 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7375 && GET_CODE (SUBREG_REG (x)) == MEM
7376 && rtx_equal_p (SUBREG_REG (x),
7377 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7378 return 1;
7379
9ec36da5
JL
7380 /* We used to see if get_last_value of X and Y were the same but that's
7381 not correct. In one direction, we'll cause the assignment to have
7382 the wrong destination and in the case, we'll import a register into this
7383 insn that might have already have been dead. So fail if none of the
7384 above cases are true. */
7385 return 0;
e11fa86f
RK
7386}
7387\f
230d793d
RS
7388/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7389 Return that assignment if so.
7390
7391 We only handle the most common cases. */
7392
7393static rtx
7394make_field_assignment (x)
7395 rtx x;
7396{
7397 rtx dest = SET_DEST (x);
7398 rtx src = SET_SRC (x);
dfbe1b2f 7399 rtx assign;
e11fa86f 7400 rtx rhs, lhs;
5f4f0e22
CH
7401 HOST_WIDE_INT c1;
7402 int pos, len;
dfbe1b2f
RK
7403 rtx other;
7404 enum machine_mode mode;
230d793d
RS
7405
7406 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7407 a clear of a one-bit field. We will have changed it to
7408 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7409 for a SUBREG. */
7410
7411 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7412 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7413 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 7414 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7415 {
8999a12e 7416 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7417 1, 1, 1, 0);
76184def 7418 if (assign != 0)
38a448ca 7419 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7420 return x;
230d793d
RS
7421 }
7422
7423 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7424 && subreg_lowpart_p (XEXP (src, 0))
7425 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7426 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7427 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7428 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 7429 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7430 {
8999a12e 7431 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
7432 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7433 1, 1, 1, 0);
76184def 7434 if (assign != 0)
38a448ca 7435 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7436 return x;
230d793d
RS
7437 }
7438
9dd11dcb 7439 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
7440 one-bit field. */
7441 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7442 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 7443 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7444 {
8999a12e 7445 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7446 1, 1, 1, 0);
76184def 7447 if (assign != 0)
38a448ca 7448 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 7449 return x;
230d793d
RS
7450 }
7451
dfbe1b2f 7452 /* The other case we handle is assignments into a constant-position
9dd11dcb 7453 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
7454 a mask that has all one bits except for a group of zero bits and
7455 OTHER is known to have zeros where C1 has ones, this is such an
7456 assignment. Compute the position and length from C1. Shift OTHER
7457 to the appropriate position, force it to the required mode, and
7458 make the extraction. Check for the AND in both operands. */
7459
9dd11dcb 7460 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
7461 return x;
7462
7463 rhs = expand_compound_operation (XEXP (src, 0));
7464 lhs = expand_compound_operation (XEXP (src, 1));
7465
7466 if (GET_CODE (rhs) == AND
7467 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7468 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7469 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7470 else if (GET_CODE (lhs) == AND
7471 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7472 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7473 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
7474 else
7475 return x;
230d793d 7476
e11fa86f 7477 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 7478 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
e5e809f4
JL
7479 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7480 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
dfbe1b2f 7481 return x;
230d793d 7482
5f4f0e22 7483 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
7484 if (assign == 0)
7485 return x;
230d793d 7486
dfbe1b2f
RK
7487 /* The mode to use for the source is the mode of the assignment, or of
7488 what is inside a possible STRICT_LOW_PART. */
7489 mode = (GET_CODE (assign) == STRICT_LOW_PART
7490 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 7491
dfbe1b2f
RK
7492 /* Shift OTHER right POS places and make it the source, restricting it
7493 to the proper length and mode. */
230d793d 7494
5f4f0e22
CH
7495 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7496 GET_MODE (src), other, pos),
6139ff20
RK
7497 mode,
7498 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7499 ? GET_MODE_MASK (mode)
7500 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 7501 dest, 0);
230d793d 7502
dfbe1b2f 7503 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
7504}
7505\f
7506/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7507 if so. */
7508
7509static rtx
7510apply_distributive_law (x)
7511 rtx x;
7512{
7513 enum rtx_code code = GET_CODE (x);
7514 rtx lhs, rhs, other;
7515 rtx tem;
7516 enum rtx_code inner_code;
7517
d8a8a4da
RS
7518 /* Distributivity is not true for floating point.
7519 It can change the value. So don't do it.
7520 -- rms and moshier@world.std.com. */
3ad2180a 7521 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
7522 return x;
7523
230d793d
RS
7524 /* The outer operation can only be one of the following: */
7525 if (code != IOR && code != AND && code != XOR
7526 && code != PLUS && code != MINUS)
7527 return x;
7528
7529 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7530
0f41302f
MS
7531 /* If either operand is a primitive we can't do anything, so get out
7532 fast. */
230d793d 7533 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 7534 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
7535 return x;
7536
7537 lhs = expand_compound_operation (lhs);
7538 rhs = expand_compound_operation (rhs);
7539 inner_code = GET_CODE (lhs);
7540 if (inner_code != GET_CODE (rhs))
7541 return x;
7542
7543 /* See if the inner and outer operations distribute. */
7544 switch (inner_code)
7545 {
7546 case LSHIFTRT:
7547 case ASHIFTRT:
7548 case AND:
7549 case IOR:
7550 /* These all distribute except over PLUS. */
7551 if (code == PLUS || code == MINUS)
7552 return x;
7553 break;
7554
7555 case MULT:
7556 if (code != PLUS && code != MINUS)
7557 return x;
7558 break;
7559
7560 case ASHIFT:
45620ed4 7561 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
7562 break;
7563
7564 case SUBREG:
dfbe1b2f
RK
7565 /* Non-paradoxical SUBREGs distributes over all operations, provided
7566 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
7567 of a low-order part, we don't convert an fp operation to int or
7568 vice versa, and we would not be converting a single-word
dfbe1b2f 7569 operation into a multi-word operation. The latter test is not
2b4bd1bc 7570 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
7571 Some of the previous tests are redundant given the latter test, but
7572 are retained because they are required for correctness.
7573
7574 We produce the result slightly differently in this case. */
7575
7576 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7577 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7578 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
7579 || (GET_MODE_CLASS (GET_MODE (lhs))
7580 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7581 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 7582 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7583 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
7584 return x;
7585
7586 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7587 SUBREG_REG (lhs), SUBREG_REG (rhs));
7588 return gen_lowpart_for_combine (GET_MODE (x), tem);
7589
7590 default:
7591 return x;
7592 }
7593
7594 /* Set LHS and RHS to the inner operands (A and B in the example
7595 above) and set OTHER to the common operand (C in the example).
7596 These is only one way to do this unless the inner operation is
7597 commutative. */
7598 if (GET_RTX_CLASS (inner_code) == 'c'
7599 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7600 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7601 else if (GET_RTX_CLASS (inner_code) == 'c'
7602 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7603 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7604 else if (GET_RTX_CLASS (inner_code) == 'c'
7605 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7606 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7607 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7608 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7609 else
7610 return x;
7611
7612 /* Form the new inner operation, seeing if it simplifies first. */
7613 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7614
7615 /* There is one exception to the general way of distributing:
7616 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7617 if (code == XOR && inner_code == IOR)
7618 {
7619 inner_code = AND;
0c1c8ea6 7620 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
7621 }
7622
7623 /* We may be able to continuing distributing the result, so call
7624 ourselves recursively on the inner operation before forming the
7625 outer operation, which we return. */
7626 return gen_binary (inner_code, GET_MODE (x),
7627 apply_distributive_law (tem), other);
7628}
7629\f
7630/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7631 in MODE.
7632
7633 Return an equivalent form, if different from X. Otherwise, return X. If
7634 X is zero, we are to always construct the equivalent form. */
7635
7636static rtx
7637simplify_and_const_int (x, mode, varop, constop)
7638 rtx x;
7639 enum machine_mode mode;
7640 rtx varop;
5f4f0e22 7641 unsigned HOST_WIDE_INT constop;
230d793d 7642{
951553af 7643 unsigned HOST_WIDE_INT nonzero;
42301240 7644 int i;
230d793d 7645
6139ff20
RK
7646 /* Simplify VAROP knowing that we will be only looking at some of the
7647 bits in it. */
e3d616e3 7648 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7649
6139ff20
RK
7650 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7651 CONST_INT, we are done. */
7652 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7653 return varop;
230d793d 7654
fc06d7aa
RK
7655 /* See what bits may be nonzero in VAROP. Unlike the general case of
7656 a call to nonzero_bits, here we don't care about bits outside
7657 MODE. */
7658
7659 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7e4ce834 7660 nonzero = trunc_int_for_mode (nonzero, mode);
9fa6d012 7661
230d793d 7662 /* Turn off all bits in the constant that are known to already be zero.
951553af 7663 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7664 which is tested below. */
7665
951553af 7666 constop &= nonzero;
230d793d
RS
7667
7668 /* If we don't have any bits left, return zero. */
7669 if (constop == 0)
7670 return const0_rtx;
7671
42301240
RK
7672 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7673 a power of two, we can replace this with a ASHIFT. */
7674 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7675 && (i = exact_log2 (constop)) >= 0)
7676 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7677
6139ff20
RK
7678 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7679 or XOR, then try to apply the distributive law. This may eliminate
7680 operations if either branch can be simplified because of the AND.
7681 It may also make some cases more complex, but those cases probably
7682 won't match a pattern either with or without this. */
7683
7684 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7685 return
7686 gen_lowpart_for_combine
7687 (mode,
7688 apply_distributive_law
7689 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7690 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7691 XEXP (varop, 0), constop),
7692 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7693 XEXP (varop, 1), constop))));
7694
230d793d
RS
7695 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7696 if we already had one (just check for the simplest cases). */
7697 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7698 && GET_MODE (XEXP (x, 0)) == mode
7699 && SUBREG_REG (XEXP (x, 0)) == varop)
7700 varop = XEXP (x, 0);
7701 else
7702 varop = gen_lowpart_for_combine (mode, varop);
7703
0f41302f 7704 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7705 if (GET_CODE (varop) == CLOBBER)
7706 return x ? x : varop;
7707
7708 /* If we are only masking insignificant bits, return VAROP. */
951553af 7709 if (constop == nonzero)
230d793d
RS
7710 x = varop;
7711
7712 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7713 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7714 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7715
7716 else
7717 {
7718 if (GET_CODE (XEXP (x, 1)) != CONST_INT
e51712db 7719 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7720 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7721
7722 SUBST (XEXP (x, 0), varop);
7723 }
7724
7725 return x;
7726}
7727\f
b3728b0e
JW
7728/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7729 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7730 is less useful. We can't allow both, because that results in exponential
956d6950 7731 run time recursion. There is a nullstone testcase that triggered
b3728b0e
JW
7732 this. This macro avoids accidental uses of num_sign_bit_copies. */
7733#define num_sign_bit_copies()
7734
230d793d
RS
7735/* Given an expression, X, compute which bits in X can be non-zero.
7736 We don't care about bits outside of those defined in MODE.
7737
7738 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7739 a shift, AND, or zero_extract, we can do better. */
7740
5f4f0e22 7741static unsigned HOST_WIDE_INT
951553af 7742nonzero_bits (x, mode)
230d793d
RS
7743 rtx x;
7744 enum machine_mode mode;
7745{
951553af
RK
7746 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7747 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
7748 enum rtx_code code;
7749 int mode_width = GET_MODE_BITSIZE (mode);
7750 rtx tem;
7751
1c75dfa4
RK
7752 /* For floating-point values, assume all bits are needed. */
7753 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7754 return nonzero;
7755
230d793d
RS
7756 /* If X is wider than MODE, use its mode instead. */
7757 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7758 {
7759 mode = GET_MODE (x);
951553af 7760 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7761 mode_width = GET_MODE_BITSIZE (mode);
7762 }
7763
5f4f0e22 7764 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7765 /* Our only callers in this case look for single bit values. So
7766 just return the mode mask. Those tests will then be false. */
951553af 7767 return nonzero;
230d793d 7768
8baf60bb 7769#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7770 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
7771 and target machines, we can compute this from which bits of the
7772 object might be nonzero in its own mode, taking into account the fact
7773 that on many CISC machines, accessing an object in a wider mode
7774 causes the high-order bits to become undefined. So they are
7775 not known to be zero. */
7776
7777 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7778 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7779 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7780 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7781 {
7782 nonzero &= nonzero_bits (x, GET_MODE (x));
7783 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7784 return nonzero;
7785 }
7786#endif
7787
230d793d
RS
7788 code = GET_CODE (x);
7789 switch (code)
7790 {
7791 case REG:
320dd7a7
RK
7792#ifdef POINTERS_EXTEND_UNSIGNED
7793 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7794 all the bits above ptr_mode are known to be zero. */
7795 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7796 && REGNO_POINTER_FLAG (REGNO (x)))
7797 nonzero &= GET_MODE_MASK (ptr_mode);
7798#endif
7799
b0d71df9
RK
7800#ifdef STACK_BOUNDARY
7801 /* If this is the stack pointer, we may know something about its
7802 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
7803 stack to be momentarily aligned only to that amount, so we pick
7804 the least alignment. */
7805
ee49a9c7
JW
7806 /* We can't check for arg_pointer_rtx here, because it is not
7807 guaranteed to have as much alignment as the stack pointer.
7808 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7809 alignment but the argument pointer has only 64 bit alignment. */
7810
0e9ff885
DM
7811 if ((x == frame_pointer_rtx
7812 || x == stack_pointer_rtx
7813 || x == hard_frame_pointer_rtx
7814 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7815 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7816#ifdef STACK_BIAS
7817 && !STACK_BIAS
7818#endif
7819 )
230d793d 7820 {
b0d71df9 7821 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
7822
7823#ifdef PUSH_ROUNDING
91102d5a 7824 if (REGNO (x) == STACK_POINTER_REGNUM)
b0d71df9 7825 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
7826#endif
7827
320dd7a7
RK
7828 /* We must return here, otherwise we may get a worse result from
7829 one of the choices below. There is nothing useful below as
7830 far as the stack pointer is concerned. */
b0d71df9 7831 return nonzero &= ~ (sp_alignment - 1);
230d793d 7832 }
b0d71df9 7833#endif
230d793d 7834
55310dad
RK
7835 /* If X is a register whose nonzero bits value is current, use it.
7836 Otherwise, if X is a register whose value we can find, use that
7837 value. Otherwise, use the previously-computed global nonzero bits
7838 for this register. */
7839
7840 if (reg_last_set_value[REGNO (x)] != 0
7841 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
7842 && (reg_last_set_label[REGNO (x)] == label_tick
7843 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
7844 && REG_N_SETS (REGNO (x)) == 1
7845 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
7846 REGNO (x))))
55310dad
RK
7847 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7848 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
7849
7850 tem = get_last_value (x);
9afa3d54 7851
230d793d 7852 if (tem)
9afa3d54
RK
7853 {
7854#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7855 /* If X is narrower than MODE and TEM is a non-negative
7856 constant that would appear negative in the mode of X,
7857 sign-extend it for use in reg_nonzero_bits because some
7858 machines (maybe most) will actually do the sign-extension
7859 and this is the conservative approach.
7860
7861 ??? For 2.5, try to tighten up the MD files in this regard
7862 instead of this kludge. */
7863
7864 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7865 && GET_CODE (tem) == CONST_INT
7866 && INTVAL (tem) > 0
7867 && 0 != (INTVAL (tem)
7868 & ((HOST_WIDE_INT) 1
9e69be8c 7869 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7870 tem = GEN_INT (INTVAL (tem)
7871 | ((HOST_WIDE_INT) (-1)
7872 << GET_MODE_BITSIZE (GET_MODE (x))));
7873#endif
7874 return nonzero_bits (tem, mode);
7875 }
951553af
RK
7876 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7877 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7878 else
951553af 7879 return nonzero;
230d793d
RS
7880
7881 case CONST_INT:
9afa3d54
RK
7882#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7883 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
7884 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7885 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7886 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
7887#endif
7888
230d793d
RS
7889 return INTVAL (x);
7890
230d793d 7891 case MEM:
8baf60bb 7892#ifdef LOAD_EXTEND_OP
230d793d
RS
7893 /* In many, if not most, RISC machines, reading a byte from memory
7894 zeros the rest of the register. Noticing that fact saves a lot
7895 of extra zero-extends. */
8baf60bb
RK
7896 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7897 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 7898#endif
8baf60bb 7899 break;
230d793d 7900
230d793d
RS
7901 case EQ: case NE:
7902 case GT: case GTU:
7903 case LT: case LTU:
7904 case GE: case GEU:
7905 case LE: case LEU:
3f508eca 7906
c6965c0f
RK
7907 /* If this produces an integer result, we know which bits are set.
7908 Code here used to clear bits outside the mode of X, but that is
7909 now done above. */
230d793d 7910
c6965c0f
RK
7911 if (GET_MODE_CLASS (mode) == MODE_INT
7912 && mode_width <= HOST_BITS_PER_WIDE_INT)
7913 nonzero = STORE_FLAG_VALUE;
230d793d 7914 break;
230d793d 7915
230d793d 7916 case NEG:
b3728b0e
JW
7917#if 0
7918 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7919 and num_sign_bit_copies. */
d0ab8cd3
RK
7920 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7921 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7922 nonzero = 1;
b3728b0e 7923#endif
230d793d
RS
7924
7925 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 7926 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 7927 break;
d0ab8cd3
RK
7928
7929 case ABS:
b3728b0e
JW
7930#if 0
7931 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7932 and num_sign_bit_copies. */
d0ab8cd3
RK
7933 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7934 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7935 nonzero = 1;
b3728b0e 7936#endif
d0ab8cd3 7937 break;
230d793d
RS
7938
7939 case TRUNCATE:
951553af 7940 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
7941 break;
7942
7943 case ZERO_EXTEND:
951553af 7944 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 7945 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 7946 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
7947 break;
7948
7949 case SIGN_EXTEND:
7950 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7951 Otherwise, show all the bits in the outer mode but not the inner
7952 may be non-zero. */
951553af 7953 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
7954 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7955 {
951553af 7956 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
e3da301d
MS
7957 if (inner_nz
7958 & (((HOST_WIDE_INT) 1
7959 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 7960 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
7961 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7962 }
7963
951553af 7964 nonzero &= inner_nz;
230d793d
RS
7965 break;
7966
7967 case AND:
951553af
RK
7968 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7969 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7970 break;
7971
d0ab8cd3
RK
7972 case XOR: case IOR:
7973 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
7974 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7975 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7976 break;
7977
7978 case PLUS: case MINUS:
7979 case MULT:
7980 case DIV: case UDIV:
7981 case MOD: case UMOD:
7982 /* We can apply the rules of arithmetic to compute the number of
7983 high- and low-order zero bits of these operations. We start by
7984 computing the width (position of the highest-order non-zero bit)
7985 and the number of low-order zero bits for each value. */
7986 {
951553af
RK
7987 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7988 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7989 int width0 = floor_log2 (nz0) + 1;
7990 int width1 = floor_log2 (nz1) + 1;
7991 int low0 = floor_log2 (nz0 & -nz0);
7992 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
7993 HOST_WIDE_INT op0_maybe_minusp
7994 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7995 HOST_WIDE_INT op1_maybe_minusp
7996 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
7997 int result_width = mode_width;
7998 int result_low = 0;
7999
8000 switch (code)
8001 {
8002 case PLUS:
0e9ff885
DM
8003#ifdef STACK_BIAS
8004 if (STACK_BIAS
8005 && (XEXP (x, 0) == stack_pointer_rtx
8006 || XEXP (x, 0) == frame_pointer_rtx)
8007 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8008 {
8009 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
8010
8011 nz0 = (GET_MODE_MASK (mode) & ~ (sp_alignment - 1));
8012 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
8013 width0 = floor_log2 (nz0) + 1;
8014 width1 = floor_log2 (nz1) + 1;
8015 low0 = floor_log2 (nz0 & -nz0);
8016 low1 = floor_log2 (nz1 & -nz1);
8017 }
8018#endif
230d793d
RS
8019 result_width = MAX (width0, width1) + 1;
8020 result_low = MIN (low0, low1);
8021 break;
8022 case MINUS:
8023 result_low = MIN (low0, low1);
8024 break;
8025 case MULT:
8026 result_width = width0 + width1;
8027 result_low = low0 + low1;
8028 break;
8029 case DIV:
8030 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8031 result_width = width0;
8032 break;
8033 case UDIV:
8034 result_width = width0;
8035 break;
8036 case MOD:
8037 if (! op0_maybe_minusp && ! op1_maybe_minusp)
8038 result_width = MIN (width0, width1);
8039 result_low = MIN (low0, low1);
8040 break;
8041 case UMOD:
8042 result_width = MIN (width0, width1);
8043 result_low = MIN (low0, low1);
8044 break;
e9a25f70
JL
8045 default:
8046 abort ();
230d793d
RS
8047 }
8048
8049 if (result_width < mode_width)
951553af 8050 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
8051
8052 if (result_low > 0)
951553af 8053 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
8054 }
8055 break;
8056
8057 case ZERO_EXTRACT:
8058 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 8059 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 8060 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
8061 break;
8062
8063 case SUBREG:
c3c2cb37
RK
8064 /* If this is a SUBREG formed for a promoted variable that has
8065 been zero-extended, we know that at least the high-order bits
8066 are zero, though others might be too. */
8067
8068 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
8069 nonzero = (GET_MODE_MASK (GET_MODE (x))
8070 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 8071
230d793d
RS
8072 /* If the inner mode is a single word for both the host and target
8073 machines, we can compute this from which bits of the inner
951553af 8074 object might be nonzero. */
230d793d 8075 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
8076 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8077 <= HOST_BITS_PER_WIDE_INT))
230d793d 8078 {
951553af 8079 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb 8080
b52ce03d
R
8081#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
8082 /* If this is a typical RISC machine, we only have to worry
8083 about the way loads are extended. */
8084 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
8085 ? (nonzero
8086 & (1L << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))
8087 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
230d793d 8088#endif
b52ce03d
R
8089 {
8090 /* On many CISC machines, accessing an object in a wider mode
8091 causes the high-order bits to become undefined. So they are
8092 not known to be zero. */
8093 if (GET_MODE_SIZE (GET_MODE (x))
8094 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8095 nonzero |= (GET_MODE_MASK (GET_MODE (x))
8096 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
8097 }
230d793d
RS
8098 }
8099 break;
8100
8101 case ASHIFTRT:
8102 case LSHIFTRT:
8103 case ASHIFT:
230d793d 8104 case ROTATE:
951553af 8105 /* The nonzero bits are in two classes: any bits within MODE
230d793d 8106 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 8107 nonzero bits are those that are significant in the operand of
230d793d
RS
8108 the shift when shifted the appropriate number of bits. This
8109 shows that high-order bits are cleared by the right shift and
8110 low-order bits by left shifts. */
8111 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8112 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 8113 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8114 {
8115 enum machine_mode inner_mode = GET_MODE (x);
8116 int width = GET_MODE_BITSIZE (inner_mode);
8117 int count = INTVAL (XEXP (x, 1));
5f4f0e22 8118 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
8119 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
8120 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 8121 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
8122
8123 if (mode_width > width)
951553af 8124 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
8125
8126 if (code == LSHIFTRT)
8127 inner >>= count;
8128 else if (code == ASHIFTRT)
8129 {
8130 inner >>= count;
8131
951553af 8132 /* If the sign bit may have been nonzero before the shift, we
230d793d 8133 need to mark all the places it could have been copied to
951553af 8134 by the shift as possibly nonzero. */
5f4f0e22
CH
8135 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
8136 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 8137 }
45620ed4 8138 else if (code == ASHIFT)
230d793d
RS
8139 inner <<= count;
8140 else
8141 inner = ((inner << (count % width)
8142 | (inner >> (width - (count % width)))) & mode_mask);
8143
951553af 8144 nonzero &= (outer | inner);
230d793d
RS
8145 }
8146 break;
8147
8148 case FFS:
8149 /* This is at most the number of bits in the mode. */
951553af 8150 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 8151 break;
d0ab8cd3
RK
8152
8153 case IF_THEN_ELSE:
951553af
RK
8154 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
8155 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 8156 break;
e9a25f70
JL
8157
8158 default:
8159 break;
230d793d
RS
8160 }
8161
951553af 8162 return nonzero;
230d793d 8163}
b3728b0e
JW
8164
8165/* See the macro definition above. */
8166#undef num_sign_bit_copies
230d793d 8167\f
d0ab8cd3 8168/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
8169 be equal to the sign bit. X will be used in mode MODE; if MODE is
8170 VOIDmode, X will be used in its own mode. The returned value will always
8171 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
8172
8173static int
8174num_sign_bit_copies (x, mode)
8175 rtx x;
8176 enum machine_mode mode;
8177{
8178 enum rtx_code code = GET_CODE (x);
8179 int bitwidth;
8180 int num0, num1, result;
951553af 8181 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
8182 rtx tem;
8183
8184 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
8185 VOIDmode, we don't know anything. Likewise if one of the modes is
8186 floating-point. */
d0ab8cd3
RK
8187
8188 if (mode == VOIDmode)
8189 mode = GET_MODE (x);
8190
1c75dfa4 8191 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 8192 return 1;
d0ab8cd3
RK
8193
8194 bitwidth = GET_MODE_BITSIZE (mode);
8195
0f41302f 8196 /* For a smaller object, just ignore the high bits. */
312def2e
RK
8197 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
8198 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
8199 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
8200
e9a25f70
JL
8201 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
8202 {
0c314d1a
RK
8203#ifndef WORD_REGISTER_OPERATIONS
8204 /* If this machine does not do all register operations on the entire
8205 register and MODE is wider than the mode of X, we can say nothing
8206 at all about the high-order bits. */
e9a25f70
JL
8207 return 1;
8208#else
8209 /* Likewise on machines that do, if the mode of the object is smaller
8210 than a word and loads of that size don't sign extend, we can say
8211 nothing about the high order bits. */
8212 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
8213#ifdef LOAD_EXTEND_OP
8214 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
8215#endif
8216 )
8217 return 1;
0c314d1a 8218#endif
e9a25f70 8219 }
0c314d1a 8220
d0ab8cd3
RK
8221 switch (code)
8222 {
8223 case REG:
55310dad 8224
ff0dbdd1
RK
8225#ifdef POINTERS_EXTEND_UNSIGNED
8226 /* If pointers extend signed and this is a pointer in Pmode, say that
8227 all the bits above ptr_mode are known to be sign bit copies. */
8228 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
8229 && REGNO_POINTER_FLAG (REGNO (x)))
8230 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
8231#endif
8232
55310dad
RK
8233 if (reg_last_set_value[REGNO (x)] != 0
8234 && reg_last_set_mode[REGNO (x)] == mode
57cf50a4
GRK
8235 && (reg_last_set_label[REGNO (x)] == label_tick
8236 || (REGNO (x) >= FIRST_PSEUDO_REGISTER
8237 && REG_N_SETS (REGNO (x)) == 1
8238 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
8239 REGNO (x))))
55310dad
RK
8240 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
8241 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
8242
8243 tem = get_last_value (x);
8244 if (tem != 0)
8245 return num_sign_bit_copies (tem, mode);
55310dad
RK
8246
8247 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
8248 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
8249 break;
8250
457816e2 8251 case MEM:
8baf60bb 8252#ifdef LOAD_EXTEND_OP
457816e2 8253 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
8254 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
8255 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 8256#endif
8baf60bb 8257 break;
457816e2 8258
d0ab8cd3
RK
8259 case CONST_INT:
8260 /* If the constant is negative, take its 1's complement and remask.
8261 Then see how many zero bits we have. */
951553af 8262 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 8263 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
8264 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8265 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 8266
951553af 8267 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8268
8269 case SUBREG:
c3c2cb37
RK
8270 /* If this is a SUBREG for a promoted object that is sign-extended
8271 and we are looking at it in a wider mode, we know that at least the
8272 high-order bits are known to be sign bit copies. */
8273
8274 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
8275 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
8276 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 8277
0f41302f 8278 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
8279 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
8280 {
8281 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
8282 return MAX (1, (num0
8283 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
8284 - bitwidth)));
8285 }
457816e2 8286
8baf60bb 8287#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 8288#ifdef LOAD_EXTEND_OP
8baf60bb
RK
8289 /* For paradoxical SUBREGs on machines where all register operations
8290 affect the entire register, just look inside. Note that we are
8291 passing MODE to the recursive call, so the number of sign bit copies
8292 will remain relative to that mode, not the inner mode. */
457816e2 8293
2aec5b7a
JW
8294 /* This works only if loads sign extend. Otherwise, if we get a
8295 reload for the inner part, it may be loaded from the stack, and
8296 then we lose all sign bit copies that existed before the store
8297 to the stack. */
8298
8299 if ((GET_MODE_SIZE (GET_MODE (x))
8300 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8301 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 8302 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 8303#endif
457816e2 8304#endif
d0ab8cd3
RK
8305 break;
8306
8307 case SIGN_EXTRACT:
8308 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
8309 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
8310 break;
8311
8312 case SIGN_EXTEND:
8313 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8314 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
8315
8316 case TRUNCATE:
0f41302f 8317 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
8318 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
8319 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
8320 - bitwidth)));
8321
8322 case NOT:
8323 return num_sign_bit_copies (XEXP (x, 0), mode);
8324
8325 case ROTATE: case ROTATERT:
8326 /* If we are rotating left by a number of bits less than the number
8327 of sign bit copies, we can just subtract that amount from the
8328 number. */
8329 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8330 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
8331 {
8332 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8333 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8334 : bitwidth - INTVAL (XEXP (x, 1))));
8335 }
8336 break;
8337
8338 case NEG:
8339 /* In general, this subtracts one sign bit copy. But if the value
8340 is known to be positive, the number of sign bit copies is the
951553af
RK
8341 same as that of the input. Finally, if the input has just one bit
8342 that might be nonzero, all the bits are copies of the sign bit. */
70186b34
BS
8343 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8344 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8345 return num0 > 1 ? num0 - 1 : 1;
8346
951553af
RK
8347 nonzero = nonzero_bits (XEXP (x, 0), mode);
8348 if (nonzero == 1)
d0ab8cd3
RK
8349 return bitwidth;
8350
d0ab8cd3 8351 if (num0 > 1
951553af 8352 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
8353 num0--;
8354
8355 return num0;
8356
8357 case IOR: case AND: case XOR:
8358 case SMIN: case SMAX: case UMIN: case UMAX:
8359 /* Logical operations will preserve the number of sign-bit copies.
8360 MIN and MAX operations always return one of the operands. */
8361 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8362 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8363 return MIN (num0, num1);
8364
8365 case PLUS: case MINUS:
8366 /* For addition and subtraction, we can have a 1-bit carry. However,
8367 if we are subtracting 1 from a positive number, there will not
8368 be such a carry. Furthermore, if the positive number is known to
8369 be 0 or 1, we know the result is either -1 or 0. */
8370
3e3ea975 8371 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 8372 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 8373 {
951553af
RK
8374 nonzero = nonzero_bits (XEXP (x, 0), mode);
8375 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8376 return (nonzero == 1 || nonzero == 0 ? bitwidth
8377 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8378 }
8379
8380 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8381 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8382 return MAX (1, MIN (num0, num1) - 1);
8383
8384 case MULT:
8385 /* The number of bits of the product is the sum of the number of
8386 bits of both terms. However, unless one of the terms if known
8387 to be positive, we must allow for an additional bit since negating
8388 a negative number can remove one sign bit copy. */
8389
8390 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8391 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8392
8393 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8394 if (result > 0
70186b34
BS
8395 && (bitwidth > HOST_BITS_PER_WIDE_INT
8396 || (((nonzero_bits (XEXP (x, 0), mode)
8397 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8398 && ((nonzero_bits (XEXP (x, 1), mode)
8399 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
d0ab8cd3
RK
8400 result--;
8401
8402 return MAX (1, result);
8403
8404 case UDIV:
70186b34
BS
8405 /* The result must be <= the first operand. If the first operand
8406 has the high bit set, we know nothing about the number of sign
8407 bit copies. */
8408 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8409 return 1;
8410 else if ((nonzero_bits (XEXP (x, 0), mode)
8411 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8412 return 1;
8413 else
8414 return num_sign_bit_copies (XEXP (x, 0), mode);
8415
d0ab8cd3
RK
8416 case UMOD:
8417 /* The result must be <= the scond operand. */
8418 return num_sign_bit_copies (XEXP (x, 1), mode);
8419
8420 case DIV:
8421 /* Similar to unsigned division, except that we have to worry about
8422 the case where the divisor is negative, in which case we have
8423 to add 1. */
8424 result = num_sign_bit_copies (XEXP (x, 0), mode);
8425 if (result > 1
70186b34
BS
8426 && (bitwidth > HOST_BITS_PER_WIDE_INT
8427 || (nonzero_bits (XEXP (x, 1), mode)
8428 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8429 result--;
d0ab8cd3
RK
8430
8431 return result;
8432
8433 case MOD:
8434 result = num_sign_bit_copies (XEXP (x, 1), mode);
8435 if (result > 1
70186b34
BS
8436 && (bitwidth > HOST_BITS_PER_WIDE_INT
8437 || (nonzero_bits (XEXP (x, 1), mode)
8438 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8439 result--;
d0ab8cd3
RK
8440
8441 return result;
8442
8443 case ASHIFTRT:
8444 /* Shifts by a constant add to the number of bits equal to the
8445 sign bit. */
8446 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8447 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8448 && INTVAL (XEXP (x, 1)) > 0)
8449 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8450
8451 return num0;
8452
8453 case ASHIFT:
d0ab8cd3
RK
8454 /* Left shifts destroy copies. */
8455 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8456 || INTVAL (XEXP (x, 1)) < 0
8457 || INTVAL (XEXP (x, 1)) >= bitwidth)
8458 return 1;
8459
8460 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8461 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8462
8463 case IF_THEN_ELSE:
8464 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8465 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8466 return MIN (num0, num1);
8467
d0ab8cd3
RK
8468 case EQ: case NE: case GE: case GT: case LE: case LT:
8469 case GEU: case GTU: case LEU: case LTU:
0802d516
RK
8470 if (STORE_FLAG_VALUE == -1)
8471 return bitwidth;
e9a25f70
JL
8472 break;
8473
8474 default:
8475 break;
d0ab8cd3
RK
8476 }
8477
8478 /* If we haven't been able to figure it out by one of the above rules,
8479 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
8480 count those bits and return one less than that amount. If we can't
8481 safely compute the mask for this mode, always return BITWIDTH. */
8482
8483 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 8484 return 1;
d0ab8cd3 8485
951553af 8486 nonzero = nonzero_bits (x, mode);
df6f4086 8487 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 8488 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8489}
8490\f
1a26b032
RK
8491/* Return the number of "extended" bits there are in X, when interpreted
8492 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8493 unsigned quantities, this is the number of high-order zero bits.
8494 For signed quantities, this is the number of copies of the sign bit
8495 minus 1. In both case, this function returns the number of "spare"
8496 bits. For example, if two quantities for which this function returns
8497 at least 1 are added, the addition is known not to overflow.
8498
8499 This function will always return 0 unless called during combine, which
8500 implies that it must be called from a define_split. */
8501
8502int
8503extended_count (x, mode, unsignedp)
8504 rtx x;
8505 enum machine_mode mode;
8506 int unsignedp;
8507{
951553af 8508 if (nonzero_sign_valid == 0)
1a26b032
RK
8509 return 0;
8510
8511 return (unsignedp
ac49a949
RS
8512 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8513 && (GET_MODE_BITSIZE (mode) - 1
951553af 8514 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
8515 : num_sign_bit_copies (x, mode) - 1);
8516}
8517\f
230d793d
RS
8518/* This function is called from `simplify_shift_const' to merge two
8519 outer operations. Specifically, we have already found that we need
8520 to perform operation *POP0 with constant *PCONST0 at the outermost
8521 position. We would now like to also perform OP1 with constant CONST1
8522 (with *POP0 being done last).
8523
8524 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8525 the resulting operation. *PCOMP_P is set to 1 if we would need to
8526 complement the innermost operand, otherwise it is unchanged.
8527
8528 MODE is the mode in which the operation will be done. No bits outside
8529 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 8530 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
8531
8532 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8533 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8534 result is simply *PCONST0.
8535
8536 If the resulting operation cannot be expressed as one operation, we
8537 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8538
8539static int
8540merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8541 enum rtx_code *pop0;
5f4f0e22 8542 HOST_WIDE_INT *pconst0;
230d793d 8543 enum rtx_code op1;
5f4f0e22 8544 HOST_WIDE_INT const1;
230d793d
RS
8545 enum machine_mode mode;
8546 int *pcomp_p;
8547{
8548 enum rtx_code op0 = *pop0;
5f4f0e22 8549 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
8550
8551 const0 &= GET_MODE_MASK (mode);
8552 const1 &= GET_MODE_MASK (mode);
8553
8554 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8555 if (op0 == AND)
8556 const1 &= const0;
8557
8558 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8559 if OP0 is SET. */
8560
8561 if (op1 == NIL || op0 == SET)
8562 return 1;
8563
8564 else if (op0 == NIL)
8565 op0 = op1, const0 = const1;
8566
8567 else if (op0 == op1)
8568 {
8569 switch (op0)
8570 {
8571 case AND:
8572 const0 &= const1;
8573 break;
8574 case IOR:
8575 const0 |= const1;
8576 break;
8577 case XOR:
8578 const0 ^= const1;
8579 break;
8580 case PLUS:
8581 const0 += const1;
8582 break;
8583 case NEG:
8584 op0 = NIL;
8585 break;
e9a25f70
JL
8586 default:
8587 break;
230d793d
RS
8588 }
8589 }
8590
8591 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8592 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8593 return 0;
8594
8595 /* If the two constants aren't the same, we can't do anything. The
8596 remaining six cases can all be done. */
8597 else if (const0 != const1)
8598 return 0;
8599
8600 else
8601 switch (op0)
8602 {
8603 case IOR:
8604 if (op1 == AND)
8605 /* (a & b) | b == b */
8606 op0 = SET;
8607 else /* op1 == XOR */
8608 /* (a ^ b) | b == a | b */
b729186a 8609 {;}
230d793d
RS
8610 break;
8611
8612 case XOR:
8613 if (op1 == AND)
8614 /* (a & b) ^ b == (~a) & b */
8615 op0 = AND, *pcomp_p = 1;
8616 else /* op1 == IOR */
8617 /* (a | b) ^ b == a & ~b */
8618 op0 = AND, *pconst0 = ~ const0;
8619 break;
8620
8621 case AND:
8622 if (op1 == IOR)
8623 /* (a | b) & b == b */
8624 op0 = SET;
8625 else /* op1 == XOR */
8626 /* (a ^ b) & b) == (~a) & b */
8627 *pcomp_p = 1;
8628 break;
e9a25f70
JL
8629 default:
8630 break;
230d793d
RS
8631 }
8632
8633 /* Check for NO-OP cases. */
8634 const0 &= GET_MODE_MASK (mode);
8635 if (const0 == 0
8636 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8637 op0 = NIL;
8638 else if (const0 == 0 && op0 == AND)
8639 op0 = SET;
e51712db
KG
8640 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8641 && op0 == AND)
230d793d
RS
8642 op0 = NIL;
8643
7e4ce834
RH
8644 /* ??? Slightly redundant with the above mask, but not entirely.
8645 Moving this above means we'd have to sign-extend the mode mask
8646 for the final test. */
8647 const0 = trunc_int_for_mode (const0, mode);
9fa6d012 8648
230d793d
RS
8649 *pop0 = op0;
8650 *pconst0 = const0;
8651
8652 return 1;
8653}
8654\f
8655/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8656 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8657 that we started with.
8658
8659 The shift is normally computed in the widest mode we find in VAROP, as
8660 long as it isn't a different number of words than RESULT_MODE. Exceptions
8661 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8662
8663static rtx
8664simplify_shift_const (x, code, result_mode, varop, count)
8665 rtx x;
8666 enum rtx_code code;
8667 enum machine_mode result_mode;
8668 rtx varop;
8669 int count;
8670{
8671 enum rtx_code orig_code = code;
8672 int orig_count = count;
8673 enum machine_mode mode = result_mode;
8674 enum machine_mode shift_mode, tmode;
8675 int mode_words
8676 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8677 /* We form (outer_op (code varop count) (outer_const)). */
8678 enum rtx_code outer_op = NIL;
c4e861e8 8679 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8680 rtx const_rtx;
8681 int complement_p = 0;
8682 rtx new;
8683
8684 /* If we were given an invalid count, don't do anything except exactly
8685 what was requested. */
8686
8687 if (count < 0 || count > GET_MODE_BITSIZE (mode))
8688 {
8689 if (x)
8690 return x;
8691
38a448ca 8692 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
230d793d
RS
8693 }
8694
8695 /* Unless one of the branches of the `if' in this loop does a `continue',
8696 we will `break' the loop after the `if'. */
8697
8698 while (count != 0)
8699 {
8700 /* If we have an operand of (clobber (const_int 0)), just return that
8701 value. */
8702 if (GET_CODE (varop) == CLOBBER)
8703 return varop;
8704
8705 /* If we discovered we had to complement VAROP, leave. Making a NOT
8706 here would cause an infinite loop. */
8707 if (complement_p)
8708 break;
8709
abc95ed3 8710 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8711 if (code == ROTATERT)
8712 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8713
230d793d 8714 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8715 shift is a right shift or a ROTATE, we must always do it in the mode
8716 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8717 widest mode encountered. */
f6789c77
RK
8718 shift_mode
8719 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8720 ? result_mode : mode);
230d793d
RS
8721
8722 /* Handle cases where the count is greater than the size of the mode
8723 minus 1. For ASHIFT, use the size minus one as the count (this can
8724 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8725 take the count modulo the size. For other shifts, the result is
8726 zero.
8727
8728 Since these shifts are being produced by the compiler by combining
8729 multiple operations, each of which are defined, we know what the
8730 result is supposed to be. */
8731
8732 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8733 {
8734 if (code == ASHIFTRT)
8735 count = GET_MODE_BITSIZE (shift_mode) - 1;
8736 else if (code == ROTATE || code == ROTATERT)
8737 count %= GET_MODE_BITSIZE (shift_mode);
8738 else
8739 {
8740 /* We can't simply return zero because there may be an
8741 outer op. */
8742 varop = const0_rtx;
8743 count = 0;
8744 break;
8745 }
8746 }
8747
8748 /* Negative counts are invalid and should not have been made (a
8749 programmer-specified negative count should have been handled
0f41302f 8750 above). */
230d793d
RS
8751 else if (count < 0)
8752 abort ();
8753
312def2e
RK
8754 /* An arithmetic right shift of a quantity known to be -1 or 0
8755 is a no-op. */
8756 if (code == ASHIFTRT
8757 && (num_sign_bit_copies (varop, shift_mode)
8758 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8759 {
312def2e
RK
8760 count = 0;
8761 break;
8762 }
d0ab8cd3 8763
312def2e
RK
8764 /* If we are doing an arithmetic right shift and discarding all but
8765 the sign bit copies, this is equivalent to doing a shift by the
8766 bitsize minus one. Convert it into that shift because it will often
8767 allow other simplifications. */
500c518b 8768
312def2e
RK
8769 if (code == ASHIFTRT
8770 && (count + num_sign_bit_copies (varop, shift_mode)
8771 >= GET_MODE_BITSIZE (shift_mode)))
8772 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8773
230d793d
RS
8774 /* We simplify the tests below and elsewhere by converting
8775 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8776 `make_compound_operation' will convert it to a ASHIFTRT for
8777 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8778 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8779 && code == ASHIFTRT
951553af 8780 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8781 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8782 == 0))
230d793d
RS
8783 code = LSHIFTRT;
8784
8785 switch (GET_CODE (varop))
8786 {
8787 case SIGN_EXTEND:
8788 case ZERO_EXTEND:
8789 case SIGN_EXTRACT:
8790 case ZERO_EXTRACT:
8791 new = expand_compound_operation (varop);
8792 if (new != varop)
8793 {
8794 varop = new;
8795 continue;
8796 }
8797 break;
8798
8799 case MEM:
8800 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8801 minus the width of a smaller mode, we can do this with a
8802 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8803 if ((code == ASHIFTRT || code == LSHIFTRT)
8804 && ! mode_dependent_address_p (XEXP (varop, 0))
8805 && ! MEM_VOLATILE_P (varop)
8806 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8807 MODE_INT, 1)) != BLKmode)
8808 {
f76b9db2 8809 if (BYTES_BIG_ENDIAN)
38a448ca 8810 new = gen_rtx_MEM (tmode, XEXP (varop, 0));
f76b9db2 8811 else
38a448ca
RH
8812 new = gen_rtx_MEM (tmode,
8813 plus_constant (XEXP (varop, 0),
8814 count / BITS_PER_UNIT));
e24b00c8 8815 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
c6df88cb 8816 MEM_COPY_ATTRIBUTES (new, varop);
230d793d
RS
8817 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8818 : ZERO_EXTEND, mode, new);
8819 count = 0;
8820 continue;
8821 }
8822 break;
8823
8824 case USE:
8825 /* Similar to the case above, except that we can only do this if
8826 the resulting mode is the same as that of the underlying
8827 MEM and adjust the address depending on the *bits* endianness
8828 because of the way that bit-field extract insns are defined. */
8829 if ((code == ASHIFTRT || code == LSHIFTRT)
8830 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8831 MODE_INT, 1)) != BLKmode
8832 && tmode == GET_MODE (XEXP (varop, 0)))
8833 {
f76b9db2
ILT
8834 if (BITS_BIG_ENDIAN)
8835 new = XEXP (varop, 0);
8836 else
8837 {
8838 new = copy_rtx (XEXP (varop, 0));
8839 SUBST (XEXP (new, 0),
8840 plus_constant (XEXP (new, 0),
8841 count / BITS_PER_UNIT));
8842 }
230d793d
RS
8843
8844 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8845 : ZERO_EXTEND, mode, new);
8846 count = 0;
8847 continue;
8848 }
8849 break;
8850
8851 case SUBREG:
8852 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8853 the same number of words as what we've seen so far. Then store
8854 the widest mode in MODE. */
f9e67232
RS
8855 if (subreg_lowpart_p (varop)
8856 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8857 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
8858 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8859 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8860 == mode_words))
8861 {
8862 varop = SUBREG_REG (varop);
8863 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8864 mode = GET_MODE (varop);
8865 continue;
8866 }
8867 break;
8868
8869 case MULT:
8870 /* Some machines use MULT instead of ASHIFT because MULT
8871 is cheaper. But it is still better on those machines to
8872 merge two shifts into one. */
8873 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8874 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8875 {
8876 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
6d649d26 8877 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
8878 continue;
8879 }
8880 break;
8881
8882 case UDIV:
8883 /* Similar, for when divides are cheaper. */
8884 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8885 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8886 {
8887 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8888 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
8889 continue;
8890 }
8891 break;
8892
8893 case ASHIFTRT:
8894 /* If we are extracting just the sign bit of an arithmetic right
8895 shift, that shift is not needed. */
8896 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8897 {
8898 varop = XEXP (varop, 0);
8899 continue;
8900 }
8901
0f41302f 8902 /* ... fall through ... */
230d793d
RS
8903
8904 case LSHIFTRT:
8905 case ASHIFT:
230d793d
RS
8906 case ROTATE:
8907 /* Here we have two nested shifts. The result is usually the
8908 AND of a new shift with a mask. We compute the result below. */
8909 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8910 && INTVAL (XEXP (varop, 1)) >= 0
8911 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
8912 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8913 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
8914 {
8915 enum rtx_code first_code = GET_CODE (varop);
8916 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 8917 unsigned HOST_WIDE_INT mask;
230d793d 8918 rtx mask_rtx;
230d793d 8919
230d793d
RS
8920 /* We have one common special case. We can't do any merging if
8921 the inner code is an ASHIFTRT of a smaller mode. However, if
8922 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8923 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8924 we can convert it to
8925 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8926 This simplifies certain SIGN_EXTEND operations. */
8927 if (code == ASHIFT && first_code == ASHIFTRT
8928 && (GET_MODE_BITSIZE (result_mode)
8929 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8930 {
8931 /* C3 has the low-order C1 bits zero. */
8932
5f4f0e22
CH
8933 mask = (GET_MODE_MASK (mode)
8934 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 8935
5f4f0e22 8936 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 8937 XEXP (varop, 0), mask);
5f4f0e22 8938 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
8939 varop, count);
8940 count = first_count;
8941 code = ASHIFTRT;
8942 continue;
8943 }
8944
d0ab8cd3
RK
8945 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8946 than C1 high-order bits equal to the sign bit, we can convert
8947 this to either an ASHIFT or a ASHIFTRT depending on the
8948 two counts.
230d793d
RS
8949
8950 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8951
8952 if (code == ASHIFTRT && first_code == ASHIFT
8953 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
8954 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8955 > first_count))
230d793d 8956 {
d0ab8cd3
RK
8957 count -= first_count;
8958 if (count < 0)
8959 count = - count, code = ASHIFT;
8960 varop = XEXP (varop, 0);
8961 continue;
230d793d
RS
8962 }
8963
8964 /* There are some cases we can't do. If CODE is ASHIFTRT,
8965 we can only do this if FIRST_CODE is also ASHIFTRT.
8966
8967 We can't do the case when CODE is ROTATE and FIRST_CODE is
8968 ASHIFTRT.
8969
8970 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 8971 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
8972
8973 Finally, we can't do any of these if the mode is too wide
8974 unless the codes are the same.
8975
8976 Handle the case where the shift codes are the same
8977 first. */
8978
8979 if (code == first_code)
8980 {
8981 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
8982 && (code == ASHIFTRT || code == LSHIFTRT
8983 || code == ROTATE))
230d793d
RS
8984 break;
8985
8986 count += first_count;
8987 varop = XEXP (varop, 0);
8988 continue;
8989 }
8990
8991 if (code == ASHIFTRT
8992 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 8993 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 8994 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
8995 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8996 || first_code == ROTATE
230d793d
RS
8997 || code == ROTATE)))
8998 break;
8999
9000 /* To compute the mask to apply after the shift, shift the
951553af 9001 nonzero bits of the inner shift the same way the
230d793d
RS
9002 outer shift will. */
9003
951553af 9004 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
9005
9006 mask_rtx
9007 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 9008 GEN_INT (count));
230d793d
RS
9009
9010 /* Give up if we can't compute an outer operation to use. */
9011 if (mask_rtx == 0
9012 || GET_CODE (mask_rtx) != CONST_INT
9013 || ! merge_outer_ops (&outer_op, &outer_const, AND,
9014 INTVAL (mask_rtx),
9015 result_mode, &complement_p))
9016 break;
9017
9018 /* If the shifts are in the same direction, we add the
9019 counts. Otherwise, we subtract them. */
9020 if ((code == ASHIFTRT || code == LSHIFTRT)
9021 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
9022 count += first_count;
9023 else
9024 count -= first_count;
9025
9026 /* If COUNT is positive, the new shift is usually CODE,
9027 except for the two exceptions below, in which case it is
9028 FIRST_CODE. If the count is negative, FIRST_CODE should
9029 always be used */
9030 if (count > 0
9031 && ((first_code == ROTATE && code == ASHIFT)
9032 || (first_code == ASHIFTRT && code == LSHIFTRT)))
9033 code = first_code;
9034 else if (count < 0)
9035 code = first_code, count = - count;
9036
9037 varop = XEXP (varop, 0);
9038 continue;
9039 }
9040
9041 /* If we have (A << B << C) for any shift, we can convert this to
9042 (A << C << B). This wins if A is a constant. Only try this if
9043 B is not a constant. */
9044
9045 else if (GET_CODE (varop) == code
9046 && GET_CODE (XEXP (varop, 1)) != CONST_INT
9047 && 0 != (new
9048 = simplify_binary_operation (code, mode,
9049 XEXP (varop, 0),
5f4f0e22 9050 GEN_INT (count))))
230d793d
RS
9051 {
9052 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
9053 count = 0;
9054 continue;
9055 }
9056 break;
9057
9058 case NOT:
9059 /* Make this fit the case below. */
9060 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 9061 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
9062 continue;
9063
9064 case IOR:
9065 case AND:
9066 case XOR:
9067 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
9068 with C the size of VAROP - 1 and the shift is logical if
9069 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9070 we have an (le X 0) operation. If we have an arithmetic shift
9071 and STORE_FLAG_VALUE is 1 or we have a logical shift with
9072 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
9073
9074 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
9075 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
9076 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9077 && (code == LSHIFTRT || code == ASHIFTRT)
9078 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
9079 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9080 {
9081 count = 0;
9082 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
9083 const0_rtx);
9084
9085 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9086 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
9087
9088 continue;
9089 }
9090
9091 /* If we have (shift (logical)), move the logical to the outside
9092 to allow it to possibly combine with another logical and the
9093 shift to combine with another shift. This also canonicalizes to
9094 what a ZERO_EXTRACT looks like. Also, some machines have
9095 (and (shift)) insns. */
9096
9097 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
9098 && (new = simplify_binary_operation (code, result_mode,
9099 XEXP (varop, 1),
5f4f0e22 9100 GEN_INT (count))) != 0
7d171a1e 9101 && GET_CODE(new) == CONST_INT
230d793d
RS
9102 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
9103 INTVAL (new), result_mode, &complement_p))
9104 {
9105 varop = XEXP (varop, 0);
9106 continue;
9107 }
9108
9109 /* If we can't do that, try to simplify the shift in each arm of the
9110 logical expression, make a new logical expression, and apply
9111 the inverse distributive law. */
9112 {
00d4ca1c 9113 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 9114 XEXP (varop, 0), count);
00d4ca1c 9115 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
9116 XEXP (varop, 1), count);
9117
21a64bf1 9118 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
9119 varop = apply_distributive_law (varop);
9120
9121 count = 0;
9122 }
9123 break;
9124
9125 case EQ:
45620ed4 9126 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 9127 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
9128 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
9129 that may be nonzero. */
9130 if (code == LSHIFTRT
230d793d
RS
9131 && XEXP (varop, 1) == const0_rtx
9132 && GET_MODE (XEXP (varop, 0)) == result_mode
9133 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 9134 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 9135 && ((STORE_FLAG_VALUE
5f4f0e22 9136 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 9137 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9138 && merge_outer_ops (&outer_op, &outer_const, XOR,
9139 (HOST_WIDE_INT) 1, result_mode,
9140 &complement_p))
230d793d
RS
9141 {
9142 varop = XEXP (varop, 0);
9143 count = 0;
9144 continue;
9145 }
9146 break;
9147
9148 case NEG:
d0ab8cd3
RK
9149 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
9150 than the number of bits in the mode is equivalent to A. */
9151 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 9152 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 9153 {
d0ab8cd3 9154 varop = XEXP (varop, 0);
230d793d
RS
9155 count = 0;
9156 continue;
9157 }
9158
9159 /* NEG commutes with ASHIFT since it is multiplication. Move the
9160 NEG outside to allow shifts to combine. */
9161 if (code == ASHIFT
5f4f0e22
CH
9162 && merge_outer_ops (&outer_op, &outer_const, NEG,
9163 (HOST_WIDE_INT) 0, result_mode,
9164 &complement_p))
230d793d
RS
9165 {
9166 varop = XEXP (varop, 0);
9167 continue;
9168 }
9169 break;
9170
9171 case PLUS:
d0ab8cd3
RK
9172 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
9173 is one less than the number of bits in the mode is
9174 equivalent to (xor A 1). */
230d793d
RS
9175 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
9176 && XEXP (varop, 1) == constm1_rtx
951553af 9177 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
9178 && merge_outer_ops (&outer_op, &outer_const, XOR,
9179 (HOST_WIDE_INT) 1, result_mode,
9180 &complement_p))
230d793d
RS
9181 {
9182 count = 0;
9183 varop = XEXP (varop, 0);
9184 continue;
9185 }
9186
3f508eca 9187 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 9188 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
9189 bits are known zero in FOO, we can replace the PLUS with FOO.
9190 Similarly in the other operand order. This code occurs when
9191 we are computing the size of a variable-size array. */
9192
9193 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9194 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
9195 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
9196 && (nonzero_bits (XEXP (varop, 1), result_mode)
9197 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
9198 {
9199 varop = XEXP (varop, 0);
9200 continue;
9201 }
9202 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 9203 && count < HOST_BITS_PER_WIDE_INT
ac49a949 9204 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 9205 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 9206 >> count)
951553af
RK
9207 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
9208 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
9209 result_mode)))
9210 {
9211 varop = XEXP (varop, 1);
9212 continue;
9213 }
9214
230d793d
RS
9215 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
9216 if (code == ASHIFT
9217 && GET_CODE (XEXP (varop, 1)) == CONST_INT
9218 && (new = simplify_binary_operation (ASHIFT, result_mode,
9219 XEXP (varop, 1),
5f4f0e22 9220 GEN_INT (count))) != 0
7d171a1e 9221 && GET_CODE(new) == CONST_INT
230d793d
RS
9222 && merge_outer_ops (&outer_op, &outer_const, PLUS,
9223 INTVAL (new), result_mode, &complement_p))
9224 {
9225 varop = XEXP (varop, 0);
9226 continue;
9227 }
9228 break;
9229
9230 case MINUS:
9231 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
9232 with C the size of VAROP - 1 and the shift is logical if
9233 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
9234 we have a (gt X 0) operation. If the shift is arithmetic with
9235 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
9236 we have a (neg (gt X 0)) operation. */
9237
0802d516
RK
9238 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
9239 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 9240 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
9241 && (code == LSHIFTRT || code == ASHIFTRT)
9242 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9243 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
9244 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
9245 {
9246 count = 0;
9247 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
9248 const0_rtx);
9249
9250 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
9251 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
9252
9253 continue;
9254 }
9255 break;
6e0ef100
JC
9256
9257 case TRUNCATE:
9258 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
9259 if the truncate does not affect the value. */
9260 if (code == LSHIFTRT
9261 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
9262 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
9263 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
9264 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
9265 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
9266 {
9267 rtx varop_inner = XEXP (varop, 0);
9268
9269 varop_inner = gen_rtx_combine (LSHIFTRT,
9270 GET_MODE (varop_inner),
9271 XEXP (varop_inner, 0),
9272 GEN_INT (count + INTVAL (XEXP (varop_inner, 1))));
9273 varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
9274 varop_inner);
9275 count = 0;
9276 continue;
9277 }
9278 break;
e9a25f70
JL
9279
9280 default:
9281 break;
230d793d
RS
9282 }
9283
9284 break;
9285 }
9286
9287 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
9288 a right shift or ROTATE, we must always do it in the mode it was
9289 originally done in. Otherwise, we can do it in MODE, the widest mode
9290 encountered. The code we care about is that of the shift that will
9291 actually be done, not the shift that was originally requested. */
9292 shift_mode
9293 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
9294 ? result_mode : mode);
230d793d
RS
9295
9296 /* We have now finished analyzing the shift. The result should be
9297 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
9298 OUTER_OP is non-NIL, it is an operation that needs to be applied
9299 to the result of the shift. OUTER_CONST is the relevant constant,
9300 but we must turn off all bits turned off in the shift.
9301
9302 If we were passed a value for X, see if we can use any pieces of
9303 it. If not, make new rtx. */
9304
9305 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
9306 && GET_CODE (XEXP (x, 1)) == CONST_INT
9307 && INTVAL (XEXP (x, 1)) == count)
9308 const_rtx = XEXP (x, 1);
9309 else
5f4f0e22 9310 const_rtx = GEN_INT (count);
230d793d
RS
9311
9312 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
9313 && GET_MODE (XEXP (x, 0)) == shift_mode
9314 && SUBREG_REG (XEXP (x, 0)) == varop)
9315 varop = XEXP (x, 0);
9316 else if (GET_MODE (varop) != shift_mode)
9317 varop = gen_lowpart_for_combine (shift_mode, varop);
9318
0f41302f 9319 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
9320 if (GET_CODE (varop) == CLOBBER)
9321 return x ? x : varop;
9322
9323 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9324 if (new != 0)
9325 x = new;
9326 else
9327 {
9328 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
9329 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
9330
9331 SUBST (XEXP (x, 0), varop);
9332 SUBST (XEXP (x, 1), const_rtx);
9333 }
9334
224eeff2
RK
9335 /* If we have an outer operation and we just made a shift, it is
9336 possible that we could have simplified the shift were it not
9337 for the outer operation. So try to do the simplification
9338 recursively. */
9339
9340 if (outer_op != NIL && GET_CODE (x) == code
9341 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9342 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9343 INTVAL (XEXP (x, 1)));
9344
230d793d
RS
9345 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9346 turn off all the bits that the shift would have turned off. */
9347 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 9348 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
9349 GET_MODE_MASK (result_mode) >> orig_count);
9350
9351 /* Do the remainder of the processing in RESULT_MODE. */
9352 x = gen_lowpart_for_combine (result_mode, x);
9353
9354 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9355 operation. */
9356 if (complement_p)
0c1c8ea6 9357 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
9358
9359 if (outer_op != NIL)
9360 {
5f4f0e22 9361 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7e4ce834 9362 outer_const = trunc_int_for_mode (outer_const, result_mode);
230d793d
RS
9363
9364 if (outer_op == AND)
5f4f0e22 9365 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
9366 else if (outer_op == SET)
9367 /* This means that we have determined that the result is
9368 equivalent to a constant. This should be rare. */
5f4f0e22 9369 x = GEN_INT (outer_const);
230d793d 9370 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 9371 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 9372 else
5f4f0e22 9373 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
9374 }
9375
9376 return x;
9377}
9378\f
9379/* Like recog, but we receive the address of a pointer to a new pattern.
9380 We try to match the rtx that the pointer points to.
9381 If that fails, we may try to modify or replace the pattern,
9382 storing the replacement into the same pointer object.
9383
9384 Modifications include deletion or addition of CLOBBERs.
9385
9386 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9387 the CLOBBERs are placed.
9388
9389 The value is the final insn code from the pattern ultimately matched,
9390 or -1. */
9391
9392static int
8e2f6e35 9393recog_for_combine (pnewpat, insn, pnotes)
230d793d
RS
9394 rtx *pnewpat;
9395 rtx insn;
9396 rtx *pnotes;
9397{
9398 register rtx pat = *pnewpat;
9399 int insn_code_number;
9400 int num_clobbers_to_add = 0;
9401 int i;
9402 rtx notes = 0;
9403
974f4146
RK
9404 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9405 we use to indicate that something didn't match. If we find such a
9406 thing, force rejection. */
d96023cf 9407 if (GET_CODE (pat) == PARALLEL)
974f4146 9408 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
9409 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9410 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
9411 return -1;
9412
230d793d
RS
9413 /* Is the result of combination a valid instruction? */
9414 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9415
9416 /* If it isn't, there is the possibility that we previously had an insn
9417 that clobbered some register as a side effect, but the combined
9418 insn doesn't need to do that. So try once more without the clobbers
9419 unless this represents an ASM insn. */
9420
9421 if (insn_code_number < 0 && ! check_asm_operands (pat)
9422 && GET_CODE (pat) == PARALLEL)
9423 {
9424 int pos;
9425
9426 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9427 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9428 {
9429 if (i != pos)
9430 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9431 pos++;
9432 }
9433
9434 SUBST_INT (XVECLEN (pat, 0), pos);
9435
9436 if (pos == 1)
9437 pat = XVECEXP (pat, 0, 0);
9438
9439 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9440 }
9441
9442 /* If we had any clobbers to add, make a new pattern than contains
9443 them. Then check to make sure that all of them are dead. */
9444 if (num_clobbers_to_add)
9445 {
38a448ca
RH
9446 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9447 gen_rtvec (GET_CODE (pat) == PARALLEL
c5c76735
JL
9448 ? (XVECLEN (pat, 0)
9449 + num_clobbers_to_add)
38a448ca 9450 : num_clobbers_to_add + 1));
230d793d
RS
9451
9452 if (GET_CODE (pat) == PARALLEL)
9453 for (i = 0; i < XVECLEN (pat, 0); i++)
9454 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9455 else
9456 XVECEXP (newpat, 0, 0) = pat;
9457
9458 add_clobbers (newpat, insn_code_number);
9459
9460 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9461 i < XVECLEN (newpat, 0); i++)
9462 {
9463 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9464 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9465 return -1;
38a448ca
RH
9466 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9467 XEXP (XVECEXP (newpat, 0, i), 0), notes);
230d793d
RS
9468 }
9469 pat = newpat;
9470 }
9471
9472 *pnewpat = pat;
9473 *pnotes = notes;
9474
9475 return insn_code_number;
9476}
9477\f
9478/* Like gen_lowpart but for use by combine. In combine it is not possible
9479 to create any new pseudoregs. However, it is safe to create
9480 invalid memory addresses, because combine will try to recognize
9481 them and all they will do is make the combine attempt fail.
9482
9483 If for some reason this cannot do its job, an rtx
9484 (clobber (const_int 0)) is returned.
9485 An insn containing that will not be recognized. */
9486
9487#undef gen_lowpart
9488
9489static rtx
9490gen_lowpart_for_combine (mode, x)
9491 enum machine_mode mode;
9492 register rtx x;
9493{
9494 rtx result;
9495
9496 if (GET_MODE (x) == mode)
9497 return x;
9498
eae957a8
RK
9499 /* We can only support MODE being wider than a word if X is a
9500 constant integer or has a mode the same size. */
9501
9502 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9503 && ! ((GET_MODE (x) == VOIDmode
9504 && (GET_CODE (x) == CONST_INT
9505 || GET_CODE (x) == CONST_DOUBLE))
9506 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
38a448ca 9507 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9508
9509 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9510 won't know what to do. So we will strip off the SUBREG here and
9511 process normally. */
9512 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9513 {
9514 x = SUBREG_REG (x);
9515 if (GET_MODE (x) == mode)
9516 return x;
9517 }
9518
9519 result = gen_lowpart_common (mode, x);
64bf47a2
RK
9520 if (result != 0
9521 && GET_CODE (result) == SUBREG
9522 && GET_CODE (SUBREG_REG (result)) == REG
9523 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9524 && (GET_MODE_SIZE (GET_MODE (result))
9525 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
b1f21e0a 9526 REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
64bf47a2 9527
230d793d
RS
9528 if (result)
9529 return result;
9530
9531 if (GET_CODE (x) == MEM)
9532 {
9533 register int offset = 0;
9534 rtx new;
9535
9536 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9537 address. */
9538 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
38a448ca 9539 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9540
9541 /* If we want to refer to something bigger than the original memref,
9542 generate a perverse subreg instead. That will force a reload
9543 of the original memref X. */
9544 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
38a448ca 9545 return gen_rtx_SUBREG (mode, x, 0);
230d793d 9546
f76b9db2
ILT
9547 if (WORDS_BIG_ENDIAN)
9548 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9549 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
c5c76735 9550
f76b9db2
ILT
9551 if (BYTES_BIG_ENDIAN)
9552 {
9553 /* Adjust the address so that the address-after-the-data is
9554 unchanged. */
9555 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9556 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9557 }
38a448ca 9558 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
230d793d 9559 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 9560 MEM_COPY_ATTRIBUTES (new, x);
230d793d
RS
9561 return new;
9562 }
9563
9564 /* If X is a comparison operator, rewrite it in a new mode. This
9565 probably won't match, but may allow further simplifications. */
9566 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9567 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9568
9569 /* If we couldn't simplify X any other way, just enclose it in a
9570 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 9571 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 9572 else
dfbe1b2f
RK
9573 {
9574 int word = 0;
9575
9576 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9577 word = ((GET_MODE_SIZE (GET_MODE (x))
9578 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9579 / UNITS_PER_WORD);
38a448ca 9580 return gen_rtx_SUBREG (mode, x, word);
dfbe1b2f 9581 }
230d793d
RS
9582}
9583\f
9584/* Make an rtx expression. This is a subset of gen_rtx and only supports
9585 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9586
9587 If the identical expression was previously in the insn (in the undobuf),
9588 it will be returned. Only if it is not found will a new expression
9589 be made. */
9590
9591/*VARARGS2*/
9592static rtx
4f90e4a0 9593gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 9594{
5148a72b 9595#ifndef ANSI_PROTOTYPES
230d793d
RS
9596 enum rtx_code code;
9597 enum machine_mode mode;
4f90e4a0
RK
9598#endif
9599 va_list p;
230d793d
RS
9600 int n_args;
9601 rtx args[3];
b729186a 9602 int j;
6f7d635c 9603 const char *fmt;
230d793d 9604 rtx rt;
241cea85 9605 struct undo *undo;
230d793d 9606
4f90e4a0
RK
9607 VA_START (p, mode);
9608
5148a72b 9609#ifndef ANSI_PROTOTYPES
230d793d
RS
9610 code = va_arg (p, enum rtx_code);
9611 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
9612#endif
9613
230d793d
RS
9614 n_args = GET_RTX_LENGTH (code);
9615 fmt = GET_RTX_FORMAT (code);
9616
9617 if (n_args == 0 || n_args > 3)
9618 abort ();
9619
9620 /* Get each arg and verify that it is supposed to be an expression. */
9621 for (j = 0; j < n_args; j++)
9622 {
9623 if (*fmt++ != 'e')
9624 abort ();
9625
9626 args[j] = va_arg (p, rtx);
9627 }
9628
f0305a2b
KG
9629 va_end (p);
9630
230d793d
RS
9631 /* See if this is in undobuf. Be sure we don't use objects that came
9632 from another insn; this could produce circular rtl structures. */
9633
241cea85
RK
9634 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9635 if (!undo->is_int
9636 && GET_CODE (undo->old_contents.r) == code
9637 && GET_MODE (undo->old_contents.r) == mode)
230d793d
RS
9638 {
9639 for (j = 0; j < n_args; j++)
241cea85 9640 if (XEXP (undo->old_contents.r, j) != args[j])
230d793d
RS
9641 break;
9642
9643 if (j == n_args)
241cea85 9644 return undo->old_contents.r;
230d793d
RS
9645 }
9646
9647 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9648 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9649 rt = rtx_alloc (code);
9650 PUT_MODE (rt, mode);
9651 XEXP (rt, 0) = args[0];
9652 if (n_args > 1)
9653 {
9654 XEXP (rt, 1) = args[1];
9655 if (n_args > 2)
9656 XEXP (rt, 2) = args[2];
9657 }
9658 return rt;
9659}
9660
9661/* These routines make binary and unary operations by first seeing if they
9662 fold; if not, a new expression is allocated. */
9663
9664static rtx
9665gen_binary (code, mode, op0, op1)
9666 enum rtx_code code;
9667 enum machine_mode mode;
9668 rtx op0, op1;
9669{
9670 rtx result;
1a26b032
RK
9671 rtx tem;
9672
9673 if (GET_RTX_CLASS (code) == 'c'
9674 && (GET_CODE (op0) == CONST_INT
9675 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9676 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
9677
9678 if (GET_RTX_CLASS (code) == '<')
9679 {
9680 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
9681
9682 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9683 just (REL_OP X Y). */
9210df58
RK
9684 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9685 {
9686 op1 = XEXP (op0, 1);
9687 op0 = XEXP (op0, 0);
9688 op_mode = GET_MODE (op0);
9689 }
9690
230d793d
RS
9691 if (op_mode == VOIDmode)
9692 op_mode = GET_MODE (op1);
9693 result = simplify_relational_operation (code, op_mode, op0, op1);
9694 }
9695 else
9696 result = simplify_binary_operation (code, mode, op0, op1);
9697
9698 if (result)
9699 return result;
9700
9701 /* Put complex operands first and constants second. */
9702 if (GET_RTX_CLASS (code) == 'c'
9703 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9704 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9705 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9706 || (GET_CODE (op0) == SUBREG
9707 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9708 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9709 return gen_rtx_combine (code, mode, op1, op0);
9710
e5e809f4
JL
9711 /* If we are turning off bits already known off in OP0, we need not do
9712 an AND. */
9713 else if (code == AND && GET_CODE (op1) == CONST_INT
9714 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9715 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
9716 return op0;
9717
230d793d
RS
9718 return gen_rtx_combine (code, mode, op0, op1);
9719}
9720
9721static rtx
0c1c8ea6 9722gen_unary (code, mode, op0_mode, op0)
230d793d 9723 enum rtx_code code;
0c1c8ea6 9724 enum machine_mode mode, op0_mode;
230d793d
RS
9725 rtx op0;
9726{
0c1c8ea6 9727 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
9728
9729 if (result)
9730 return result;
9731
9732 return gen_rtx_combine (code, mode, op0);
9733}
9734\f
9735/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9736 comparison code that will be tested.
9737
9738 The result is a possibly different comparison code to use. *POP0 and
9739 *POP1 may be updated.
9740
9741 It is possible that we might detect that a comparison is either always
9742 true or always false. However, we do not perform general constant
5089e22e 9743 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9744 should have been detected earlier. Hence we ignore all such cases. */
9745
9746static enum rtx_code
9747simplify_comparison (code, pop0, pop1)
9748 enum rtx_code code;
9749 rtx *pop0;
9750 rtx *pop1;
9751{
9752 rtx op0 = *pop0;
9753 rtx op1 = *pop1;
9754 rtx tem, tem1;
9755 int i;
9756 enum machine_mode mode, tmode;
9757
9758 /* Try a few ways of applying the same transformation to both operands. */
9759 while (1)
9760 {
3a19aabc
RK
9761#ifndef WORD_REGISTER_OPERATIONS
9762 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9763 so check specially. */
9764 if (code != GTU && code != GEU && code != LTU && code != LEU
9765 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9766 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9767 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9768 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9769 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9770 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 9771 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
9772 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9773 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9774 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9775 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9776 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9777 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9778 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9779 && (INTVAL (XEXP (op0, 1))
9780 == (GET_MODE_BITSIZE (GET_MODE (op0))
9781 - (GET_MODE_BITSIZE
9782 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9783 {
9784 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9785 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9786 }
9787#endif
9788
230d793d
RS
9789 /* If both operands are the same constant shift, see if we can ignore the
9790 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 9791 this shift are known to be zero for both inputs and if the type of
230d793d 9792 comparison is compatible with the shift. */
67232b23
RK
9793 if (GET_CODE (op0) == GET_CODE (op1)
9794 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9795 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 9796 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
9797 && (code != GT && code != LT && code != GE && code != LE))
9798 || (GET_CODE (op0) == ASHIFTRT
9799 && (code != GTU && code != LTU
9800 && code != GEU && code != GEU)))
9801 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9802 && INTVAL (XEXP (op0, 1)) >= 0
9803 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9804 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
9805 {
9806 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 9807 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9808 int shift_count = INTVAL (XEXP (op0, 1));
9809
9810 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9811 mask &= (mask >> shift_count) << shift_count;
45620ed4 9812 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
9813 mask = (mask & (mask << shift_count)) >> shift_count;
9814
951553af
RK
9815 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9816 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
9817 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9818 else
9819 break;
9820 }
9821
9822 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9823 SUBREGs are of the same mode, and, in both cases, the AND would
9824 be redundant if the comparison was done in the narrower mode,
9825 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
9826 and the operand's possibly nonzero bits are 0xffffff01; in that case
9827 if we only care about QImode, we don't need the AND). This case
9828 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
9829 STORE_FLAG_VALUE == 1 (e.g., the 386).
9830
9831 Similarly, check for a case where the AND's are ZERO_EXTEND
9832 operations from some narrower mode even though a SUBREG is not
9833 present. */
230d793d
RS
9834
9835 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9836 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 9837 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 9838 {
7e4dc511
RK
9839 rtx inner_op0 = XEXP (op0, 0);
9840 rtx inner_op1 = XEXP (op1, 0);
9841 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9842 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9843 int changed = 0;
9844
9845 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9846 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9847 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9848 && (GET_MODE (SUBREG_REG (inner_op0))
9849 == GET_MODE (SUBREG_REG (inner_op1)))
729a2bc6 9850 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
7e4dc511 9851 <= HOST_BITS_PER_WIDE_INT)
01c82bbb 9852 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
729a2bc6 9853 GET_MODE (SUBREG_REG (inner_op0)))))
01c82bbb
RK
9854 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9855 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
9856 {
9857 op0 = SUBREG_REG (inner_op0);
9858 op1 = SUBREG_REG (inner_op1);
9859
9860 /* The resulting comparison is always unsigned since we masked
0f41302f 9861 off the original sign bit. */
7e4dc511
RK
9862 code = unsigned_condition (code);
9863
9864 changed = 1;
9865 }
230d793d 9866
7e4dc511
RK
9867 else if (c0 == c1)
9868 for (tmode = GET_CLASS_NARROWEST_MODE
9869 (GET_MODE_CLASS (GET_MODE (op0)));
9870 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
e51712db 9871 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
7e4dc511
RK
9872 {
9873 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9874 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 9875 code = unsigned_condition (code);
7e4dc511
RK
9876 changed = 1;
9877 break;
9878 }
9879
9880 if (! changed)
9881 break;
230d793d 9882 }
3a19aabc 9883
ad25ba17
RK
9884 /* If both operands are NOT, we can strip off the outer operation
9885 and adjust the comparison code for swapped operands; similarly for
9886 NEG, except that this must be an equality comparison. */
9887 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9888 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9889 && (code == EQ || code == NE)))
9890 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 9891
230d793d
RS
9892 else
9893 break;
9894 }
9895
9896 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
9897 comparison code appropriately, but don't do this if the second operand
9898 is already a constant integer. */
9899 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
230d793d
RS
9900 {
9901 tem = op0, op0 = op1, op1 = tem;
9902 code = swap_condition (code);
9903 }
9904
9905 /* We now enter a loop during which we will try to simplify the comparison.
9906 For the most part, we only are concerned with comparisons with zero,
9907 but some things may really be comparisons with zero but not start
9908 out looking that way. */
9909
9910 while (GET_CODE (op1) == CONST_INT)
9911 {
9912 enum machine_mode mode = GET_MODE (op0);
9913 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 9914 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9915 int equality_comparison_p;
9916 int sign_bit_comparison_p;
9917 int unsigned_comparison_p;
5f4f0e22 9918 HOST_WIDE_INT const_op;
230d793d
RS
9919
9920 /* We only want to handle integral modes. This catches VOIDmode,
9921 CCmode, and the floating-point modes. An exception is that we
9922 can handle VOIDmode if OP0 is a COMPARE or a comparison
9923 operation. */
9924
9925 if (GET_MODE_CLASS (mode) != MODE_INT
9926 && ! (mode == VOIDmode
9927 && (GET_CODE (op0) == COMPARE
9928 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9929 break;
9930
9931 /* Get the constant we are comparing against and turn off all bits
9932 not on in our mode. */
9933 const_op = INTVAL (op1);
5f4f0e22 9934 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 9935 const_op &= mask;
230d793d
RS
9936
9937 /* If we are comparing against a constant power of two and the value
951553af 9938 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
9939 `and'ed with that bit), we can replace this with a comparison
9940 with zero. */
9941 if (const_op
9942 && (code == EQ || code == NE || code == GE || code == GEU
9943 || code == LT || code == LTU)
5f4f0e22 9944 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9945 && exact_log2 (const_op) >= 0
e51712db 9946 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
230d793d
RS
9947 {
9948 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9949 op1 = const0_rtx, const_op = 0;
9950 }
9951
d0ab8cd3
RK
9952 /* Similarly, if we are comparing a value known to be either -1 or
9953 0 with -1, change it to the opposite comparison against zero. */
9954
9955 if (const_op == -1
9956 && (code == EQ || code == NE || code == GT || code == LE
9957 || code == GEU || code == LTU)
9958 && num_sign_bit_copies (op0, mode) == mode_width)
9959 {
9960 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9961 op1 = const0_rtx, const_op = 0;
9962 }
9963
230d793d 9964 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
9965 comparisons against zero and then prefer equality comparisons.
9966 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
9967
9968 switch (code)
9969 {
9970 case LT:
4803a34a
RK
9971 /* < C is equivalent to <= (C - 1) */
9972 if (const_op > 0)
230d793d 9973 {
4803a34a 9974 const_op -= 1;
5f4f0e22 9975 op1 = GEN_INT (const_op);
230d793d
RS
9976 code = LE;
9977 /* ... fall through to LE case below. */
9978 }
9979 else
9980 break;
9981
9982 case LE:
4803a34a
RK
9983 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9984 if (const_op < 0)
9985 {
9986 const_op += 1;
5f4f0e22 9987 op1 = GEN_INT (const_op);
4803a34a
RK
9988 code = LT;
9989 }
230d793d
RS
9990
9991 /* If we are doing a <= 0 comparison on a value known to have
9992 a zero sign bit, we can replace this with == 0. */
9993 else if (const_op == 0
5f4f0e22 9994 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9995 && (nonzero_bits (op0, mode)
5f4f0e22 9996 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9997 code = EQ;
9998 break;
9999
10000 case GE:
0f41302f 10001 /* >= C is equivalent to > (C - 1). */
4803a34a 10002 if (const_op > 0)
230d793d 10003 {
4803a34a 10004 const_op -= 1;
5f4f0e22 10005 op1 = GEN_INT (const_op);
230d793d
RS
10006 code = GT;
10007 /* ... fall through to GT below. */
10008 }
10009 else
10010 break;
10011
10012 case GT:
4803a34a
RK
10013 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
10014 if (const_op < 0)
10015 {
10016 const_op += 1;
5f4f0e22 10017 op1 = GEN_INT (const_op);
4803a34a
RK
10018 code = GE;
10019 }
230d793d
RS
10020
10021 /* If we are doing a > 0 comparison on a value known to have
10022 a zero sign bit, we can replace this with != 0. */
10023 else if (const_op == 0
5f4f0e22 10024 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10025 && (nonzero_bits (op0, mode)
5f4f0e22 10026 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
10027 code = NE;
10028 break;
10029
230d793d 10030 case LTU:
4803a34a
RK
10031 /* < C is equivalent to <= (C - 1). */
10032 if (const_op > 0)
10033 {
10034 const_op -= 1;
5f4f0e22 10035 op1 = GEN_INT (const_op);
4803a34a 10036 code = LEU;
0f41302f 10037 /* ... fall through ... */
4803a34a 10038 }
d0ab8cd3
RK
10039
10040 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
10041 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10042 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10043 {
10044 const_op = 0, op1 = const0_rtx;
10045 code = GE;
10046 break;
10047 }
4803a34a
RK
10048 else
10049 break;
230d793d
RS
10050
10051 case LEU:
10052 /* unsigned <= 0 is equivalent to == 0 */
10053 if (const_op == 0)
10054 code = EQ;
d0ab8cd3 10055
0f41302f 10056 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
10057 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10058 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10059 {
10060 const_op = 0, op1 = const0_rtx;
10061 code = GE;
10062 }
230d793d
RS
10063 break;
10064
4803a34a
RK
10065 case GEU:
10066 /* >= C is equivalent to < (C - 1). */
10067 if (const_op > 1)
10068 {
10069 const_op -= 1;
5f4f0e22 10070 op1 = GEN_INT (const_op);
4803a34a 10071 code = GTU;
0f41302f 10072 /* ... fall through ... */
4803a34a 10073 }
d0ab8cd3
RK
10074
10075 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
10076 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10077 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
10078 {
10079 const_op = 0, op1 = const0_rtx;
10080 code = LT;
8b2e69e1 10081 break;
d0ab8cd3 10082 }
4803a34a
RK
10083 else
10084 break;
10085
230d793d
RS
10086 case GTU:
10087 /* unsigned > 0 is equivalent to != 0 */
10088 if (const_op == 0)
10089 code = NE;
d0ab8cd3
RK
10090
10091 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
10092 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
10093 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
10094 {
10095 const_op = 0, op1 = const0_rtx;
10096 code = LT;
10097 }
230d793d 10098 break;
e9a25f70
JL
10099
10100 default:
10101 break;
230d793d
RS
10102 }
10103
10104 /* Compute some predicates to simplify code below. */
10105
10106 equality_comparison_p = (code == EQ || code == NE);
10107 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
10108 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
10109 || code == LEU);
10110
6139ff20
RK
10111 /* If this is a sign bit comparison and we can do arithmetic in
10112 MODE, say that we will only be needing the sign bit of OP0. */
10113 if (sign_bit_comparison_p
10114 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
10115 op0 = force_to_mode (op0, mode,
10116 ((HOST_WIDE_INT) 1
10117 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 10118 NULL_RTX, 0);
6139ff20 10119
230d793d
RS
10120 /* Now try cases based on the opcode of OP0. If none of the cases
10121 does a "continue", we exit this loop immediately after the
10122 switch. */
10123
10124 switch (GET_CODE (op0))
10125 {
10126 case ZERO_EXTRACT:
10127 /* If we are extracting a single bit from a variable position in
10128 a constant that has only a single bit set and are comparing it
10129 with zero, we can convert this into an equality comparison
d7cd794f 10130 between the position and the location of the single bit. */
230d793d 10131
230d793d
RS
10132 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
10133 && XEXP (op0, 1) == const1_rtx
10134 && equality_comparison_p && const_op == 0
d7cd794f 10135 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 10136 {
f76b9db2 10137 if (BITS_BIG_ENDIAN)
0d8e55d8 10138 {
d7cd794f 10139#ifdef HAVE_extzv
a995e389 10140 mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
0d8e55d8
JL
10141 if (mode == VOIDmode)
10142 mode = word_mode;
10143 i = (GET_MODE_BITSIZE (mode) - 1 - i);
d7cd794f 10144#else
0d8e55d8 10145 i = BITS_PER_WORD - 1 - i;
230d793d 10146#endif
0d8e55d8 10147 }
230d793d
RS
10148
10149 op0 = XEXP (op0, 2);
5f4f0e22 10150 op1 = GEN_INT (i);
230d793d
RS
10151 const_op = i;
10152
10153 /* Result is nonzero iff shift count is equal to I. */
10154 code = reverse_condition (code);
10155 continue;
10156 }
230d793d 10157
0f41302f 10158 /* ... fall through ... */
230d793d
RS
10159
10160 case SIGN_EXTRACT:
10161 tem = expand_compound_operation (op0);
10162 if (tem != op0)
10163 {
10164 op0 = tem;
10165 continue;
10166 }
10167 break;
10168
10169 case NOT:
10170 /* If testing for equality, we can take the NOT of the constant. */
10171 if (equality_comparison_p
10172 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
10173 {
10174 op0 = XEXP (op0, 0);
10175 op1 = tem;
10176 continue;
10177 }
10178
10179 /* If just looking at the sign bit, reverse the sense of the
10180 comparison. */
10181 if (sign_bit_comparison_p)
10182 {
10183 op0 = XEXP (op0, 0);
10184 code = (code == GE ? LT : GE);
10185 continue;
10186 }
10187 break;
10188
10189 case NEG:
10190 /* If testing for equality, we can take the NEG of the constant. */
10191 if (equality_comparison_p
10192 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
10193 {
10194 op0 = XEXP (op0, 0);
10195 op1 = tem;
10196 continue;
10197 }
10198
10199 /* The remaining cases only apply to comparisons with zero. */
10200 if (const_op != 0)
10201 break;
10202
10203 /* When X is ABS or is known positive,
10204 (neg X) is < 0 if and only if X != 0. */
10205
10206 if (sign_bit_comparison_p
10207 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 10208 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10209 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10210 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
10211 {
10212 op0 = XEXP (op0, 0);
10213 code = (code == LT ? NE : EQ);
10214 continue;
10215 }
10216
3bed8141 10217 /* If we have NEG of something whose two high-order bits are the
0f41302f 10218 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 10219 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
10220 {
10221 op0 = XEXP (op0, 0);
10222 code = swap_condition (code);
10223 continue;
10224 }
10225 break;
10226
10227 case ROTATE:
10228 /* If we are testing equality and our count is a constant, we
10229 can perform the inverse operation on our RHS. */
10230 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
10231 && (tem = simplify_binary_operation (ROTATERT, mode,
10232 op1, XEXP (op0, 1))) != 0)
10233 {
10234 op0 = XEXP (op0, 0);
10235 op1 = tem;
10236 continue;
10237 }
10238
10239 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
10240 a particular bit. Convert it to an AND of a constant of that
10241 bit. This will be converted into a ZERO_EXTRACT. */
10242 if (const_op == 0 && sign_bit_comparison_p
10243 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10244 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10245 {
5f4f0e22
CH
10246 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10247 ((HOST_WIDE_INT) 1
10248 << (mode_width - 1
10249 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10250 code = (code == LT ? NE : EQ);
10251 continue;
10252 }
10253
0f41302f 10254 /* ... fall through ... */
230d793d
RS
10255
10256 case ABS:
10257 /* ABS is ignorable inside an equality comparison with zero. */
10258 if (const_op == 0 && equality_comparison_p)
10259 {
10260 op0 = XEXP (op0, 0);
10261 continue;
10262 }
10263 break;
10264
10265
10266 case SIGN_EXTEND:
10267 /* Can simplify (compare (zero/sign_extend FOO) CONST)
10268 to (compare FOO CONST) if CONST fits in FOO's mode and we
10269 are either testing inequality or have an unsigned comparison
10270 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
10271 if (! unsigned_comparison_p
10272 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10273 <= HOST_BITS_PER_WIDE_INT)
10274 && ((unsigned HOST_WIDE_INT) const_op
e51712db 10275 < (((unsigned HOST_WIDE_INT) 1
5f4f0e22 10276 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
10277 {
10278 op0 = XEXP (op0, 0);
10279 continue;
10280 }
10281 break;
10282
10283 case SUBREG:
a687e897 10284 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 10285 both constants are smaller than 1/2 the maximum positive
a687e897
RK
10286 value in MODE, and the comparison is equality or unsigned.
10287 In that case, if A is either zero-extended to MODE or has
10288 sufficient sign bits so that the high-order bit in MODE
10289 is a copy of the sign in the inner mode, we can prove that it is
10290 safe to do the operation in the wider mode. This simplifies
10291 many range checks. */
10292
10293 if (mode_width <= HOST_BITS_PER_WIDE_INT
10294 && subreg_lowpart_p (op0)
10295 && GET_CODE (SUBREG_REG (op0)) == PLUS
10296 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
10297 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
10298 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
e51712db 10299 < (HOST_WIDE_INT)(GET_MODE_MASK (mode) / 2))
adb7a1cb 10300 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
10301 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
10302 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
10303 & ~ GET_MODE_MASK (mode))
10304 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
10305 GET_MODE (SUBREG_REG (op0)))
10306 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10307 - GET_MODE_BITSIZE (mode)))))
10308 {
10309 op0 = SUBREG_REG (op0);
10310 continue;
10311 }
10312
fe0cf571
RK
10313 /* If the inner mode is narrower and we are extracting the low part,
10314 we can treat the SUBREG as if it were a ZERO_EXTEND. */
10315 if (subreg_lowpart_p (op0)
89f1c7f2
RS
10316 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
10317 /* Fall through */ ;
10318 else
230d793d
RS
10319 break;
10320
0f41302f 10321 /* ... fall through ... */
230d793d
RS
10322
10323 case ZERO_EXTEND:
10324 if ((unsigned_comparison_p || equality_comparison_p)
10325 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
10326 <= HOST_BITS_PER_WIDE_INT)
10327 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
10328 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10329 {
10330 op0 = XEXP (op0, 0);
10331 continue;
10332 }
10333 break;
10334
10335 case PLUS:
20fdd649 10336 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 10337 this for equality comparisons due to pathological cases involving
230d793d 10338 overflows. */
20fdd649
RK
10339 if (equality_comparison_p
10340 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10341 op1, XEXP (op0, 1))))
230d793d
RS
10342 {
10343 op0 = XEXP (op0, 0);
10344 op1 = tem;
10345 continue;
10346 }
10347
10348 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10349 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10350 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10351 {
10352 op0 = XEXP (XEXP (op0, 0), 0);
10353 code = (code == LT ? EQ : NE);
10354 continue;
10355 }
10356 break;
10357
10358 case MINUS:
0bd4b461
RH
10359 /* (op (minus A B) 0) -> (op A B) */
10360 if (op1 == const0_rtx)
10361 {
10362 op1 = XEXP (op0, 1);
10363 op0 = XEXP (op0, 0);
10364 continue;
10365 }
10366
20fdd649
RK
10367 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10368 (eq B (minus A C)), whichever simplifies. We can only do
10369 this for equality comparisons due to pathological cases involving
10370 overflows. */
10371 if (equality_comparison_p
10372 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10373 XEXP (op0, 1), op1)))
10374 {
10375 op0 = XEXP (op0, 0);
10376 op1 = tem;
10377 continue;
10378 }
10379
10380 if (equality_comparison_p
10381 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10382 XEXP (op0, 0), op1)))
10383 {
10384 op0 = XEXP (op0, 1);
10385 op1 = tem;
10386 continue;
10387 }
10388
230d793d
RS
10389 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10390 of bits in X minus 1, is one iff X > 0. */
10391 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10392 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10393 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10394 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10395 {
10396 op0 = XEXP (op0, 1);
10397 code = (code == GE ? LE : GT);
10398 continue;
10399 }
10400 break;
10401
10402 case XOR:
10403 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10404 if C is zero or B is a constant. */
10405 if (equality_comparison_p
10406 && 0 != (tem = simplify_binary_operation (XOR, mode,
10407 XEXP (op0, 1), op1)))
10408 {
10409 op0 = XEXP (op0, 0);
10410 op1 = tem;
10411 continue;
10412 }
10413 break;
10414
10415 case EQ: case NE:
10416 case LT: case LTU: case LE: case LEU:
10417 case GT: case GTU: case GE: case GEU:
10418 /* We can't do anything if OP0 is a condition code value, rather
10419 than an actual data value. */
10420 if (const_op != 0
10421#ifdef HAVE_cc0
10422 || XEXP (op0, 0) == cc0_rtx
10423#endif
10424 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10425 break;
10426
10427 /* Get the two operands being compared. */
10428 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10429 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10430 else
10431 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10432
10433 /* Check for the cases where we simply want the result of the
10434 earlier test or the opposite of that result. */
10435 if (code == NE
10436 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 10437 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 10438 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 10439 && (STORE_FLAG_VALUE
5f4f0e22
CH
10440 & (((HOST_WIDE_INT) 1
10441 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
10442 && (code == LT
10443 || (code == GE && reversible_comparison_p (op0)))))
10444 {
10445 code = (code == LT || code == NE
10446 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10447 op0 = tem, op1 = tem1;
10448 continue;
10449 }
10450 break;
10451
10452 case IOR:
10453 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10454 iff X <= 0. */
10455 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10456 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10457 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10458 {
10459 op0 = XEXP (op0, 1);
10460 code = (code == GE ? GT : LE);
10461 continue;
10462 }
10463 break;
10464
10465 case AND:
10466 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10467 will be converted to a ZERO_EXTRACT later. */
10468 if (const_op == 0 && equality_comparison_p
45620ed4 10469 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
10470 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10471 {
10472 op0 = simplify_and_const_int
10473 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10474 XEXP (op0, 1),
10475 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 10476 (HOST_WIDE_INT) 1);
230d793d
RS
10477 continue;
10478 }
10479
10480 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10481 zero and X is a comparison and C1 and C2 describe only bits set
10482 in STORE_FLAG_VALUE, we can compare with X. */
10483 if (const_op == 0 && equality_comparison_p
5f4f0e22 10484 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
10485 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10486 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10487 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10488 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 10489 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
10490 {
10491 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10492 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10493 if ((~ STORE_FLAG_VALUE & mask) == 0
10494 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10495 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10496 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10497 {
10498 op0 = XEXP (XEXP (op0, 0), 0);
10499 continue;
10500 }
10501 }
10502
10503 /* If we are doing an equality comparison of an AND of a bit equal
10504 to the sign bit, replace this with a LT or GE comparison of
10505 the underlying value. */
10506 if (equality_comparison_p
10507 && const_op == 0
10508 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10509 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10510 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
e51712db 10511 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
10512 {
10513 op0 = XEXP (op0, 0);
10514 code = (code == EQ ? GE : LT);
10515 continue;
10516 }
10517
10518 /* If this AND operation is really a ZERO_EXTEND from a narrower
10519 mode, the constant fits within that mode, and this is either an
10520 equality or unsigned comparison, try to do this comparison in
10521 the narrower mode. */
10522 if ((equality_comparison_p || unsigned_comparison_p)
10523 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10524 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10525 & GET_MODE_MASK (mode))
10526 + 1)) >= 0
10527 && const_op >> i == 0
10528 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10529 {
10530 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10531 continue;
10532 }
e5e809f4
JL
10533
10534 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10535 in both M1 and M2 and the SUBREG is either paradoxical or
10536 represents the low part, permute the SUBREG and the AND and
10537 try again. */
10538 if (GET_CODE (XEXP (op0, 0)) == SUBREG
c5c76735 10539 && (0
9ec36da5 10540#ifdef WORD_REGISTER_OPERATIONS
c5c76735
JL
10541 || ((mode_width
10542 > (GET_MODE_BITSIZE
10543 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10544 && mode_width <= BITS_PER_WORD)
9ec36da5 10545#endif
c5c76735
JL
10546 || ((mode_width
10547 <= (GET_MODE_BITSIZE
10548 (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
10549 && subreg_lowpart_p (XEXP (op0, 0))))
adc05e6c
JL
10550#ifndef WORD_REGISTER_OPERATIONS
10551 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10552 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10553 As originally written the upper bits have a defined value
10554 due to the AND operation. However, if we commute the AND
10555 inside the SUBREG then they no longer have defined values
10556 and the meaning of the code has been changed. */
10557 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10558 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10559#endif
e5e809f4
JL
10560 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10561 && mode_width <= HOST_BITS_PER_WIDE_INT
10562 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10563 <= HOST_BITS_PER_WIDE_INT)
10564 && (INTVAL (XEXP (op0, 1)) & ~ mask) == 0
10565 && 0 == (~ GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
9ec36da5 10566 & INTVAL (XEXP (op0, 1)))
e51712db
KG
10567 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10568 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
9ec36da5 10569 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
e5e809f4
JL
10570
10571 {
10572 op0
10573 = gen_lowpart_for_combine
10574 (mode,
10575 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10576 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10577 continue;
10578 }
10579
230d793d
RS
10580 break;
10581
10582 case ASHIFT:
45620ed4 10583 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 10584 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 10585 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
10586 shifted right N bits so long as the low-order N bits of C are
10587 zero. */
10588 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10589 && INTVAL (XEXP (op0, 1)) >= 0
10590 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
10591 < HOST_BITS_PER_WIDE_INT)
10592 && ((const_op
34785d05 10593 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 10594 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10595 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
10596 & ~ (mask >> (INTVAL (XEXP (op0, 1))
10597 + ! equality_comparison_p))) == 0)
10598 {
7ce787fe
NC
10599 /* We must perform a logical shift, not an arithmetic one,
10600 as we want the top N bits of C to be zero. */
aaaec114 10601 unsigned HOST_WIDE_INT temp = const_op & GET_MODE_MASK (mode);
7ce787fe
NC
10602
10603 temp >>= INTVAL (XEXP (op0, 1));
aaaec114 10604 op1 = GEN_INT (trunc_int_for_mode (temp, mode));
230d793d
RS
10605 op0 = XEXP (op0, 0);
10606 continue;
10607 }
10608
dfbe1b2f 10609 /* If we are doing a sign bit comparison, it means we are testing
230d793d 10610 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 10611 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10612 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10613 {
5f4f0e22
CH
10614 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10615 ((HOST_WIDE_INT) 1
10616 << (mode_width - 1
10617 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10618 code = (code == LT ? NE : EQ);
10619 continue;
10620 }
dfbe1b2f
RK
10621
10622 /* If this an equality comparison with zero and we are shifting
10623 the low bit to the sign bit, we can convert this to an AND of the
10624 low-order bit. */
10625 if (const_op == 0 && equality_comparison_p
10626 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10627 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10628 {
5f4f0e22
CH
10629 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10630 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
10631 continue;
10632 }
230d793d
RS
10633 break;
10634
10635 case ASHIFTRT:
d0ab8cd3
RK
10636 /* If this is an equality comparison with zero, we can do this
10637 as a logical shift, which might be much simpler. */
10638 if (equality_comparison_p && const_op == 0
10639 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10640 {
10641 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10642 XEXP (op0, 0),
10643 INTVAL (XEXP (op0, 1)));
10644 continue;
10645 }
10646
230d793d
RS
10647 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10648 do the comparison in a narrower mode. */
10649 if (! unsigned_comparison_p
10650 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10651 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10652 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10653 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 10654 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
10655 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10656 || ((unsigned HOST_WIDE_INT) - const_op
10657 <= GET_MODE_MASK (tmode))))
230d793d
RS
10658 {
10659 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10660 continue;
10661 }
10662
14a774a9
RK
10663 /* Likewise if OP0 is a PLUS of a sign extension with a
10664 constant, which is usually represented with the PLUS
10665 between the shifts. */
10666 if (! unsigned_comparison_p
10667 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10668 && GET_CODE (XEXP (op0, 0)) == PLUS
10669 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10670 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == ASHIFT
10671 && XEXP (op0, 1) == XEXP (XEXP (XEXP (op0, 0), 0), 1)
10672 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
10673 MODE_INT, 1)) != BLKmode
10674 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10675 || ((unsigned HOST_WIDE_INT) - const_op
10676 <= GET_MODE_MASK (tmode))))
10677 {
10678 rtx inner = XEXP (XEXP (XEXP (op0, 0), 0), 0);
10679 rtx add_const = XEXP (XEXP (op0, 0), 1);
10680 rtx new_const = gen_binary (ASHIFTRT, GET_MODE (op0), add_const,
10681 XEXP (op0, 1));
10682
10683 op0 = gen_binary (PLUS, tmode,
10684 gen_lowpart_for_combine (tmode, inner),
10685 new_const);
10686 continue;
10687 }
10688
0f41302f 10689 /* ... fall through ... */
230d793d
RS
10690 case LSHIFTRT:
10691 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 10692 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
10693 by comparing FOO with C shifted left N bits so long as no
10694 overflow occurs. */
10695 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10696 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
10697 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10698 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10699 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10700 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
10701 && (const_op == 0
10702 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10703 < mode_width)))
10704 {
10705 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 10706 op1 = GEN_INT (const_op);
230d793d
RS
10707 op0 = XEXP (op0, 0);
10708 continue;
10709 }
10710
10711 /* If we are using this shift to extract just the sign bit, we
10712 can replace this with an LT or GE comparison. */
10713 if (const_op == 0
10714 && (equality_comparison_p || sign_bit_comparison_p)
10715 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10716 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10717 {
10718 op0 = XEXP (op0, 0);
10719 code = (code == NE || code == GT ? LT : GE);
10720 continue;
10721 }
10722 break;
e9a25f70
JL
10723
10724 default:
10725 break;
230d793d
RS
10726 }
10727
10728 break;
10729 }
10730
10731 /* Now make any compound operations involved in this comparison. Then,
76d31c63 10732 check for an outmost SUBREG on OP0 that is not doing anything or is
230d793d
RS
10733 paradoxical. The latter case can only occur when it is known that the
10734 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10735 We can never remove a SUBREG for a non-equality comparison because the
10736 sign bit is in a different place in the underlying object. */
10737
10738 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10739 op1 = make_compound_operation (op1, SET);
10740
10741 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10742 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10743 && (code == NE || code == EQ)
10744 && ((GET_MODE_SIZE (GET_MODE (op0))
10745 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10746 {
10747 op0 = SUBREG_REG (op0);
10748 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10749 }
10750
10751 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10752 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10753 && (code == NE || code == EQ)
ac49a949
RS
10754 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10755 <= HOST_BITS_PER_WIDE_INT)
951553af 10756 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10757 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
10758 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10759 op1),
951553af 10760 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10761 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
10762 op0 = SUBREG_REG (op0), op1 = tem;
10763
10764 /* We now do the opposite procedure: Some machines don't have compare
10765 insns in all modes. If OP0's mode is an integer mode smaller than a
10766 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
10767 mode for which we can do the compare. There are a number of cases in
10768 which we can use the wider mode. */
230d793d
RS
10769
10770 mode = GET_MODE (op0);
10771 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10772 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10773 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10774 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
10775 (tmode != VOIDmode
10776 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 10777 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 10778 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 10779 {
951553af 10780 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
10781 narrower mode and this is an equality or unsigned comparison,
10782 we can use the wider mode. Similarly for sign-extended
7e4dc511 10783 values, in which case it is true for all comparisons. */
a687e897
RK
10784 if (((code == EQ || code == NE
10785 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
10786 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10787 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
10788 || ((num_sign_bit_copies (op0, tmode)
10789 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 10790 && (num_sign_bit_copies (op1, tmode)
58744483 10791 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897 10792 {
14a774a9
RK
10793 /* If OP0 is an AND and we don't have an AND in MODE either,
10794 make a new AND in the proper mode. */
10795 if (GET_CODE (op0) == AND
10796 && (add_optab->handlers[(int) mode].insn_code
10797 == CODE_FOR_nothing))
10798 op0 = gen_binary (AND, tmode,
10799 gen_lowpart_for_combine (tmode,
10800 XEXP (op0, 0)),
10801 gen_lowpart_for_combine (tmode,
10802 XEXP (op0, 1)));
10803
a687e897
RK
10804 op0 = gen_lowpart_for_combine (tmode, op0);
10805 op1 = gen_lowpart_for_combine (tmode, op1);
10806 break;
10807 }
230d793d 10808
a687e897
RK
10809 /* If this is a test for negative, we can make an explicit
10810 test of the sign bit. */
10811
10812 if (op1 == const0_rtx && (code == LT || code == GE)
10813 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 10814 {
a687e897
RK
10815 op0 = gen_binary (AND, tmode,
10816 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
10817 GEN_INT ((HOST_WIDE_INT) 1
10818 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 10819 code = (code == LT) ? NE : EQ;
a687e897 10820 break;
230d793d 10821 }
230d793d
RS
10822 }
10823
b7a775b2
RK
10824#ifdef CANONICALIZE_COMPARISON
10825 /* If this machine only supports a subset of valid comparisons, see if we
10826 can convert an unsupported one into a supported one. */
10827 CANONICALIZE_COMPARISON (code, op0, op1);
10828#endif
10829
230d793d
RS
10830 *pop0 = op0;
10831 *pop1 = op1;
10832
10833 return code;
10834}
10835\f
10836/* Return 1 if we know that X, a comparison operation, is not operating
10837 on a floating-point value or is EQ or NE, meaning that we can safely
10838 reverse it. */
10839
10840static int
10841reversible_comparison_p (x)
10842 rtx x;
10843{
10844 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 10845 || flag_fast_math
230d793d
RS
10846 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10847 return 1;
10848
10849 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10850 {
10851 case MODE_INT:
3ad2180a
RK
10852 case MODE_PARTIAL_INT:
10853 case MODE_COMPLEX_INT:
230d793d
RS
10854 return 1;
10855
10856 case MODE_CC:
9210df58
RK
10857 /* If the mode of the condition codes tells us that this is safe,
10858 we need look no further. */
10859 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10860 return 1;
10861
10862 /* Otherwise try and find where the condition codes were last set and
10863 use that. */
230d793d
RS
10864 x = get_last_value (XEXP (x, 0));
10865 return (x && GET_CODE (x) == COMPARE
3ad2180a 10866 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
e9a25f70
JL
10867
10868 default:
10869 return 0;
230d793d 10870 }
230d793d
RS
10871}
10872\f
10873/* Utility function for following routine. Called when X is part of a value
10874 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10875 for each register mentioned. Similar to mention_regs in cse.c */
10876
10877static void
10878update_table_tick (x)
10879 rtx x;
10880{
10881 register enum rtx_code code = GET_CODE (x);
6f7d635c 10882 register const char *fmt = GET_RTX_FORMAT (code);
230d793d
RS
10883 register int i;
10884
10885 if (code == REG)
10886 {
10887 int regno = REGNO (x);
10888 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10889 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10890
10891 for (i = regno; i < endregno; i++)
10892 reg_last_set_table_tick[i] = label_tick;
10893
10894 return;
10895 }
10896
10897 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10898 /* Note that we can't have an "E" in values stored; see
10899 get_last_value_validate. */
10900 if (fmt[i] == 'e')
10901 update_table_tick (XEXP (x, i));
10902}
10903
10904/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10905 are saying that the register is clobbered and we no longer know its
7988fd36
RK
10906 value. If INSN is zero, don't update reg_last_set; this is only permitted
10907 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
10908
10909static void
10910record_value_for_reg (reg, insn, value)
10911 rtx reg;
10912 rtx insn;
10913 rtx value;
10914{
10915 int regno = REGNO (reg);
10916 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10917 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10918 int i;
10919
10920 /* If VALUE contains REG and we have a previous value for REG, substitute
10921 the previous value. */
10922 if (value && insn && reg_overlap_mentioned_p (reg, value))
10923 {
10924 rtx tem;
10925
10926 /* Set things up so get_last_value is allowed to see anything set up to
10927 our insn. */
10928 subst_low_cuid = INSN_CUID (insn);
10929 tem = get_last_value (reg);
10930
14a774a9
RK
10931 /* If TEM is simply a binary operation with two CLOBBERs as operands,
10932 it isn't going to be useful and will take a lot of time to process,
10933 so just use the CLOBBER. */
10934
230d793d 10935 if (tem)
14a774a9
RK
10936 {
10937 if ((GET_RTX_CLASS (GET_CODE (tem)) == '2'
10938 || GET_RTX_CLASS (GET_CODE (tem)) == 'c')
10939 && GET_CODE (XEXP (tem, 0)) == CLOBBER
10940 && GET_CODE (XEXP (tem, 1)) == CLOBBER)
10941 tem = XEXP (tem, 0);
10942
10943 value = replace_rtx (copy_rtx (value), reg, tem);
10944 }
230d793d
RS
10945 }
10946
10947 /* For each register modified, show we don't know its value, that
ef026f91
RS
10948 we don't know about its bitwise content, that its value has been
10949 updated, and that we don't know the location of the death of the
10950 register. */
230d793d
RS
10951 for (i = regno; i < endregno; i ++)
10952 {
10953 if (insn)
10954 reg_last_set[i] = insn;
10955 reg_last_set_value[i] = 0;
ef026f91
RS
10956 reg_last_set_mode[i] = 0;
10957 reg_last_set_nonzero_bits[i] = 0;
10958 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
10959 reg_last_death[i] = 0;
10960 }
10961
10962 /* Mark registers that are being referenced in this value. */
10963 if (value)
10964 update_table_tick (value);
10965
10966 /* Now update the status of each register being set.
10967 If someone is using this register in this block, set this register
10968 to invalid since we will get confused between the two lives in this
10969 basic block. This makes using this register always invalid. In cse, we
10970 scan the table to invalidate all entries using this register, but this
10971 is too much work for us. */
10972
10973 for (i = regno; i < endregno; i++)
10974 {
10975 reg_last_set_label[i] = label_tick;
10976 if (value && reg_last_set_table_tick[i] == label_tick)
10977 reg_last_set_invalid[i] = 1;
10978 else
10979 reg_last_set_invalid[i] = 0;
10980 }
10981
10982 /* The value being assigned might refer to X (like in "x++;"). In that
10983 case, we must replace it with (clobber (const_int 0)) to prevent
10984 infinite loops. */
9a893315 10985 if (value && ! get_last_value_validate (&value, insn,
230d793d
RS
10986 reg_last_set_label[regno], 0))
10987 {
10988 value = copy_rtx (value);
9a893315
JW
10989 if (! get_last_value_validate (&value, insn,
10990 reg_last_set_label[regno], 1))
230d793d
RS
10991 value = 0;
10992 }
10993
55310dad
RK
10994 /* For the main register being modified, update the value, the mode, the
10995 nonzero bits, and the number of sign bit copies. */
10996
230d793d
RS
10997 reg_last_set_value[regno] = value;
10998
55310dad
RK
10999 if (value)
11000 {
2afabb48 11001 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
11002 reg_last_set_mode[regno] = GET_MODE (reg);
11003 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
11004 reg_last_set_sign_bit_copies[regno]
11005 = num_sign_bit_copies (value, GET_MODE (reg));
11006 }
230d793d
RS
11007}
11008
230d793d 11009/* Called via note_stores from record_dead_and_set_regs to handle one
84832317
MM
11010 SET or CLOBBER in an insn. DATA is the instruction in which the
11011 set is occurring. */
230d793d
RS
11012
11013static void
84832317 11014record_dead_and_set_regs_1 (dest, setter, data)
230d793d 11015 rtx dest, setter;
84832317 11016 void *data;
230d793d 11017{
84832317
MM
11018 rtx record_dead_insn = (rtx) data;
11019
ca89d290
RK
11020 if (GET_CODE (dest) == SUBREG)
11021 dest = SUBREG_REG (dest);
11022
230d793d
RS
11023 if (GET_CODE (dest) == REG)
11024 {
11025 /* If we are setting the whole register, we know its value. Otherwise
11026 show that we don't know the value. We can handle SUBREG in
11027 some cases. */
11028 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
11029 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
11030 else if (GET_CODE (setter) == SET
11031 && GET_CODE (SET_DEST (setter)) == SUBREG
11032 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 11033 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 11034 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
11035 record_value_for_reg (dest, record_dead_insn,
11036 gen_lowpart_for_combine (GET_MODE (dest),
11037 SET_SRC (setter)));
230d793d 11038 else
5f4f0e22 11039 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
11040 }
11041 else if (GET_CODE (dest) == MEM
11042 /* Ignore pushes, they clobber nothing. */
11043 && ! push_operand (dest, GET_MODE (dest)))
11044 mem_last_set = INSN_CUID (record_dead_insn);
11045}
11046
11047/* Update the records of when each REG was most recently set or killed
11048 for the things done by INSN. This is the last thing done in processing
11049 INSN in the combiner loop.
11050
ef026f91
RS
11051 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
11052 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
11053 and also the similar information mem_last_set (which insn most recently
11054 modified memory) and last_call_cuid (which insn was the most recent
11055 subroutine call). */
230d793d
RS
11056
11057static void
11058record_dead_and_set_regs (insn)
11059 rtx insn;
11060{
11061 register rtx link;
55310dad
RK
11062 int i;
11063
230d793d
RS
11064 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
11065 {
dbc131f3
RK
11066 if (REG_NOTE_KIND (link) == REG_DEAD
11067 && GET_CODE (XEXP (link, 0)) == REG)
11068 {
11069 int regno = REGNO (XEXP (link, 0));
11070 int endregno
11071 = regno + (regno < FIRST_PSEUDO_REGISTER
11072 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
11073 : 1);
dbc131f3
RK
11074
11075 for (i = regno; i < endregno; i++)
11076 reg_last_death[i] = insn;
11077 }
230d793d 11078 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 11079 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
11080 }
11081
11082 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
11083 {
11084 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
11085 if (call_used_regs[i])
11086 {
11087 reg_last_set_value[i] = 0;
ef026f91
RS
11088 reg_last_set_mode[i] = 0;
11089 reg_last_set_nonzero_bits[i] = 0;
11090 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
11091 reg_last_death[i] = 0;
11092 }
11093
11094 last_call_cuid = mem_last_set = INSN_CUID (insn);
11095 }
230d793d 11096
84832317 11097 note_stores (PATTERN (insn), record_dead_and_set_regs_1, insn);
230d793d 11098}
732f2ac9 11099
732f2ac9
JJ
11100/* If a SUBREG has the promoted bit set, it is in fact a property of the
11101 register present in the SUBREG, so for each such SUBREG go back and
11102 adjust nonzero and sign bit information of the registers that are
11103 known to have some zero/sign bits set.
11104
11105 This is needed because when combine blows the SUBREGs away, the
11106 information on zero/sign bits is lost and further combines can be
11107 missed because of that. */
11108
11109static void
11110record_promoted_value (insn, subreg)
11111 rtx insn;
11112 rtx subreg;
11113{
4a71b24f 11114 rtx links, set;
732f2ac9
JJ
11115 int regno = REGNO (SUBREG_REG (subreg));
11116 enum machine_mode mode = GET_MODE (subreg);
11117
11118 if (GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT)
11119 return;
11120
11121 for (links = LOG_LINKS (insn); links; )
11122 {
11123 insn = XEXP (links, 0);
11124 set = single_set (insn);
11125
11126 if (! set || GET_CODE (SET_DEST (set)) != REG
11127 || REGNO (SET_DEST (set)) != regno
11128 || GET_MODE (SET_DEST (set)) != GET_MODE (SUBREG_REG (subreg)))
11129 {
11130 links = XEXP (links, 1);
11131 continue;
11132 }
11133
11134 if (reg_last_set [regno] == insn)
11135 {
11136 if (SUBREG_PROMOTED_UNSIGNED_P (subreg))
11137 reg_last_set_nonzero_bits [regno] &= GET_MODE_MASK (mode);
11138 }
11139
11140 if (GET_CODE (SET_SRC (set)) == REG)
11141 {
11142 regno = REGNO (SET_SRC (set));
11143 links = LOG_LINKS (insn);
11144 }
11145 else
11146 break;
11147 }
11148}
11149
11150/* Scan X for promoted SUBREGs. For each one found,
11151 note what it implies to the registers used in it. */
11152
11153static void
11154check_promoted_subreg (insn, x)
11155 rtx insn;
11156 rtx x;
11157{
11158 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
11159 && GET_CODE (SUBREG_REG (x)) == REG)
11160 record_promoted_value (insn, x);
11161 else
11162 {
11163 const char *format = GET_RTX_FORMAT (GET_CODE (x));
11164 int i, j;
11165
11166 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (x)); i++)
11167 switch (format [i])
11168 {
11169 case 'e':
11170 check_promoted_subreg (insn, XEXP (x, i));
11171 break;
11172 case 'V':
11173 case 'E':
11174 if (XVEC (x, i) != 0)
11175 for (j = 0; j < XVECLEN (x, i); j++)
11176 check_promoted_subreg (insn, XVECEXP (x, i, j));
11177 break;
11178 }
11179 }
11180}
230d793d
RS
11181\f
11182/* Utility routine for the following function. Verify that all the registers
11183 mentioned in *LOC are valid when *LOC was part of a value set when
11184 label_tick == TICK. Return 0 if some are not.
11185
11186 If REPLACE is non-zero, replace the invalid reference with
11187 (clobber (const_int 0)) and return 1. This replacement is useful because
11188 we often can get useful information about the form of a value (e.g., if
11189 it was produced by a shift that always produces -1 or 0) even though
11190 we don't know exactly what registers it was produced from. */
11191
11192static int
9a893315 11193get_last_value_validate (loc, insn, tick, replace)
230d793d 11194 rtx *loc;
9a893315 11195 rtx insn;
230d793d
RS
11196 int tick;
11197 int replace;
11198{
11199 rtx x = *loc;
6f7d635c 11200 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
230d793d
RS
11201 int len = GET_RTX_LENGTH (GET_CODE (x));
11202 int i;
11203
11204 if (GET_CODE (x) == REG)
11205 {
11206 int regno = REGNO (x);
11207 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11208 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11209 int j;
11210
11211 for (j = regno; j < endregno; j++)
11212 if (reg_last_set_invalid[j]
57cf50a4
GRK
11213 /* If this is a pseudo-register that was only set once and not
11214 live at the beginning of the function, it is always valid. */
11215 || (! (regno >= FIRST_PSEUDO_REGISTER
11216 && REG_N_SETS (regno) == 1
11217 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))
230d793d
RS
11218 && reg_last_set_label[j] > tick))
11219 {
11220 if (replace)
38a448ca 11221 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
11222 return replace;
11223 }
11224
11225 return 1;
11226 }
9a893315
JW
11227 /* If this is a memory reference, make sure that there were
11228 no stores after it that might have clobbered the value. We don't
11229 have alias info, so we assume any store invalidates it. */
11230 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
11231 && INSN_CUID (insn) <= mem_last_set)
11232 {
11233 if (replace)
38a448ca 11234 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
11235 return replace;
11236 }
230d793d
RS
11237
11238 for (i = 0; i < len; i++)
11239 if ((fmt[i] == 'e'
9a893315 11240 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
230d793d
RS
11241 /* Don't bother with these. They shouldn't occur anyway. */
11242 || fmt[i] == 'E')
11243 return 0;
11244
11245 /* If we haven't found a reason for it to be invalid, it is valid. */
11246 return 1;
11247}
11248
11249/* Get the last value assigned to X, if known. Some registers
11250 in the value may be replaced with (clobber (const_int 0)) if their value
11251 is known longer known reliably. */
11252
11253static rtx
11254get_last_value (x)
11255 rtx x;
11256{
11257 int regno;
11258 rtx value;
11259
11260 /* If this is a non-paradoxical SUBREG, get the value of its operand and
11261 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 11262 we cannot predict what values the "extra" bits might have. */
230d793d
RS
11263 if (GET_CODE (x) == SUBREG
11264 && subreg_lowpart_p (x)
11265 && (GET_MODE_SIZE (GET_MODE (x))
11266 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
11267 && (value = get_last_value (SUBREG_REG (x))) != 0)
11268 return gen_lowpart_for_combine (GET_MODE (x), value);
11269
11270 if (GET_CODE (x) != REG)
11271 return 0;
11272
11273 regno = REGNO (x);
11274 value = reg_last_set_value[regno];
11275
57cf50a4
GRK
11276 /* If we don't have a value, or if it isn't for this basic block and
11277 it's either a hard register, set more than once, or it's a live
11278 at the beginning of the function, return 0.
11279
11280 Because if it's not live at the beginnning of the function then the reg
11281 is always set before being used (is never used without being set).
11282 And, if it's set only once, and it's always set before use, then all
11283 uses must have the same last value, even if it's not from this basic
11284 block. */
230d793d
RS
11285
11286 if (value == 0
57cf50a4
GRK
11287 || (reg_last_set_label[regno] != label_tick
11288 && (regno < FIRST_PSEUDO_REGISTER
11289 || REG_N_SETS (regno) != 1
11290 || REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, regno))))
230d793d
RS
11291 return 0;
11292
4255220d 11293 /* If the value was set in a later insn than the ones we are processing,
ca4cd906 11294 we can't use it even if the register was only set once. */
bcd49eb7 11295 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
ca4cd906 11296 return 0;
d0ab8cd3
RK
11297
11298 /* If the value has all its registers valid, return it. */
9a893315
JW
11299 if (get_last_value_validate (&value, reg_last_set[regno],
11300 reg_last_set_label[regno], 0))
230d793d
RS
11301 return value;
11302
11303 /* Otherwise, make a copy and replace any invalid register with
11304 (clobber (const_int 0)). If that fails for some reason, return 0. */
11305
11306 value = copy_rtx (value);
9a893315
JW
11307 if (get_last_value_validate (&value, reg_last_set[regno],
11308 reg_last_set_label[regno], 1))
230d793d
RS
11309 return value;
11310
11311 return 0;
11312}
11313\f
11314/* Return nonzero if expression X refers to a REG or to memory
11315 that is set in an instruction more recent than FROM_CUID. */
11316
11317static int
11318use_crosses_set_p (x, from_cuid)
11319 register rtx x;
11320 int from_cuid;
11321{
6f7d635c 11322 register const char *fmt;
230d793d
RS
11323 register int i;
11324 register enum rtx_code code = GET_CODE (x);
11325
11326 if (code == REG)
11327 {
11328 register int regno = REGNO (x);
e28f5732
RK
11329 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
11330 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
11331
230d793d
RS
11332#ifdef PUSH_ROUNDING
11333 /* Don't allow uses of the stack pointer to be moved,
11334 because we don't know whether the move crosses a push insn. */
11335 if (regno == STACK_POINTER_REGNUM)
11336 return 1;
11337#endif
e28f5732
RK
11338 for (;regno < endreg; regno++)
11339 if (reg_last_set[regno]
11340 && INSN_CUID (reg_last_set[regno]) > from_cuid)
11341 return 1;
11342 return 0;
230d793d
RS
11343 }
11344
11345 if (code == MEM && mem_last_set > from_cuid)
11346 return 1;
11347
11348 fmt = GET_RTX_FORMAT (code);
11349
11350 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11351 {
11352 if (fmt[i] == 'E')
11353 {
11354 register int j;
11355 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
11356 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
11357 return 1;
11358 }
11359 else if (fmt[i] == 'e'
11360 && use_crosses_set_p (XEXP (x, i), from_cuid))
11361 return 1;
11362 }
11363 return 0;
11364}
11365\f
11366/* Define three variables used for communication between the following
11367 routines. */
11368
11369static int reg_dead_regno, reg_dead_endregno;
11370static int reg_dead_flag;
11371
11372/* Function called via note_stores from reg_dead_at_p.
11373
ddd5a7c1 11374 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
11375 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
11376
11377static void
84832317 11378reg_dead_at_p_1 (dest, x, data)
230d793d
RS
11379 rtx dest;
11380 rtx x;
84832317 11381 void *data ATTRIBUTE_UNUSED;
230d793d
RS
11382{
11383 int regno, endregno;
11384
11385 if (GET_CODE (dest) != REG)
11386 return;
11387
11388 regno = REGNO (dest);
11389 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
11390 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
11391
11392 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
11393 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
11394}
11395
11396/* Return non-zero if REG is known to be dead at INSN.
11397
11398 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
11399 referencing REG, it is dead. If we hit a SET referencing REG, it is
11400 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
11401 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
11402 must be assumed to be always live. */
230d793d
RS
11403
11404static int
11405reg_dead_at_p (reg, insn)
11406 rtx reg;
11407 rtx insn;
11408{
11409 int block, i;
11410
11411 /* Set variables for reg_dead_at_p_1. */
11412 reg_dead_regno = REGNO (reg);
11413 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
11414 ? HARD_REGNO_NREGS (reg_dead_regno,
11415 GET_MODE (reg))
11416 : 1);
11417
11418 reg_dead_flag = 0;
11419
6e25d159
RK
11420 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
11421 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
11422 {
11423 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
11424 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
11425 return 0;
11426 }
11427
230d793d
RS
11428 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
11429 beginning of function. */
60715d0b 11430 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
11431 insn = prev_nonnote_insn (insn))
11432 {
84832317 11433 note_stores (PATTERN (insn), reg_dead_at_p_1, NULL);
230d793d
RS
11434 if (reg_dead_flag)
11435 return reg_dead_flag == 1 ? 1 : 0;
11436
11437 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
11438 return 1;
11439 }
11440
11441 /* Get the basic block number that we were in. */
11442 if (insn == 0)
11443 block = 0;
11444 else
11445 {
11446 for (block = 0; block < n_basic_blocks; block++)
3b413743 11447 if (insn == BLOCK_HEAD (block))
230d793d
RS
11448 break;
11449
11450 if (block == n_basic_blocks)
11451 return 0;
11452 }
11453
11454 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
e881bb1b 11455 if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
230d793d
RS
11456 return 0;
11457
11458 return 1;
11459}
6e25d159
RK
11460\f
11461/* Note hard registers in X that are used. This code is similar to
11462 that in flow.c, but much simpler since we don't care about pseudos. */
11463
11464static void
11465mark_used_regs_combine (x)
11466 rtx x;
11467{
11468 register RTX_CODE code = GET_CODE (x);
11469 register int regno;
11470 int i;
11471
11472 switch (code)
11473 {
11474 case LABEL_REF:
11475 case SYMBOL_REF:
11476 case CONST_INT:
11477 case CONST:
11478 case CONST_DOUBLE:
11479 case PC:
11480 case ADDR_VEC:
11481 case ADDR_DIFF_VEC:
11482 case ASM_INPUT:
11483#ifdef HAVE_cc0
11484 /* CC0 must die in the insn after it is set, so we don't need to take
11485 special note of it here. */
11486 case CC0:
11487#endif
11488 return;
11489
11490 case CLOBBER:
11491 /* If we are clobbering a MEM, mark any hard registers inside the
11492 address as used. */
11493 if (GET_CODE (XEXP (x, 0)) == MEM)
11494 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11495 return;
11496
11497 case REG:
11498 regno = REGNO (x);
11499 /* A hard reg in a wide mode may really be multiple registers.
11500 If so, mark all of them just like the first. */
11501 if (regno < FIRST_PSEUDO_REGISTER)
11502 {
11503 /* None of this applies to the stack, frame or arg pointers */
11504 if (regno == STACK_POINTER_REGNUM
11505#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11506 || regno == HARD_FRAME_POINTER_REGNUM
11507#endif
11508#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11509 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11510#endif
11511 || regno == FRAME_POINTER_REGNUM)
11512 return;
11513
11514 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
11515 while (i-- > 0)
11516 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
11517 }
11518 return;
11519
11520 case SET:
11521 {
11522 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11523 the address. */
11524 register rtx testreg = SET_DEST (x);
11525
e048778f
RK
11526 while (GET_CODE (testreg) == SUBREG
11527 || GET_CODE (testreg) == ZERO_EXTRACT
11528 || GET_CODE (testreg) == SIGN_EXTRACT
11529 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
11530 testreg = XEXP (testreg, 0);
11531
11532 if (GET_CODE (testreg) == MEM)
11533 mark_used_regs_combine (XEXP (testreg, 0));
11534
11535 mark_used_regs_combine (SET_SRC (x));
6e25d159 11536 }
e9a25f70
JL
11537 return;
11538
11539 default:
11540 break;
6e25d159
RK
11541 }
11542
11543 /* Recursively scan the operands of this expression. */
11544
11545 {
6f7d635c 11546 register const char *fmt = GET_RTX_FORMAT (code);
6e25d159
RK
11547
11548 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11549 {
11550 if (fmt[i] == 'e')
11551 mark_used_regs_combine (XEXP (x, i));
11552 else if (fmt[i] == 'E')
11553 {
11554 register int j;
11555
11556 for (j = 0; j < XVECLEN (x, i); j++)
11557 mark_used_regs_combine (XVECEXP (x, i, j));
11558 }
11559 }
11560 }
11561}
11562
230d793d
RS
11563\f
11564/* Remove register number REGNO from the dead registers list of INSN.
11565
11566 Return the note used to record the death, if there was one. */
11567
11568rtx
11569remove_death (regno, insn)
11570 int regno;
11571 rtx insn;
11572{
11573 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11574
11575 if (note)
1a26b032 11576 {
b1f21e0a 11577 REG_N_DEATHS (regno)--;
1a26b032
RK
11578 remove_note (insn, note);
11579 }
230d793d
RS
11580
11581 return note;
11582}
11583
11584/* For each register (hardware or pseudo) used within expression X, if its
11585 death is in an instruction with cuid between FROM_CUID (inclusive) and
11586 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11587 list headed by PNOTES.
11588
6eb12cef
RK
11589 That said, don't move registers killed by maybe_kill_insn.
11590
230d793d
RS
11591 This is done when X is being merged by combination into TO_INSN. These
11592 notes will then be distributed as needed. */
11593
11594static void
6eb12cef 11595move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 11596 rtx x;
6eb12cef 11597 rtx maybe_kill_insn;
230d793d
RS
11598 int from_cuid;
11599 rtx to_insn;
11600 rtx *pnotes;
11601{
6f7d635c 11602 register const char *fmt;
230d793d
RS
11603 register int len, i;
11604 register enum rtx_code code = GET_CODE (x);
11605
11606 if (code == REG)
11607 {
11608 register int regno = REGNO (x);
11609 register rtx where_dead = reg_last_death[regno];
e340018d
JW
11610 register rtx before_dead, after_dead;
11611
6eb12cef
RK
11612 /* Don't move the register if it gets killed in between from and to */
11613 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11614 && !reg_referenced_p (x, maybe_kill_insn))
11615 return;
11616
e340018d
JW
11617 /* WHERE_DEAD could be a USE insn made by combine, so first we
11618 make sure that we have insns with valid INSN_CUID values. */
11619 before_dead = where_dead;
11620 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11621 before_dead = PREV_INSN (before_dead);
11622 after_dead = where_dead;
11623 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11624 after_dead = NEXT_INSN (after_dead);
11625
11626 if (before_dead && after_dead
11627 && INSN_CUID (before_dead) >= from_cuid
11628 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11629 || (where_dead != after_dead
11630 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 11631 {
dbc131f3 11632 rtx note = remove_death (regno, where_dead);
230d793d
RS
11633
11634 /* It is possible for the call above to return 0. This can occur
11635 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
11636 In that case make a new note.
11637
11638 We must also check for the case where X is a hard register
11639 and NOTE is a death note for a range of hard registers
11640 including X. In that case, we must put REG_DEAD notes for
11641 the remaining registers in place of NOTE. */
11642
11643 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11644 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 11645 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3
RK
11646 {
11647 int deadregno = REGNO (XEXP (note, 0));
11648 int deadend
11649 = (deadregno + HARD_REGNO_NREGS (deadregno,
11650 GET_MODE (XEXP (note, 0))));
11651 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11652 int i;
11653
11654 for (i = deadregno; i < deadend; i++)
11655 if (i < regno || i >= ourend)
11656 REG_NOTES (where_dead)
38a448ca
RH
11657 = gen_rtx_EXPR_LIST (REG_DEAD,
11658 gen_rtx_REG (reg_raw_mode[i], i),
11659 REG_NOTES (where_dead));
dbc131f3 11660 }
24e46fc4
JW
11661 /* If we didn't find any note, or if we found a REG_DEAD note that
11662 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
11663 register, then to be safe we must check for REG_DEAD notes
11664 for each register other than the first. They could have
11665 their own REG_DEAD notes lying around. */
24e46fc4
JW
11666 else if ((note == 0
11667 || (note != 0
11668 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11669 < GET_MODE_SIZE (GET_MODE (x)))))
11670 && regno < FIRST_PSEUDO_REGISTER
fabd69e8
RK
11671 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11672 {
11673 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
24e46fc4 11674 int i, offset;
fabd69e8
RK
11675 rtx oldnotes = 0;
11676
24e46fc4
JW
11677 if (note)
11678 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11679 else
11680 offset = 1;
11681
11682 for (i = regno + offset; i < ourend; i++)
38a448ca 11683 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
6eb12cef 11684 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 11685 }
230d793d 11686
dbc131f3 11687 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
11688 {
11689 XEXP (note, 1) = *pnotes;
11690 *pnotes = note;
11691 }
11692 else
38a448ca 11693 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
1a26b032 11694
b1f21e0a 11695 REG_N_DEATHS (regno)++;
230d793d
RS
11696 }
11697
11698 return;
11699 }
11700
11701 else if (GET_CODE (x) == SET)
11702 {
11703 rtx dest = SET_DEST (x);
11704
6eb12cef 11705 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 11706
a7c99304
RK
11707 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11708 that accesses one word of a multi-word item, some
11709 piece of everything register in the expression is used by
11710 this insn, so remove any old death. */
11711
11712 if (GET_CODE (dest) == ZERO_EXTRACT
11713 || GET_CODE (dest) == STRICT_LOW_PART
11714 || (GET_CODE (dest) == SUBREG
11715 && (((GET_MODE_SIZE (GET_MODE (dest))
11716 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11717 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11718 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 11719 {
6eb12cef 11720 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 11721 return;
230d793d
RS
11722 }
11723
a7c99304
RK
11724 /* If this is some other SUBREG, we know it replaces the entire
11725 value, so use that as the destination. */
11726 if (GET_CODE (dest) == SUBREG)
11727 dest = SUBREG_REG (dest);
11728
11729 /* If this is a MEM, adjust deaths of anything used in the address.
11730 For a REG (the only other possibility), the entire value is
11731 being replaced so the old value is not used in this insn. */
230d793d
RS
11732
11733 if (GET_CODE (dest) == MEM)
6eb12cef
RK
11734 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11735 to_insn, pnotes);
230d793d
RS
11736 return;
11737 }
11738
11739 else if (GET_CODE (x) == CLOBBER)
11740 return;
11741
11742 len = GET_RTX_LENGTH (code);
11743 fmt = GET_RTX_FORMAT (code);
11744
11745 for (i = 0; i < len; i++)
11746 {
11747 if (fmt[i] == 'E')
11748 {
11749 register int j;
11750 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
11751 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11752 to_insn, pnotes);
230d793d
RS
11753 }
11754 else if (fmt[i] == 'e')
6eb12cef 11755 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
11756 }
11757}
11758\f
a7c99304
RK
11759/* Return 1 if X is the target of a bit-field assignment in BODY, the
11760 pattern of an insn. X must be a REG. */
230d793d
RS
11761
11762static int
a7c99304
RK
11763reg_bitfield_target_p (x, body)
11764 rtx x;
230d793d
RS
11765 rtx body;
11766{
11767 int i;
11768
11769 if (GET_CODE (body) == SET)
a7c99304
RK
11770 {
11771 rtx dest = SET_DEST (body);
11772 rtx target;
11773 int regno, tregno, endregno, endtregno;
11774
11775 if (GET_CODE (dest) == ZERO_EXTRACT)
11776 target = XEXP (dest, 0);
11777 else if (GET_CODE (dest) == STRICT_LOW_PART)
11778 target = SUBREG_REG (XEXP (dest, 0));
11779 else
11780 return 0;
11781
11782 if (GET_CODE (target) == SUBREG)
11783 target = SUBREG_REG (target);
11784
11785 if (GET_CODE (target) != REG)
11786 return 0;
11787
11788 tregno = REGNO (target), regno = REGNO (x);
11789 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11790 return target == x;
11791
11792 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11793 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11794
11795 return endregno > tregno && regno < endtregno;
11796 }
230d793d
RS
11797
11798 else if (GET_CODE (body) == PARALLEL)
11799 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 11800 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
11801 return 1;
11802
11803 return 0;
11804}
11805\f
11806/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11807 as appropriate. I3 and I2 are the insns resulting from the combination
11808 insns including FROM (I2 may be zero).
11809
11810 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11811 not need REG_DEAD notes because they are being substituted for. This
11812 saves searching in the most common cases.
11813
11814 Each note in the list is either ignored or placed on some insns, depending
11815 on the type of note. */
11816
11817static void
11818distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11819 rtx notes;
11820 rtx from_insn;
11821 rtx i3, i2;
11822 rtx elim_i2, elim_i1;
11823{
11824 rtx note, next_note;
11825 rtx tem;
11826
11827 for (note = notes; note; note = next_note)
11828 {
11829 rtx place = 0, place2 = 0;
11830
11831 /* If this NOTE references a pseudo register, ensure it references
11832 the latest copy of that register. */
11833 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11834 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11835 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11836
11837 next_note = XEXP (note, 1);
11838 switch (REG_NOTE_KIND (note))
11839 {
c9903b44
DE
11840 case REG_BR_PROB:
11841 case REG_EXEC_COUNT:
11842 /* Doesn't matter much where we put this, as long as it's somewhere.
11843 It is preferable to keep these notes on branches, which is most
11844 likely to be i3. */
11845 place = i3;
11846 break;
11847
4b7c585f 11848 case REG_EH_REGION:
0e403ec3
AS
11849 case REG_EH_RETHROW:
11850 /* These notes must remain with the call. It should not be
11851 possible for both I2 and I3 to be a call. */
4b7c585f
JL
11852 if (GET_CODE (i3) == CALL_INSN)
11853 place = i3;
11854 else if (i2 && GET_CODE (i2) == CALL_INSN)
11855 place = i2;
11856 else
11857 abort ();
11858 break;
11859
230d793d 11860 case REG_UNUSED:
07d0cbdd 11861 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
11862 REG_UNUSED notes from that insn.
11863
11864 Any clobbers from i2 or i1 can only exist if they were added by
11865 recog_for_combine. In that case, recog_for_combine created the
11866 necessary REG_UNUSED notes. Trying to keep any original
11867 REG_UNUSED notes from these insns can cause incorrect output
11868 if it is for the same register as the original i3 dest.
11869 In that case, we will notice that the register is set in i3,
11870 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
11871 is wrong. However, it is possible to have REG_UNUSED notes from
11872 i2 or i1 for register which were both used and clobbered, so
11873 we keep notes from i2 or i1 if they will turn into REG_DEAD
11874 notes. */
176c9e6b 11875
230d793d
RS
11876 /* If this register is set or clobbered in I3, put the note there
11877 unless there is one already. */
07d0cbdd 11878 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 11879 {
07d0cbdd
JW
11880 if (from_insn != i3)
11881 break;
11882
230d793d
RS
11883 if (! (GET_CODE (XEXP (note, 0)) == REG
11884 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11885 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11886 place = i3;
11887 }
11888 /* Otherwise, if this register is used by I3, then this register
11889 now dies here, so we must put a REG_DEAD note here unless there
11890 is one already. */
11891 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11892 && ! (GET_CODE (XEXP (note, 0)) == REG
11893 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
11894 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11895 {
11896 PUT_REG_NOTE_KIND (note, REG_DEAD);
11897 place = i3;
11898 }
11899 break;
11900
11901 case REG_EQUAL:
11902 case REG_EQUIV:
11903 case REG_NONNEG:
9ae8ffe7 11904 case REG_NOALIAS:
230d793d
RS
11905 /* These notes say something about results of an insn. We can
11906 only support them if they used to be on I3 in which case they
a687e897
RK
11907 remain on I3. Otherwise they are ignored.
11908
11909 If the note refers to an expression that is not a constant, we
11910 must also ignore the note since we cannot tell whether the
11911 equivalence is still true. It might be possible to do
11912 slightly better than this (we only have a problem if I2DEST
11913 or I1DEST is present in the expression), but it doesn't
11914 seem worth the trouble. */
11915
11916 if (from_insn == i3
11917 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
11918 place = i3;
11919 break;
11920
11921 case REG_INC:
11922 case REG_NO_CONFLICT:
230d793d
RS
11923 /* These notes say something about how a register is used. They must
11924 be present on any use of the register in I2 or I3. */
11925 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11926 place = i3;
11927
11928 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11929 {
11930 if (place)
11931 place2 = i2;
11932 else
11933 place = i2;
11934 }
11935 break;
11936
e55b4486
RH
11937 case REG_LABEL:
11938 /* This can show up in several ways -- either directly in the
11939 pattern, or hidden off in the constant pool with (or without?)
11940 a REG_EQUAL note. */
11941 /* ??? Ignore the without-reg_equal-note problem for now. */
11942 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
11943 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
11944 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
11945 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
11946 place = i3;
11947
11948 if (i2
11949 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
11950 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
11951 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
11952 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
11953 {
11954 if (place)
11955 place2 = i2;
11956 else
11957 place = i2;
11958 }
11959 break;
11960
230d793d
RS
11961 case REG_WAS_0:
11962 /* It is too much trouble to try to see if this note is still
11963 correct in all situations. It is better to simply delete it. */
11964 break;
11965
11966 case REG_RETVAL:
11967 /* If the insn previously containing this note still exists,
11968 put it back where it was. Otherwise move it to the previous
11969 insn. Adjust the corresponding REG_LIBCALL note. */
11970 if (GET_CODE (from_insn) != NOTE)
11971 place = from_insn;
11972 else
11973 {
5f4f0e22 11974 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
11975 place = prev_real_insn (from_insn);
11976 if (tem && place)
11977 XEXP (tem, 0) = place;
11978 }
11979 break;
11980
11981 case REG_LIBCALL:
11982 /* This is handled similarly to REG_RETVAL. */
11983 if (GET_CODE (from_insn) != NOTE)
11984 place = from_insn;
11985 else
11986 {
5f4f0e22 11987 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
11988 place = next_real_insn (from_insn);
11989 if (tem && place)
11990 XEXP (tem, 0) = place;
11991 }
11992 break;
11993
11994 case REG_DEAD:
11995 /* If the register is used as an input in I3, it dies there.
11996 Similarly for I2, if it is non-zero and adjacent to I3.
11997
11998 If the register is not used as an input in either I3 or I2
11999 and it is not one of the registers we were supposed to eliminate,
12000 there are two possibilities. We might have a non-adjacent I2
12001 or we might have somehow eliminated an additional register
12002 from a computation. For example, we might have had A & B where
12003 we discover that B will always be zero. In this case we will
12004 eliminate the reference to A.
12005
12006 In both cases, we must search to see if we can find a previous
12007 use of A and put the death note there. */
12008
6e2d1486
RK
12009 if (from_insn
12010 && GET_CODE (from_insn) == CALL_INSN
12011 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
12012 place = from_insn;
12013 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
12014 place = i3;
12015 else if (i2 != 0 && next_nonnote_insn (i2) == i3
12016 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12017 place = i2;
12018
12019 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
12020 break;
12021
12022 if (place == 0)
38d8473f 12023 {
d3a923ee
RH
12024 basic_block bb = BASIC_BLOCK (this_basic_block);
12025
12026 for (tem = PREV_INSN (i3); place == 0; tem = PREV_INSN (tem))
38d8473f 12027 {
d3a923ee
RH
12028 if (GET_RTX_CLASS (GET_CODE (tem)) != 'i')
12029 {
12030 if (tem == bb->head)
12031 break;
12032 continue;
12033 }
12034
38d8473f
RK
12035 /* If the register is being set at TEM, see if that is all
12036 TEM is doing. If so, delete TEM. Otherwise, make this
12037 into a REG_UNUSED note instead. */
12038 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
12039 {
12040 rtx set = single_set (tem);
e5e809f4 12041 rtx inner_dest = 0;
e51712db 12042#ifdef HAVE_cc0
f5c97640 12043 rtx cc0_setter = NULL_RTX;
e51712db 12044#endif
e5e809f4
JL
12045
12046 if (set != 0)
12047 for (inner_dest = SET_DEST (set);
12048 GET_CODE (inner_dest) == STRICT_LOW_PART
d3a923ee
RH
12049 || GET_CODE (inner_dest) == SUBREG
12050 || GET_CODE (inner_dest) == ZERO_EXTRACT;
e5e809f4
JL
12051 inner_dest = XEXP (inner_dest, 0))
12052 ;
38d8473f
RK
12053
12054 /* Verify that it was the set, and not a clobber that
f5c97640
RH
12055 modified the register.
12056
12057 CC0 targets must be careful to maintain setter/user
12058 pairs. If we cannot delete the setter due to side
12059 effects, mark the user with an UNUSED note instead
12060 of deleting it. */
38d8473f
RK
12061
12062 if (set != 0 && ! side_effects_p (SET_SRC (set))
f5c97640
RH
12063 && rtx_equal_p (XEXP (note, 0), inner_dest)
12064#ifdef HAVE_cc0
12065 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
12066 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
12067 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
12068#endif
12069 )
38d8473f
RK
12070 {
12071 /* Move the notes and links of TEM elsewhere.
12072 This might delete other dead insns recursively.
12073 First set the pattern to something that won't use
12074 any register. */
12075
12076 PATTERN (tem) = pc_rtx;
12077
12078 distribute_notes (REG_NOTES (tem), tem, tem,
12079 NULL_RTX, NULL_RTX, NULL_RTX);
12080 distribute_links (LOG_LINKS (tem));
12081
12082 PUT_CODE (tem, NOTE);
12083 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
12084 NOTE_SOURCE_FILE (tem) = 0;
f5c97640
RH
12085
12086#ifdef HAVE_cc0
12087 /* Delete the setter too. */
12088 if (cc0_setter)
12089 {
12090 PATTERN (cc0_setter) = pc_rtx;
12091
12092 distribute_notes (REG_NOTES (cc0_setter),
12093 cc0_setter, cc0_setter,
12094 NULL_RTX, NULL_RTX, NULL_RTX);
12095 distribute_links (LOG_LINKS (cc0_setter));
12096
12097 PUT_CODE (cc0_setter, NOTE);
d3a923ee
RH
12098 NOTE_LINE_NUMBER (cc0_setter)
12099 = NOTE_INSN_DELETED;
f5c97640
RH
12100 NOTE_SOURCE_FILE (cc0_setter) = 0;
12101 }
12102#endif
38d8473f 12103 }
e5e809f4
JL
12104 /* If the register is both set and used here, put the
12105 REG_DEAD note here, but place a REG_UNUSED note
12106 here too unless there already is one. */
12107 else if (reg_referenced_p (XEXP (note, 0),
12108 PATTERN (tem)))
12109 {
12110 place = tem;
12111
12112 if (! find_regno_note (tem, REG_UNUSED,
12113 REGNO (XEXP (note, 0))))
12114 REG_NOTES (tem)
c5c76735 12115 = gen_rtx_EXPR_LIST (REG_UNUSED, XEXP (note, 0),
9e6a5703 12116 REG_NOTES (tem));
e5e809f4 12117 }
38d8473f
RK
12118 else
12119 {
12120 PUT_REG_NOTE_KIND (note, REG_UNUSED);
12121
12122 /* If there isn't already a REG_UNUSED note, put one
12123 here. */
12124 if (! find_regno_note (tem, REG_UNUSED,
12125 REGNO (XEXP (note, 0))))
12126 place = tem;
12127 break;
d3a923ee
RH
12128 }
12129 }
12130 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
12131 || (GET_CODE (tem) == CALL_INSN
12132 && find_reg_fusage (tem, USE, XEXP (note, 0))))
12133 {
12134 place = tem;
12135
12136 /* If we are doing a 3->2 combination, and we have a
12137 register which formerly died in i3 and was not used
12138 by i2, which now no longer dies in i3 and is used in
12139 i2 but does not die in i2, and place is between i2
12140 and i3, then we may need to move a link from place to
12141 i2. */
12142 if (i2 && INSN_UID (place) <= max_uid_cuid
12143 && INSN_CUID (place) > INSN_CUID (i2)
12144 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
12145 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
12146 {
12147 rtx links = LOG_LINKS (place);
12148 LOG_LINKS (place) = 0;
12149 distribute_links (links);
12150 }
12151 break;
12152 }
12153
12154 if (tem == bb->head)
230d793d 12155 break;
38d8473f
RK
12156 }
12157
d3a923ee
RH
12158 /* We haven't found an insn for the death note and it
12159 is still a REG_DEAD note, but we have hit the beginning
12160 of the block. If the existing life info says the reg
715e7fbc
RH
12161 was dead, there's nothing left to do. Otherwise, we'll
12162 need to do a global life update after combine. */
d3a923ee 12163 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0)
e2cce0cf 12164 {
d3a923ee 12165 int regno = REGNO (XEXP (note, 0));
d3a923ee
RH
12166 if (REGNO_REG_SET_P (bb->global_live_at_start, regno))
12167 {
715e7fbc
RH
12168 SET_BIT (refresh_blocks, this_basic_block);
12169 need_refresh = 1;
d3a923ee 12170 }
e2cce0cf 12171 }
38d8473f 12172 }
230d793d
RS
12173
12174 /* If the register is set or already dead at PLACE, we needn't do
e5e809f4
JL
12175 anything with this note if it is still a REG_DEAD note.
12176 We can here if it is set at all, not if is it totally replace,
12177 which is what `dead_or_set_p' checks, so also check for it being
12178 set partially. */
12179
230d793d
RS
12180 if (place && REG_NOTE_KIND (note) == REG_DEAD)
12181 {
12182 int regno = REGNO (XEXP (note, 0));
12183
12184 if (dead_or_set_p (place, XEXP (note, 0))
12185 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
12186 {
12187 /* Unless the register previously died in PLACE, clear
12188 reg_last_death. [I no longer understand why this is
12189 being done.] */
12190 if (reg_last_death[regno] != place)
12191 reg_last_death[regno] = 0;
12192 place = 0;
12193 }
12194 else
12195 reg_last_death[regno] = place;
12196
12197 /* If this is a death note for a hard reg that is occupying
12198 multiple registers, ensure that we are still using all
12199 parts of the object. If we find a piece of the object
12200 that is unused, we must add a USE for that piece before
12201 PLACE and put the appropriate REG_DEAD note on it.
12202
12203 An alternative would be to put a REG_UNUSED for the pieces
12204 on the insn that set the register, but that can't be done if
12205 it is not in the same block. It is simpler, though less
12206 efficient, to add the USE insns. */
12207
12208 if (place && regno < FIRST_PSEUDO_REGISTER
12209 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
12210 {
12211 int endregno
12212 = regno + HARD_REGNO_NREGS (regno,
12213 GET_MODE (XEXP (note, 0)));
12214 int all_used = 1;
12215 int i;
12216
12217 for (i = regno; i < endregno; i++)
9fd5bb62
JW
12218 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
12219 && ! find_regno_fusage (place, USE, i))
230d793d 12220 {
38a448ca 12221 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
28f6d3af
RK
12222 rtx p;
12223
12224 /* See if we already placed a USE note for this
12225 register in front of PLACE. */
12226 for (p = place;
12227 GET_CODE (PREV_INSN (p)) == INSN
12228 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
12229 p = PREV_INSN (p))
12230 if (rtx_equal_p (piece,
12231 XEXP (PATTERN (PREV_INSN (p)), 0)))
12232 {
12233 p = 0;
12234 break;
12235 }
12236
12237 if (p)
12238 {
12239 rtx use_insn
38a448ca
RH
12240 = emit_insn_before (gen_rtx_USE (VOIDmode,
12241 piece),
28f6d3af
RK
12242 p);
12243 REG_NOTES (use_insn)
38a448ca
RH
12244 = gen_rtx_EXPR_LIST (REG_DEAD, piece,
12245 REG_NOTES (use_insn));
28f6d3af 12246 }
230d793d 12247
5089e22e 12248 all_used = 0;
230d793d
RS
12249 }
12250
a394b17b
JW
12251 /* Check for the case where the register dying partially
12252 overlaps the register set by this insn. */
12253 if (all_used)
12254 for (i = regno; i < endregno; i++)
12255 if (dead_or_set_regno_p (place, i))
12256 {
12257 all_used = 0;
12258 break;
12259 }
12260
230d793d
RS
12261 if (! all_used)
12262 {
12263 /* Put only REG_DEAD notes for pieces that are
12264 still used and that are not already dead or set. */
12265
12266 for (i = regno; i < endregno; i++)
12267 {
38a448ca 12268 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
230d793d 12269
17cbf358
JW
12270 if ((reg_referenced_p (piece, PATTERN (place))
12271 || (GET_CODE (place) == CALL_INSN
12272 && find_reg_fusage (place, USE, piece)))
230d793d
RS
12273 && ! dead_or_set_p (place, piece)
12274 && ! reg_bitfield_target_p (piece,
12275 PATTERN (place)))
38a448ca 12276 REG_NOTES (place)
c5c76735
JL
12277 = gen_rtx_EXPR_LIST (REG_DEAD, piece,
12278 REG_NOTES (place));
230d793d
RS
12279 }
12280
12281 place = 0;
12282 }
12283 }
12284 }
12285 break;
12286
12287 default:
12288 /* Any other notes should not be present at this point in the
12289 compilation. */
12290 abort ();
12291 }
12292
12293 if (place)
12294 {
12295 XEXP (note, 1) = REG_NOTES (place);
12296 REG_NOTES (place) = note;
12297 }
1a26b032
RK
12298 else if ((REG_NOTE_KIND (note) == REG_DEAD
12299 || REG_NOTE_KIND (note) == REG_UNUSED)
12300 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12301 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
230d793d
RS
12302
12303 if (place2)
1a26b032
RK
12304 {
12305 if ((REG_NOTE_KIND (note) == REG_DEAD
12306 || REG_NOTE_KIND (note) == REG_UNUSED)
12307 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 12308 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 12309
38a448ca
RH
12310 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
12311 REG_NOTE_KIND (note),
12312 XEXP (note, 0),
12313 REG_NOTES (place2));
1a26b032 12314 }
230d793d
RS
12315 }
12316}
12317\f
12318/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
12319 I3, I2, and I1 to new locations. This is also called in one case to
12320 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
12321
12322static void
12323distribute_links (links)
12324 rtx links;
12325{
12326 rtx link, next_link;
12327
12328 for (link = links; link; link = next_link)
12329 {
12330 rtx place = 0;
12331 rtx insn;
12332 rtx set, reg;
12333
12334 next_link = XEXP (link, 1);
12335
12336 /* If the insn that this link points to is a NOTE or isn't a single
12337 set, ignore it. In the latter case, it isn't clear what we
12338 can do other than ignore the link, since we can't tell which
12339 register it was for. Such links wouldn't be used by combine
12340 anyway.
12341
12342 It is not possible for the destination of the target of the link to
12343 have been changed by combine. The only potential of this is if we
12344 replace I3, I2, and I1 by I3 and I2. But in that case the
12345 destination of I2 also remains unchanged. */
12346
12347 if (GET_CODE (XEXP (link, 0)) == NOTE
12348 || (set = single_set (XEXP (link, 0))) == 0)
12349 continue;
12350
12351 reg = SET_DEST (set);
12352 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
12353 || GET_CODE (reg) == SIGN_EXTRACT
12354 || GET_CODE (reg) == STRICT_LOW_PART)
12355 reg = XEXP (reg, 0);
12356
12357 /* A LOG_LINK is defined as being placed on the first insn that uses
12358 a register and points to the insn that sets the register. Start
12359 searching at the next insn after the target of the link and stop
12360 when we reach a set of the register or the end of the basic block.
12361
12362 Note that this correctly handles the link that used to point from
5089e22e 12363 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
12364 since most links don't point very far away. */
12365
12366 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3 12367 (insn && (this_basic_block == n_basic_blocks - 1
3b413743 12368 || BLOCK_HEAD (this_basic_block + 1) != insn));
230d793d
RS
12369 insn = NEXT_INSN (insn))
12370 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
12371 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
12372 {
12373 if (reg_referenced_p (reg, PATTERN (insn)))
12374 place = insn;
12375 break;
12376 }
6e2d1486
RK
12377 else if (GET_CODE (insn) == CALL_INSN
12378 && find_reg_fusage (insn, USE, reg))
12379 {
12380 place = insn;
12381 break;
12382 }
230d793d
RS
12383
12384 /* If we found a place to put the link, place it there unless there
12385 is already a link to the same insn as LINK at that point. */
12386
12387 if (place)
12388 {
12389 rtx link2;
12390
12391 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
12392 if (XEXP (link2, 0) == XEXP (link, 0))
12393 break;
12394
12395 if (link2 == 0)
12396 {
12397 XEXP (link, 1) = LOG_LINKS (place);
12398 LOG_LINKS (place) = link;
abe6e52f
RK
12399
12400 /* Set added_links_insn to the earliest insn we added a
12401 link to. */
12402 if (added_links_insn == 0
12403 || INSN_CUID (added_links_insn) > INSN_CUID (place))
12404 added_links_insn = place;
230d793d
RS
12405 }
12406 }
12407 }
12408}
12409\f
1427d6d2
RK
12410/* Compute INSN_CUID for INSN, which is an insn made by combine. */
12411
12412static int
12413insn_cuid (insn)
12414 rtx insn;
12415{
12416 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
12417 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
12418 insn = NEXT_INSN (insn);
12419
12420 if (INSN_UID (insn) > max_uid_cuid)
12421 abort ();
12422
12423 return INSN_CUID (insn);
12424}
12425\f
230d793d
RS
12426void
12427dump_combine_stats (file)
12428 FILE *file;
12429{
ab87f8c8 12430 fnotice
230d793d
RS
12431 (file,
12432 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
12433 combine_attempts, combine_merges, combine_extras, combine_successes);
12434}
12435
12436void
12437dump_combine_total_stats (file)
12438 FILE *file;
12439{
ab87f8c8 12440 fnotice
230d793d
RS
12441 (file,
12442 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12443 total_attempts, total_merges, total_extras, total_successes);
12444}
This page took 2.581521 seconds and 5 git commands to generate.