]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
errfn.c (cp_thing): Print buf as a string not as a printf format to avoid problems...
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
c85f7c16 2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
230d793d
RS
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
670ee920 78#include "system.h"
789f983a 79#include "rtl.h" /* stdio.h must precede rtl.h for FFS. */
230d793d
RS
80#include "flags.h"
81#include "regs.h"
55310dad 82#include "hard-reg-set.h"
230d793d
RS
83#include "basic-block.h"
84#include "insn-config.h"
d6f4ec51
KG
85/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
86#include "expr.h"
230d793d
RS
87#include "insn-flags.h"
88#include "insn-codes.h"
89#include "insn-attr.h"
90#include "recog.h"
91#include "real.h"
2e107e9e 92#include "toplev.h"
230d793d
RS
93
94/* It is not safe to use ordinary gen_lowpart in combine.
95 Use gen_lowpart_for_combine instead. See comments there. */
96#define gen_lowpart dont_use_gen_lowpart_you_dummy
97
98/* Number of attempts to combine instructions in this function. */
99
100static int combine_attempts;
101
102/* Number of attempts that got as far as substitution in this function. */
103
104static int combine_merges;
105
106/* Number of instructions combined with added SETs in this function. */
107
108static int combine_extras;
109
110/* Number of instructions combined in this function. */
111
112static int combine_successes;
113
114/* Totals over entire compilation. */
115
116static int total_attempts, total_merges, total_extras, total_successes;
9210df58 117
ddd5a7c1 118/* Define a default value for REVERSIBLE_CC_MODE.
9210df58
RK
119 We can never assume that a condition code mode is safe to reverse unless
120 the md tells us so. */
121#ifndef REVERSIBLE_CC_MODE
122#define REVERSIBLE_CC_MODE(MODE) 0
123#endif
230d793d
RS
124\f
125/* Vector mapping INSN_UIDs to cuids.
5089e22e 126 The cuids are like uids but increase monotonically always.
230d793d
RS
127 Combine always uses cuids so that it can compare them.
128 But actually renumbering the uids, which we used to do,
129 proves to be a bad idea because it makes it hard to compare
130 the dumps produced by earlier passes with those from later passes. */
131
132static int *uid_cuid;
4255220d 133static int max_uid_cuid;
230d793d
RS
134
135/* Get the cuid of an insn. */
136
1427d6d2
RK
137#define INSN_CUID(INSN) \
138(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d
RS
139
140/* Maximum register number, which is the size of the tables below. */
141
142static int combine_max_regno;
143
144/* Record last point of death of (hard or pseudo) register n. */
145
146static rtx *reg_last_death;
147
148/* Record last point of modification of (hard or pseudo) register n. */
149
150static rtx *reg_last_set;
151
152/* Record the cuid of the last insn that invalidated memory
153 (anything that writes memory, and subroutine calls, but not pushes). */
154
155static int mem_last_set;
156
157/* Record the cuid of the last CALL_INSN
158 so we can tell whether a potential combination crosses any calls. */
159
160static int last_call_cuid;
161
162/* When `subst' is called, this is the insn that is being modified
163 (by combining in a previous insn). The PATTERN of this insn
164 is still the old pattern partially modified and it should not be
165 looked at, but this may be used to examine the successors of the insn
166 to judge whether a simplification is valid. */
167
168static rtx subst_insn;
169
0d9641d1
JW
170/* This is an insn that belongs before subst_insn, but is not currently
171 on the insn chain. */
172
173static rtx subst_prev_insn;
174
230d793d
RS
175/* This is the lowest CUID that `subst' is currently dealing with.
176 get_last_value will not return a value if the register was set at or
177 after this CUID. If not for this mechanism, we could get confused if
178 I2 or I1 in try_combine were an insn that used the old value of a register
179 to obtain a new value. In that case, we might erroneously get the
180 new value of the register when we wanted the old one. */
181
182static int subst_low_cuid;
183
6e25d159
RK
184/* This contains any hard registers that are used in newpat; reg_dead_at_p
185 must consider all these registers to be always live. */
186
187static HARD_REG_SET newpat_used_regs;
188
abe6e52f
RK
189/* This is an insn to which a LOG_LINKS entry has been added. If this
190 insn is the earlier than I2 or I3, combine should rescan starting at
191 that location. */
192
193static rtx added_links_insn;
194
0d4d42c3
RK
195/* Basic block number of the block in which we are performing combines. */
196static int this_basic_block;
230d793d
RS
197\f
198/* The next group of arrays allows the recording of the last value assigned
199 to (hard or pseudo) register n. We use this information to see if a
5089e22e 200 operation being processed is redundant given a prior operation performed
230d793d
RS
201 on the register. For example, an `and' with a constant is redundant if
202 all the zero bits are already known to be turned off.
203
204 We use an approach similar to that used by cse, but change it in the
205 following ways:
206
207 (1) We do not want to reinitialize at each label.
208 (2) It is useful, but not critical, to know the actual value assigned
209 to a register. Often just its form is helpful.
210
211 Therefore, we maintain the following arrays:
212
213 reg_last_set_value the last value assigned
214 reg_last_set_label records the value of label_tick when the
215 register was assigned
216 reg_last_set_table_tick records the value of label_tick when a
217 value using the register is assigned
218 reg_last_set_invalid set to non-zero when it is not valid
219 to use the value of this register in some
220 register's value
221
222 To understand the usage of these tables, it is important to understand
223 the distinction between the value in reg_last_set_value being valid
224 and the register being validly contained in some other expression in the
225 table.
226
227 Entry I in reg_last_set_value is valid if it is non-zero, and either
228 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
229
230 Register I may validly appear in any expression returned for the value
231 of another register if reg_n_sets[i] is 1. It may also appear in the
232 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
233 reg_last_set_invalid[j] is zero.
234
235 If an expression is found in the table containing a register which may
236 not validly appear in an expression, the register is replaced by
237 something that won't match, (clobber (const_int 0)).
238
239 reg_last_set_invalid[i] is set non-zero when register I is being assigned
240 to and reg_last_set_table_tick[i] == label_tick. */
241
0f41302f 242/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
243
244static rtx *reg_last_set_value;
245
246/* Record the value of label_tick when the value for register n is placed in
247 reg_last_set_value[n]. */
248
568356af 249static int *reg_last_set_label;
230d793d
RS
250
251/* Record the value of label_tick when an expression involving register n
0f41302f 252 is placed in reg_last_set_value. */
230d793d 253
568356af 254static int *reg_last_set_table_tick;
230d793d
RS
255
256/* Set non-zero if references to register n in expressions should not be
257 used. */
258
259static char *reg_last_set_invalid;
260
0f41302f 261/* Incremented for each label. */
230d793d 262
568356af 263static int label_tick;
230d793d
RS
264
265/* Some registers that are set more than once and used in more than one
266 basic block are nevertheless always set in similar ways. For example,
267 a QImode register may be loaded from memory in two places on a machine
268 where byte loads zero extend.
269
951553af 270 We record in the following array what we know about the nonzero
230d793d
RS
271 bits of a register, specifically which bits are known to be zero.
272
273 If an entry is zero, it means that we don't know anything special. */
274
55310dad 275static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 276
951553af 277/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 278 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 279
951553af 280static enum machine_mode nonzero_bits_mode;
230d793d 281
d0ab8cd3
RK
282/* Nonzero if we know that a register has some leading bits that are always
283 equal to the sign bit. */
284
285static char *reg_sign_bit_copies;
286
951553af 287/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
288 It is zero while computing them and after combine has completed. This
289 former test prevents propagating values based on previously set values,
290 which can be incorrect if a variable is modified in a loop. */
230d793d 291
951553af 292static int nonzero_sign_valid;
55310dad
RK
293
294/* These arrays are maintained in parallel with reg_last_set_value
295 and are used to store the mode in which the register was last set,
296 the bits that were known to be zero when it was last set, and the
297 number of sign bits copies it was known to have when it was last set. */
298
299static enum machine_mode *reg_last_set_mode;
300static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
301static char *reg_last_set_sign_bit_copies;
230d793d
RS
302\f
303/* Record one modification to rtl structure
304 to be undone by storing old_contents into *where.
305 is_int is 1 if the contents are an int. */
306
307struct undo
308{
241cea85 309 struct undo *next;
230d793d 310 int is_int;
f5393ab9
RS
311 union {rtx r; int i;} old_contents;
312 union {rtx *r; int *i;} where;
230d793d
RS
313};
314
315/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
316 num_undo says how many are currently recorded.
317
318 storage is nonzero if we must undo the allocation of new storage.
319 The value of storage is what to pass to obfree.
320
321 other_insn is nonzero if we have modified some other insn in the process
241cea85 322 of working on subst_insn. It must be verified too.
230d793d 323
241cea85
RK
324 previous_undos is the value of undobuf.undos when we started processing
325 this substitution. This will prevent gen_rtx_combine from re-used a piece
326 from the previous expression. Doing so can produce circular rtl
327 structures. */
230d793d
RS
328
329struct undobuf
330{
230d793d 331 char *storage;
241cea85
RK
332 struct undo *undos;
333 struct undo *frees;
334 struct undo *previous_undos;
230d793d
RS
335 rtx other_insn;
336};
337
338static struct undobuf undobuf;
339
cc876596 340/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 341 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
342 set to NEWVAL, do not record this change. Because computing NEWVAL might
343 also call SUBST, we have to compute it before we put anything into
344 the undo table. */
230d793d
RS
345
346#define SUBST(INTO, NEWVAL) \
241cea85
RK
347 do { rtx _new = (NEWVAL); \
348 struct undo *_buf; \
349 \
350 if (undobuf.frees) \
351 _buf = undobuf.frees, undobuf.frees = _buf->next; \
352 else \
353 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
354 \
355 _buf->is_int = 0; \
356 _buf->where.r = &INTO; \
357 _buf->old_contents.r = INTO; \
358 INTO = _new; \
359 if (_buf->old_contents.r == INTO) \
360 _buf->next = undobuf.frees, undobuf.frees = _buf; \
361 else \
362 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
363 } while (0)
364
241cea85
RK
365/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
366 for the value of a HOST_WIDE_INT value (including CONST_INT) is
367 not safe. */
230d793d
RS
368
369#define SUBST_INT(INTO, NEWVAL) \
241cea85
RK
370 do { struct undo *_buf; \
371 \
372 if (undobuf.frees) \
373 _buf = undobuf.frees, undobuf.frees = _buf->next; \
374 else \
375 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
376 \
377 _buf->is_int = 1; \
378 _buf->where.i = (int *) &INTO; \
379 _buf->old_contents.i = INTO; \
380 INTO = NEWVAL; \
381 if (_buf->old_contents.i == INTO) \
382 _buf->next = undobuf.frees, undobuf.frees = _buf; \
383 else \
384 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
385 } while (0)
386
387/* Number of times the pseudo being substituted for
388 was found and replaced. */
389
390static int n_occurrences;
391
c5ad722c
RK
392static void init_reg_last_arrays PROTO((void));
393static void setup_incoming_promotions PROTO((void));
fe2db4fb
RK
394static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
395static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
e009aaf3 396static int sets_function_arg_p PROTO((rtx));
fe2db4fb
RK
397static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
398static rtx try_combine PROTO((rtx, rtx, rtx));
399static void undo_all PROTO((void));
400static rtx *find_split_point PROTO((rtx *, rtx));
401static rtx subst PROTO((rtx, rtx, rtx, int, int));
8079805d
RK
402static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
403static rtx simplify_if_then_else PROTO((rtx));
404static rtx simplify_set PROTO((rtx));
405static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
406static rtx expand_compound_operation PROTO((rtx));
407static rtx expand_field_assignment PROTO((rtx));
408static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
409 int, int, int));
71923da7 410static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
411static rtx make_compound_operation PROTO((rtx, enum rtx_code));
412static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 413static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 414 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 415static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb 416static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
e11fa86f 417static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
fe2db4fb
RK
418static rtx make_field_assignment PROTO((rtx));
419static rtx apply_distributive_law PROTO((rtx));
420static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
421 unsigned HOST_WIDE_INT));
422static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
423static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
424static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
425 enum rtx_code, HOST_WIDE_INT,
426 enum machine_mode, int *));
427static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
428 rtx, int));
a29ca9db 429static int recog_for_combine PROTO((rtx *, rtx, rtx *, int *));
fe2db4fb 430static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 431static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 432 ...));
fe2db4fb
RK
433static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
434 rtx, rtx));
0c1c8ea6
RK
435static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
436 enum machine_mode, rtx));
fe2db4fb
RK
437static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
438static int reversible_comparison_p PROTO((rtx));
439static void update_table_tick PROTO((rtx));
440static void record_value_for_reg PROTO((rtx, rtx, rtx));
441static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
442static void record_dead_and_set_regs PROTO((rtx));
9a893315 443static int get_last_value_validate PROTO((rtx *, rtx, int, int));
fe2db4fb
RK
444static rtx get_last_value PROTO((rtx));
445static int use_crosses_set_p PROTO((rtx, int));
446static void reg_dead_at_p_1 PROTO((rtx, rtx));
447static int reg_dead_at_p PROTO((rtx, rtx));
6eb12cef 448static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
fe2db4fb
RK
449static int reg_bitfield_target_p PROTO((rtx, rtx));
450static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
451static void distribute_links PROTO((rtx));
6e25d159 452static void mark_used_regs_combine PROTO((rtx));
1427d6d2 453static int insn_cuid PROTO((rtx));
230d793d
RS
454\f
455/* Main entry point for combiner. F is the first insn of the function.
456 NREGS is the first unused pseudo-reg number. */
457
458void
459combine_instructions (f, nregs)
460 rtx f;
461 int nregs;
462{
b729186a
JL
463 register rtx insn, next;
464#ifdef HAVE_cc0
465 register rtx prev;
466#endif
230d793d
RS
467 register int i;
468 register rtx links, nextlinks;
469
470 combine_attempts = 0;
471 combine_merges = 0;
472 combine_extras = 0;
473 combine_successes = 0;
241cea85 474 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
475
476 combine_max_regno = nregs;
477
ef026f91
RS
478 reg_nonzero_bits
479 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
480 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
481
4c9a05bc 482 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
483 bzero (reg_sign_bit_copies, nregs * sizeof (char));
484
230d793d
RS
485 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
486 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
487 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
488 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
489 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 490 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
491 reg_last_set_mode
492 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
493 reg_last_set_nonzero_bits
494 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
495 reg_last_set_sign_bit_copies
496 = (char *) alloca (nregs * sizeof (char));
497
ef026f91 498 init_reg_last_arrays ();
230d793d
RS
499
500 init_recog_no_volatile ();
501
502 /* Compute maximum uid value so uid_cuid can be allocated. */
503
504 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
505 if (INSN_UID (insn) > i)
506 i = INSN_UID (insn);
507
508 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
4255220d 509 max_uid_cuid = i;
230d793d 510
951553af 511 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 512
951553af 513 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
514 when, for example, we have j <<= 1 in a loop. */
515
951553af 516 nonzero_sign_valid = 0;
230d793d
RS
517
518 /* Compute the mapping from uids to cuids.
519 Cuids are numbers assigned to insns, like uids,
520 except that cuids increase monotonically through the code.
521
522 Scan all SETs and see if we can deduce anything about what
951553af 523 bits are known to be zero for some registers and how many copies
d79f08e0
RK
524 of the sign bit are known to exist for those registers.
525
526 Also set any known values so that we can use it while searching
527 for what bits are known to be set. */
528
529 label_tick = 1;
230d793d 530
bcd49eb7
JW
531 /* We need to initialize it here, because record_dead_and_set_regs may call
532 get_last_value. */
533 subst_prev_insn = NULL_RTX;
534
7988fd36
RK
535 setup_incoming_promotions ();
536
230d793d
RS
537 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
538 {
4255220d 539 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
540 subst_low_cuid = i;
541 subst_insn = insn;
542
230d793d 543 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
544 {
545 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
546 record_dead_and_set_regs (insn);
2dab894a
RK
547
548#ifdef AUTO_INC_DEC
549 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
550 if (REG_NOTE_KIND (links) == REG_INC)
551 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
552#endif
d79f08e0
RK
553 }
554
555 if (GET_CODE (insn) == CODE_LABEL)
556 label_tick++;
230d793d
RS
557 }
558
951553af 559 nonzero_sign_valid = 1;
230d793d
RS
560
561 /* Now scan all the insns in forward order. */
562
0d4d42c3 563 this_basic_block = -1;
230d793d
RS
564 label_tick = 1;
565 last_call_cuid = 0;
566 mem_last_set = 0;
ef026f91 567 init_reg_last_arrays ();
7988fd36
RK
568 setup_incoming_promotions ();
569
230d793d
RS
570 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
571 {
572 next = 0;
573
0d4d42c3 574 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 575 if (this_basic_block + 1 < n_basic_blocks
0d4d42c3
RK
576 && basic_block_head[this_basic_block + 1] == insn)
577 this_basic_block++;
578
230d793d
RS
579 if (GET_CODE (insn) == CODE_LABEL)
580 label_tick++;
581
0d4d42c3 582 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
583 {
584 /* Try this insn with each insn it links back to. */
585
586 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 587 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
588 goto retry;
589
590 /* Try each sequence of three linked insns ending with this one. */
591
592 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
593 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
594 nextlinks = XEXP (nextlinks, 1))
595 if ((next = try_combine (insn, XEXP (links, 0),
596 XEXP (nextlinks, 0))) != 0)
597 goto retry;
598
599#ifdef HAVE_cc0
600 /* Try to combine a jump insn that uses CC0
601 with a preceding insn that sets CC0, and maybe with its
602 logical predecessor as well.
603 This is how we make decrement-and-branch insns.
604 We need this special code because data flow connections
605 via CC0 do not get entered in LOG_LINKS. */
606
607 if (GET_CODE (insn) == JUMP_INSN
608 && (prev = prev_nonnote_insn (insn)) != 0
609 && GET_CODE (prev) == INSN
610 && sets_cc0_p (PATTERN (prev)))
611 {
5f4f0e22 612 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
613 goto retry;
614
615 for (nextlinks = LOG_LINKS (prev); nextlinks;
616 nextlinks = XEXP (nextlinks, 1))
617 if ((next = try_combine (insn, prev,
618 XEXP (nextlinks, 0))) != 0)
619 goto retry;
620 }
621
622 /* Do the same for an insn that explicitly references CC0. */
623 if (GET_CODE (insn) == INSN
624 && (prev = prev_nonnote_insn (insn)) != 0
625 && GET_CODE (prev) == INSN
626 && sets_cc0_p (PATTERN (prev))
627 && GET_CODE (PATTERN (insn)) == SET
628 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
629 {
5f4f0e22 630 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
631 goto retry;
632
633 for (nextlinks = LOG_LINKS (prev); nextlinks;
634 nextlinks = XEXP (nextlinks, 1))
635 if ((next = try_combine (insn, prev,
636 XEXP (nextlinks, 0))) != 0)
637 goto retry;
638 }
639
640 /* Finally, see if any of the insns that this insn links to
641 explicitly references CC0. If so, try this insn, that insn,
5089e22e 642 and its predecessor if it sets CC0. */
230d793d
RS
643 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
644 if (GET_CODE (XEXP (links, 0)) == INSN
645 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
646 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
647 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
648 && GET_CODE (prev) == INSN
649 && sets_cc0_p (PATTERN (prev))
650 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
651 goto retry;
652#endif
653
654 /* Try combining an insn with two different insns whose results it
655 uses. */
656 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
657 for (nextlinks = XEXP (links, 1); nextlinks;
658 nextlinks = XEXP (nextlinks, 1))
659 if ((next = try_combine (insn, XEXP (links, 0),
660 XEXP (nextlinks, 0))) != 0)
661 goto retry;
662
663 if (GET_CODE (insn) != NOTE)
664 record_dead_and_set_regs (insn);
665
666 retry:
667 ;
668 }
669 }
670
671 total_attempts += combine_attempts;
672 total_merges += combine_merges;
673 total_extras += combine_extras;
674 total_successes += combine_successes;
1a26b032 675
951553af 676 nonzero_sign_valid = 0;
230d793d 677}
ef026f91
RS
678
679/* Wipe the reg_last_xxx arrays in preparation for another pass. */
680
681static void
682init_reg_last_arrays ()
683{
684 int nregs = combine_max_regno;
685
4c9a05bc
RK
686 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
687 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
688 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
689 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
690 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 691 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
692 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
693 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
694 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
695}
230d793d 696\f
7988fd36
RK
697/* Set up any promoted values for incoming argument registers. */
698
ee791cc3 699static void
7988fd36
RK
700setup_incoming_promotions ()
701{
702#ifdef PROMOTE_FUNCTION_ARGS
703 int regno;
704 rtx reg;
705 enum machine_mode mode;
706 int unsignedp;
707 rtx first = get_insns ();
708
709 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
710 if (FUNCTION_ARG_REGNO_P (regno)
711 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
38a448ca
RH
712 {
713 record_value_for_reg
714 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
715 : SIGN_EXTEND),
716 GET_MODE (reg),
717 gen_rtx_CLOBBER (mode, const0_rtx)));
718 }
7988fd36
RK
719#endif
720}
721\f
91102d5a
RK
722/* Called via note_stores. If X is a pseudo that is narrower than
723 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
724
725 If we are setting only a portion of X and we can't figure out what
726 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
727 be happening.
728
729 Similarly, set how many bits of X are known to be copies of the sign bit
730 at all locations in the function. This is the smallest number implied
731 by any set of X. */
230d793d
RS
732
733static void
951553af 734set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
735 rtx x;
736 rtx set;
737{
d0ab8cd3
RK
738 int num;
739
230d793d
RS
740 if (GET_CODE (x) == REG
741 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
742 /* If this register is undefined at the start of the file, we can't
743 say what its contents were. */
8e08106d 744 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], REGNO (x))
5f4f0e22 745 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 746 {
2dab894a 747 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
748 {
749 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 750 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
751 return;
752 }
230d793d
RS
753
754 /* If this is a complex assignment, see if we can convert it into a
5089e22e 755 simple assignment. */
230d793d 756 set = expand_field_assignment (set);
d79f08e0
RK
757
758 /* If this is a simple assignment, or we have a paradoxical SUBREG,
759 set what we know about X. */
760
761 if (SET_DEST (set) == x
762 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
763 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
764 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 765 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 766 {
9afa3d54
RK
767 rtx src = SET_SRC (set);
768
769#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
770 /* If X is narrower than a word and SRC is a non-negative
771 constant that would appear negative in the mode of X,
772 sign-extend it for use in reg_nonzero_bits because some
773 machines (maybe most) will actually do the sign-extension
774 and this is the conservative approach.
775
776 ??? For 2.5, try to tighten up the MD files in this regard
777 instead of this kludge. */
778
779 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
780 && GET_CODE (src) == CONST_INT
781 && INTVAL (src) > 0
782 && 0 != (INTVAL (src)
783 & ((HOST_WIDE_INT) 1
9e69be8c 784 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
785 src = GEN_INT (INTVAL (src)
786 | ((HOST_WIDE_INT) (-1)
787 << GET_MODE_BITSIZE (GET_MODE (x))));
788#endif
789
951553af 790 reg_nonzero_bits[REGNO (x)]
9afa3d54 791 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
792 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
793 if (reg_sign_bit_copies[REGNO (x)] == 0
794 || reg_sign_bit_copies[REGNO (x)] > num)
795 reg_sign_bit_copies[REGNO (x)] = num;
796 }
230d793d 797 else
d0ab8cd3 798 {
951553af 799 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 800 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 801 }
230d793d
RS
802 }
803}
804\f
805/* See if INSN can be combined into I3. PRED and SUCC are optionally
806 insns that were previously combined into I3 or that will be combined
807 into the merger of INSN and I3.
808
809 Return 0 if the combination is not allowed for any reason.
810
811 If the combination is allowed, *PDEST will be set to the single
812 destination of INSN and *PSRC to the single source, and this function
813 will return 1. */
814
815static int
816can_combine_p (insn, i3, pred, succ, pdest, psrc)
817 rtx insn;
818 rtx i3;
819 rtx pred, succ;
820 rtx *pdest, *psrc;
821{
822 int i;
823 rtx set = 0, src, dest;
b729186a
JL
824 rtx p;
825#ifdef AUTO_INC_DEC
76d31c63 826 rtx link;
b729186a 827#endif
230d793d
RS
828 int all_adjacent = (succ ? (next_active_insn (insn) == succ
829 && next_active_insn (succ) == i3)
830 : next_active_insn (insn) == i3);
831
832 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
833 or a PARALLEL consisting of such a SET and CLOBBERs.
834
835 If INSN has CLOBBER parallel parts, ignore them for our processing.
836 By definition, these happen during the execution of the insn. When it
837 is merged with another insn, all bets are off. If they are, in fact,
838 needed and aren't also supplied in I3, they may be added by
839 recog_for_combine. Otherwise, it won't match.
840
841 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
842 note.
843
844 Get the source and destination of INSN. If more than one, can't
845 combine. */
846
847 if (GET_CODE (PATTERN (insn)) == SET)
848 set = PATTERN (insn);
849 else if (GET_CODE (PATTERN (insn)) == PARALLEL
850 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
851 {
852 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
853 {
854 rtx elt = XVECEXP (PATTERN (insn), 0, i);
855
856 switch (GET_CODE (elt))
857 {
e3258cef
R
858 /* This is important to combine floating point insns
859 for the SH4 port. */
860 case USE:
861 /* Combining an isolated USE doesn't make sense.
862 We depend here on combinable_i3_pat to reject them. */
863 /* The code below this loop only verifies that the inputs of
864 the SET in INSN do not change. We call reg_set_between_p
865 to verify that the REG in the USE does not change betweeen
866 I3 and INSN.
867 If the USE in INSN was for a pseudo register, the matching
868 insn pattern will likely match any register; combining this
869 with any other USE would only be safe if we knew that the
870 used registers have identical values, or if there was
871 something to tell them apart, e.g. different modes. For
872 now, we forgo such compilcated tests and simply disallow
873 combining of USES of pseudo registers with any other USE. */
874 if (GET_CODE (XEXP (elt, 0)) == REG
875 && GET_CODE (PATTERN (i3)) == PARALLEL)
876 {
877 rtx i3pat = PATTERN (i3);
878 int i = XVECLEN (i3pat, 0) - 1;
879 int regno = REGNO (XEXP (elt, 0));
880 do
881 {
882 rtx i3elt = XVECEXP (i3pat, 0, i);
883 if (GET_CODE (i3elt) == USE
884 && GET_CODE (XEXP (i3elt, 0)) == REG
885 && (REGNO (XEXP (i3elt, 0)) == regno
886 ? reg_set_between_p (XEXP (elt, 0),
887 PREV_INSN (insn), i3)
888 : regno >= FIRST_PSEUDO_REGISTER))
889 return 0;
890 }
891 while (--i >= 0);
892 }
893 break;
894
230d793d
RS
895 /* We can ignore CLOBBERs. */
896 case CLOBBER:
897 break;
898
899 case SET:
900 /* Ignore SETs whose result isn't used but not those that
901 have side-effects. */
902 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
903 && ! side_effects_p (elt))
904 break;
905
906 /* If we have already found a SET, this is a second one and
907 so we cannot combine with this insn. */
908 if (set)
909 return 0;
910
911 set = elt;
912 break;
913
914 default:
915 /* Anything else means we can't combine. */
916 return 0;
917 }
918 }
919
920 if (set == 0
921 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
922 so don't do anything with it. */
923 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
924 return 0;
925 }
926 else
927 return 0;
928
929 if (set == 0)
930 return 0;
931
932 set = expand_field_assignment (set);
933 src = SET_SRC (set), dest = SET_DEST (set);
934
935 /* Don't eliminate a store in the stack pointer. */
936 if (dest == stack_pointer_rtx
230d793d
RS
937 /* If we couldn't eliminate a field assignment, we can't combine. */
938 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
939 /* Don't combine with an insn that sets a register to itself if it has
940 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 941 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
942 /* Can't merge a function call. */
943 || GET_CODE (src) == CALL
cd5e8f1f 944 /* Don't eliminate a function call argument. */
4dca5ec5
RK
945 || (GET_CODE (i3) == CALL_INSN
946 && (find_reg_fusage (i3, USE, dest)
947 || (GET_CODE (dest) == REG
948 && REGNO (dest) < FIRST_PSEUDO_REGISTER
949 && global_regs[REGNO (dest)])))
230d793d
RS
950 /* Don't substitute into an incremented register. */
951 || FIND_REG_INC_NOTE (i3, dest)
952 || (succ && FIND_REG_INC_NOTE (succ, dest))
ec35104c 953#if 0
230d793d 954 /* Don't combine the end of a libcall into anything. */
ec35104c
JL
955 /* ??? This gives worse code, and appears to be unnecessary, since no
956 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
957 use REG_RETVAL notes for noconflict blocks, but other code here
958 makes sure that those insns don't disappear. */
5f4f0e22 959 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
ec35104c 960#endif
230d793d
RS
961 /* Make sure that DEST is not used after SUCC but before I3. */
962 || (succ && ! all_adjacent
963 && reg_used_between_p (dest, succ, i3))
964 /* Make sure that the value that is to be substituted for the register
965 does not use any registers whose values alter in between. However,
966 If the insns are adjacent, a use can't cross a set even though we
967 think it might (this can happen for a sequence of insns each setting
968 the same destination; reg_last_set of that register might point to
d81481d3
RK
969 a NOTE). If INSN has a REG_EQUIV note, the register is always
970 equivalent to the memory so the substitution is valid even if there
971 are intervening stores. Also, don't move a volatile asm or
972 UNSPEC_VOLATILE across any other insns. */
230d793d 973 || (! all_adjacent
d81481d3
RK
974 && (((GET_CODE (src) != MEM
975 || ! find_reg_note (insn, REG_EQUIV, src))
976 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
977 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
978 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
979 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
980 better register allocation by not doing the combine. */
981 || find_reg_note (i3, REG_NO_CONFLICT, dest)
982 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
983 /* Don't combine across a CALL_INSN, because that would possibly
984 change whether the life span of some REGs crosses calls or not,
985 and it is a pain to update that information.
986 Exception: if source is a constant, moving it later can't hurt.
987 Accept that special case, because it helps -fforce-addr a lot. */
988 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
989 return 0;
990
991 /* DEST must either be a REG or CC0. */
992 if (GET_CODE (dest) == REG)
993 {
994 /* If register alignment is being enforced for multi-word items in all
995 cases except for parameters, it is possible to have a register copy
996 insn referencing a hard register that is not allowed to contain the
997 mode being copied and which would not be valid as an operand of most
998 insns. Eliminate this problem by not combining with such an insn.
999
1000 Also, on some machines we don't want to extend the life of a hard
4d2c432d
RK
1001 register.
1002
1003 This is the same test done in can_combine except that we don't test
1004 if SRC is a CALL operation to permit a hard register with
1005 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1006 into account. */
230d793d
RS
1007
1008 if (GET_CODE (src) == REG
1009 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1010 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1011 /* Don't extend the life of a hard register unless it is
1012 user variable (if we have few registers) or it can't
1013 fit into the desired register (meaning something special
ecd40809
RK
1014 is going on).
1015 Also avoid substituting a return register into I3, because
1016 reload can't handle a conflict with constraints of other
1017 inputs. */
230d793d 1018 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e 1019 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
f95182a4
ILT
1020 || (SMALL_REGISTER_CLASSES
1021 && ((! all_adjacent && ! REG_USERVAR_P (src))
1022 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
e9a25f70 1023 && ! REG_USERVAR_P (src))))))))
230d793d
RS
1024 return 0;
1025 }
1026 else if (GET_CODE (dest) != CC0)
1027 return 0;
1028
5f96750d
RS
1029 /* Don't substitute for a register intended as a clobberable operand.
1030 Similarly, don't substitute an expression containing a register that
1031 will be clobbered in I3. */
230d793d
RS
1032 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1033 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1034 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
1035 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1036 src)
1037 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
1038 return 0;
1039
1040 /* If INSN contains anything volatile, or is an `asm' (whether volatile
d276f2bb 1041 or not), reject, unless nothing volatile comes between it and I3 */
230d793d
RS
1042
1043 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
d276f2bb
CM
1044 {
1045 /* Make sure succ doesn't contain a volatile reference. */
1046 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1047 return 0;
1048
1049 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1050 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1051 && p != succ && volatile_refs_p (PATTERN (p)))
1052 return 0;
1053 }
230d793d 1054
b79ee7eb
RH
1055 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1056 to be an explicit register variable, and was chosen for a reason. */
1057
1058 if (GET_CODE (src) == ASM_OPERANDS
1059 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1060 return 0;
1061
4b2cb4a2
RS
1062 /* If there are any volatile insns between INSN and I3, reject, because
1063 they might affect machine state. */
1064
1065 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1066 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1067 && p != succ && volatile_insn_p (PATTERN (p)))
1068 return 0;
1069
230d793d
RS
1070 /* If INSN or I2 contains an autoincrement or autodecrement,
1071 make sure that register is not used between there and I3,
1072 and not already used in I3 either.
1073 Also insist that I3 not be a jump; if it were one
1074 and the incremented register were spilled, we would lose. */
1075
1076#ifdef AUTO_INC_DEC
1077 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1078 if (REG_NOTE_KIND (link) == REG_INC
1079 && (GET_CODE (i3) == JUMP_INSN
1080 || reg_used_between_p (XEXP (link, 0), insn, i3)
1081 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1082 return 0;
1083#endif
1084
1085#ifdef HAVE_cc0
1086 /* Don't combine an insn that follows a CC0-setting insn.
1087 An insn that uses CC0 must not be separated from the one that sets it.
1088 We do, however, allow I2 to follow a CC0-setting insn if that insn
1089 is passed as I1; in that case it will be deleted also.
1090 We also allow combining in this case if all the insns are adjacent
1091 because that would leave the two CC0 insns adjacent as well.
1092 It would be more logical to test whether CC0 occurs inside I1 or I2,
1093 but that would be much slower, and this ought to be equivalent. */
1094
1095 p = prev_nonnote_insn (insn);
1096 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1097 && ! all_adjacent)
1098 return 0;
1099#endif
1100
1101 /* If we get here, we have passed all the tests and the combination is
1102 to be allowed. */
1103
1104 *pdest = dest;
1105 *psrc = src;
1106
1107 return 1;
1108}
1109\f
956d6950
JL
1110/* Check if PAT is an insn - or a part of it - used to set up an
1111 argument for a function in a hard register. */
1112
1113static int
1114sets_function_arg_p (pat)
1115 rtx pat;
1116{
1117 int i;
1118 rtx inner_dest;
1119
1120 switch (GET_CODE (pat))
1121 {
1122 case INSN:
1123 return sets_function_arg_p (PATTERN (pat));
1124
1125 case PARALLEL:
1126 for (i = XVECLEN (pat, 0); --i >= 0;)
1127 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1128 return 1;
1129
1130 break;
1131
1132 case SET:
1133 inner_dest = SET_DEST (pat);
1134 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1135 || GET_CODE (inner_dest) == SUBREG
1136 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1137 inner_dest = XEXP (inner_dest, 0);
1138
1139 return (GET_CODE (inner_dest) == REG
1140 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1141 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1d300e19
KG
1142
1143 default:
1144 break;
956d6950
JL
1145 }
1146
1147 return 0;
1148}
1149
230d793d
RS
1150/* LOC is the location within I3 that contains its pattern or the component
1151 of a PARALLEL of the pattern. We validate that it is valid for combining.
1152
1153 One problem is if I3 modifies its output, as opposed to replacing it
1154 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1155 so would produce an insn that is not equivalent to the original insns.
1156
1157 Consider:
1158
1159 (set (reg:DI 101) (reg:DI 100))
1160 (set (subreg:SI (reg:DI 101) 0) <foo>)
1161
1162 This is NOT equivalent to:
1163
1164 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1165 (set (reg:DI 101) (reg:DI 100))])
1166
1167 Not only does this modify 100 (in which case it might still be valid
1168 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1169
1170 We can also run into a problem if I2 sets a register that I1
1171 uses and I1 gets directly substituted into I3 (not via I2). In that
1172 case, we would be getting the wrong value of I2DEST into I3, so we
1173 must reject the combination. This case occurs when I2 and I1 both
1174 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1175 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1176 of a SET must prevent combination from occurring.
1177
e9a25f70 1178 On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
c448a43e
RK
1179 if the destination of a SET is a hard register that isn't a user
1180 variable.
230d793d
RS
1181
1182 Before doing the above check, we first try to expand a field assignment
1183 into a set of logical operations.
1184
1185 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1186 we place a register that is both set and used within I3. If more than one
1187 such register is detected, we fail.
1188
1189 Return 1 if the combination is valid, zero otherwise. */
1190
1191static int
1192combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1193 rtx i3;
1194 rtx *loc;
1195 rtx i2dest;
1196 rtx i1dest;
1197 int i1_not_in_src;
1198 rtx *pi3dest_killed;
1199{
1200 rtx x = *loc;
1201
1202 if (GET_CODE (x) == SET)
1203 {
1204 rtx set = expand_field_assignment (x);
1205 rtx dest = SET_DEST (set);
1206 rtx src = SET_SRC (set);
29a82058
JL
1207 rtx inner_dest = dest;
1208
1209#if 0
1210 rtx inner_src = src;
1211#endif
230d793d
RS
1212
1213 SUBST (*loc, set);
1214
1215 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1216 || GET_CODE (inner_dest) == SUBREG
1217 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1218 inner_dest = XEXP (inner_dest, 0);
1219
1220 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1221 was added. */
1222#if 0
1223 while (GET_CODE (inner_src) == STRICT_LOW_PART
1224 || GET_CODE (inner_src) == SUBREG
1225 || GET_CODE (inner_src) == ZERO_EXTRACT)
1226 inner_src = XEXP (inner_src, 0);
1227
1228 /* If it is better that two different modes keep two different pseudos,
1229 avoid combining them. This avoids producing the following pattern
1230 on a 386:
1231 (set (subreg:SI (reg/v:QI 21) 0)
1232 (lshiftrt:SI (reg/v:SI 20)
1233 (const_int 24)))
1234 If that were made, reload could not handle the pair of
1235 reg 20/21, since it would try to get any GENERAL_REGS
1236 but some of them don't handle QImode. */
1237
1238 if (rtx_equal_p (inner_src, i2dest)
1239 && GET_CODE (inner_dest) == REG
1240 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1241 return 0;
1242#endif
1243
1244 /* Check for the case where I3 modifies its output, as
1245 discussed above. */
1246 if ((inner_dest != dest
1247 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1248 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1249
3f508eca
RK
1250 /* This is the same test done in can_combine_p except that we
1251 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
956d6950
JL
1252 CALL operation. Moreover, we can't test all_adjacent; we don't
1253 have to, since this instruction will stay in place, thus we are
1254 not considering increasing the lifetime of INNER_DEST.
1255
1256 Also, if this insn sets a function argument, combining it with
1257 something that might need a spill could clobber a previous
1258 function argument; the all_adjacent test in can_combine_p also
1259 checks this; here, we do a more specific test for this case. */
1260
230d793d 1261 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1262 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1263 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1264 GET_MODE (inner_dest))
e9a25f70
JL
1265 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1266 && ! REG_USERVAR_P (inner_dest)
956d6950
JL
1267 && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1268 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1269 && i3 != 0
1270 && sets_function_arg_p (prev_nonnote_insn (i3)))))))
230d793d
RS
1271 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1272 return 0;
1273
1274 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1275 so record that for later.
1276 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1277 STACK_POINTER_REGNUM, since these are always considered to be
1278 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1279 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1280 && reg_referenced_p (dest, PATTERN (i3))
1281 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1282#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1283 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1284#endif
36a9c2e9
JL
1285#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1286 && (REGNO (dest) != ARG_POINTER_REGNUM
1287 || ! fixed_regs [REGNO (dest)])
1288#endif
1289 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1290 {
1291 if (*pi3dest_killed)
1292 return 0;
1293
1294 *pi3dest_killed = dest;
1295 }
1296 }
1297
1298 else if (GET_CODE (x) == PARALLEL)
1299 {
1300 int i;
1301
1302 for (i = 0; i < XVECLEN (x, 0); i++)
1303 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1304 i1_not_in_src, pi3dest_killed))
1305 return 0;
1306 }
1307
1308 return 1;
1309}
1310\f
1311/* Try to combine the insns I1 and I2 into I3.
1312 Here I1 and I2 appear earlier than I3.
1313 I1 can be zero; then we combine just I2 into I3.
1314
1315 It we are combining three insns and the resulting insn is not recognized,
1316 try splitting it into two insns. If that happens, I2 and I3 are retained
1317 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1318 are pseudo-deleted.
1319
abe6e52f
RK
1320 Return 0 if the combination does not work. Then nothing is changed.
1321 If we did the combination, return the insn at which combine should
1322 resume scanning. */
230d793d
RS
1323
1324static rtx
1325try_combine (i3, i2, i1)
1326 register rtx i3, i2, i1;
1327{
1328 /* New patterns for I3 and I3, respectively. */
1329 rtx newpat, newi2pat = 0;
1330 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1331 int added_sets_1, added_sets_2;
1332 /* Total number of SETs to put into I3. */
1333 int total_sets;
1334 /* Nonzero is I2's body now appears in I3. */
1335 int i2_is_used;
1336 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1337 int insn_code_number, i2_code_number, other_code_number;
1338 /* Contains I3 if the destination of I3 is used in its source, which means
1339 that the old life of I3 is being killed. If that usage is placed into
1340 I2 and not in I3, a REG_DEAD note must be made. */
1341 rtx i3dest_killed = 0;
1342 /* SET_DEST and SET_SRC of I2 and I1. */
1343 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1344 /* PATTERN (I2), or a copy of it in certain cases. */
1345 rtx i2pat;
1346 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1347 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1348 int i1_feeds_i3 = 0;
1349 /* Notes that must be added to REG_NOTES in I3 and I2. */
1350 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1351 /* Notes that we substituted I3 into I2 instead of the normal case. */
1352 int i3_subst_into_i2 = 0;
df7d75de
RK
1353 /* Notes that I1, I2 or I3 is a MULT operation. */
1354 int have_mult = 0;
a29ca9db
RK
1355 /* Number of clobbers of SCRATCH we had to add. */
1356 int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
230d793d
RS
1357
1358 int maxreg;
1359 rtx temp;
1360 register rtx link;
1361 int i;
1362
1363 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1364 This can occur when flow deletes an insn that it has merged into an
1365 auto-increment address. We also can't do anything if I3 has a
1366 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1367 libcall. */
1368
1369 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1370 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1371 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
ec35104c
JL
1372#if 0
1373 /* ??? This gives worse code, and appears to be unnecessary, since no
1374 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1375 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1376#endif
1377)
230d793d
RS
1378 return 0;
1379
1380 combine_attempts++;
1381
241cea85 1382 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
1383 undobuf.other_insn = 0;
1384
1385 /* Save the current high-water-mark so we can free storage if we didn't
1386 accept this combination. */
1387 undobuf.storage = (char *) oballoc (0);
1388
6e25d159
RK
1389 /* Reset the hard register usage information. */
1390 CLEAR_HARD_REG_SET (newpat_used_regs);
1391
230d793d
RS
1392 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1393 code below, set I1 to be the earlier of the two insns. */
1394 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1395 temp = i1, i1 = i2, i2 = temp;
1396
abe6e52f 1397 added_links_insn = 0;
137e889e 1398
230d793d
RS
1399 /* First check for one important special-case that the code below will
1400 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1401 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1402 we may be able to replace that destination with the destination of I3.
1403 This occurs in the common code where we compute both a quotient and
1404 remainder into a structure, in which case we want to do the computation
1405 directly into the structure to avoid register-register copies.
1406
1407 We make very conservative checks below and only try to handle the
1408 most common cases of this. For example, we only handle the case
1409 where I2 and I3 are adjacent to avoid making difficult register
1410 usage tests. */
1411
1412 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1413 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1414 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
f95182a4 1415 && (! SMALL_REGISTER_CLASSES
e9a25f70
JL
1416 || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1417 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1418 || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
230d793d
RS
1419 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1420 && GET_CODE (PATTERN (i2)) == PARALLEL
1421 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1422 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1423 below would need to check what is inside (and reg_overlap_mentioned_p
1424 doesn't support those codes anyway). Don't allow those destinations;
1425 the resulting insn isn't likely to be recognized anyway. */
1426 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1427 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1428 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1429 SET_DEST (PATTERN (i3)))
1430 && next_real_insn (i2) == i3)
5089e22e
RS
1431 {
1432 rtx p2 = PATTERN (i2);
1433
1434 /* Make sure that the destination of I3,
1435 which we are going to substitute into one output of I2,
1436 is not used within another output of I2. We must avoid making this:
1437 (parallel [(set (mem (reg 69)) ...)
1438 (set (reg 69) ...)])
1439 which is not well-defined as to order of actions.
1440 (Besides, reload can't handle output reloads for this.)
1441
1442 The problem can also happen if the dest of I3 is a memory ref,
1443 if another dest in I2 is an indirect memory ref. */
1444 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1445 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1446 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1447 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1448 SET_DEST (XVECEXP (p2, 0, i))))
1449 break;
230d793d 1450
5089e22e
RS
1451 if (i == XVECLEN (p2, 0))
1452 for (i = 0; i < XVECLEN (p2, 0); i++)
1453 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1454 {
1455 combine_merges++;
230d793d 1456
5089e22e
RS
1457 subst_insn = i3;
1458 subst_low_cuid = INSN_CUID (i2);
230d793d 1459
c4e861e8 1460 added_sets_2 = added_sets_1 = 0;
5089e22e 1461 i2dest = SET_SRC (PATTERN (i3));
230d793d 1462
5089e22e
RS
1463 /* Replace the dest in I2 with our dest and make the resulting
1464 insn the new pattern for I3. Then skip to where we
1465 validate the pattern. Everything was set up above. */
1466 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1467 SET_DEST (PATTERN (i3)));
1468
1469 newpat = p2;
176c9e6b 1470 i3_subst_into_i2 = 1;
5089e22e
RS
1471 goto validate_replacement;
1472 }
1473 }
230d793d
RS
1474
1475#ifndef HAVE_cc0
1476 /* If we have no I1 and I2 looks like:
1477 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1478 (set Y OP)])
1479 make up a dummy I1 that is
1480 (set Y OP)
1481 and change I2 to be
1482 (set (reg:CC X) (compare:CC Y (const_int 0)))
1483
1484 (We can ignore any trailing CLOBBERs.)
1485
1486 This undoes a previous combination and allows us to match a branch-and-
1487 decrement insn. */
1488
1489 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1490 && XVECLEN (PATTERN (i2), 0) >= 2
1491 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1492 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1493 == MODE_CC)
1494 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1495 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1496 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1497 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1498 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1499 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1500 {
1501 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1502 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1503 break;
1504
1505 if (i == 1)
1506 {
1507 /* We make I1 with the same INSN_UID as I2. This gives it
1508 the same INSN_CUID for value tracking. Our fake I1 will
1509 never appear in the insn stream so giving it the same INSN_UID
1510 as I2 will not cause a problem. */
1511
0d9641d1 1512 subst_prev_insn = i1
38a448ca
RH
1513 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1514 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1515 NULL_RTX);
230d793d
RS
1516
1517 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1518 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1519 SET_DEST (PATTERN (i1)));
1520 }
1521 }
1522#endif
1523
1524 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1525 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1526 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1527 {
1528 undo_all ();
1529 return 0;
1530 }
1531
1532 /* Record whether I2DEST is used in I2SRC and similarly for the other
1533 cases. Knowing this will help in register status updating below. */
1534 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1535 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1536 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1537
916f14f1 1538 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1539 in I2SRC. */
1540 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1541
1542 /* Ensure that I3's pattern can be the destination of combines. */
1543 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1544 i1 && i2dest_in_i1src && i1_feeds_i3,
1545 &i3dest_killed))
1546 {
1547 undo_all ();
1548 return 0;
1549 }
1550
df7d75de
RK
1551 /* See if any of the insns is a MULT operation. Unless one is, we will
1552 reject a combination that is, since it must be slower. Be conservative
1553 here. */
1554 if (GET_CODE (i2src) == MULT
1555 || (i1 != 0 && GET_CODE (i1src) == MULT)
1556 || (GET_CODE (PATTERN (i3)) == SET
1557 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1558 have_mult = 1;
1559
230d793d
RS
1560 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1561 We used to do this EXCEPT in one case: I3 has a post-inc in an
1562 output operand. However, that exception can give rise to insns like
1563 mov r3,(r3)+
1564 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1565 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1566
1567#if 0
1568 if (!(GET_CODE (PATTERN (i3)) == SET
1569 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1570 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1571 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1572 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1573 /* It's not the exception. */
1574#endif
1575#ifdef AUTO_INC_DEC
1576 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1577 if (REG_NOTE_KIND (link) == REG_INC
1578 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1579 || (i1 != 0
1580 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1581 {
1582 undo_all ();
1583 return 0;
1584 }
1585#endif
1586
1587 /* See if the SETs in I1 or I2 need to be kept around in the merged
1588 instruction: whenever the value set there is still needed past I3.
1589 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1590
1591 For the SET in I1, we have two cases: If I1 and I2 independently
1592 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1593 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1594 in I1 needs to be kept around unless I1DEST dies or is set in either
1595 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1596 I1DEST. If so, we know I1 feeds into I2. */
1597
1598 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1599
1600 added_sets_1
1601 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1602 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1603
1604 /* If the set in I2 needs to be kept around, we must make a copy of
1605 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1606 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1607 an already-substituted copy. This also prevents making self-referential
1608 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1609 I2DEST. */
1610
1611 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
38a448ca 1612 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
230d793d
RS
1613 : PATTERN (i2));
1614
1615 if (added_sets_2)
1616 i2pat = copy_rtx (i2pat);
1617
1618 combine_merges++;
1619
1620 /* Substitute in the latest insn for the regs set by the earlier ones. */
1621
1622 maxreg = max_reg_num ();
1623
1624 subst_insn = i3;
230d793d
RS
1625
1626 /* It is possible that the source of I2 or I1 may be performing an
1627 unneeded operation, such as a ZERO_EXTEND of something that is known
1628 to have the high part zero. Handle that case by letting subst look at
1629 the innermost one of them.
1630
1631 Another way to do this would be to have a function that tries to
1632 simplify a single insn instead of merging two or more insns. We don't
1633 do this because of the potential of infinite loops and because
1634 of the potential extra memory required. However, doing it the way
1635 we are is a bit of a kludge and doesn't catch all cases.
1636
1637 But only do this if -fexpensive-optimizations since it slows things down
1638 and doesn't usually win. */
1639
1640 if (flag_expensive_optimizations)
1641 {
1642 /* Pass pc_rtx so no substitutions are done, just simplifications.
1643 The cases that we are interested in here do not involve the few
1644 cases were is_replaced is checked. */
1645 if (i1)
d0ab8cd3
RK
1646 {
1647 subst_low_cuid = INSN_CUID (i1);
1648 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1649 }
230d793d 1650 else
d0ab8cd3
RK
1651 {
1652 subst_low_cuid = INSN_CUID (i2);
1653 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1654 }
230d793d 1655
241cea85 1656 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1657 }
1658
1659#ifndef HAVE_cc0
1660 /* Many machines that don't use CC0 have insns that can both perform an
1661 arithmetic operation and set the condition code. These operations will
1662 be represented as a PARALLEL with the first element of the vector
1663 being a COMPARE of an arithmetic operation with the constant zero.
1664 The second element of the vector will set some pseudo to the result
1665 of the same arithmetic operation. If we simplify the COMPARE, we won't
1666 match such a pattern and so will generate an extra insn. Here we test
1667 for this case, where both the comparison and the operation result are
1668 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1669 I2SRC. Later we will make the PARALLEL that contains I2. */
1670
1671 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1672 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1673 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1674 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1675 {
081f5e7e 1676#ifdef EXTRA_CC_MODES
230d793d
RS
1677 rtx *cc_use;
1678 enum machine_mode compare_mode;
081f5e7e 1679#endif
230d793d
RS
1680
1681 newpat = PATTERN (i3);
1682 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1683
1684 i2_is_used = 1;
1685
1686#ifdef EXTRA_CC_MODES
1687 /* See if a COMPARE with the operand we substituted in should be done
1688 with the mode that is currently being used. If not, do the same
1689 processing we do in `subst' for a SET; namely, if the destination
1690 is used only once, try to replace it with a register of the proper
1691 mode and also replace the COMPARE. */
1692 if (undobuf.other_insn == 0
1693 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1694 &undobuf.other_insn))
77fa0940
RK
1695 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1696 i2src, const0_rtx))
230d793d
RS
1697 != GET_MODE (SET_DEST (newpat))))
1698 {
1699 int regno = REGNO (SET_DEST (newpat));
38a448ca 1700 rtx new_dest = gen_rtx_REG (compare_mode, regno);
230d793d
RS
1701
1702 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 1703 || (REG_N_SETS (regno) == 1 && ! added_sets_2
230d793d
RS
1704 && ! REG_USERVAR_P (SET_DEST (newpat))))
1705 {
1706 if (regno >= FIRST_PSEUDO_REGISTER)
1707 SUBST (regno_reg_rtx[regno], new_dest);
1708
1709 SUBST (SET_DEST (newpat), new_dest);
1710 SUBST (XEXP (*cc_use, 0), new_dest);
1711 SUBST (SET_SRC (newpat),
1712 gen_rtx_combine (COMPARE, compare_mode,
1713 i2src, const0_rtx));
1714 }
1715 else
1716 undobuf.other_insn = 0;
1717 }
1718#endif
1719 }
1720 else
1721#endif
1722 {
1723 n_occurrences = 0; /* `subst' counts here */
1724
1725 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1726 need to make a unique copy of I2SRC each time we substitute it
1727 to avoid self-referential rtl. */
1728
d0ab8cd3 1729 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1730 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1731 ! i1_feeds_i3 && i1dest_in_i1src);
241cea85 1732 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1733
1734 /* Record whether i2's body now appears within i3's body. */
1735 i2_is_used = n_occurrences;
1736 }
1737
1738 /* If we already got a failure, don't try to do more. Otherwise,
1739 try to substitute in I1 if we have it. */
1740
1741 if (i1 && GET_CODE (newpat) != CLOBBER)
1742 {
1743 /* Before we can do this substitution, we must redo the test done
1744 above (see detailed comments there) that ensures that I1DEST
0f41302f 1745 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1746
5f4f0e22
CH
1747 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1748 0, NULL_PTR))
230d793d
RS
1749 {
1750 undo_all ();
1751 return 0;
1752 }
1753
1754 n_occurrences = 0;
d0ab8cd3 1755 subst_low_cuid = INSN_CUID (i1);
230d793d 1756 newpat = subst (newpat, i1dest, i1src, 0, 0);
241cea85 1757 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1758 }
1759
916f14f1
RK
1760 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1761 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1762 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1763 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1764 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1765 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1766 > 1))
230d793d
RS
1767 /* Fail if we tried to make a new register (we used to abort, but there's
1768 really no reason to). */
1769 || max_reg_num () != maxreg
1770 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1771 || GET_CODE (newpat) == CLOBBER
1772 /* Fail if this new pattern is a MULT and we didn't have one before
1773 at the outer level. */
1774 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1775 && ! have_mult))
230d793d
RS
1776 {
1777 undo_all ();
1778 return 0;
1779 }
1780
1781 /* If the actions of the earlier insns must be kept
1782 in addition to substituting them into the latest one,
1783 we must make a new PARALLEL for the latest insn
1784 to hold additional the SETs. */
1785
1786 if (added_sets_1 || added_sets_2)
1787 {
1788 combine_extras++;
1789
1790 if (GET_CODE (newpat) == PARALLEL)
1791 {
1792 rtvec old = XVEC (newpat, 0);
1793 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 1794 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
59888de2 1795 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
230d793d
RS
1796 sizeof (old->elem[0]) * old->num_elem);
1797 }
1798 else
1799 {
1800 rtx old = newpat;
1801 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 1802 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
1803 XVECEXP (newpat, 0, 0) = old;
1804 }
1805
1806 if (added_sets_1)
1807 XVECEXP (newpat, 0, --total_sets)
1808 = (GET_CODE (PATTERN (i1)) == PARALLEL
38a448ca 1809 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
230d793d
RS
1810
1811 if (added_sets_2)
1812 {
1813 /* If there is no I1, use I2's body as is. We used to also not do
1814 the subst call below if I2 was substituted into I3,
1815 but that could lose a simplification. */
1816 if (i1 == 0)
1817 XVECEXP (newpat, 0, --total_sets) = i2pat;
1818 else
1819 /* See comment where i2pat is assigned. */
1820 XVECEXP (newpat, 0, --total_sets)
1821 = subst (i2pat, i1dest, i1src, 0, 0);
1822 }
1823 }
1824
1825 /* We come here when we are replacing a destination in I2 with the
1826 destination of I3. */
1827 validate_replacement:
1828
6e25d159
RK
1829 /* Note which hard regs this insn has as inputs. */
1830 mark_used_regs_combine (newpat);
1831
230d793d 1832 /* Is the result of combination a valid instruction? */
a29ca9db
RK
1833 insn_code_number
1834 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1835
1836 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1837 the second SET's destination is a register that is unused. In that case,
1838 we just need the first SET. This can occur when simplifying a divmod
1839 insn. We *must* test for this case here because the code below that
1840 splits two independent SETs doesn't handle this case correctly when it
1841 updates the register status. Also check the case where the first
1842 SET's destination is unused. That would not cause incorrect code, but
1843 does cause an unneeded insn to remain. */
1844
1845 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1846 && XVECLEN (newpat, 0) == 2
1847 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1848 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1849 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1850 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1851 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1852 && asm_noperands (newpat) < 0)
1853 {
1854 newpat = XVECEXP (newpat, 0, 0);
a29ca9db
RK
1855 insn_code_number
1856 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1857 }
1858
1859 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1860 && XVECLEN (newpat, 0) == 2
1861 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1862 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1863 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1864 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1865 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1866 && asm_noperands (newpat) < 0)
1867 {
1868 newpat = XVECEXP (newpat, 0, 1);
a29ca9db
RK
1869 insn_code_number
1870 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1871 }
1872
1873 /* If we were combining three insns and the result is a simple SET
1874 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1875 insns. There are two ways to do this. It can be split using a
1876 machine-specific method (like when you have an addition of a large
1877 constant) or by combine in the function find_split_point. */
1878
230d793d
RS
1879 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1880 && asm_noperands (newpat) < 0)
1881 {
916f14f1 1882 rtx m_split, *split;
42495ca0 1883 rtx ni2dest = i2dest;
916f14f1
RK
1884
1885 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1886 use I2DEST as a scratch register will help. In the latter case,
1887 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1888
1889 m_split = split_insns (newpat, i3);
a70c61d9
JW
1890
1891 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1892 inputs of NEWPAT. */
1893
1894 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1895 possible to try that as a scratch reg. This would require adding
1896 more code to make it work though. */
1897
1898 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1899 {
1900 /* If I2DEST is a hard register or the only use of a pseudo,
1901 we can change its mode. */
1902 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1903 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1904 && GET_CODE (i2dest) == REG
42495ca0 1905 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 1906 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
42495ca0 1907 && ! REG_USERVAR_P (i2dest))))
38a448ca 1908 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
42495ca0
RK
1909 REGNO (i2dest));
1910
38a448ca
RH
1911 m_split = split_insns
1912 (gen_rtx_PARALLEL (VOIDmode,
1913 gen_rtvec (2, newpat,
1914 gen_rtx_CLOBBER (VOIDmode,
1915 ni2dest))),
1916 i3);
42495ca0 1917 }
916f14f1
RK
1918
1919 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1920 && XVECLEN (m_split, 0) == 2
1921 && (next_real_insn (i2) == i3
1922 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1923 INSN_CUID (i2))))
916f14f1 1924 {
1a26b032 1925 rtx i2set, i3set;
d0ab8cd3 1926 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1927 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1928
e4ba89be
RK
1929 i3set = single_set (XVECEXP (m_split, 0, 1));
1930 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1931
42495ca0
RK
1932 /* In case we changed the mode of I2DEST, replace it in the
1933 pseudo-register table here. We can't do it above in case this
1934 code doesn't get executed and we do a split the other way. */
1935
1936 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1937 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1938
a29ca9db
RK
1939 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1940 &i2_scratches);
1a26b032
RK
1941
1942 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
1943 register status, so don't use these insns. If I2's destination
1944 is used between I2 and I3, we also can't use these insns. */
1a26b032 1945
9cc96794
RK
1946 if (i2_code_number >= 0 && i2set && i3set
1947 && (next_real_insn (i2) == i3
1948 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
a29ca9db
RK
1949 insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1950 &i3_scratches);
d0ab8cd3
RK
1951 if (insn_code_number >= 0)
1952 newpat = newi3pat;
1953
c767f54b 1954 /* It is possible that both insns now set the destination of I3.
22609cbf 1955 If so, we must show an extra use of it. */
c767f54b 1956
393de53f
RK
1957 if (insn_code_number >= 0)
1958 {
1959 rtx new_i3_dest = SET_DEST (i3set);
1960 rtx new_i2_dest = SET_DEST (i2set);
1961
1962 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1963 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1964 || GET_CODE (new_i3_dest) == SUBREG)
1965 new_i3_dest = XEXP (new_i3_dest, 0);
1966
d4096689
RK
1967 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
1968 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
1969 || GET_CODE (new_i2_dest) == SUBREG)
1970 new_i2_dest = XEXP (new_i2_dest, 0);
1971
393de53f
RK
1972 if (GET_CODE (new_i3_dest) == REG
1973 && GET_CODE (new_i2_dest) == REG
1974 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
b1f21e0a 1975 REG_N_SETS (REGNO (new_i2_dest))++;
393de53f 1976 }
916f14f1 1977 }
230d793d
RS
1978
1979 /* If we can split it and use I2DEST, go ahead and see if that
1980 helps things be recognized. Verify that none of the registers
1981 are set between I2 and I3. */
d0ab8cd3 1982 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1983#ifdef HAVE_cc0
1984 && GET_CODE (i2dest) == REG
1985#endif
1986 /* We need I2DEST in the proper mode. If it is a hard register
1987 or the only use of a pseudo, we can change its mode. */
1988 && (GET_MODE (*split) == GET_MODE (i2dest)
1989 || GET_MODE (*split) == VOIDmode
1990 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 1991 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
230d793d
RS
1992 && ! REG_USERVAR_P (i2dest)))
1993 && (next_real_insn (i2) == i3
1994 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1995 /* We can't overwrite I2DEST if its value is still used by
1996 NEWPAT. */
1997 && ! reg_referenced_p (i2dest, newpat))
1998 {
1999 rtx newdest = i2dest;
df7d75de
RK
2000 enum rtx_code split_code = GET_CODE (*split);
2001 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
2002
2003 /* Get NEWDEST as a register in the proper mode. We have already
2004 validated that we can do this. */
df7d75de 2005 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 2006 {
38a448ca 2007 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
230d793d
RS
2008
2009 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2010 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2011 }
2012
2013 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2014 an ASHIFT. This can occur if it was inside a PLUS and hence
2015 appeared to be a memory address. This is a kludge. */
df7d75de 2016 if (split_code == MULT
230d793d
RS
2017 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2018 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
2019 {
2020 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2021 XEXP (*split, 0), GEN_INT (i)));
2022 /* Update split_code because we may not have a multiply
2023 anymore. */
2024 split_code = GET_CODE (*split);
2025 }
230d793d
RS
2026
2027#ifdef INSN_SCHEDULING
2028 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2029 be written as a ZERO_EXTEND. */
df7d75de
RK
2030 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2031 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
2032 XEXP (*split, 0)));
2033#endif
2034
2035 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2036 SUBST (*split, newdest);
a29ca9db
RK
2037 i2_code_number
2038 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
df7d75de
RK
2039
2040 /* If the split point was a MULT and we didn't have one before,
2041 don't use one now. */
2042 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
a29ca9db
RK
2043 insn_code_number
2044 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
2045 }
2046 }
2047
2048 /* Check for a case where we loaded from memory in a narrow mode and
2049 then sign extended it, but we need both registers. In that case,
2050 we have a PARALLEL with both loads from the same memory location.
2051 We can split this into a load from memory followed by a register-register
2052 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
2053 eliminate the copy.
2054
2055 We cannot do this if the destination of the second assignment is
2056 a register that we have already assumed is zero-extended. Similarly
2057 for a SUBREG of such a register. */
230d793d
RS
2058
2059 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2060 && GET_CODE (newpat) == PARALLEL
2061 && XVECLEN (newpat, 0) == 2
2062 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2063 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2064 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2065 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2066 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2067 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2068 INSN_CUID (i2))
2069 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2070 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
2071 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2072 (GET_CODE (temp) == REG
2073 && reg_nonzero_bits[REGNO (temp)] != 0
2074 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2075 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2076 && (reg_nonzero_bits[REGNO (temp)]
2077 != GET_MODE_MASK (word_mode))))
2078 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2079 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2080 (GET_CODE (temp) == REG
2081 && reg_nonzero_bits[REGNO (temp)] != 0
2082 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2083 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2084 && (reg_nonzero_bits[REGNO (temp)]
2085 != GET_MODE_MASK (word_mode)))))
230d793d
RS
2086 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2087 SET_SRC (XVECEXP (newpat, 0, 1)))
2088 && ! find_reg_note (i3, REG_UNUSED,
2089 SET_DEST (XVECEXP (newpat, 0, 0))))
2090 {
472fbdd1
RK
2091 rtx ni2dest;
2092
230d793d 2093 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 2094 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
2095 newpat = XVECEXP (newpat, 0, 1);
2096 SUBST (SET_SRC (newpat),
472fbdd1 2097 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
a29ca9db
RK
2098 i2_code_number
2099 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2100
230d793d 2101 if (i2_code_number >= 0)
a29ca9db
RK
2102 insn_code_number
2103 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
5089e22e
RS
2104
2105 if (insn_code_number >= 0)
2106 {
2107 rtx insn;
2108 rtx link;
2109
2110 /* If we will be able to accept this, we have made a change to the
2111 destination of I3. This can invalidate a LOG_LINKS pointing
2112 to I3. No other part of combine.c makes such a transformation.
2113
2114 The new I3 will have a destination that was previously the
2115 destination of I1 or I2 and which was used in i2 or I3. Call
2116 distribute_links to make a LOG_LINK from the next use of
2117 that destination. */
2118
2119 PATTERN (i3) = newpat;
38a448ca 2120 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
5089e22e
RS
2121
2122 /* I3 now uses what used to be its destination and which is
2123 now I2's destination. That means we need a LOG_LINK from
2124 I3 to I2. But we used to have one, so we still will.
2125
2126 However, some later insn might be using I2's dest and have
2127 a LOG_LINK pointing at I3. We must remove this link.
2128 The simplest way to remove the link is to point it at I1,
2129 which we know will be a NOTE. */
2130
2131 for (insn = NEXT_INSN (i3);
0d4d42c3
RK
2132 insn && (this_basic_block == n_basic_blocks - 1
2133 || insn != basic_block_head[this_basic_block + 1]);
5089e22e
RS
2134 insn = NEXT_INSN (insn))
2135 {
2136 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 2137 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2138 {
2139 for (link = LOG_LINKS (insn); link;
2140 link = XEXP (link, 1))
2141 if (XEXP (link, 0) == i3)
2142 XEXP (link, 0) = i1;
2143
2144 break;
2145 }
2146 }
2147 }
230d793d
RS
2148 }
2149
2150 /* Similarly, check for a case where we have a PARALLEL of two independent
2151 SETs but we started with three insns. In this case, we can do the sets
2152 as two separate insns. This case occurs when some SET allows two
2153 other insns to combine, but the destination of that SET is still live. */
2154
2155 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2156 && GET_CODE (newpat) == PARALLEL
2157 && XVECLEN (newpat, 0) == 2
2158 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2159 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2160 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2161 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2162 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2163 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2164 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2165 INSN_CUID (i2))
2166 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2167 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2168 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2169 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2170 XVECEXP (newpat, 0, 0))
2171 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2172 XVECEXP (newpat, 0, 1)))
2173 {
e9a25f70
JL
2174 /* Normally, it doesn't matter which of the two is done first,
2175 but it does if one references cc0. In that case, it has to
2176 be first. */
2177#ifdef HAVE_cc0
2178 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2179 {
2180 newi2pat = XVECEXP (newpat, 0, 0);
2181 newpat = XVECEXP (newpat, 0, 1);
2182 }
2183 else
2184#endif
2185 {
2186 newi2pat = XVECEXP (newpat, 0, 1);
2187 newpat = XVECEXP (newpat, 0, 0);
2188 }
230d793d 2189
a29ca9db
RK
2190 i2_code_number
2191 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2192
230d793d 2193 if (i2_code_number >= 0)
a29ca9db
RK
2194 insn_code_number
2195 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
2196 }
2197
2198 /* If it still isn't recognized, fail and change things back the way they
2199 were. */
2200 if ((insn_code_number < 0
2201 /* Is the result a reasonable ASM_OPERANDS? */
2202 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2203 {
2204 undo_all ();
2205 return 0;
2206 }
2207
2208 /* If we had to change another insn, make sure it is valid also. */
2209 if (undobuf.other_insn)
2210 {
230d793d
RS
2211 rtx other_pat = PATTERN (undobuf.other_insn);
2212 rtx new_other_notes;
2213 rtx note, next;
2214
6e25d159
RK
2215 CLEAR_HARD_REG_SET (newpat_used_regs);
2216
a29ca9db
RK
2217 other_code_number
2218 = recog_for_combine (&other_pat, undobuf.other_insn,
2219 &new_other_notes, &other_scratches);
230d793d
RS
2220
2221 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2222 {
2223 undo_all ();
2224 return 0;
2225 }
2226
2227 PATTERN (undobuf.other_insn) = other_pat;
2228
2229 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2230 are still valid. Then add any non-duplicate notes added by
2231 recog_for_combine. */
2232 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2233 {
2234 next = XEXP (note, 1);
2235
2236 if (REG_NOTE_KIND (note) == REG_UNUSED
2237 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2238 {
2239 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2240 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
1a26b032
RK
2241
2242 remove_note (undobuf.other_insn, note);
2243 }
230d793d
RS
2244 }
2245
1a26b032
RK
2246 for (note = new_other_notes; note; note = XEXP (note, 1))
2247 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2248 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 2249
230d793d 2250 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2251 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2252 }
2253
2254 /* We now know that we can do this combination. Merge the insns and
2255 update the status of registers and LOG_LINKS. */
2256
2257 {
2258 rtx i3notes, i2notes, i1notes = 0;
2259 rtx i3links, i2links, i1links = 0;
2260 rtx midnotes = 0;
230d793d 2261 register int regno;
ff3467a9
JW
2262 /* Compute which registers we expect to eliminate. newi2pat may be setting
2263 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2264 same as i3dest, in which case newi2pat may be setting i1dest. */
2265 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2266 || i2dest_in_i2src || i2dest_in_i1src
230d793d 2267 ? 0 : i2dest);
ff3467a9
JW
2268 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2269 || (newi2pat && reg_set_p (i1dest, newi2pat))
2270 ? 0 : i1dest);
230d793d
RS
2271
2272 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2273 clear them. */
2274 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2275 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2276 if (i1)
2277 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2278
2279 /* Ensure that we do not have something that should not be shared but
2280 occurs multiple times in the new insns. Check this by first
5089e22e 2281 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2282
2283 reset_used_flags (i3notes);
2284 reset_used_flags (i2notes);
2285 reset_used_flags (i1notes);
2286 reset_used_flags (newpat);
2287 reset_used_flags (newi2pat);
2288 if (undobuf.other_insn)
2289 reset_used_flags (PATTERN (undobuf.other_insn));
2290
2291 i3notes = copy_rtx_if_shared (i3notes);
2292 i2notes = copy_rtx_if_shared (i2notes);
2293 i1notes = copy_rtx_if_shared (i1notes);
2294 newpat = copy_rtx_if_shared (newpat);
2295 newi2pat = copy_rtx_if_shared (newi2pat);
2296 if (undobuf.other_insn)
2297 reset_used_flags (PATTERN (undobuf.other_insn));
2298
2299 INSN_CODE (i3) = insn_code_number;
2300 PATTERN (i3) = newpat;
2301 if (undobuf.other_insn)
2302 INSN_CODE (undobuf.other_insn) = other_code_number;
2303
2304 /* We had one special case above where I2 had more than one set and
2305 we replaced a destination of one of those sets with the destination
2306 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2307 in this basic block. Note that this (expensive) case is rare.
2308
2309 Also, in this case, we must pretend that all REG_NOTEs for I2
2310 actually came from I3, so that REG_UNUSED notes from I2 will be
2311 properly handled. */
2312
2313 if (i3_subst_into_i2)
2314 {
2315 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2316 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2317 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2318 && ! find_reg_note (i2, REG_UNUSED,
2319 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2320 for (temp = NEXT_INSN (i2);
2321 temp && (this_basic_block == n_basic_blocks - 1
2322 || basic_block_head[this_basic_block] != temp);
2323 temp = NEXT_INSN (temp))
2324 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2325 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2326 if (XEXP (link, 0) == i2)
2327 XEXP (link, 0) = i3;
2328
2329 if (i3notes)
2330 {
2331 rtx link = i3notes;
2332 while (XEXP (link, 1))
2333 link = XEXP (link, 1);
2334 XEXP (link, 1) = i2notes;
2335 }
2336 else
2337 i3notes = i2notes;
2338 i2notes = 0;
2339 }
230d793d
RS
2340
2341 LOG_LINKS (i3) = 0;
2342 REG_NOTES (i3) = 0;
2343 LOG_LINKS (i2) = 0;
2344 REG_NOTES (i2) = 0;
2345
2346 if (newi2pat)
2347 {
2348 INSN_CODE (i2) = i2_code_number;
2349 PATTERN (i2) = newi2pat;
2350 }
2351 else
2352 {
2353 PUT_CODE (i2, NOTE);
2354 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2355 NOTE_SOURCE_FILE (i2) = 0;
2356 }
2357
2358 if (i1)
2359 {
2360 LOG_LINKS (i1) = 0;
2361 REG_NOTES (i1) = 0;
2362 PUT_CODE (i1, NOTE);
2363 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2364 NOTE_SOURCE_FILE (i1) = 0;
2365 }
2366
2367 /* Get death notes for everything that is now used in either I3 or
6eb12cef
RK
2368 I2 and used to die in a previous insn. If we built two new
2369 patterns, move from I1 to I2 then I2 to I3 so that we get the
2370 proper movement on registers that I2 modifies. */
230d793d 2371
230d793d 2372 if (newi2pat)
6eb12cef
RK
2373 {
2374 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2375 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2376 }
2377 else
2378 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2379 i3, &midnotes);
230d793d
RS
2380
2381 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2382 if (i3notes)
5f4f0e22
CH
2383 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2384 elim_i2, elim_i1);
230d793d 2385 if (i2notes)
5f4f0e22
CH
2386 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2387 elim_i2, elim_i1);
230d793d 2388 if (i1notes)
5f4f0e22
CH
2389 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2390 elim_i2, elim_i1);
230d793d 2391 if (midnotes)
5f4f0e22
CH
2392 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2393 elim_i2, elim_i1);
230d793d
RS
2394
2395 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2396 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2397 so we always pass it as i3. We have not counted the notes in
2398 reg_n_deaths yet, so we need to do so now. */
2399
230d793d 2400 if (newi2pat && new_i2_notes)
1a26b032
RK
2401 {
2402 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2403 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2404 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2405
2406 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2407 }
2408
230d793d 2409 if (new_i3_notes)
1a26b032
RK
2410 {
2411 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2412 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2413 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2414
2415 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2416 }
230d793d
RS
2417
2418 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
2419 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2420 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2421 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
2422 Show an additional death due to the REG_DEAD note we make here. If
2423 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2424
230d793d 2425 if (i3dest_killed)
1a26b032
RK
2426 {
2427 if (GET_CODE (i3dest_killed) == REG)
b1f21e0a 2428 REG_N_DEATHS (REGNO (i3dest_killed))++;
1a26b032 2429
e9a25f70 2430 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
38a448ca
RH
2431 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2432 NULL_RTX),
ff3467a9 2433 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 2434 else
38a448ca
RH
2435 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2436 NULL_RTX),
e9a25f70 2437 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
ff3467a9 2438 elim_i2, elim_i1);
1a26b032 2439 }
58c8c593 2440
230d793d 2441 if (i2dest_in_i2src)
58c8c593 2442 {
1a26b032 2443 if (GET_CODE (i2dest) == REG)
b1f21e0a 2444 REG_N_DEATHS (REGNO (i2dest))++;
1a26b032 2445
58c8c593 2446 if (newi2pat && reg_set_p (i2dest, newi2pat))
38a448ca 2447 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2448 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2449 else
38a448ca 2450 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2451 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2452 NULL_RTX, NULL_RTX);
2453 }
2454
230d793d 2455 if (i1dest_in_i1src)
58c8c593 2456 {
1a26b032 2457 if (GET_CODE (i1dest) == REG)
b1f21e0a 2458 REG_N_DEATHS (REGNO (i1dest))++;
1a26b032 2459
58c8c593 2460 if (newi2pat && reg_set_p (i1dest, newi2pat))
38a448ca 2461 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2462 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2463 else
38a448ca 2464 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2465 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2466 NULL_RTX, NULL_RTX);
2467 }
230d793d
RS
2468
2469 distribute_links (i3links);
2470 distribute_links (i2links);
2471 distribute_links (i1links);
2472
2473 if (GET_CODE (i2dest) == REG)
2474 {
d0ab8cd3
RK
2475 rtx link;
2476 rtx i2_insn = 0, i2_val = 0, set;
2477
2478 /* The insn that used to set this register doesn't exist, and
2479 this life of the register may not exist either. See if one of
2480 I3's links points to an insn that sets I2DEST. If it does,
2481 that is now the last known value for I2DEST. If we don't update
2482 this and I2 set the register to a value that depended on its old
230d793d
RS
2483 contents, we will get confused. If this insn is used, thing
2484 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2485
2486 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2487 if ((set = single_set (XEXP (link, 0))) != 0
2488 && rtx_equal_p (i2dest, SET_DEST (set)))
2489 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2490
2491 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2492
2493 /* If the reg formerly set in I2 died only once and that was in I3,
2494 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2495 if (! added_sets_2
2496 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2497 && ! i2dest_in_i2src)
230d793d
RS
2498 {
2499 regno = REGNO (i2dest);
b1f21e0a
MM
2500 REG_N_SETS (regno)--;
2501 if (REG_N_SETS (regno) == 0
8e08106d 2502 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
b1f21e0a 2503 REG_N_REFS (regno) = 0;
230d793d
RS
2504 }
2505 }
2506
2507 if (i1 && GET_CODE (i1dest) == REG)
2508 {
d0ab8cd3
RK
2509 rtx link;
2510 rtx i1_insn = 0, i1_val = 0, set;
2511
2512 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2513 if ((set = single_set (XEXP (link, 0))) != 0
2514 && rtx_equal_p (i1dest, SET_DEST (set)))
2515 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2516
2517 record_value_for_reg (i1dest, i1_insn, i1_val);
2518
230d793d 2519 regno = REGNO (i1dest);
5af91171 2520 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d 2521 {
b1f21e0a
MM
2522 REG_N_SETS (regno)--;
2523 if (REG_N_SETS (regno) == 0
8e08106d 2524 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
b1f21e0a 2525 REG_N_REFS (regno) = 0;
230d793d
RS
2526 }
2527 }
2528
951553af 2529 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2530 to this insn. */
2531
951553af 2532 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2533 if (newi2pat)
951553af 2534 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2535
a29ca9db
RK
2536 /* If we added any (clobber (scratch)), add them to the max for a
2537 block. This is a very pessimistic calculation, since we might
2538 have had them already and this might not be the worst block, but
2539 it's not worth doing any better. */
2540 max_scratch += i3_scratches + i2_scratches + other_scratches;
2541
230d793d
RS
2542 /* If I3 is now an unconditional jump, ensure that it has a
2543 BARRIER following it since it may have initially been a
381ee8af 2544 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2545
2546 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2547 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2548 || GET_CODE (temp) != BARRIER))
230d793d
RS
2549 emit_barrier_after (i3);
2550 }
2551
2552 combine_successes++;
2553
bcd49eb7
JW
2554 /* Clear this here, so that subsequent get_last_value calls are not
2555 affected. */
2556 subst_prev_insn = NULL_RTX;
2557
abe6e52f
RK
2558 if (added_links_insn
2559 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2560 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2561 return added_links_insn;
2562 else
2563 return newi2pat ? i2 : i3;
230d793d
RS
2564}
2565\f
2566/* Undo all the modifications recorded in undobuf. */
2567
2568static void
2569undo_all ()
2570{
241cea85
RK
2571 struct undo *undo, *next;
2572
2573 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2574 {
241cea85
RK
2575 next = undo->next;
2576 if (undo->is_int)
2577 *undo->where.i = undo->old_contents.i;
7c046e4e 2578 else
241cea85
RK
2579 *undo->where.r = undo->old_contents.r;
2580
2581 undo->next = undobuf.frees;
2582 undobuf.frees = undo;
7c046e4e 2583 }
230d793d
RS
2584
2585 obfree (undobuf.storage);
845fc875 2586 undobuf.undos = undobuf.previous_undos = 0;
bcd49eb7
JW
2587
2588 /* Clear this here, so that subsequent get_last_value calls are not
2589 affected. */
2590 subst_prev_insn = NULL_RTX;
230d793d
RS
2591}
2592\f
2593/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2594 where we have an arithmetic expression and return that point. LOC will
2595 be inside INSN.
230d793d
RS
2596
2597 try_combine will call this function to see if an insn can be split into
2598 two insns. */
2599
2600static rtx *
d0ab8cd3 2601find_split_point (loc, insn)
230d793d 2602 rtx *loc;
d0ab8cd3 2603 rtx insn;
230d793d
RS
2604{
2605 rtx x = *loc;
2606 enum rtx_code code = GET_CODE (x);
2607 rtx *split;
2608 int len = 0, pos, unsignedp;
2609 rtx inner;
2610
2611 /* First special-case some codes. */
2612 switch (code)
2613 {
2614 case SUBREG:
2615#ifdef INSN_SCHEDULING
2616 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2617 point. */
2618 if (GET_CODE (SUBREG_REG (x)) == MEM)
2619 return loc;
2620#endif
d0ab8cd3 2621 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2622
230d793d 2623 case MEM:
916f14f1 2624#ifdef HAVE_lo_sum
230d793d
RS
2625 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2626 using LO_SUM and HIGH. */
2627 if (GET_CODE (XEXP (x, 0)) == CONST
2628 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2629 {
2630 SUBST (XEXP (x, 0),
2631 gen_rtx_combine (LO_SUM, Pmode,
2632 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2633 XEXP (x, 0)));
2634 return &XEXP (XEXP (x, 0), 0);
2635 }
230d793d
RS
2636#endif
2637
916f14f1
RK
2638 /* If we have a PLUS whose second operand is a constant and the
2639 address is not valid, perhaps will can split it up using
2640 the machine-specific way to split large constants. We use
ddd5a7c1 2641 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2642 it will not remain in the result. */
2643 if (GET_CODE (XEXP (x, 0)) == PLUS
2644 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2645 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2646 {
2647 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
38a448ca 2648 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
916f14f1
RK
2649 subst_insn);
2650
2651 /* This should have produced two insns, each of which sets our
2652 placeholder. If the source of the second is a valid address,
2653 we can make put both sources together and make a split point
2654 in the middle. */
2655
2656 if (seq && XVECLEN (seq, 0) == 2
2657 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2658 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2659 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2660 && ! reg_mentioned_p (reg,
2661 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2662 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2663 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2664 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2665 && memory_address_p (GET_MODE (x),
2666 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2667 {
2668 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2669 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2670
2671 /* Replace the placeholder in SRC2 with SRC1. If we can
2672 find where in SRC2 it was placed, that can become our
2673 split point and we can replace this address with SRC2.
2674 Just try two obvious places. */
2675
2676 src2 = replace_rtx (src2, reg, src1);
2677 split = 0;
2678 if (XEXP (src2, 0) == src1)
2679 split = &XEXP (src2, 0);
2680 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2681 && XEXP (XEXP (src2, 0), 0) == src1)
2682 split = &XEXP (XEXP (src2, 0), 0);
2683
2684 if (split)
2685 {
2686 SUBST (XEXP (x, 0), src2);
2687 return split;
2688 }
2689 }
1a26b032
RK
2690
2691 /* If that didn't work, perhaps the first operand is complex and
2692 needs to be computed separately, so make a split point there.
2693 This will occur on machines that just support REG + CONST
2694 and have a constant moved through some previous computation. */
2695
2696 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2697 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2698 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2699 == 'o')))
2700 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2701 }
2702 break;
2703
230d793d
RS
2704 case SET:
2705#ifdef HAVE_cc0
2706 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2707 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2708 we need to put the operand into a register. So split at that
2709 point. */
2710
2711 if (SET_DEST (x) == cc0_rtx
2712 && GET_CODE (SET_SRC (x)) != COMPARE
2713 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2714 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2715 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2716 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2717 return &SET_SRC (x);
2718#endif
2719
2720 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2721 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2722 if (split && split != &SET_SRC (x))
2723 return split;
2724
041d7180
JL
2725 /* See if we can split SET_DEST as it stands. */
2726 split = find_split_point (&SET_DEST (x), insn);
2727 if (split && split != &SET_DEST (x))
2728 return split;
2729
230d793d
RS
2730 /* See if this is a bitfield assignment with everything constant. If
2731 so, this is an IOR of an AND, so split it into that. */
2732 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2733 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2734 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2735 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2736 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2737 && GET_CODE (SET_SRC (x)) == CONST_INT
2738 && ((INTVAL (XEXP (SET_DEST (x), 1))
2739 + INTVAL (XEXP (SET_DEST (x), 2)))
2740 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2741 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2742 {
2743 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2744 int len = INTVAL (XEXP (SET_DEST (x), 1));
2745 int src = INTVAL (SET_SRC (x));
2746 rtx dest = XEXP (SET_DEST (x), 0);
2747 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2748 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2749
f76b9db2
ILT
2750 if (BITS_BIG_ENDIAN)
2751 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d
RS
2752
2753 if (src == mask)
2754 SUBST (SET_SRC (x),
5f4f0e22 2755 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2756 else
2757 SUBST (SET_SRC (x),
2758 gen_binary (IOR, mode,
2759 gen_binary (AND, mode, dest,
5f4f0e22
CH
2760 GEN_INT (~ (mask << pos)
2761 & GET_MODE_MASK (mode))),
2762 GEN_INT (src << pos)));
230d793d
RS
2763
2764 SUBST (SET_DEST (x), dest);
2765
d0ab8cd3 2766 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2767 if (split && split != &SET_SRC (x))
2768 return split;
2769 }
2770
2771 /* Otherwise, see if this is an operation that we can split into two.
2772 If so, try to split that. */
2773 code = GET_CODE (SET_SRC (x));
2774
2775 switch (code)
2776 {
d0ab8cd3
RK
2777 case AND:
2778 /* If we are AND'ing with a large constant that is only a single
2779 bit and the result is only being used in a context where we
2780 need to know if it is zero or non-zero, replace it with a bit
2781 extraction. This will avoid the large constant, which might
2782 have taken more than one insn to make. If the constant were
2783 not a valid argument to the AND but took only one insn to make,
2784 this is no worse, but if it took more than one insn, it will
2785 be better. */
2786
2787 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2788 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2789 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2790 && GET_CODE (SET_DEST (x)) == REG
2791 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2792 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2793 && XEXP (*split, 0) == SET_DEST (x)
2794 && XEXP (*split, 1) == const0_rtx)
2795 {
76184def
DE
2796 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2797 XEXP (SET_SRC (x), 0),
2798 pos, NULL_RTX, 1, 1, 0, 0);
2799 if (extraction != 0)
2800 {
2801 SUBST (SET_SRC (x), extraction);
2802 return find_split_point (loc, insn);
2803 }
d0ab8cd3
RK
2804 }
2805 break;
2806
1a6ec070
RK
2807 case NE:
2808 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2809 is known to be on, this can be converted into a NEG of a shift. */
2810 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2811 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 2812 && 1 <= (pos = exact_log2
1a6ec070
RK
2813 (nonzero_bits (XEXP (SET_SRC (x), 0),
2814 GET_MODE (XEXP (SET_SRC (x), 0))))))
2815 {
2816 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2817
2818 SUBST (SET_SRC (x),
2819 gen_rtx_combine (NEG, mode,
2820 gen_rtx_combine (LSHIFTRT, mode,
2821 XEXP (SET_SRC (x), 0),
4eb2cb10 2822 GEN_INT (pos))));
1a6ec070
RK
2823
2824 split = find_split_point (&SET_SRC (x), insn);
2825 if (split && split != &SET_SRC (x))
2826 return split;
2827 }
2828 break;
2829
230d793d
RS
2830 case SIGN_EXTEND:
2831 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
2832
2833 /* We can't optimize if either mode is a partial integer
2834 mode as we don't know how many bits are significant
2835 in those modes. */
2836 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
2837 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
2838 break;
2839
230d793d
RS
2840 pos = 0;
2841 len = GET_MODE_BITSIZE (GET_MODE (inner));
2842 unsignedp = 0;
2843 break;
2844
2845 case SIGN_EXTRACT:
2846 case ZERO_EXTRACT:
2847 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2848 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2849 {
2850 inner = XEXP (SET_SRC (x), 0);
2851 len = INTVAL (XEXP (SET_SRC (x), 1));
2852 pos = INTVAL (XEXP (SET_SRC (x), 2));
2853
f76b9db2
ILT
2854 if (BITS_BIG_ENDIAN)
2855 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
2856 unsignedp = (code == ZERO_EXTRACT);
2857 }
2858 break;
e9a25f70
JL
2859
2860 default:
2861 break;
230d793d
RS
2862 }
2863
2864 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2865 {
2866 enum machine_mode mode = GET_MODE (SET_SRC (x));
2867
d0ab8cd3
RK
2868 /* For unsigned, we have a choice of a shift followed by an
2869 AND or two shifts. Use two shifts for field sizes where the
2870 constant might be too large. We assume here that we can
2871 always at least get 8-bit constants in an AND insn, which is
2872 true for every current RISC. */
2873
2874 if (unsignedp && len <= 8)
230d793d
RS
2875 {
2876 SUBST (SET_SRC (x),
2877 gen_rtx_combine
2878 (AND, mode,
2879 gen_rtx_combine (LSHIFTRT, mode,
2880 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2881 GEN_INT (pos)),
2882 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2883
d0ab8cd3 2884 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2885 if (split && split != &SET_SRC (x))
2886 return split;
2887 }
2888 else
2889 {
2890 SUBST (SET_SRC (x),
2891 gen_rtx_combine
d0ab8cd3 2892 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2893 gen_rtx_combine (ASHIFT, mode,
2894 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2895 GEN_INT (GET_MODE_BITSIZE (mode)
2896 - len - pos)),
2897 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2898
d0ab8cd3 2899 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2900 if (split && split != &SET_SRC (x))
2901 return split;
2902 }
2903 }
2904
2905 /* See if this is a simple operation with a constant as the second
2906 operand. It might be that this constant is out of range and hence
2907 could be used as a split point. */
2908 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2909 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2910 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2911 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2912 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2913 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2914 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2915 == 'o'))))
2916 return &XEXP (SET_SRC (x), 1);
2917
2918 /* Finally, see if this is a simple operation with its first operand
2919 not in a register. The operation might require this operand in a
2920 register, so return it as a split point. We can always do this
2921 because if the first operand were another operation, we would have
2922 already found it as a split point. */
2923 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2924 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2925 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2926 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2927 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2928 return &XEXP (SET_SRC (x), 0);
2929
2930 return 0;
2931
2932 case AND:
2933 case IOR:
2934 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2935 it is better to write this as (not (ior A B)) so we can split it.
2936 Similarly for IOR. */
2937 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2938 {
2939 SUBST (*loc,
2940 gen_rtx_combine (NOT, GET_MODE (x),
2941 gen_rtx_combine (code == IOR ? AND : IOR,
2942 GET_MODE (x),
2943 XEXP (XEXP (x, 0), 0),
2944 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2945 return find_split_point (loc, insn);
230d793d
RS
2946 }
2947
2948 /* Many RISC machines have a large set of logical insns. If the
2949 second operand is a NOT, put it first so we will try to split the
2950 other operand first. */
2951 if (GET_CODE (XEXP (x, 1)) == NOT)
2952 {
2953 rtx tem = XEXP (x, 0);
2954 SUBST (XEXP (x, 0), XEXP (x, 1));
2955 SUBST (XEXP (x, 1), tem);
2956 }
2957 break;
e9a25f70
JL
2958
2959 default:
2960 break;
230d793d
RS
2961 }
2962
2963 /* Otherwise, select our actions depending on our rtx class. */
2964 switch (GET_RTX_CLASS (code))
2965 {
2966 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2967 case '3':
d0ab8cd3 2968 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2969 if (split)
2970 return split;
0f41302f 2971 /* ... fall through ... */
230d793d
RS
2972 case '2':
2973 case 'c':
2974 case '<':
d0ab8cd3 2975 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2976 if (split)
2977 return split;
0f41302f 2978 /* ... fall through ... */
230d793d
RS
2979 case '1':
2980 /* Some machines have (and (shift ...) ...) insns. If X is not
2981 an AND, but XEXP (X, 0) is, use it as our split point. */
2982 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2983 return &XEXP (x, 0);
2984
d0ab8cd3 2985 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2986 if (split)
2987 return split;
2988 return loc;
2989 }
2990
2991 /* Otherwise, we don't have a split point. */
2992 return 0;
2993}
2994\f
2995/* Throughout X, replace FROM with TO, and return the result.
2996 The result is TO if X is FROM;
2997 otherwise the result is X, but its contents may have been modified.
2998 If they were modified, a record was made in undobuf so that
2999 undo_all will (among other things) return X to its original state.
3000
3001 If the number of changes necessary is too much to record to undo,
3002 the excess changes are not made, so the result is invalid.
3003 The changes already made can still be undone.
3004 undobuf.num_undo is incremented for such changes, so by testing that
3005 the caller can tell whether the result is valid.
3006
3007 `n_occurrences' is incremented each time FROM is replaced.
3008
3009 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
3010
5089e22e 3011 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
3012 by copying if `n_occurrences' is non-zero. */
3013
3014static rtx
3015subst (x, from, to, in_dest, unique_copy)
3016 register rtx x, from, to;
3017 int in_dest;
3018 int unique_copy;
3019{
f24ad0e4 3020 register enum rtx_code code = GET_CODE (x);
230d793d 3021 enum machine_mode op0_mode = VOIDmode;
8079805d
RK
3022 register char *fmt;
3023 register int len, i;
3024 rtx new;
230d793d
RS
3025
3026/* Two expressions are equal if they are identical copies of a shared
3027 RTX or if they are both registers with the same register number
3028 and mode. */
3029
3030#define COMBINE_RTX_EQUAL_P(X,Y) \
3031 ((X) == (Y) \
3032 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3033 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3034
3035 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3036 {
3037 n_occurrences++;
3038 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3039 }
3040
3041 /* If X and FROM are the same register but different modes, they will
3042 not have been seen as equal above. However, flow.c will make a
3043 LOG_LINKS entry for that case. If we do nothing, we will try to
3044 rerecognize our original insn and, when it succeeds, we will
3045 delete the feeding insn, which is incorrect.
3046
3047 So force this insn not to match in this (rare) case. */
3048 if (! in_dest && code == REG && GET_CODE (from) == REG
3049 && REGNO (x) == REGNO (from))
38a448ca 3050 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
3051
3052 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3053 of which may contain things that can be combined. */
3054 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3055 return x;
3056
3057 /* It is possible to have a subexpression appear twice in the insn.
3058 Suppose that FROM is a register that appears within TO.
3059 Then, after that subexpression has been scanned once by `subst',
3060 the second time it is scanned, TO may be found. If we were
3061 to scan TO here, we would find FROM within it and create a
3062 self-referent rtl structure which is completely wrong. */
3063 if (COMBINE_RTX_EQUAL_P (x, to))
3064 return to;
3065
3066 len = GET_RTX_LENGTH (code);
3067 fmt = GET_RTX_FORMAT (code);
3068
3069 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
3070 set up to skip this common case. All other cases where we want to
3071 suppress replacing something inside a SET_SRC are handled via the
3072 IN_DEST operand. */
3073 if (code == SET
3074 && (GET_CODE (SET_DEST (x)) == REG
3075 || GET_CODE (SET_DEST (x)) == CC0
3076 || GET_CODE (SET_DEST (x)) == PC))
3077 fmt = "ie";
3078
0f41302f
MS
3079 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3080 constant. */
230d793d
RS
3081 if (fmt[0] == 'e')
3082 op0_mode = GET_MODE (XEXP (x, 0));
3083
3084 for (i = 0; i < len; i++)
3085 {
3086 if (fmt[i] == 'E')
3087 {
3088 register int j;
3089 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3090 {
230d793d
RS
3091 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3092 {
3093 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3094 n_occurrences++;
3095 }
3096 else
3097 {
3098 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
3099
3100 /* If this substitution failed, this whole thing fails. */
3101 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3102 return new;
3103 }
3104
3105 SUBST (XVECEXP (x, i, j), new);
3106 }
3107 }
3108 else if (fmt[i] == 'e')
3109 {
230d793d
RS
3110 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3111 {
42301240
RK
3112 /* In general, don't install a subreg involving two modes not
3113 tieable. It can worsen register allocation, and can even
3114 make invalid reload insns, since the reg inside may need to
3115 be copied from in the outside mode, and that may be invalid
3116 if it is an fp reg copied in integer mode.
3117
3118 We allow two exceptions to this: It is valid if it is inside
3119 another SUBREG and the mode of that SUBREG and the mode of
3120 the inside of TO is tieable and it is valid if X is a SET
3121 that copies FROM to CC0. */
3122 if (GET_CODE (to) == SUBREG
3123 && ! MODES_TIEABLE_P (GET_MODE (to),
3124 GET_MODE (SUBREG_REG (to)))
3125 && ! (code == SUBREG
8079805d
RK
3126 && MODES_TIEABLE_P (GET_MODE (x),
3127 GET_MODE (SUBREG_REG (to))))
42301240
RK
3128#ifdef HAVE_cc0
3129 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3130#endif
3131 )
38a448ca 3132 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 3133
230d793d
RS
3134 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3135 n_occurrences++;
3136 }
3137 else
3138 /* If we are in a SET_DEST, suppress most cases unless we
3139 have gone inside a MEM, in which case we want to
3140 simplify the address. We assume here that things that
3141 are actually part of the destination have their inner
3142 parts in the first expression. This is true for SUBREG,
3143 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3144 things aside from REG and MEM that should appear in a
3145 SET_DEST. */
3146 new = subst (XEXP (x, i), from, to,
3147 (((in_dest
3148 && (code == SUBREG || code == STRICT_LOW_PART
3149 || code == ZERO_EXTRACT))
3150 || code == SET)
3151 && i == 0), unique_copy);
3152
3153 /* If we found that we will have to reject this combination,
3154 indicate that by returning the CLOBBER ourselves, rather than
3155 an expression containing it. This will speed things up as
3156 well as prevent accidents where two CLOBBERs are considered
3157 to be equal, thus producing an incorrect simplification. */
3158
3159 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3160 return new;
3161
3162 SUBST (XEXP (x, i), new);
3163 }
3164 }
3165
8079805d
RK
3166 /* Try to simplify X. If the simplification changed the code, it is likely
3167 that further simplification will help, so loop, but limit the number
3168 of repetitions that will be performed. */
3169
3170 for (i = 0; i < 4; i++)
3171 {
3172 /* If X is sufficiently simple, don't bother trying to do anything
3173 with it. */
3174 if (code != CONST_INT && code != REG && code != CLOBBER)
3175 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3176
8079805d
RK
3177 if (GET_CODE (x) == code)
3178 break;
d0ab8cd3 3179
8079805d 3180 code = GET_CODE (x);
eeb43d32 3181
8079805d
RK
3182 /* We no longer know the original mode of operand 0 since we
3183 have changed the form of X) */
3184 op0_mode = VOIDmode;
3185 }
eeb43d32 3186
8079805d
RK
3187 return x;
3188}
3189\f
3190/* Simplify X, a piece of RTL. We just operate on the expression at the
3191 outer level; call `subst' to simplify recursively. Return the new
3192 expression.
3193
3194 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3195 will be the iteration even if an expression with a code different from
3196 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3197
8079805d
RK
3198static rtx
3199simplify_rtx (x, op0_mode, last, in_dest)
3200 rtx x;
3201 enum machine_mode op0_mode;
3202 int last;
3203 int in_dest;
3204{
3205 enum rtx_code code = GET_CODE (x);
3206 enum machine_mode mode = GET_MODE (x);
3207 rtx temp;
3208 int i;
d0ab8cd3 3209
230d793d
RS
3210 /* If this is a commutative operation, put a constant last and a complex
3211 expression first. We don't need to do this for comparisons here. */
3212 if (GET_RTX_CLASS (code) == 'c'
3213 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3214 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3215 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3216 || (GET_CODE (XEXP (x, 0)) == SUBREG
3217 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3218 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3219 {
3220 temp = XEXP (x, 0);
3221 SUBST (XEXP (x, 0), XEXP (x, 1));
3222 SUBST (XEXP (x, 1), temp);
3223 }
3224
22609cbf
RK
3225 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3226 sign extension of a PLUS with a constant, reverse the order of the sign
3227 extension and the addition. Note that this not the same as the original
3228 code, but overflow is undefined for signed values. Also note that the
3229 PLUS will have been partially moved "inside" the sign-extension, so that
3230 the first operand of X will really look like:
3231 (ashiftrt (plus (ashift A C4) C5) C4).
3232 We convert this to
3233 (plus (ashiftrt (ashift A C4) C2) C4)
3234 and replace the first operand of X with that expression. Later parts
3235 of this function may simplify the expression further.
3236
3237 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3238 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3239 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3240
3241 We do this to simplify address expressions. */
3242
3243 if ((code == PLUS || code == MINUS || code == MULT)
3244 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3245 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3246 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3247 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3248 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3249 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3250 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3251 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3252 XEXP (XEXP (XEXP (x, 0), 0), 1),
3253 XEXP (XEXP (x, 0), 1))) != 0)
3254 {
3255 rtx new
3256 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3257 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3258 INTVAL (XEXP (XEXP (x, 0), 1)));
3259
3260 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3261 INTVAL (XEXP (XEXP (x, 0), 1)));
3262
3263 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3264 }
3265
d0ab8cd3
RK
3266 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3267 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3268 things. Check for cases where both arms are testing the same
3269 condition.
3270
3271 Don't do anything if all operands are very simple. */
3272
3273 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3274 || GET_RTX_CLASS (code) == '<')
3275 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3276 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3277 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3278 == 'o')))
3279 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3280 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3281 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3282 == 'o')))))
3283 || (GET_RTX_CLASS (code) == '1'
3284 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3285 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3286 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3287 == 'o'))))))
d0ab8cd3 3288 {
abe6e52f
RK
3289 rtx cond, true, false;
3290
3291 cond = if_then_else_cond (x, &true, &false);
0802d516
RK
3292 if (cond != 0
3293 /* If everything is a comparison, what we have is highly unlikely
3294 to be simpler, so don't use it. */
3295 && ! (GET_RTX_CLASS (code) == '<'
3296 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3297 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
abe6e52f
RK
3298 {
3299 rtx cop1 = const0_rtx;
3300 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3301
15448afc
RK
3302 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3303 return x;
3304
9210df58
RK
3305 /* Simplify the alternative arms; this may collapse the true and
3306 false arms to store-flag values. */
3307 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3308 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3309
3310 /* Restarting if we generate a store-flag expression will cause
3311 us to loop. Just drop through in this case. */
3312
abe6e52f
RK
3313 /* If the result values are STORE_FLAG_VALUE and zero, we can
3314 just make the comparison operation. */
3315 if (true == const_true_rtx && false == const0_rtx)
3316 x = gen_binary (cond_code, mode, cond, cop1);
3317 else if (true == const0_rtx && false == const_true_rtx)
3318 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3319
3320 /* Likewise, we can make the negate of a comparison operation
3321 if the result values are - STORE_FLAG_VALUE and zero. */
3322 else if (GET_CODE (true) == CONST_INT
3323 && INTVAL (true) == - STORE_FLAG_VALUE
3324 && false == const0_rtx)
0c1c8ea6 3325 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3326 gen_binary (cond_code, mode, cond, cop1));
3327 else if (GET_CODE (false) == CONST_INT
3328 && INTVAL (false) == - STORE_FLAG_VALUE
3329 && true == const0_rtx)
0c1c8ea6 3330 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3331 gen_binary (reverse_condition (cond_code),
3332 mode, cond, cop1));
3333 else
38a448ca
RH
3334 return gen_rtx_IF_THEN_ELSE (mode,
3335 gen_binary (cond_code, VOIDmode,
3336 cond, cop1),
3337 true, false);
5109d49f 3338
9210df58
RK
3339 code = GET_CODE (x);
3340 op0_mode = VOIDmode;
abe6e52f 3341 }
d0ab8cd3
RK
3342 }
3343
230d793d
RS
3344 /* Try to fold this expression in case we have constants that weren't
3345 present before. */
3346 temp = 0;
3347 switch (GET_RTX_CLASS (code))
3348 {
3349 case '1':
3350 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3351 break;
3352 case '<':
3353 temp = simplify_relational_operation (code, op0_mode,
3354 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3355#ifdef FLOAT_STORE_FLAG_VALUE
3356 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3357 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3358 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3359#endif
230d793d
RS
3360 break;
3361 case 'c':
3362 case '2':
3363 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3364 break;
3365 case 'b':
3366 case '3':
3367 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3368 XEXP (x, 1), XEXP (x, 2));
3369 break;
3370 }
3371
3372 if (temp)
d0ab8cd3 3373 x = temp, code = GET_CODE (temp);
230d793d 3374
230d793d 3375 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3376 if (code == PLUS || code == MINUS
3377 || code == AND || code == IOR || code == XOR)
230d793d
RS
3378 {
3379 x = apply_distributive_law (x);
3380 code = GET_CODE (x);
3381 }
3382
3383 /* If CODE is an associative operation not otherwise handled, see if we
3384 can associate some operands. This can win if they are constants or
3385 if they are logically related (i.e. (a & b) & a. */
3386 if ((code == PLUS || code == MINUS
3387 || code == MULT || code == AND || code == IOR || code == XOR
3388 || code == DIV || code == UDIV
3389 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3390 && INTEGRAL_MODE_P (mode))
230d793d
RS
3391 {
3392 if (GET_CODE (XEXP (x, 0)) == code)
3393 {
3394 rtx other = XEXP (XEXP (x, 0), 0);
3395 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3396 rtx inner_op1 = XEXP (x, 1);
3397 rtx inner;
3398
3399 /* Make sure we pass the constant operand if any as the second
3400 one if this is a commutative operation. */
3401 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3402 {
3403 rtx tem = inner_op0;
3404 inner_op0 = inner_op1;
3405 inner_op1 = tem;
3406 }
3407 inner = simplify_binary_operation (code == MINUS ? PLUS
3408 : code == DIV ? MULT
3409 : code == UDIV ? MULT
3410 : code,
3411 mode, inner_op0, inner_op1);
3412
3413 /* For commutative operations, try the other pair if that one
3414 didn't simplify. */
3415 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3416 {
3417 other = XEXP (XEXP (x, 0), 1);
3418 inner = simplify_binary_operation (code, mode,
3419 XEXP (XEXP (x, 0), 0),
3420 XEXP (x, 1));
3421 }
3422
3423 if (inner)
8079805d 3424 return gen_binary (code, mode, other, inner);
230d793d
RS
3425 }
3426 }
3427
3428 /* A little bit of algebraic simplification here. */
3429 switch (code)
3430 {
3431 case MEM:
3432 /* Ensure that our address has any ASHIFTs converted to MULT in case
3433 address-recognizing predicates are called later. */
3434 temp = make_compound_operation (XEXP (x, 0), MEM);
3435 SUBST (XEXP (x, 0), temp);
3436 break;
3437
3438 case SUBREG:
3439 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3440 is paradoxical. If we can't do that safely, then it becomes
3441 something nonsensical so that this combination won't take place. */
3442
3443 if (GET_CODE (SUBREG_REG (x)) == MEM
3444 && (GET_MODE_SIZE (mode)
3445 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3446 {
3447 rtx inner = SUBREG_REG (x);
3448 int endian_offset = 0;
3449 /* Don't change the mode of the MEM
3450 if that would change the meaning of the address. */
3451 if (MEM_VOLATILE_P (SUBREG_REG (x))
3452 || mode_dependent_address_p (XEXP (inner, 0)))
38a448ca 3453 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d 3454
f76b9db2
ILT
3455 if (BYTES_BIG_ENDIAN)
3456 {
3457 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3458 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3459 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3460 endian_offset -= (UNITS_PER_WORD
3461 - GET_MODE_SIZE (GET_MODE (inner)));
3462 }
230d793d
RS
3463 /* Note if the plus_constant doesn't make a valid address
3464 then this combination won't be accepted. */
38a448ca
RH
3465 x = gen_rtx_MEM (mode,
3466 plus_constant (XEXP (inner, 0),
3467 (SUBREG_WORD (x) * UNITS_PER_WORD
3468 + endian_offset)));
230d793d
RS
3469 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3470 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3471 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3472 return x;
3473 }
3474
3475 /* If we are in a SET_DEST, these other cases can't apply. */
3476 if (in_dest)
3477 return x;
3478
3479 /* Changing mode twice with SUBREG => just change it once,
3480 or not at all if changing back to starting mode. */
3481 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3482 {
3483 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3484 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3485 return SUBREG_REG (SUBREG_REG (x));
3486
3487 SUBST_INT (SUBREG_WORD (x),
3488 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3489 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3490 }
3491
3492 /* SUBREG of a hard register => just change the register number
3493 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3494 suppress this combination. If the hard register is the stack,
3495 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3496
3497 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3498 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3499 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3500#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3501 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3502#endif
26ecfc76
RK
3503#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3504 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3505#endif
3506 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3507 {
3508 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3509 mode))
38a448ca
RH
3510 return gen_rtx_REG (mode,
3511 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
230d793d 3512 else
38a448ca 3513 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d
RS
3514 }
3515
3516 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3517 word and low-order part. Only do this if we are narrowing
3518 the constant; if it is being widened, we have no idea what
3519 the extra bits will have been set to. */
230d793d
RS
3520
3521 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3522 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3c99d5ff 3523 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
230d793d
RS
3524 && GET_MODE_CLASS (mode) == MODE_INT)
3525 {
3526 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3527 0, op0_mode);
230d793d
RS
3528 if (temp)
3529 return temp;
3530 }
3531
19808e22
RS
3532 /* If we want a subreg of a constant, at offset 0,
3533 take the low bits. On a little-endian machine, that's
3534 always valid. On a big-endian machine, it's valid
3c99d5ff 3535 only if the constant's mode fits in one word. Note that we
61b1bece 3536 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3c99d5ff
RK
3537 if (CONSTANT_P (SUBREG_REG (x))
3538 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3539 || ! WORDS_BIG_ENDIAN)
3540 ? SUBREG_WORD (x) == 0
3541 : (SUBREG_WORD (x)
3542 == ((GET_MODE_SIZE (op0_mode)
3543 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3544 / UNITS_PER_WORD)))
f82da7d2 3545 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3546 && (! WORDS_BIG_ENDIAN
3547 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3548 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3549
b65c1b5b
RK
3550 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3551 since we are saying that the high bits don't matter. */
3552 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3553 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3554 return SUBREG_REG (x);
3555
87e3e0c1
RK
3556 /* Note that we cannot do any narrowing for non-constants since
3557 we might have been counting on using the fact that some bits were
3558 zero. We now do this in the SET. */
3559
230d793d
RS
3560 break;
3561
3562 case NOT:
3563 /* (not (plus X -1)) can become (neg X). */
3564 if (GET_CODE (XEXP (x, 0)) == PLUS
3565 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3566 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3567
3568 /* Similarly, (not (neg X)) is (plus X -1). */
3569 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3570 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3571 constm1_rtx);
230d793d 3572
d0ab8cd3
RK
3573 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3574 if (GET_CODE (XEXP (x, 0)) == XOR
3575 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3576 && (temp = simplify_unary_operation (NOT, mode,
3577 XEXP (XEXP (x, 0), 1),
3578 mode)) != 0)
787745f5 3579 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3580
230d793d
RS
3581 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3582 other than 1, but that is not valid. We could do a similar
3583 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3584 but this doesn't seem common enough to bother with. */
3585 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3586 && XEXP (XEXP (x, 0), 0) == const1_rtx)
38a448ca
RH
3587 return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3588 XEXP (XEXP (x, 0), 1));
230d793d
RS
3589
3590 if (GET_CODE (XEXP (x, 0)) == SUBREG
3591 && subreg_lowpart_p (XEXP (x, 0))
3592 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3593 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3594 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3595 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3596 {
3597 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3598
38a448ca
RH
3599 x = gen_rtx_ROTATE (inner_mode,
3600 gen_unary (NOT, inner_mode, inner_mode,
3601 const1_rtx),
3602 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3603 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3604 }
3605
0802d516
RK
3606 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3607 reversing the comparison code if valid. */
3608 if (STORE_FLAG_VALUE == -1
3609 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
230d793d
RS
3610 && reversible_comparison_p (XEXP (x, 0)))
3611 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3612 mode, XEXP (XEXP (x, 0), 0),
3613 XEXP (XEXP (x, 0), 1));
500c518b
RK
3614
3615 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3616 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3617 perform the above simplification. */
500c518b 3618
0802d516
RK
3619 if (STORE_FLAG_VALUE == -1
3620 && XEXP (x, 1) == const1_rtx
500c518b
RK
3621 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3622 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3623 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3624 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3625
3626 /* Apply De Morgan's laws to reduce number of patterns for machines
3627 with negating logical insns (and-not, nand, etc.). If result has
3628 only one NOT, put it first, since that is how the patterns are
3629 coded. */
3630
3631 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3632 {
3633 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3634
3635 if (GET_CODE (in1) == NOT)
3636 in1 = XEXP (in1, 0);
3637 else
3638 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3639
3640 if (GET_CODE (in2) == NOT)
3641 in2 = XEXP (in2, 0);
3642 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3643 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3644 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3645 else
3646 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3647
3648 if (GET_CODE (in2) == NOT)
3649 {
3650 rtx tem = in2;
3651 in2 = in1; in1 = tem;
3652 }
3653
8079805d
RK
3654 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3655 mode, in1, in2);
230d793d
RS
3656 }
3657 break;
3658
3659 case NEG:
3660 /* (neg (plus X 1)) can become (not X). */
3661 if (GET_CODE (XEXP (x, 0)) == PLUS
3662 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3663 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3664
3665 /* Similarly, (neg (not X)) is (plus X 1). */
3666 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3667 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3668
230d793d
RS
3669 /* (neg (minus X Y)) can become (minus Y X). */
3670 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3671 && (! FLOAT_MODE_P (mode)
0f41302f 3672 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3673 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3674 || flag_fast_math))
8079805d
RK
3675 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3676 XEXP (XEXP (x, 0), 0));
230d793d 3677
0f41302f 3678 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3679 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3680 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3681 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3682
230d793d
RS
3683 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3684 if we can then eliminate the NEG (e.g.,
3685 if the operand is a constant). */
3686
3687 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3688 {
3689 temp = simplify_unary_operation (NEG, mode,
3690 XEXP (XEXP (x, 0), 0), mode);
3691 if (temp)
3692 {
3693 SUBST (XEXP (XEXP (x, 0), 0), temp);
3694 return XEXP (x, 0);
3695 }
3696 }
3697
3698 temp = expand_compound_operation (XEXP (x, 0));
3699
3700 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3701 replaced by (lshiftrt X C). This will convert
3702 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3703
3704 if (GET_CODE (temp) == ASHIFTRT
3705 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3706 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3707 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3708 INTVAL (XEXP (temp, 1)));
230d793d 3709
951553af 3710 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3711 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3712 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3713 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3714 or a SUBREG of one since we'd be making the expression more
3715 complex if it was just a register. */
3716
3717 if (GET_CODE (temp) != REG
3718 && ! (GET_CODE (temp) == SUBREG
3719 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3720 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3721 {
3722 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3723 (NULL_RTX, ASHIFTRT, mode,
3724 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3725 GET_MODE_BITSIZE (mode) - 1 - i),
3726 GET_MODE_BITSIZE (mode) - 1 - i);
3727
3728 /* If all we did was surround TEMP with the two shifts, we
3729 haven't improved anything, so don't use it. Otherwise,
3730 we are better off with TEMP1. */
3731 if (GET_CODE (temp1) != ASHIFTRT
3732 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3733 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3734 return temp1;
230d793d
RS
3735 }
3736 break;
3737
2ca9ae17 3738 case TRUNCATE:
e30fb98f
JL
3739 /* We can't handle truncation to a partial integer mode here
3740 because we don't know the real bitsize of the partial
3741 integer mode. */
3742 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3743 break;
3744
80608e27
JL
3745 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3746 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3747 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
2ca9ae17
JW
3748 SUBST (XEXP (x, 0),
3749 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3750 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
3751
3752 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3753 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3754 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3755 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3756 return XEXP (XEXP (x, 0), 0);
3757
3758 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3759 (OP:SI foo:SI) if OP is NEG or ABS. */
3760 if ((GET_CODE (XEXP (x, 0)) == ABS
3761 || GET_CODE (XEXP (x, 0)) == NEG)
3762 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3763 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3764 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3765 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3766 XEXP (XEXP (XEXP (x, 0), 0), 0));
3767
3768 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3769 (truncate:SI x). */
3770 if (GET_CODE (XEXP (x, 0)) == SUBREG
3771 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3772 && subreg_lowpart_p (XEXP (x, 0)))
3773 return SUBREG_REG (XEXP (x, 0));
3774
3775 /* If we know that the value is already truncated, we can
3776 replace the TRUNCATE with a SUBREG. */
9ec36da5
JL
3777 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3778 >= GET_MODE_BITSIZE (mode) + 1)
0f13a422
ILT
3779 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3780
3781 /* A truncate of a comparison can be replaced with a subreg if
3782 STORE_FLAG_VALUE permits. This is like the previous test,
3783 but it works even if the comparison is done in a mode larger
3784 than HOST_BITS_PER_WIDE_INT. */
3785 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3786 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3787 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3788 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3789
3790 /* Similarly, a truncate of a register whose value is a
3791 comparison can be replaced with a subreg if STORE_FLAG_VALUE
3792 permits. */
3793 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3794 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3795 && (temp = get_last_value (XEXP (x, 0)))
3796 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3797 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3798
2ca9ae17
JW
3799 break;
3800
230d793d
RS
3801 case FLOAT_TRUNCATE:
3802 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3803 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3804 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3805 return XEXP (XEXP (x, 0), 0);
4635f748
RK
3806
3807 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3808 (OP:SF foo:SF) if OP is NEG or ABS. */
3809 if ((GET_CODE (XEXP (x, 0)) == ABS
3810 || GET_CODE (XEXP (x, 0)) == NEG)
3811 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3812 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
3813 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3814 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
3815
3816 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3817 is (float_truncate:SF x). */
3818 if (GET_CODE (XEXP (x, 0)) == SUBREG
3819 && subreg_lowpart_p (XEXP (x, 0))
3820 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3821 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
3822 break;
3823
3824#ifdef HAVE_cc0
3825 case COMPARE:
3826 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3827 using cc0, in which case we want to leave it as a COMPARE
3828 so we can distinguish it from a register-register-copy. */
3829 if (XEXP (x, 1) == const0_rtx)
3830 return XEXP (x, 0);
3831
3832 /* In IEEE floating point, x-0 is not the same as x. */
3833 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3834 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3835 || flag_fast_math)
230d793d
RS
3836 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3837 return XEXP (x, 0);
3838 break;
3839#endif
3840
3841 case CONST:
3842 /* (const (const X)) can become (const X). Do it this way rather than
3843 returning the inner CONST since CONST can be shared with a
3844 REG_EQUAL note. */
3845 if (GET_CODE (XEXP (x, 0)) == CONST)
3846 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3847 break;
3848
3849#ifdef HAVE_lo_sum
3850 case LO_SUM:
3851 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3852 can add in an offset. find_split_point will split this address up
3853 again if it doesn't match. */
3854 if (GET_CODE (XEXP (x, 0)) == HIGH
3855 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3856 return XEXP (x, 1);
3857 break;
3858#endif
3859
3860 case PLUS:
3861 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3862 outermost. That's because that's the way indexed addresses are
3863 supposed to appear. This code used to check many more cases, but
3864 they are now checked elsewhere. */
3865 if (GET_CODE (XEXP (x, 0)) == PLUS
3866 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3867 return gen_binary (PLUS, mode,
3868 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3869 XEXP (x, 1)),
3870 XEXP (XEXP (x, 0), 1));
3871
3872 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3873 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3874 bit-field and can be replaced by either a sign_extend or a
3875 sign_extract. The `and' may be a zero_extend. */
3876 if (GET_CODE (XEXP (x, 0)) == XOR
3877 && GET_CODE (XEXP (x, 1)) == CONST_INT
3878 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3879 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3880 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3881 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3882 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3883 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3884 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3885 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3886 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3887 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3888 == i + 1))))
8079805d
RK
3889 return simplify_shift_const
3890 (NULL_RTX, ASHIFTRT, mode,
3891 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3892 XEXP (XEXP (XEXP (x, 0), 0), 0),
3893 GET_MODE_BITSIZE (mode) - (i + 1)),
3894 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 3895
bc0776c6
RK
3896 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3897 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3898 is 1. This produces better code than the alternative immediately
3899 below. */
3900 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3901 && reversible_comparison_p (XEXP (x, 0))
3902 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3903 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 3904 return
0c1c8ea6 3905 gen_unary (NEG, mode, mode,
8079805d
RK
3906 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3907 mode, XEXP (XEXP (x, 0), 0),
3908 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
3909
3910 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3911 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3912 the bitsize of the mode - 1. This allows simplification of
3913 "a = (b & 8) == 0;" */
3914 if (XEXP (x, 1) == constm1_rtx
3915 && GET_CODE (XEXP (x, 0)) != REG
3916 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3917 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3918 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
3919 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3920 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3921 gen_rtx_combine (XOR, mode,
3922 XEXP (x, 0), const1_rtx),
3923 GET_MODE_BITSIZE (mode) - 1),
3924 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
3925
3926 /* If we are adding two things that have no bits in common, convert
3927 the addition into an IOR. This will often be further simplified,
3928 for example in cases like ((a & 1) + (a & 2)), which can
3929 become a & 3. */
3930
ac49a949 3931 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3932 && (nonzero_bits (XEXP (x, 0), mode)
3933 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 3934 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
3935 break;
3936
3937 case MINUS:
0802d516
RK
3938 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
3939 by reversing the comparison code if valid. */
3940 if (STORE_FLAG_VALUE == 1
3941 && XEXP (x, 0) == const1_rtx
5109d49f
RK
3942 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3943 && reversible_comparison_p (XEXP (x, 1)))
3944 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3945 mode, XEXP (XEXP (x, 1), 0),
3946 XEXP (XEXP (x, 1), 1));
5109d49f 3947
230d793d
RS
3948 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3949 (and <foo> (const_int pow2-1)) */
3950 if (GET_CODE (XEXP (x, 1)) == AND
3951 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3952 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3953 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
3954 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3955 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
3956
3957 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3958 integers. */
3959 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
3960 return gen_binary (MINUS, mode,
3961 gen_binary (MINUS, mode, XEXP (x, 0),
3962 XEXP (XEXP (x, 1), 0)),
3963 XEXP (XEXP (x, 1), 1));
230d793d
RS
3964 break;
3965
3966 case MULT:
3967 /* If we have (mult (plus A B) C), apply the distributive law and then
3968 the inverse distributive law to see if things simplify. This
3969 occurs mostly in addresses, often when unrolling loops. */
3970
3971 if (GET_CODE (XEXP (x, 0)) == PLUS)
3972 {
3973 x = apply_distributive_law
3974 (gen_binary (PLUS, mode,
3975 gen_binary (MULT, mode,
3976 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3977 gen_binary (MULT, mode,
3978 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3979
3980 if (GET_CODE (x) != MULT)
8079805d 3981 return x;
230d793d 3982 }
230d793d
RS
3983 break;
3984
3985 case UDIV:
3986 /* If this is a divide by a power of two, treat it as a shift if
3987 its first operand is a shift. */
3988 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3989 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3990 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3991 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3992 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3993 || GET_CODE (XEXP (x, 0)) == ROTATE
3994 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 3995 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3996 break;
3997
3998 case EQ: case NE:
3999 case GT: case GTU: case GE: case GEU:
4000 case LT: case LTU: case LE: case LEU:
4001 /* If the first operand is a condition code, we can't do anything
4002 with it. */
4003 if (GET_CODE (XEXP (x, 0)) == COMPARE
4004 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4005#ifdef HAVE_cc0
4006 && XEXP (x, 0) != cc0_rtx
4007#endif
4008 ))
4009 {
4010 rtx op0 = XEXP (x, 0);
4011 rtx op1 = XEXP (x, 1);
4012 enum rtx_code new_code;
4013
4014 if (GET_CODE (op0) == COMPARE)
4015 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4016
4017 /* Simplify our comparison, if possible. */
4018 new_code = simplify_comparison (code, &op0, &op1);
4019
230d793d 4020 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 4021 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
4022 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4023 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4024 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4025 (plus X 1).
4026
4027 Remove any ZERO_EXTRACT we made when thinking this was a
4028 comparison. It may now be simpler to use, e.g., an AND. If a
4029 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4030 the call to make_compound_operation in the SET case. */
4031
0802d516
RK
4032 if (STORE_FLAG_VALUE == 1
4033 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4034 && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4035 return gen_lowpart_for_combine (mode,
4036 expand_compound_operation (op0));
5109d49f 4037
0802d516
RK
4038 else if (STORE_FLAG_VALUE == 1
4039 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4040 && op1 == const0_rtx
4041 && (num_sign_bit_copies (op0, mode)
4042 == GET_MODE_BITSIZE (mode)))
4043 {
4044 op0 = expand_compound_operation (op0);
0c1c8ea6 4045 return gen_unary (NEG, mode, mode,
8079805d 4046 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4047 }
4048
0802d516
RK
4049 else if (STORE_FLAG_VALUE == 1
4050 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4051 && op1 == const0_rtx
5109d49f 4052 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4053 {
4054 op0 = expand_compound_operation (op0);
8079805d
RK
4055 return gen_binary (XOR, mode,
4056 gen_lowpart_for_combine (mode, op0),
4057 const1_rtx);
5109d49f 4058 }
818b11b9 4059
0802d516
RK
4060 else if (STORE_FLAG_VALUE == 1
4061 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4062 && op1 == const0_rtx
4063 && (num_sign_bit_copies (op0, mode)
4064 == GET_MODE_BITSIZE (mode)))
4065 {
4066 op0 = expand_compound_operation (op0);
8079805d 4067 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 4068 }
230d793d 4069
5109d49f
RK
4070 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4071 those above. */
0802d516
RK
4072 if (STORE_FLAG_VALUE == -1
4073 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4074 && op1 == const0_rtx
5109d49f
RK
4075 && (num_sign_bit_copies (op0, mode)
4076 == GET_MODE_BITSIZE (mode)))
4077 return gen_lowpart_for_combine (mode,
4078 expand_compound_operation (op0));
4079
0802d516
RK
4080 else if (STORE_FLAG_VALUE == -1
4081 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4082 && op1 == const0_rtx
4083 && nonzero_bits (op0, mode) == 1)
4084 {
4085 op0 = expand_compound_operation (op0);
0c1c8ea6 4086 return gen_unary (NEG, mode, mode,
8079805d 4087 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4088 }
4089
0802d516
RK
4090 else if (STORE_FLAG_VALUE == -1
4091 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4092 && op1 == const0_rtx
4093 && (num_sign_bit_copies (op0, mode)
4094 == GET_MODE_BITSIZE (mode)))
230d793d 4095 {
818b11b9 4096 op0 = expand_compound_operation (op0);
0c1c8ea6 4097 return gen_unary (NOT, mode, mode,
8079805d 4098 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4099 }
4100
4101 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
4102 else if (STORE_FLAG_VALUE == -1
4103 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4104 && op1 == const0_rtx
4105 && nonzero_bits (op0, mode) == 1)
4106 {
4107 op0 = expand_compound_operation (op0);
8079805d 4108 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 4109 }
230d793d
RS
4110
4111 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
4112 one bit that might be nonzero, we can convert (ne x 0) to
4113 (ashift x c) where C puts the bit in the sign bit. Remove any
4114 AND with STORE_FLAG_VALUE when we are done, since we are only
4115 going to test the sign bit. */
3f508eca 4116 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 4117 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4118 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5f4f0e22 4119 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
4120 && op1 == const0_rtx
4121 && mode == GET_MODE (op0)
5109d49f 4122 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 4123 {
818b11b9
RK
4124 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4125 expand_compound_operation (op0),
230d793d
RS
4126 GET_MODE_BITSIZE (mode) - 1 - i);
4127 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4128 return XEXP (x, 0);
4129 else
4130 return x;
4131 }
4132
4133 /* If the code changed, return a whole new comparison. */
4134 if (new_code != code)
4135 return gen_rtx_combine (new_code, mode, op0, op1);
4136
4137 /* Otherwise, keep this operation, but maybe change its operands.
4138 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4139 SUBST (XEXP (x, 0), op0);
4140 SUBST (XEXP (x, 1), op1);
4141 }
4142 break;
4143
4144 case IF_THEN_ELSE:
8079805d 4145 return simplify_if_then_else (x);
9210df58 4146
8079805d
RK
4147 case ZERO_EXTRACT:
4148 case SIGN_EXTRACT:
4149 case ZERO_EXTEND:
4150 case SIGN_EXTEND:
0f41302f 4151 /* If we are processing SET_DEST, we are done. */
8079805d
RK
4152 if (in_dest)
4153 return x;
d0ab8cd3 4154
8079805d 4155 return expand_compound_operation (x);
d0ab8cd3 4156
8079805d
RK
4157 case SET:
4158 return simplify_set (x);
1a26b032 4159
8079805d
RK
4160 case AND:
4161 case IOR:
4162 case XOR:
4163 return simplify_logical (x, last);
d0ab8cd3 4164
b472527b 4165 case ABS:
8079805d
RK
4166 /* (abs (neg <foo>)) -> (abs <foo>) */
4167 if (GET_CODE (XEXP (x, 0)) == NEG)
4168 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4169
b472527b
JL
4170 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4171 do nothing. */
4172 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4173 break;
f40421ce 4174
8079805d
RK
4175 /* If operand is something known to be positive, ignore the ABS. */
4176 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4177 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4178 <= HOST_BITS_PER_WIDE_INT)
4179 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4180 & ((HOST_WIDE_INT) 1
4181 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4182 == 0)))
4183 return XEXP (x, 0);
1a26b032 4184
1a26b032 4185
8079805d
RK
4186 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4187 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4188 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 4189
8079805d 4190 break;
1a26b032 4191
8079805d
RK
4192 case FFS:
4193 /* (ffs (*_extend <X>)) = (ffs <X>) */
4194 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4195 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4196 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4197 break;
1a26b032 4198
8079805d
RK
4199 case FLOAT:
4200 /* (float (sign_extend <X>)) = (float <X>). */
4201 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4202 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4203 break;
1a26b032 4204
8079805d
RK
4205 case ASHIFT:
4206 case LSHIFTRT:
4207 case ASHIFTRT:
4208 case ROTATE:
4209 case ROTATERT:
4210 /* If this is a shift by a constant amount, simplify it. */
4211 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4212 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4213 INTVAL (XEXP (x, 1)));
4214
4215#ifdef SHIFT_COUNT_TRUNCATED
4216 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4217 SUBST (XEXP (x, 1),
4218 force_to_mode (XEXP (x, 1), GET_MODE (x),
4219 ((HOST_WIDE_INT) 1
4220 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4221 - 1,
4222 NULL_RTX, 0));
4223#endif
4224
4225 break;
e9a25f70
JL
4226
4227 default:
4228 break;
8079805d
RK
4229 }
4230
4231 return x;
4232}
4233\f
4234/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4235
8079805d
RK
4236static rtx
4237simplify_if_then_else (x)
4238 rtx x;
4239{
4240 enum machine_mode mode = GET_MODE (x);
4241 rtx cond = XEXP (x, 0);
4242 rtx true = XEXP (x, 1);
4243 rtx false = XEXP (x, 2);
4244 enum rtx_code true_code = GET_CODE (cond);
4245 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4246 rtx temp;
4247 int i;
4248
0f41302f 4249 /* Simplify storing of the truth value. */
8079805d
RK
4250 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4251 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4252
0f41302f 4253 /* Also when the truth value has to be reversed. */
8079805d
RK
4254 if (comparison_p && reversible_comparison_p (cond)
4255 && true == const0_rtx && false == const_true_rtx)
4256 return gen_binary (reverse_condition (true_code),
4257 mode, XEXP (cond, 0), XEXP (cond, 1));
4258
4259 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4260 in it is being compared against certain values. Get the true and false
4261 comparisons and see if that says anything about the value of each arm. */
4262
4263 if (comparison_p && reversible_comparison_p (cond)
4264 && GET_CODE (XEXP (cond, 0)) == REG)
4265 {
4266 HOST_WIDE_INT nzb;
4267 rtx from = XEXP (cond, 0);
4268 enum rtx_code false_code = reverse_condition (true_code);
4269 rtx true_val = XEXP (cond, 1);
4270 rtx false_val = true_val;
4271 int swapped = 0;
9210df58 4272
8079805d 4273 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4274
8079805d 4275 if (false_code == EQ)
1a26b032 4276 {
8079805d
RK
4277 swapped = 1, true_code = EQ, false_code = NE;
4278 temp = true, true = false, false = temp;
4279 }
5109d49f 4280
8079805d
RK
4281 /* If we are comparing against zero and the expression being tested has
4282 only a single bit that might be nonzero, that is its value when it is
4283 not equal to zero. Similarly if it is known to be -1 or 0. */
4284
4285 if (true_code == EQ && true_val == const0_rtx
4286 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4287 false_code = EQ, false_val = GEN_INT (nzb);
4288 else if (true_code == EQ && true_val == const0_rtx
4289 && (num_sign_bit_copies (from, GET_MODE (from))
4290 == GET_MODE_BITSIZE (GET_MODE (from))))
4291 false_code = EQ, false_val = constm1_rtx;
4292
4293 /* Now simplify an arm if we know the value of the register in the
4294 branch and it is used in the arm. Be careful due to the potential
4295 of locally-shared RTL. */
4296
4297 if (reg_mentioned_p (from, true))
4298 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4299 pc_rtx, pc_rtx, 0, 0);
4300 if (reg_mentioned_p (from, false))
4301 false = subst (known_cond (copy_rtx (false), false_code,
4302 from, false_val),
4303 pc_rtx, pc_rtx, 0, 0);
4304
4305 SUBST (XEXP (x, 1), swapped ? false : true);
4306 SUBST (XEXP (x, 2), swapped ? true : false);
4307
4308 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4309 }
5109d49f 4310
8079805d
RK
4311 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4312 reversed, do so to avoid needing two sets of patterns for
4313 subtract-and-branch insns. Similarly if we have a constant in the true
4314 arm, the false arm is the same as the first operand of the comparison, or
4315 the false arm is more complicated than the true arm. */
4316
4317 if (comparison_p && reversible_comparison_p (cond)
4318 && (true == pc_rtx
4319 || (CONSTANT_P (true)
4320 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4321 || true == const0_rtx
4322 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4323 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4324 || (GET_CODE (true) == SUBREG
4325 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4326 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4327 || reg_mentioned_p (true, false)
4328 || rtx_equal_p (false, XEXP (cond, 0))))
4329 {
4330 true_code = reverse_condition (true_code);
4331 SUBST (XEXP (x, 0),
4332 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4333 XEXP (cond, 1)));
5109d49f 4334
8079805d
RK
4335 SUBST (XEXP (x, 1), false);
4336 SUBST (XEXP (x, 2), true);
1a26b032 4337
8079805d 4338 temp = true, true = false, false = temp, cond = XEXP (x, 0);
bb821298 4339
0f41302f 4340 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4341 true_code = GET_CODE (cond);
4342 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4343 }
abe6e52f 4344
8079805d 4345 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4346
8079805d
RK
4347 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4348 return true;
1a26b032 4349
5be669c7
RK
4350 /* Convert a == b ? b : a to "a". */
4351 if (true_code == EQ && ! side_effects_p (cond)
4352 && rtx_equal_p (XEXP (cond, 0), false)
4353 && rtx_equal_p (XEXP (cond, 1), true))
4354 return false;
4355 else if (true_code == NE && ! side_effects_p (cond)
4356 && rtx_equal_p (XEXP (cond, 0), true)
4357 && rtx_equal_p (XEXP (cond, 1), false))
4358 return true;
4359
8079805d
RK
4360 /* Look for cases where we have (abs x) or (neg (abs X)). */
4361
4362 if (GET_MODE_CLASS (mode) == MODE_INT
4363 && GET_CODE (false) == NEG
4364 && rtx_equal_p (true, XEXP (false, 0))
4365 && comparison_p
4366 && rtx_equal_p (true, XEXP (cond, 0))
4367 && ! side_effects_p (true))
4368 switch (true_code)
4369 {
4370 case GT:
4371 case GE:
0c1c8ea6 4372 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4373 case LT:
4374 case LE:
0c1c8ea6 4375 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
e9a25f70
JL
4376 default:
4377 break;
8079805d
RK
4378 }
4379
4380 /* Look for MIN or MAX. */
4381
34c8be72 4382 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4383 && comparison_p
4384 && rtx_equal_p (XEXP (cond, 0), true)
4385 && rtx_equal_p (XEXP (cond, 1), false)
4386 && ! side_effects_p (cond))
4387 switch (true_code)
4388 {
4389 case GE:
4390 case GT:
4391 return gen_binary (SMAX, mode, true, false);
4392 case LE:
4393 case LT:
4394 return gen_binary (SMIN, mode, true, false);
4395 case GEU:
4396 case GTU:
4397 return gen_binary (UMAX, mode, true, false);
4398 case LEU:
4399 case LTU:
4400 return gen_binary (UMIN, mode, true, false);
e9a25f70
JL
4401 default:
4402 break;
8079805d
RK
4403 }
4404
8079805d
RK
4405 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4406 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4407 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4408 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4409 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4410 neither 1 or -1, but it isn't worth checking for. */
8079805d 4411
0802d516
RK
4412 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4413 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d
RK
4414 {
4415 rtx t = make_compound_operation (true, SET);
4416 rtx f = make_compound_operation (false, SET);
4417 rtx cond_op0 = XEXP (cond, 0);
4418 rtx cond_op1 = XEXP (cond, 1);
4419 enum rtx_code op, extend_op = NIL;
4420 enum machine_mode m = mode;
f24ad0e4 4421 rtx z = 0, c1;
8079805d 4422
8079805d
RK
4423 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4424 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4425 || GET_CODE (t) == ASHIFT
4426 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4427 && rtx_equal_p (XEXP (t, 0), f))
4428 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4429
4430 /* If an identity-zero op is commutative, check whether there
0f41302f 4431 would be a match if we swapped the operands. */
8079805d
RK
4432 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4433 || GET_CODE (t) == XOR)
4434 && rtx_equal_p (XEXP (t, 1), f))
4435 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4436 else if (GET_CODE (t) == SIGN_EXTEND
4437 && (GET_CODE (XEXP (t, 0)) == PLUS
4438 || GET_CODE (XEXP (t, 0)) == MINUS
4439 || GET_CODE (XEXP (t, 0)) == IOR
4440 || GET_CODE (XEXP (t, 0)) == XOR
4441 || GET_CODE (XEXP (t, 0)) == ASHIFT
4442 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4443 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4444 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4445 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4446 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4447 && (num_sign_bit_copies (f, GET_MODE (f))
4448 > (GET_MODE_BITSIZE (mode)
4449 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4450 {
4451 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4452 extend_op = SIGN_EXTEND;
4453 m = GET_MODE (XEXP (t, 0));
1a26b032 4454 }
8079805d
RK
4455 else if (GET_CODE (t) == SIGN_EXTEND
4456 && (GET_CODE (XEXP (t, 0)) == PLUS
4457 || GET_CODE (XEXP (t, 0)) == IOR
4458 || GET_CODE (XEXP (t, 0)) == XOR)
4459 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4460 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4461 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4462 && (num_sign_bit_copies (f, GET_MODE (f))
4463 > (GET_MODE_BITSIZE (mode)
4464 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4465 {
4466 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4467 extend_op = SIGN_EXTEND;
4468 m = GET_MODE (XEXP (t, 0));
4469 }
4470 else if (GET_CODE (t) == ZERO_EXTEND
4471 && (GET_CODE (XEXP (t, 0)) == PLUS
4472 || GET_CODE (XEXP (t, 0)) == MINUS
4473 || GET_CODE (XEXP (t, 0)) == IOR
4474 || GET_CODE (XEXP (t, 0)) == XOR
4475 || GET_CODE (XEXP (t, 0)) == ASHIFT
4476 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4477 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4478 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4479 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4480 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4481 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4482 && ((nonzero_bits (f, GET_MODE (f))
4483 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4484 == 0))
4485 {
4486 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4487 extend_op = ZERO_EXTEND;
4488 m = GET_MODE (XEXP (t, 0));
4489 }
4490 else if (GET_CODE (t) == ZERO_EXTEND
4491 && (GET_CODE (XEXP (t, 0)) == PLUS
4492 || GET_CODE (XEXP (t, 0)) == IOR
4493 || GET_CODE (XEXP (t, 0)) == XOR)
4494 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4495 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4496 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4497 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4498 && ((nonzero_bits (f, GET_MODE (f))
4499 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4500 == 0))
4501 {
4502 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4503 extend_op = ZERO_EXTEND;
4504 m = GET_MODE (XEXP (t, 0));
4505 }
4506
4507 if (z)
4508 {
4509 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4510 pc_rtx, pc_rtx, 0, 0);
4511 temp = gen_binary (MULT, m, temp,
4512 gen_binary (MULT, m, c1, const_true_rtx));
4513 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4514 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4515
4516 if (extend_op != NIL)
0c1c8ea6 4517 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4518
4519 return temp;
4520 }
4521 }
224eeff2 4522
8079805d
RK
4523 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4524 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4525 negation of a single bit, we can convert this operation to a shift. We
4526 can actually do this more generally, but it doesn't seem worth it. */
4527
4528 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4529 && false == const0_rtx && GET_CODE (true) == CONST_INT
4530 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4531 && (i = exact_log2 (INTVAL (true))) >= 0)
4532 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4533 == GET_MODE_BITSIZE (mode))
4534 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4535 return
4536 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4537 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4538
8079805d
RK
4539 return x;
4540}
4541\f
4542/* Simplify X, a SET expression. Return the new expression. */
230d793d 4543
8079805d
RK
4544static rtx
4545simplify_set (x)
4546 rtx x;
4547{
4548 rtx src = SET_SRC (x);
4549 rtx dest = SET_DEST (x);
4550 enum machine_mode mode
4551 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4552 rtx other_insn;
4553 rtx *cc_use;
4554
4555 /* (set (pc) (return)) gets written as (return). */
4556 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4557 return src;
230d793d 4558
87e3e0c1
RK
4559 /* Now that we know for sure which bits of SRC we are using, see if we can
4560 simplify the expression for the object knowing that we only need the
4561 low-order bits. */
4562
4563 if (GET_MODE_CLASS (mode) == MODE_INT)
4564 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4565
8079805d
RK
4566 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4567 the comparison result and try to simplify it unless we already have used
4568 undobuf.other_insn. */
4569 if ((GET_CODE (src) == COMPARE
230d793d 4570#ifdef HAVE_cc0
8079805d 4571 || dest == cc0_rtx
230d793d 4572#endif
8079805d
RK
4573 )
4574 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4575 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4576 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4577 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4578 {
4579 enum rtx_code old_code = GET_CODE (*cc_use);
4580 enum rtx_code new_code;
4581 rtx op0, op1;
4582 int other_changed = 0;
4583 enum machine_mode compare_mode = GET_MODE (dest);
4584
4585 if (GET_CODE (src) == COMPARE)
4586 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4587 else
4588 op0 = src, op1 = const0_rtx;
230d793d 4589
8079805d
RK
4590 /* Simplify our comparison, if possible. */
4591 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4592
c141a106 4593#ifdef EXTRA_CC_MODES
8079805d
RK
4594 /* If this machine has CC modes other than CCmode, check to see if we
4595 need to use a different CC mode here. */
4596 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4597#endif /* EXTRA_CC_MODES */
230d793d 4598
c141a106 4599#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4600 /* If the mode changed, we have to change SET_DEST, the mode in the
4601 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4602 a hard register, just build new versions with the proper mode. If it
4603 is a pseudo, we lose unless it is only time we set the pseudo, in
4604 which case we can safely change its mode. */
4605 if (compare_mode != GET_MODE (dest))
4606 {
4607 int regno = REGNO (dest);
38a448ca 4608 rtx new_dest = gen_rtx_REG (compare_mode, regno);
8079805d
RK
4609
4610 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 4611 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
230d793d 4612 {
8079805d
RK
4613 if (regno >= FIRST_PSEUDO_REGISTER)
4614 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4615
8079805d
RK
4616 SUBST (SET_DEST (x), new_dest);
4617 SUBST (XEXP (*cc_use, 0), new_dest);
4618 other_changed = 1;
230d793d 4619
8079805d 4620 dest = new_dest;
230d793d 4621 }
8079805d 4622 }
230d793d
RS
4623#endif
4624
8079805d
RK
4625 /* If the code changed, we have to build a new comparison in
4626 undobuf.other_insn. */
4627 if (new_code != old_code)
4628 {
4629 unsigned HOST_WIDE_INT mask;
4630
4631 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4632 dest, const0_rtx));
4633
4634 /* If the only change we made was to change an EQ into an NE or
4635 vice versa, OP0 has only one bit that might be nonzero, and OP1
4636 is zero, check if changing the user of the condition code will
4637 produce a valid insn. If it won't, we can keep the original code
4638 in that insn by surrounding our operation with an XOR. */
4639
4640 if (((old_code == NE && new_code == EQ)
4641 || (old_code == EQ && new_code == NE))
4642 && ! other_changed && op1 == const0_rtx
4643 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4644 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4645 {
8079805d 4646 rtx pat = PATTERN (other_insn), note = 0;
a29ca9db 4647 int scratches;
230d793d 4648
a29ca9db 4649 if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
8079805d
RK
4650 && ! check_asm_operands (pat)))
4651 {
4652 PUT_CODE (*cc_use, old_code);
4653 other_insn = 0;
230d793d 4654
8079805d 4655 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4656 }
230d793d
RS
4657 }
4658
8079805d
RK
4659 other_changed = 1;
4660 }
4661
4662 if (other_changed)
4663 undobuf.other_insn = other_insn;
230d793d
RS
4664
4665#ifdef HAVE_cc0
8079805d
RK
4666 /* If we are now comparing against zero, change our source if
4667 needed. If we do not use cc0, we always have a COMPARE. */
4668 if (op1 == const0_rtx && dest == cc0_rtx)
4669 {
4670 SUBST (SET_SRC (x), op0);
4671 src = op0;
4672 }
4673 else
230d793d
RS
4674#endif
4675
8079805d
RK
4676 /* Otherwise, if we didn't previously have a COMPARE in the
4677 correct mode, we need one. */
4678 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4679 {
4680 SUBST (SET_SRC (x),
4681 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4682 src = SET_SRC (x);
230d793d
RS
4683 }
4684 else
4685 {
8079805d
RK
4686 /* Otherwise, update the COMPARE if needed. */
4687 SUBST (XEXP (src, 0), op0);
4688 SUBST (XEXP (src, 1), op1);
230d793d 4689 }
8079805d
RK
4690 }
4691 else
4692 {
4693 /* Get SET_SRC in a form where we have placed back any
4694 compound expressions. Then do the checks below. */
4695 src = make_compound_operation (src, SET);
4696 SUBST (SET_SRC (x), src);
4697 }
230d793d 4698
8079805d
RK
4699 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4700 and X being a REG or (subreg (reg)), we may be able to convert this to
4701 (set (subreg:m2 x) (op)).
df62f951 4702
8079805d
RK
4703 We can always do this if M1 is narrower than M2 because that means that
4704 we only care about the low bits of the result.
df62f951 4705
8079805d 4706 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
9ec36da5 4707 perform a narrower operation than requested since the high-order bits will
8079805d
RK
4708 be undefined. On machine where it is defined, this transformation is safe
4709 as long as M1 and M2 have the same number of words. */
df62f951 4710
8079805d
RK
4711 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4712 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4713 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4714 / UNITS_PER_WORD)
4715 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4716 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4717#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4718 && (GET_MODE_SIZE (GET_MODE (src))
4719 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4720#endif
f507a070
RK
4721#ifdef CLASS_CANNOT_CHANGE_SIZE
4722 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4723 && (TEST_HARD_REG_BIT
4724 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4725 REGNO (dest)))
4726 && (GET_MODE_SIZE (GET_MODE (src))
4727 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4728#endif
8079805d
RK
4729 && (GET_CODE (dest) == REG
4730 || (GET_CODE (dest) == SUBREG
4731 && GET_CODE (SUBREG_REG (dest)) == REG)))
4732 {
4733 SUBST (SET_DEST (x),
4734 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4735 dest));
4736 SUBST (SET_SRC (x), SUBREG_REG (src));
4737
4738 src = SET_SRC (x), dest = SET_DEST (x);
4739 }
df62f951 4740
8baf60bb 4741#ifdef LOAD_EXTEND_OP
8079805d
RK
4742 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4743 would require a paradoxical subreg. Replace the subreg with a
0f41302f 4744 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
4745
4746 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4747 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4748 && SUBREG_WORD (src) == 0
4749 && (GET_MODE_SIZE (GET_MODE (src))
4750 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4751 && GET_CODE (SUBREG_REG (src)) == MEM)
4752 {
4753 SUBST (SET_SRC (x),
4754 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4755 GET_MODE (src), XEXP (src, 0)));
4756
4757 src = SET_SRC (x);
4758 }
230d793d
RS
4759#endif
4760
8079805d
RK
4761 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4762 are comparing an item known to be 0 or -1 against 0, use a logical
4763 operation instead. Check for one of the arms being an IOR of the other
4764 arm with some value. We compute three terms to be IOR'ed together. In
4765 practice, at most two will be nonzero. Then we do the IOR's. */
4766
4767 if (GET_CODE (dest) != PC
4768 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 4769 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4770 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4771 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 4772 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
4773#ifdef HAVE_conditional_move
4774 && ! can_conditionally_move_p (GET_MODE (src))
4775#endif
8079805d
RK
4776 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4777 GET_MODE (XEXP (XEXP (src, 0), 0)))
4778 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4779 && ! side_effects_p (src))
4780 {
4781 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4782 ? XEXP (src, 1) : XEXP (src, 2));
4783 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4784 ? XEXP (src, 2) : XEXP (src, 1));
4785 rtx term1 = const0_rtx, term2, term3;
4786
4787 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4788 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4789 else if (GET_CODE (true) == IOR
4790 && rtx_equal_p (XEXP (true, 1), false))
4791 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4792 else if (GET_CODE (false) == IOR
4793 && rtx_equal_p (XEXP (false, 0), true))
4794 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4795 else if (GET_CODE (false) == IOR
4796 && rtx_equal_p (XEXP (false, 1), true))
4797 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4798
4799 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4800 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 4801 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
4802 XEXP (XEXP (src, 0), 0)),
4803 false);
4804
4805 SUBST (SET_SRC (x),
4806 gen_binary (IOR, GET_MODE (src),
4807 gen_binary (IOR, GET_MODE (src), term1, term2),
4808 term3));
4809
4810 src = SET_SRC (x);
4811 }
230d793d 4812
246e00f2
RK
4813 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4814 whole thing fail. */
4815 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4816 return src;
4817 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4818 return dest;
4819 else
4820 /* Convert this into a field assignment operation, if possible. */
4821 return make_field_assignment (x);
8079805d
RK
4822}
4823\f
4824/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4825 result. LAST is nonzero if this is the last retry. */
4826
4827static rtx
4828simplify_logical (x, last)
4829 rtx x;
4830 int last;
4831{
4832 enum machine_mode mode = GET_MODE (x);
4833 rtx op0 = XEXP (x, 0);
4834 rtx op1 = XEXP (x, 1);
4835
4836 switch (GET_CODE (x))
4837 {
230d793d 4838 case AND:
8079805d
RK
4839 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4840 insn (and may simplify more). */
4841 if (GET_CODE (op0) == XOR
4842 && rtx_equal_p (XEXP (op0, 0), op1)
4843 && ! side_effects_p (op1))
0c1c8ea6
RK
4844 x = gen_binary (AND, mode,
4845 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
4846
4847 if (GET_CODE (op0) == XOR
4848 && rtx_equal_p (XEXP (op0, 1), op1)
4849 && ! side_effects_p (op1))
0c1c8ea6
RK
4850 x = gen_binary (AND, mode,
4851 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
4852
4853 /* Similarly for (~ (A ^ B)) & A. */
4854 if (GET_CODE (op0) == NOT
4855 && GET_CODE (XEXP (op0, 0)) == XOR
4856 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4857 && ! side_effects_p (op1))
4858 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4859
4860 if (GET_CODE (op0) == NOT
4861 && GET_CODE (XEXP (op0, 0)) == XOR
4862 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4863 && ! side_effects_p (op1))
4864 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4865
4866 if (GET_CODE (op1) == CONST_INT)
230d793d 4867 {
8079805d 4868 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
4869
4870 /* If we have (ior (and (X C1) C2)) and the next restart would be
4871 the last, simplify this by making C1 as small as possible
0f41302f 4872 and then exit. */
8079805d
RK
4873 if (last
4874 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4875 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4876 && GET_CODE (op1) == CONST_INT)
4877 return gen_binary (IOR, mode,
4878 gen_binary (AND, mode, XEXP (op0, 0),
4879 GEN_INT (INTVAL (XEXP (op0, 1))
4880 & ~ INTVAL (op1))), op1);
230d793d
RS
4881
4882 if (GET_CODE (x) != AND)
8079805d 4883 return x;
0e32506c
RK
4884
4885 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4886 || GET_RTX_CLASS (GET_CODE (x)) == '2')
4887 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
4888 }
4889
4890 /* Convert (A | B) & A to A. */
8079805d
RK
4891 if (GET_CODE (op0) == IOR
4892 && (rtx_equal_p (XEXP (op0, 0), op1)
4893 || rtx_equal_p (XEXP (op0, 1), op1))
4894 && ! side_effects_p (XEXP (op0, 0))
4895 && ! side_effects_p (XEXP (op0, 1)))
4896 return op1;
230d793d 4897
d0ab8cd3 4898 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4899 we start with some combination of logical operations and apply
4900 the distributive law followed by the inverse distributive law.
4901 Most of the time, this results in no change. However, if some of
4902 the operands are the same or inverses of each other, simplifications
4903 will result.
4904
4905 For example, (and (ior A B) (not B)) can occur as the result of
4906 expanding a bit field assignment. When we apply the distributive
4907 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 4908 which then simplifies to (and (A (not B))).
230d793d 4909
8079805d 4910 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
4911 the inverse distributive law to see if things simplify. */
4912
8079805d 4913 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
4914 {
4915 x = apply_distributive_law
8079805d
RK
4916 (gen_binary (GET_CODE (op0), mode,
4917 gen_binary (AND, mode, XEXP (op0, 0), op1),
4918 gen_binary (AND, mode, XEXP (op0, 1), op1)));
230d793d 4919 if (GET_CODE (x) != AND)
8079805d 4920 return x;
230d793d
RS
4921 }
4922
8079805d
RK
4923 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4924 return apply_distributive_law
4925 (gen_binary (GET_CODE (op1), mode,
4926 gen_binary (AND, mode, XEXP (op1, 0), op0),
4927 gen_binary (AND, mode, XEXP (op1, 1), op0)));
230d793d
RS
4928
4929 /* Similarly, taking advantage of the fact that
4930 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4931
8079805d
RK
4932 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4933 return apply_distributive_law
4934 (gen_binary (XOR, mode,
4935 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4936 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
230d793d 4937
8079805d
RK
4938 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4939 return apply_distributive_law
4940 (gen_binary (XOR, mode,
4941 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4942 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
230d793d
RS
4943 break;
4944
4945 case IOR:
951553af 4946 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 4947 if (GET_CODE (op1) == CONST_INT
ac49a949 4948 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
4949 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4950 return op1;
d0ab8cd3 4951
230d793d 4952 /* Convert (A & B) | A to A. */
8079805d
RK
4953 if (GET_CODE (op0) == AND
4954 && (rtx_equal_p (XEXP (op0, 0), op1)
4955 || rtx_equal_p (XEXP (op0, 1), op1))
4956 && ! side_effects_p (XEXP (op0, 0))
4957 && ! side_effects_p (XEXP (op0, 1)))
4958 return op1;
230d793d
RS
4959
4960 /* If we have (ior (and A B) C), apply the distributive law and then
4961 the inverse distributive law to see if things simplify. */
4962
8079805d 4963 if (GET_CODE (op0) == AND)
230d793d
RS
4964 {
4965 x = apply_distributive_law
4966 (gen_binary (AND, mode,
8079805d
RK
4967 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4968 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
230d793d
RS
4969
4970 if (GET_CODE (x) != IOR)
8079805d 4971 return x;
230d793d
RS
4972 }
4973
8079805d 4974 if (GET_CODE (op1) == AND)
230d793d
RS
4975 {
4976 x = apply_distributive_law
4977 (gen_binary (AND, mode,
8079805d
RK
4978 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4979 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
230d793d
RS
4980
4981 if (GET_CODE (x) != IOR)
8079805d 4982 return x;
230d793d
RS
4983 }
4984
4985 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4986 mode size to (rotate A CX). */
4987
8079805d
RK
4988 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4989 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4990 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4991 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4992 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4993 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 4994 == GET_MODE_BITSIZE (mode)))
38a448ca
RH
4995 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
4996 (GET_CODE (op0) == ASHIFT
4997 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 4998
71923da7
RK
4999 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5000 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5001 does not affect any of the bits in OP1, it can really be done
5002 as a PLUS and we can associate. We do this by seeing if OP1
5003 can be safely shifted left C bits. */
5004 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5005 && GET_CODE (XEXP (op0, 0)) == PLUS
5006 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5007 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5008 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5009 {
5010 int count = INTVAL (XEXP (op0, 1));
5011 HOST_WIDE_INT mask = INTVAL (op1) << count;
5012
5013 if (mask >> count == INTVAL (op1)
5014 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5015 {
5016 SUBST (XEXP (XEXP (op0, 0), 1),
5017 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5018 return op0;
5019 }
5020 }
230d793d
RS
5021 break;
5022
5023 case XOR:
5024 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5025 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5026 (NOT y). */
5027 {
5028 int num_negated = 0;
230d793d 5029
8079805d
RK
5030 if (GET_CODE (op0) == NOT)
5031 num_negated++, op0 = XEXP (op0, 0);
5032 if (GET_CODE (op1) == NOT)
5033 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
5034
5035 if (num_negated == 2)
5036 {
8079805d
RK
5037 SUBST (XEXP (x, 0), op0);
5038 SUBST (XEXP (x, 1), op1);
230d793d
RS
5039 }
5040 else if (num_negated == 1)
0c1c8ea6 5041 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
5042 }
5043
5044 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5045 correspond to a machine insn or result in further simplifications
5046 if B is a constant. */
5047
8079805d
RK
5048 if (GET_CODE (op0) == AND
5049 && rtx_equal_p (XEXP (op0, 1), op1)
5050 && ! side_effects_p (op1))
0c1c8ea6
RK
5051 return gen_binary (AND, mode,
5052 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 5053 op1);
230d793d 5054
8079805d
RK
5055 else if (GET_CODE (op0) == AND
5056 && rtx_equal_p (XEXP (op0, 0), op1)
5057 && ! side_effects_p (op1))
0c1c8ea6
RK
5058 return gen_binary (AND, mode,
5059 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 5060 op1);
230d793d 5061
230d793d 5062 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
5063 comparison if STORE_FLAG_VALUE is 1. */
5064 if (STORE_FLAG_VALUE == 1
5065 && op1 == const1_rtx
8079805d
RK
5066 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5067 && reversible_comparison_p (op0))
5068 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5069 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
5070
5071 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5072 is (lt foo (const_int 0)), so we can perform the above
0802d516 5073 simplification if STORE_FLAG_VALUE is 1. */
500c518b 5074
0802d516
RK
5075 if (STORE_FLAG_VALUE == 1
5076 && op1 == const1_rtx
8079805d
RK
5077 && GET_CODE (op0) == LSHIFTRT
5078 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5079 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5080 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
5081
5082 /* (xor (comparison foo bar) (const_int sign-bit))
5083 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 5084 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5085 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5f4f0e22 5086 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
5087 && op1 == const_true_rtx
5088 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5089 && reversible_comparison_p (op0))
5090 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5091 mode, XEXP (op0, 0), XEXP (op0, 1));
230d793d 5092 break;
e9a25f70
JL
5093
5094 default:
5095 abort ();
230d793d
RS
5096 }
5097
5098 return x;
5099}
5100\f
5101/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5102 operations" because they can be replaced with two more basic operations.
5103 ZERO_EXTEND is also considered "compound" because it can be replaced with
5104 an AND operation, which is simpler, though only one operation.
5105
5106 The function expand_compound_operation is called with an rtx expression
5107 and will convert it to the appropriate shifts and AND operations,
5108 simplifying at each stage.
5109
5110 The function make_compound_operation is called to convert an expression
5111 consisting of shifts and ANDs into the equivalent compound expression.
5112 It is the inverse of this function, loosely speaking. */
5113
5114static rtx
5115expand_compound_operation (x)
5116 rtx x;
5117{
5118 int pos = 0, len;
5119 int unsignedp = 0;
5120 int modewidth;
5121 rtx tem;
5122
5123 switch (GET_CODE (x))
5124 {
5125 case ZERO_EXTEND:
5126 unsignedp = 1;
5127 case SIGN_EXTEND:
75473182
RS
5128 /* We can't necessarily use a const_int for a multiword mode;
5129 it depends on implicitly extending the value.
5130 Since we don't know the right way to extend it,
5131 we can't tell whether the implicit way is right.
5132
5133 Even for a mode that is no wider than a const_int,
5134 we can't win, because we need to sign extend one of its bits through
5135 the rest of it, and we don't know which bit. */
230d793d 5136 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 5137 return x;
230d793d 5138
8079805d
RK
5139 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5140 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5141 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5142 reloaded. If not for that, MEM's would very rarely be safe.
5143
5144 Reject MODEs bigger than a word, because we might not be able
5145 to reference a two-register group starting with an arbitrary register
5146 (and currently gen_lowpart might crash for a SUBREG). */
5147
5148 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
5149 return x;
5150
5151 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5152 /* If the inner object has VOIDmode (the only way this can happen
5153 is if it is a ASM_OPERANDS), we can't do anything since we don't
5154 know how much masking to do. */
5155 if (len == 0)
5156 return x;
5157
5158 break;
5159
5160 case ZERO_EXTRACT:
5161 unsignedp = 1;
5162 case SIGN_EXTRACT:
5163 /* If the operand is a CLOBBER, just return it. */
5164 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5165 return XEXP (x, 0);
5166
5167 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5168 || GET_CODE (XEXP (x, 2)) != CONST_INT
5169 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5170 return x;
5171
5172 len = INTVAL (XEXP (x, 1));
5173 pos = INTVAL (XEXP (x, 2));
5174
5175 /* If this goes outside the object being extracted, replace the object
5176 with a (use (mem ...)) construct that only combine understands
5177 and is used only for this purpose. */
5178 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
38a448ca 5179 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
230d793d 5180
f76b9db2
ILT
5181 if (BITS_BIG_ENDIAN)
5182 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5183
230d793d
RS
5184 break;
5185
5186 default:
5187 return x;
5188 }
5189
0f13a422
ILT
5190 /* We can optimize some special cases of ZERO_EXTEND. */
5191 if (GET_CODE (x) == ZERO_EXTEND)
5192 {
5193 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5194 know that the last value didn't have any inappropriate bits
5195 set. */
5196 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5197 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5198 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5199 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5200 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5201 return XEXP (XEXP (x, 0), 0);
5202
5203 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5204 if (GET_CODE (XEXP (x, 0)) == SUBREG
5205 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5206 && subreg_lowpart_p (XEXP (x, 0))
5207 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5208 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
fcc60894 5209 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5210 return SUBREG_REG (XEXP (x, 0));
5211
5212 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5213 is a comparison and STORE_FLAG_VALUE permits. This is like
5214 the first case, but it works even when GET_MODE (x) is larger
5215 than HOST_WIDE_INT. */
5216 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5217 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5218 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5219 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5220 <= HOST_BITS_PER_WIDE_INT)
5221 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5222 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5223 return XEXP (XEXP (x, 0), 0);
5224
5225 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5226 if (GET_CODE (XEXP (x, 0)) == SUBREG
5227 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5228 && subreg_lowpart_p (XEXP (x, 0))
5229 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5230 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5231 <= HOST_BITS_PER_WIDE_INT)
5232 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5233 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5234 return SUBREG_REG (XEXP (x, 0));
5235
5236 /* If sign extension is cheaper than zero extension, then use it
5237 if we know that no extraneous bits are set, and that the high
5238 bit is not set. */
5239 if (flag_expensive_optimizations
5240 && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5241 && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5242 & ~ (((unsigned HOST_WIDE_INT)
5243 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5244 >> 1))
5245 == 0))
5246 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5247 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5248 <= HOST_BITS_PER_WIDE_INT)
5249 && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5250 & ~ (((unsigned HOST_WIDE_INT)
5251 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5252 >> 1))
5253 == 0))))
5254 {
38a448ca 5255 rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0));
0f13a422
ILT
5256
5257 if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5258 return expand_compound_operation (temp);
5259 }
5260 }
5261
230d793d
RS
5262 /* If we reach here, we want to return a pair of shifts. The inner
5263 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5264 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5265 logical depending on the value of UNSIGNEDP.
5266
5267 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5268 converted into an AND of a shift.
5269
5270 We must check for the case where the left shift would have a negative
5271 count. This can happen in a case like (x >> 31) & 255 on machines
5272 that can't shift by a constant. On those machines, we would first
5273 combine the shift with the AND to produce a variable-position
5274 extraction. Then the constant of 31 would be substituted in to produce
5275 a such a position. */
5276
5277 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5278 if (modewidth >= pos - len)
5f4f0e22 5279 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5280 GET_MODE (x),
5f4f0e22
CH
5281 simplify_shift_const (NULL_RTX, ASHIFT,
5282 GET_MODE (x),
230d793d
RS
5283 XEXP (x, 0),
5284 modewidth - pos - len),
5285 modewidth - len);
5286
5f4f0e22
CH
5287 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5288 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5289 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5290 GET_MODE (x),
5291 XEXP (x, 0), pos),
5f4f0e22 5292 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5293 else
5294 /* Any other cases we can't handle. */
5295 return x;
5296
5297
5298 /* If we couldn't do this for some reason, return the original
5299 expression. */
5300 if (GET_CODE (tem) == CLOBBER)
5301 return x;
5302
5303 return tem;
5304}
5305\f
5306/* X is a SET which contains an assignment of one object into
5307 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5308 or certain SUBREGS). If possible, convert it into a series of
5309 logical operations.
5310
5311 We half-heartedly support variable positions, but do not at all
5312 support variable lengths. */
5313
5314static rtx
5315expand_field_assignment (x)
5316 rtx x;
5317{
5318 rtx inner;
0f41302f 5319 rtx pos; /* Always counts from low bit. */
230d793d
RS
5320 int len;
5321 rtx mask;
5322 enum machine_mode compute_mode;
5323
5324 /* Loop until we find something we can't simplify. */
5325 while (1)
5326 {
5327 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5328 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5329 {
5330 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5331 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4d9cfc7b 5332 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
230d793d
RS
5333 }
5334 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5335 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5336 {
5337 inner = XEXP (SET_DEST (x), 0);
5338 len = INTVAL (XEXP (SET_DEST (x), 1));
5339 pos = XEXP (SET_DEST (x), 2);
5340
5341 /* If the position is constant and spans the width of INNER,
5342 surround INNER with a USE to indicate this. */
5343 if (GET_CODE (pos) == CONST_INT
5344 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
38a448ca 5345 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
230d793d 5346
f76b9db2
ILT
5347 if (BITS_BIG_ENDIAN)
5348 {
5349 if (GET_CODE (pos) == CONST_INT)
5350 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5351 - INTVAL (pos));
5352 else if (GET_CODE (pos) == MINUS
5353 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5354 && (INTVAL (XEXP (pos, 1))
5355 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5356 /* If position is ADJUST - X, new position is X. */
5357 pos = XEXP (pos, 0);
5358 else
5359 pos = gen_binary (MINUS, GET_MODE (pos),
5360 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5361 - len),
5362 pos);
5363 }
230d793d
RS
5364 }
5365
5366 /* A SUBREG between two modes that occupy the same numbers of words
5367 can be done by moving the SUBREG to the source. */
5368 else if (GET_CODE (SET_DEST (x)) == SUBREG
5369 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5370 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5371 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5372 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5373 {
38a448ca
RH
5374 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5375 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
5376 SET_SRC (x)));
230d793d
RS
5377 continue;
5378 }
5379 else
5380 break;
5381
5382 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5383 inner = SUBREG_REG (inner);
5384
5385 compute_mode = GET_MODE (inner);
5386
5387 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5388 if (len < HOST_BITS_PER_WIDE_INT)
5389 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5390 else
5391 break;
5392
5393 /* Now compute the equivalent expression. Make a copy of INNER
5394 for the SET_DEST in case it is a MEM into which we will substitute;
5395 we don't want shared RTL in that case. */
38a448ca
RH
5396 x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
5397 gen_binary (IOR, compute_mode,
5398 gen_binary (AND, compute_mode,
5399 gen_unary (NOT, compute_mode,
5400 compute_mode,
5401 gen_binary (ASHIFT,
5402 compute_mode,
5403 mask, pos)),
5404 inner),
5405 gen_binary (ASHIFT, compute_mode,
5406 gen_binary (AND, compute_mode,
5407 gen_lowpart_for_combine
5408 (compute_mode,
5409 SET_SRC (x)),
5410 mask),
5411 pos)));
230d793d
RS
5412 }
5413
5414 return x;
5415}
5416\f
8999a12e
RK
5417/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5418 it is an RTX that represents a variable starting position; otherwise,
5419 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5420
5421 INNER may be a USE. This will occur when we started with a bitfield
5422 that went outside the boundary of the object in memory, which is
5423 allowed on most machines. To isolate this case, we produce a USE
5424 whose mode is wide enough and surround the MEM with it. The only
5425 code that understands the USE is this routine. If it is not removed,
5426 it will cause the resulting insn not to match.
5427
5428 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5429 signed reference.
5430
5431 IN_DEST is non-zero if this is a reference in the destination of a
5432 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5433 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5434 be used.
5435
5436 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5437 ZERO_EXTRACT should be built even for bits starting at bit 0.
5438
76184def
DE
5439 MODE is the desired mode of the result (if IN_DEST == 0).
5440
5441 The result is an RTX for the extraction or NULL_RTX if the target
5442 can't handle it. */
230d793d
RS
5443
5444static rtx
5445make_extraction (mode, inner, pos, pos_rtx, len,
5446 unsignedp, in_dest, in_compare)
5447 enum machine_mode mode;
5448 rtx inner;
5449 int pos;
5450 rtx pos_rtx;
5451 int len;
5452 int unsignedp;
5453 int in_dest, in_compare;
5454{
94b4b17a
RS
5455 /* This mode describes the size of the storage area
5456 to fetch the overall value from. Within that, we
5457 ignore the POS lowest bits, etc. */
230d793d
RS
5458 enum machine_mode is_mode = GET_MODE (inner);
5459 enum machine_mode inner_mode;
d7cd794f
RK
5460 enum machine_mode wanted_inner_mode = byte_mode;
5461 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5462 enum machine_mode pos_mode = word_mode;
5463 enum machine_mode extraction_mode = word_mode;
5464 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5465 int spans_byte = 0;
5466 rtx new = 0;
8999a12e 5467 rtx orig_pos_rtx = pos_rtx;
6139ff20 5468 int orig_pos;
230d793d
RS
5469
5470 /* Get some information about INNER and get the innermost object. */
5471 if (GET_CODE (inner) == USE)
94b4b17a 5472 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5473 /* We don't need to adjust the position because we set up the USE
5474 to pretend that it was a full-word object. */
5475 spans_byte = 1, inner = XEXP (inner, 0);
5476 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5477 {
5478 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5479 consider just the QI as the memory to extract from.
5480 The subreg adds or removes high bits; its mode is
5481 irrelevant to the meaning of this extraction,
5482 since POS and LEN count from the lsb. */
5483 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5484 is_mode = GET_MODE (SUBREG_REG (inner));
5485 inner = SUBREG_REG (inner);
5486 }
230d793d
RS
5487
5488 inner_mode = GET_MODE (inner);
5489
5490 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5491 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5492
5493 /* See if this can be done without an extraction. We never can if the
5494 width of the field is not the same as that of some integer mode. For
5495 registers, we can only avoid the extraction if the position is at the
5496 low-order bit and this is either not in the destination or we have the
5497 appropriate STRICT_LOW_PART operation available.
5498
5499 For MEM, we can avoid an extract if the field starts on an appropriate
5500 boundary and we can change the mode of the memory reference. However,
5501 we cannot directly access the MEM if we have a USE and the underlying
5502 MEM is not TMODE. This combination means that MEM was being used in a
5503 context where bits outside its mode were being referenced; that is only
5504 valid in bit-field insns. */
5505
5506 if (tmode != BLKmode
5507 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5508 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5509 && GET_CODE (inner) != MEM
230d793d 5510 && (! in_dest
df62f951
RK
5511 || (GET_CODE (inner) == REG
5512 && (movstrict_optab->handlers[(int) tmode].insn_code
5513 != CODE_FOR_nothing))))
8999a12e 5514 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5515 && (pos
5516 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5517 : BITS_PER_UNIT)) == 0
230d793d
RS
5518 /* We can't do this if we are widening INNER_MODE (it
5519 may not be aligned, for one thing). */
5520 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5521 && (inner_mode == tmode
5522 || (! mode_dependent_address_p (XEXP (inner, 0))
5523 && ! MEM_VOLATILE_P (inner))))))
5524 {
230d793d
RS
5525 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5526 field. If the original and current mode are the same, we need not
5527 adjust the offset. Otherwise, we do if bytes big endian.
5528
4d9cfc7b
RK
5529 If INNER is not a MEM, get a piece consisting of just the field
5530 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5531
5532 if (GET_CODE (inner) == MEM)
5533 {
94b4b17a
RS
5534 int offset;
5535 /* POS counts from lsb, but make OFFSET count in memory order. */
5536 if (BYTES_BIG_ENDIAN)
5537 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5538 else
5539 offset = pos / BITS_PER_UNIT;
230d793d 5540
38a448ca 5541 new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
230d793d
RS
5542 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5543 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5544 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5545 }
df62f951 5546 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5547 {
5548 /* We can't call gen_lowpart_for_combine here since we always want
5549 a SUBREG and it would sometimes return a new hard register. */
5550 if (tmode != inner_mode)
38a448ca
RH
5551 new = gen_rtx_SUBREG (tmode, inner,
5552 (WORDS_BIG_ENDIAN
5553 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5554 ? (((GET_MODE_SIZE (inner_mode)
5555 - GET_MODE_SIZE (tmode))
5556 / UNITS_PER_WORD)
5557 - pos / BITS_PER_WORD)
5558 : pos / BITS_PER_WORD));
c0d3ac4d
RK
5559 else
5560 new = inner;
5561 }
230d793d 5562 else
6139ff20
RK
5563 new = force_to_mode (inner, tmode,
5564 len >= HOST_BITS_PER_WIDE_INT
5565 ? GET_MODE_MASK (tmode)
5566 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5567 NULL_RTX, 0);
230d793d
RS
5568
5569 /* If this extraction is going into the destination of a SET,
5570 make a STRICT_LOW_PART unless we made a MEM. */
5571
5572 if (in_dest)
5573 return (GET_CODE (new) == MEM ? new
77fa0940 5574 : (GET_CODE (new) != SUBREG
38a448ca 5575 ? gen_rtx_CLOBBER (tmode, const0_rtx)
77fa0940 5576 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5577
5578 /* Otherwise, sign- or zero-extend unless we already are in the
5579 proper mode. */
5580
5581 return (mode == tmode ? new
5582 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5583 mode, new));
5584 }
5585
cc471082
RS
5586 /* Unless this is a COMPARE or we have a funny memory reference,
5587 don't do anything with zero-extending field extracts starting at
5588 the low-order bit since they are simple AND operations. */
8999a12e
RK
5589 if (pos_rtx == 0 && pos == 0 && ! in_dest
5590 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5591 return 0;
5592
e7373556
RK
5593 /* Unless we are allowed to span bytes, reject this if we would be
5594 spanning bytes or if the position is not a constant and the length
5595 is not 1. In all other cases, we would only be going outside
5596 out object in cases when an original shift would have been
5597 undefined. */
5598 if (! spans_byte
5599 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5600 || (pos_rtx != 0 && len != 1)))
5601 return 0;
5602
d7cd794f 5603 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
5604 and the mode for the result. */
5605#ifdef HAVE_insv
5606 if (in_dest)
5607 {
0d8e55d8
JL
5608 wanted_inner_reg_mode
5609 = (insn_operand_mode[(int) CODE_FOR_insv][0] == VOIDmode
5610 ? word_mode
5611 : insn_operand_mode[(int) CODE_FOR_insv][0]);
5612 pos_mode = (insn_operand_mode[(int) CODE_FOR_insv][2] == VOIDmode
5613 ? word_mode : insn_operand_mode[(int) CODE_FOR_insv][2]);
5614 extraction_mode = (insn_operand_mode[(int) CODE_FOR_insv][3] == VOIDmode
5615 ? word_mode
5616 : insn_operand_mode[(int) CODE_FOR_insv][3]);
230d793d
RS
5617 }
5618#endif
5619
5620#ifdef HAVE_extzv
5621 if (! in_dest && unsignedp)
5622 {
0d8e55d8
JL
5623 wanted_inner_reg_mode
5624 = (insn_operand_mode[(int) CODE_FOR_extzv][1] == VOIDmode
5625 ? word_mode
5626 : insn_operand_mode[(int) CODE_FOR_extzv][1]);
5627 pos_mode = (insn_operand_mode[(int) CODE_FOR_extzv][3] == VOIDmode
5628 ? word_mode : insn_operand_mode[(int) CODE_FOR_extzv][3]);
5629 extraction_mode = (insn_operand_mode[(int) CODE_FOR_extzv][0] == VOIDmode
5630 ? word_mode
5631 : insn_operand_mode[(int) CODE_FOR_extzv][0]);
230d793d
RS
5632 }
5633#endif
5634
5635#ifdef HAVE_extv
5636 if (! in_dest && ! unsignedp)
5637 {
0d8e55d8
JL
5638 wanted_inner_reg_mode
5639 = (insn_operand_mode[(int) CODE_FOR_extv][1] == VOIDmode
5640 ? word_mode
5641 : insn_operand_mode[(int) CODE_FOR_extv][1]);
5642 pos_mode = (insn_operand_mode[(int) CODE_FOR_extv][3] == VOIDmode
5643 ? word_mode : insn_operand_mode[(int) CODE_FOR_extv][3]);
5644 extraction_mode = (insn_operand_mode[(int) CODE_FOR_extv][0] == VOIDmode
5645 ? word_mode
5646 : insn_operand_mode[(int) CODE_FOR_extv][0]);
230d793d
RS
5647 }
5648#endif
5649
5650 /* Never narrow an object, since that might not be safe. */
5651
5652 if (mode != VOIDmode
5653 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5654 extraction_mode = mode;
5655
5656 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5657 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5658 pos_mode = GET_MODE (pos_rtx);
5659
d7cd794f
RK
5660 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5661 if we have to change the mode of memory and cannot, the desired mode is
5662 EXTRACTION_MODE. */
5663 if (GET_CODE (inner) != MEM)
5664 wanted_inner_mode = wanted_inner_reg_mode;
5665 else if (inner_mode != wanted_inner_mode
5666 && (mode_dependent_address_p (XEXP (inner, 0))
5667 || MEM_VOLATILE_P (inner)))
5668 wanted_inner_mode = extraction_mode;
230d793d 5669
6139ff20
RK
5670 orig_pos = pos;
5671
f76b9db2
ILT
5672 if (BITS_BIG_ENDIAN)
5673 {
cf54c2cd
DE
5674 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5675 BITS_BIG_ENDIAN style. If position is constant, compute new
5676 position. Otherwise, build subtraction.
5677 Note that POS is relative to the mode of the original argument.
5678 If it's a MEM we need to recompute POS relative to that.
5679 However, if we're extracting from (or inserting into) a register,
5680 we want to recompute POS relative to wanted_inner_mode. */
5681 int width = (GET_CODE (inner) == MEM
5682 ? GET_MODE_BITSIZE (is_mode)
5683 : GET_MODE_BITSIZE (wanted_inner_mode));
5684
f76b9db2 5685 if (pos_rtx == 0)
cf54c2cd 5686 pos = width - len - pos;
f76b9db2
ILT
5687 else
5688 pos_rtx
5689 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
cf54c2cd
DE
5690 GEN_INT (width - len), pos_rtx);
5691 /* POS may be less than 0 now, but we check for that below.
5692 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 5693 }
230d793d
RS
5694
5695 /* If INNER has a wider mode, make it smaller. If this is a constant
5696 extract, try to adjust the byte to point to the byte containing
5697 the value. */
d7cd794f
RK
5698 if (wanted_inner_mode != VOIDmode
5699 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 5700 && ((GET_CODE (inner) == MEM
d7cd794f 5701 && (inner_mode == wanted_inner_mode
230d793d
RS
5702 || (! mode_dependent_address_p (XEXP (inner, 0))
5703 && ! MEM_VOLATILE_P (inner))))))
5704 {
5705 int offset = 0;
5706
5707 /* The computations below will be correct if the machine is big
5708 endian in both bits and bytes or little endian in bits and bytes.
5709 If it is mixed, we must adjust. */
5710
230d793d 5711 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 5712 adjust OFFSET to compensate. */
f76b9db2
ILT
5713 if (BYTES_BIG_ENDIAN
5714 && ! spans_byte
230d793d
RS
5715 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5716 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
5717
5718 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5719 if (pos_rtx == 0)
230d793d
RS
5720 {
5721 offset += pos / BITS_PER_UNIT;
d7cd794f 5722 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
5723 }
5724
f76b9db2
ILT
5725 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5726 && ! spans_byte
d7cd794f 5727 && is_mode != wanted_inner_mode)
c6b3f1f2 5728 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 5729 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 5730
d7cd794f 5731 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 5732 {
38a448ca
RH
5733 rtx newmem = gen_rtx_MEM (wanted_inner_mode,
5734 plus_constant (XEXP (inner, 0), offset));
230d793d
RS
5735 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5736 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5737 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5738 inner = newmem;
5739 }
5740 }
5741
9e74dc41
RK
5742 /* If INNER is not memory, we can always get it into the proper mode. If we
5743 are changing its mode, POS must be a constant and smaller than the size
5744 of the new mode. */
230d793d 5745 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
5746 {
5747 if (GET_MODE (inner) != wanted_inner_mode
5748 && (pos_rtx != 0
5749 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5750 return 0;
5751
5752 inner = force_to_mode (inner, wanted_inner_mode,
5753 pos_rtx
5754 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5755 ? GET_MODE_MASK (wanted_inner_mode)
5756 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5757 NULL_RTX, 0);
5758 }
230d793d
RS
5759
5760 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5761 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5762 if (pos_rtx != 0
230d793d
RS
5763 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5764 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5765 else if (pos_rtx != 0
230d793d
RS
5766 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5767 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5768
8999a12e
RK
5769 /* Make POS_RTX unless we already have it and it is correct. If we don't
5770 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 5771 be a CONST_INT. */
8999a12e
RK
5772 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5773 pos_rtx = orig_pos_rtx;
5774
5775 else if (pos_rtx == 0)
5f4f0e22 5776 pos_rtx = GEN_INT (pos);
230d793d
RS
5777
5778 /* Make the required operation. See if we can use existing rtx. */
5779 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5780 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5781 if (! in_dest)
5782 new = gen_lowpart_for_combine (mode, new);
5783
5784 return new;
5785}
5786\f
71923da7
RK
5787/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5788 with any other operations in X. Return X without that shift if so. */
5789
5790static rtx
5791extract_left_shift (x, count)
5792 rtx x;
5793 int count;
5794{
5795 enum rtx_code code = GET_CODE (x);
5796 enum machine_mode mode = GET_MODE (x);
5797 rtx tem;
5798
5799 switch (code)
5800 {
5801 case ASHIFT:
5802 /* This is the shift itself. If it is wide enough, we will return
5803 either the value being shifted if the shift count is equal to
5804 COUNT or a shift for the difference. */
5805 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5806 && INTVAL (XEXP (x, 1)) >= count)
5807 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5808 INTVAL (XEXP (x, 1)) - count);
5809 break;
5810
5811 case NEG: case NOT:
5812 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 5813 return gen_unary (code, mode, mode, tem);
71923da7
RK
5814
5815 break;
5816
5817 case PLUS: case IOR: case XOR: case AND:
5818 /* If we can safely shift this constant and we find the inner shift,
5819 make a new operation. */
5820 if (GET_CODE (XEXP (x,1)) == CONST_INT
b729186a 5821 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7
RK
5822 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5823 return gen_binary (code, mode, tem,
5824 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5825
5826 break;
e9a25f70
JL
5827
5828 default:
5829 break;
71923da7
RK
5830 }
5831
5832 return 0;
5833}
5834\f
230d793d
RS
5835/* Look at the expression rooted at X. Look for expressions
5836 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5837 Form these expressions.
5838
5839 Return the new rtx, usually just X.
5840
5841 Also, for machines like the Vax that don't have logical shift insns,
5842 try to convert logical to arithmetic shift operations in cases where
5843 they are equivalent. This undoes the canonicalizations to logical
5844 shifts done elsewhere.
5845
5846 We try, as much as possible, to re-use rtl expressions to save memory.
5847
5848 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5849 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5850 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5851 or a COMPARE against zero, it is COMPARE. */
5852
5853static rtx
5854make_compound_operation (x, in_code)
5855 rtx x;
5856 enum rtx_code in_code;
5857{
5858 enum rtx_code code = GET_CODE (x);
5859 enum machine_mode mode = GET_MODE (x);
5860 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 5861 rtx rhs, lhs;
230d793d 5862 enum rtx_code next_code;
f24ad0e4 5863 int i;
230d793d 5864 rtx new = 0;
280f58ba 5865 rtx tem;
230d793d
RS
5866 char *fmt;
5867
5868 /* Select the code to be used in recursive calls. Once we are inside an
5869 address, we stay there. If we have a comparison, set to COMPARE,
5870 but once inside, go back to our default of SET. */
5871
42495ca0 5872 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5873 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5874 && XEXP (x, 1) == const0_rtx) ? COMPARE
5875 : in_code == COMPARE ? SET : in_code);
5876
5877 /* Process depending on the code of this operation. If NEW is set
5878 non-zero, it will be returned. */
5879
5880 switch (code)
5881 {
5882 case ASHIFT:
230d793d
RS
5883 /* Convert shifts by constants into multiplications if inside
5884 an address. */
5885 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5886 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5887 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5888 {
5889 new = make_compound_operation (XEXP (x, 0), next_code);
5890 new = gen_rtx_combine (MULT, mode, new,
5891 GEN_INT ((HOST_WIDE_INT) 1
5892 << INTVAL (XEXP (x, 1))));
5893 }
230d793d
RS
5894 break;
5895
5896 case AND:
5897 /* If the second operand is not a constant, we can't do anything
5898 with it. */
5899 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5900 break;
5901
5902 /* If the constant is a power of two minus one and the first operand
5903 is a logical right shift, make an extraction. */
5904 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5905 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5906 {
5907 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5908 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5909 0, in_code == COMPARE);
5910 }
dfbe1b2f 5911
230d793d
RS
5912 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5913 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5914 && subreg_lowpart_p (XEXP (x, 0))
5915 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5916 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5917 {
5918 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5919 next_code);
2f99f437 5920 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
5921 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5922 0, in_code == COMPARE);
5923 }
45620ed4 5924 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
5925 else if ((GET_CODE (XEXP (x, 0)) == XOR
5926 || GET_CODE (XEXP (x, 0)) == IOR)
5927 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5928 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5929 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5930 {
5931 /* Apply the distributive law, and then try to make extractions. */
5932 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
38a448ca
RH
5933 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
5934 XEXP (x, 1)),
5935 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
5936 XEXP (x, 1)));
c2f9f64e
JW
5937 new = make_compound_operation (new, in_code);
5938 }
a7c99304
RK
5939
5940 /* If we are have (and (rotate X C) M) and C is larger than the number
5941 of bits in M, this is an extraction. */
5942
5943 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5944 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5945 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5946 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5947 {
5948 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5949 new = make_extraction (mode, new,
5950 (GET_MODE_BITSIZE (mode)
5951 - INTVAL (XEXP (XEXP (x, 0), 1))),
5952 NULL_RTX, i, 1, 0, in_code == COMPARE);
5953 }
a7c99304
RK
5954
5955 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5956 a logical shift and our mask turns off all the propagated sign
5957 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5958 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5959 && (lshr_optab->handlers[(int) mode].insn_code
5960 == CODE_FOR_nothing)
230d793d
RS
5961 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5962 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5963 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5964 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5965 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5966 {
5f4f0e22 5967 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5968
5969 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5970 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5971 SUBST (XEXP (x, 0),
280f58ba
RK
5972 gen_rtx_combine (ASHIFTRT, mode,
5973 make_compound_operation (XEXP (XEXP (x, 0), 0),
5974 next_code),
230d793d
RS
5975 XEXP (XEXP (x, 0), 1)));
5976 }
5977
5978 /* If the constant is one less than a power of two, this might be
5979 representable by an extraction even if no shift is present.
5980 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5981 we are in a COMPARE. */
5982 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5983 new = make_extraction (mode,
5984 make_compound_operation (XEXP (x, 0),
5985 next_code),
5986 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5987
5988 /* If we are in a comparison and this is an AND with a power of two,
5989 convert this into the appropriate bit extract. */
5990 else if (in_code == COMPARE
5991 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5992 new = make_extraction (mode,
5993 make_compound_operation (XEXP (x, 0),
5994 next_code),
5995 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5996
5997 break;
5998
5999 case LSHIFTRT:
6000 /* If the sign bit is known to be zero, replace this with an
6001 arithmetic shift. */
d0ab8cd3
RK
6002 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6003 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 6004 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 6005 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 6006 {
280f58ba
RK
6007 new = gen_rtx_combine (ASHIFTRT, mode,
6008 make_compound_operation (XEXP (x, 0),
6009 next_code),
6010 XEXP (x, 1));
230d793d
RS
6011 break;
6012 }
6013
0f41302f 6014 /* ... fall through ... */
230d793d
RS
6015
6016 case ASHIFTRT:
71923da7
RK
6017 lhs = XEXP (x, 0);
6018 rhs = XEXP (x, 1);
6019
230d793d
RS
6020 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6021 this is a SIGN_EXTRACT. */
71923da7
RK
6022 if (GET_CODE (rhs) == CONST_INT
6023 && GET_CODE (lhs) == ASHIFT
6024 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6025 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 6026 {
71923da7 6027 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 6028 new = make_extraction (mode, new,
71923da7
RK
6029 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6030 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
6031 code == LSHIFTRT, 0, in_code == COMPARE);
6032 }
6033
71923da7
RK
6034 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6035 If so, try to merge the shifts into a SIGN_EXTEND. We could
6036 also do this for some cases of SIGN_EXTRACT, but it doesn't
6037 seem worth the effort; the case checked for occurs on Alpha. */
6038
6039 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6040 && ! (GET_CODE (lhs) == SUBREG
6041 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6042 && GET_CODE (rhs) == CONST_INT
6043 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6044 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6045 new = make_extraction (mode, make_compound_operation (new, next_code),
6046 0, NULL_RTX, mode_width - INTVAL (rhs),
6047 code == LSHIFTRT, 0, in_code == COMPARE);
6048
230d793d 6049 break;
280f58ba
RK
6050
6051 case SUBREG:
6052 /* Call ourselves recursively on the inner expression. If we are
6053 narrowing the object and it has a different RTL code from
6054 what it originally did, do this SUBREG as a force_to_mode. */
6055
0a5cbff6 6056 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
6057 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6058 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6059 && subreg_lowpart_p (x))
0a5cbff6
RK
6060 {
6061 rtx newer = force_to_mode (tem, mode,
e3d616e3 6062 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
6063
6064 /* If we have something other than a SUBREG, we might have
6065 done an expansion, so rerun outselves. */
6066 if (GET_CODE (newer) != SUBREG)
6067 newer = make_compound_operation (newer, in_code);
6068
6069 return newer;
6070 }
6f28d3e9
RH
6071
6072 /* If this is a paradoxical subreg, and the new code is a sign or
6073 zero extension, omit the subreg and widen the extension. If it
6074 is a regular subreg, we can still get rid of the subreg by not
6075 widening so much, or in fact removing the extension entirely. */
6076 if ((GET_CODE (tem) == SIGN_EXTEND
6077 || GET_CODE (tem) == ZERO_EXTEND)
6078 && subreg_lowpart_p (x))
6079 {
6080 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6081 || (GET_MODE_SIZE (mode) >
6082 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6083 tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0));
6084 else
6085 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6086 return tem;
6087 }
e9a25f70
JL
6088 break;
6089
6090 default:
6091 break;
230d793d
RS
6092 }
6093
6094 if (new)
6095 {
df62f951 6096 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
6097 code = GET_CODE (x);
6098 }
6099
6100 /* Now recursively process each operand of this operation. */
6101 fmt = GET_RTX_FORMAT (code);
6102 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6103 if (fmt[i] == 'e')
6104 {
6105 new = make_compound_operation (XEXP (x, i), next_code);
6106 SUBST (XEXP (x, i), new);
6107 }
6108
6109 return x;
6110}
6111\f
6112/* Given M see if it is a value that would select a field of bits
6113 within an item, but not the entire word. Return -1 if not.
6114 Otherwise, return the starting position of the field, where 0 is the
6115 low-order bit.
6116
6117 *PLEN is set to the length of the field. */
6118
6119static int
6120get_pos_from_mask (m, plen)
5f4f0e22 6121 unsigned HOST_WIDE_INT m;
230d793d
RS
6122 int *plen;
6123{
6124 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6125 int pos = exact_log2 (m & - m);
6126
6127 if (pos < 0)
6128 return -1;
6129
6130 /* Now shift off the low-order zero bits and see if we have a power of
6131 two minus 1. */
6132 *plen = exact_log2 ((m >> pos) + 1);
6133
6134 if (*plen <= 0)
6135 return -1;
6136
6137 return pos;
6138}
6139\f
6139ff20
RK
6140/* See if X can be simplified knowing that we will only refer to it in
6141 MODE and will only refer to those bits that are nonzero in MASK.
6142 If other bits are being computed or if masking operations are done
6143 that select a superset of the bits in MASK, they can sometimes be
6144 ignored.
6145
6146 Return a possibly simplified expression, but always convert X to
6147 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
6148
6149 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
6150 replace X with REG.
6151
6152 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6153 are all off in X. This is used when X will be complemented, by either
180b8e4b 6154 NOT, NEG, or XOR. */
dfbe1b2f
RK
6155
6156static rtx
e3d616e3 6157force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
6158 rtx x;
6159 enum machine_mode mode;
6139ff20 6160 unsigned HOST_WIDE_INT mask;
dfbe1b2f 6161 rtx reg;
e3d616e3 6162 int just_select;
dfbe1b2f
RK
6163{
6164 enum rtx_code code = GET_CODE (x);
180b8e4b 6165 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
6166 enum machine_mode op_mode;
6167 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
6168 rtx op0, op1, temp;
6169
132d2040
RK
6170 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6171 code below will do the wrong thing since the mode of such an
be3d27d6
CI
6172 expression is VOIDmode.
6173
6174 Also do nothing if X is a CLOBBER; this can happen if X was
6175 the return value from a call to gen_lowpart_for_combine. */
6176 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
6177 return x;
6178
6139ff20
RK
6179 /* We want to perform the operation is its present mode unless we know
6180 that the operation is valid in MODE, in which case we do the operation
6181 in MODE. */
1c75dfa4
RK
6182 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6183 && code_to_optab[(int) code] != 0
ef026f91
RS
6184 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6185 != CODE_FOR_nothing))
6186 ? mode : GET_MODE (x));
e3d616e3 6187
aa988991
RS
6188 /* It is not valid to do a right-shift in a narrower mode
6189 than the one it came in with. */
6190 if ((code == LSHIFTRT || code == ASHIFTRT)
6191 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6192 op_mode = GET_MODE (x);
ef026f91
RS
6193
6194 /* Truncate MASK to fit OP_MODE. */
6195 if (op_mode)
6196 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
6197
6198 /* When we have an arithmetic operation, or a shift whose count we
6199 do not know, we need to assume that all bit the up to the highest-order
6200 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
6201 if (op_mode)
6202 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6203 ? GET_MODE_MASK (op_mode)
6204 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
6205 else
6206 fuller_mask = ~ (HOST_WIDE_INT) 0;
6207
6208 /* Determine what bits of X are guaranteed to be (non)zero. */
6209 nonzero = nonzero_bits (x, mode);
6139ff20
RK
6210
6211 /* If none of the bits in X are needed, return a zero. */
e3d616e3 6212 if (! just_select && (nonzero & mask) == 0)
6139ff20 6213 return const0_rtx;
dfbe1b2f 6214
6139ff20
RK
6215 /* If X is a CONST_INT, return a new one. Do this here since the
6216 test below will fail. */
6217 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
6218 {
6219 HOST_WIDE_INT cval = INTVAL (x) & mask;
6220 int width = GET_MODE_BITSIZE (mode);
6221
6222 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6223 number, sign extend it. */
6224 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6225 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6226 cval |= (HOST_WIDE_INT) -1 << width;
6227
6228 return GEN_INT (cval);
6229 }
dfbe1b2f 6230
180b8e4b
RK
6231 /* If X is narrower than MODE and we want all the bits in X's mode, just
6232 get X in the proper mode. */
6233 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6234 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
6235 return gen_lowpart_for_combine (mode, x);
6236
71923da7
RK
6237 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6238 MASK are already known to be zero in X, we need not do anything. */
6239 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
6240 return x;
6241
dfbe1b2f
RK
6242 switch (code)
6243 {
6139ff20
RK
6244 case CLOBBER:
6245 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6246 generating something that won't match. */
6139ff20
RK
6247 return x;
6248
6139ff20
RK
6249 case USE:
6250 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6251 spanned the boundary of the MEM. If we are now masking so it is
6252 within that boundary, we don't need the USE any more. */
f76b9db2
ILT
6253 if (! BITS_BIG_ENDIAN
6254 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6255 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6256 break;
6139ff20 6257
dfbe1b2f
RK
6258 case SIGN_EXTEND:
6259 case ZERO_EXTEND:
6260 case ZERO_EXTRACT:
6261 case SIGN_EXTRACT:
6262 x = expand_compound_operation (x);
6263 if (GET_CODE (x) != code)
e3d616e3 6264 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6265 break;
6266
6267 case REG:
6268 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6269 || rtx_equal_p (reg, get_last_value (x))))
6270 x = reg;
6271 break;
6272
dfbe1b2f 6273 case SUBREG:
6139ff20 6274 if (subreg_lowpart_p (x)
180b8e4b
RK
6275 /* We can ignore the effect of this SUBREG if it narrows the mode or
6276 if the constant masks to zero all the bits the mode doesn't
6277 have. */
6139ff20
RK
6278 && ((GET_MODE_SIZE (GET_MODE (x))
6279 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6280 || (0 == (mask
6281 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 6282 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6283 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6284 break;
6285
6286 case AND:
6139ff20
RK
6287 /* If this is an AND with a constant, convert it into an AND
6288 whose constant is the AND of that constant with MASK. If it
6289 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6290
2ca9ae17 6291 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6292 {
6139ff20
RK
6293 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6294 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6295
6296 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6297 is just some low-order bits. If so, and it is MASK, we don't
6298 need it. */
dfbe1b2f
RK
6299
6300 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6301 && INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6302 x = XEXP (x, 0);
d0ab8cd3 6303
71923da7
RK
6304 /* If it remains an AND, try making another AND with the bits
6305 in the mode mask that aren't in MASK turned on. If the
6306 constant in the AND is wide enough, this might make a
6307 cheaper constant. */
6308
6309 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6310 && GET_MODE_MASK (GET_MODE (x)) != mask
6311 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6312 {
6313 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6314 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6315 int width = GET_MODE_BITSIZE (GET_MODE (x));
6316 rtx y;
6317
6318 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6319 number, sign extend it. */
6320 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6321 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6322 cval |= (HOST_WIDE_INT) -1 << width;
6323
6324 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6325 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6326 x = y;
6327 }
6328
d0ab8cd3 6329 break;
dfbe1b2f
RK
6330 }
6331
6139ff20 6332 goto binop;
dfbe1b2f
RK
6333
6334 case PLUS:
6139ff20
RK
6335 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6336 low-order bits (as in an alignment operation) and FOO is already
6337 aligned to that boundary, mask C1 to that boundary as well.
6338 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6339
6340 {
6341 int width = GET_MODE_BITSIZE (mode);
6342 unsigned HOST_WIDE_INT smask = mask;
6343
6344 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6345 number, sign extend it. */
6346
6347 if (width < HOST_BITS_PER_WIDE_INT
6348 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6349 smask |= (HOST_WIDE_INT) -1 << width;
6350
6351 if (GET_CODE (XEXP (x, 1)) == CONST_INT
0e9ff885
DM
6352 && exact_log2 (- smask) >= 0)
6353 {
6354#ifdef STACK_BIAS
6355 if (STACK_BIAS
6356 && (XEXP (x, 0) == stack_pointer_rtx
6357 || XEXP (x, 0) == frame_pointer_rtx))
6358 {
6359 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6360 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6361
6362 sp_mask &= ~ (sp_alignment - 1);
6363 if ((sp_mask & ~ mask) == 0
6364 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~ mask) != 0)
6365 return force_to_mode (plus_constant (XEXP (x, 0),
6366 ((INTVAL (XEXP (x, 1)) -
6367 STACK_BIAS) & mask)
6368 + STACK_BIAS),
6369 mode, mask, reg, next_select);
6370 }
6371#endif
6372 if ((nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
6373 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
6374 return force_to_mode (plus_constant (XEXP (x, 0),
6375 INTVAL (XEXP (x, 1)) & mask),
6376 mode, mask, reg, next_select);
6377 }
9fa6d012 6378 }
6139ff20 6379
0f41302f 6380 /* ... fall through ... */
6139ff20 6381
dfbe1b2f
RK
6382 case MINUS:
6383 case MULT:
6139ff20
RK
6384 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6385 most significant bit in MASK since carries from those bits will
6386 affect the bits we are interested in. */
6387 mask = fuller_mask;
6388 goto binop;
6389
dfbe1b2f
RK
6390 case IOR:
6391 case XOR:
6139ff20
RK
6392 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6393 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6394 operation which may be a bitfield extraction. Ensure that the
6395 constant we form is not wider than the mode of X. */
6396
6397 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6398 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6399 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6400 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6401 && GET_CODE (XEXP (x, 1)) == CONST_INT
6402 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6403 + floor_log2 (INTVAL (XEXP (x, 1))))
6404 < GET_MODE_BITSIZE (GET_MODE (x)))
6405 && (INTVAL (XEXP (x, 1))
01c82bbb 6406 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6407 {
6408 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6409 << INTVAL (XEXP (XEXP (x, 0), 1)));
6410 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6411 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6412 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6413 XEXP (XEXP (x, 0), 1));
e3d616e3 6414 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6415 }
6416
6417 binop:
dfbe1b2f 6418 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6419 change the mode if we have an operation of that mode. */
6420
e3d616e3
RK
6421 op0 = gen_lowpart_for_combine (op_mode,
6422 force_to_mode (XEXP (x, 0), mode, mask,
6423 reg, next_select));
6424 op1 = gen_lowpart_for_combine (op_mode,
6425 force_to_mode (XEXP (x, 1), mode, mask,
6426 reg, next_select));
6139ff20 6427
2dd484ed
RK
6428 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6429 MASK since OP1 might have been sign-extended but we never want
6430 to turn on extra bits, since combine might have previously relied
6431 on them being off. */
6432 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6433 && (INTVAL (op1) & mask) != 0)
6434 op1 = GEN_INT (INTVAL (op1) & mask);
6435
6139ff20
RK
6436 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6437 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6438 break;
dfbe1b2f
RK
6439
6440 case ASHIFT:
dfbe1b2f 6441 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6442 However, we cannot do anything with shifts where we cannot
6443 guarantee that the counts are smaller than the size of the mode
6444 because such a count will have a different meaning in a
6139ff20 6445 wider mode. */
f6785026
RK
6446
6447 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6448 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6449 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6450 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6451 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6452 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
6453 break;
6454
6139ff20
RK
6455 /* If the shift count is a constant and we can do arithmetic in
6456 the mode of the shift, refine which bits we need. Otherwise, use the
6457 conservative form of the mask. */
6458 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6459 && INTVAL (XEXP (x, 1)) >= 0
6460 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6461 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6462 mask >>= INTVAL (XEXP (x, 1));
6463 else
6464 mask = fuller_mask;
6465
6466 op0 = gen_lowpart_for_combine (op_mode,
6467 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6468 mask, reg, next_select));
6139ff20
RK
6469
6470 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6471 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6472 break;
dfbe1b2f
RK
6473
6474 case LSHIFTRT:
1347292b
JW
6475 /* Here we can only do something if the shift count is a constant,
6476 this shift constant is valid for the host, and we can do arithmetic
6477 in OP_MODE. */
dfbe1b2f
RK
6478
6479 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6480 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6481 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6482 {
6139ff20
RK
6483 rtx inner = XEXP (x, 0);
6484
6485 /* Select the mask of the bits we need for the shift operand. */
6486 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 6487
6139ff20
RK
6488 /* We can only change the mode of the shift if we can do arithmetic
6489 in the mode of the shift and MASK is no wider than the width of
6490 OP_MODE. */
6491 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6492 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6493 op_mode = GET_MODE (x);
6494
e3d616e3 6495 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
6496
6497 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6498 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6499 }
6139ff20
RK
6500
6501 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6502 shift and AND produces only copies of the sign bit (C2 is one less
6503 than a power of two), we can do this with just a shift. */
6504
6505 if (GET_CODE (x) == LSHIFTRT
6506 && GET_CODE (XEXP (x, 1)) == CONST_INT
6507 && ((INTVAL (XEXP (x, 1))
6508 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6509 >= GET_MODE_BITSIZE (GET_MODE (x)))
6510 && exact_log2 (mask + 1) >= 0
6511 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6512 >= exact_log2 (mask + 1)))
6513 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6514 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6515 - exact_log2 (mask + 1)));
d0ab8cd3
RK
6516 break;
6517
6518 case ASHIFTRT:
6139ff20
RK
6519 /* If we are just looking for the sign bit, we don't need this shift at
6520 all, even if it has a variable count. */
9bf22b75
RK
6521 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6522 && (mask == ((HOST_WIDE_INT) 1
6523 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6524 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6525
6526 /* If this is a shift by a constant, get a mask that contains those bits
6527 that are not copies of the sign bit. We then have two cases: If
6528 MASK only includes those bits, this can be a logical shift, which may
6529 allow simplifications. If MASK is a single-bit field not within
6530 those bits, we are requesting a copy of the sign bit and hence can
6531 shift the sign bit to the appropriate location. */
6532
6533 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6534 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6535 {
6536 int i = -1;
6537
b69960ac
RK
6538 /* If the considered data is wider then HOST_WIDE_INT, we can't
6539 represent a mask for all its bits in a single scalar.
6540 But we only care about the lower bits, so calculate these. */
6541
6a11342f 6542 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 6543 {
0f41302f 6544 nonzero = ~ (HOST_WIDE_INT) 0;
b69960ac
RK
6545
6546 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6547 is the number of bits a full-width mask would have set.
6548 We need only shift if these are fewer than nonzero can
6549 hold. If not, we must keep all bits set in nonzero. */
6550
6551 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6552 < HOST_BITS_PER_WIDE_INT)
6553 nonzero >>= INTVAL (XEXP (x, 1))
6554 + HOST_BITS_PER_WIDE_INT
6555 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6556 }
6557 else
6558 {
6559 nonzero = GET_MODE_MASK (GET_MODE (x));
6560 nonzero >>= INTVAL (XEXP (x, 1));
6561 }
6139ff20
RK
6562
6563 if ((mask & ~ nonzero) == 0
6564 || (i = exact_log2 (mask)) >= 0)
6565 {
6566 x = simplify_shift_const
6567 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6568 i < 0 ? INTVAL (XEXP (x, 1))
6569 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6570
6571 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 6572 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6573 }
6574 }
6575
6576 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6577 even if the shift count isn't a constant. */
6578 if (mask == 1)
6579 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6580
d0ab8cd3 6581 /* If this is a sign-extension operation that just affects bits
4c002f29
RK
6582 we don't care about, remove it. Be sure the call above returned
6583 something that is still a shift. */
d0ab8cd3 6584
4c002f29
RK
6585 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6586 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 6587 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
6588 && (INTVAL (XEXP (x, 1))
6589 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
6590 && GET_CODE (XEXP (x, 0)) == ASHIFT
6591 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6592 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
6593 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6594 reg, next_select);
6139ff20 6595
dfbe1b2f
RK
6596 break;
6597
6139ff20
RK
6598 case ROTATE:
6599 case ROTATERT:
6600 /* If the shift count is constant and we can do computations
6601 in the mode of X, compute where the bits we care about are.
6602 Otherwise, we can't do anything. Don't change the mode of
6603 the shift or propagate MODE into the shift, though. */
6604 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6605 && INTVAL (XEXP (x, 1)) >= 0)
6606 {
6607 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6608 GET_MODE (x), GEN_INT (mask),
6609 XEXP (x, 1));
7d171a1e 6610 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
6611 SUBST (XEXP (x, 0),
6612 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6613 INTVAL (temp), reg, next_select));
6139ff20
RK
6614 }
6615 break;
6616
dfbe1b2f 6617 case NEG:
180b8e4b
RK
6618 /* If we just want the low-order bit, the NEG isn't needed since it
6619 won't change the low-order bit. */
6620 if (mask == 1)
6621 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6622
6139ff20
RK
6623 /* We need any bits less significant than the most significant bit in
6624 MASK since carries from those bits will affect the bits we are
6625 interested in. */
6626 mask = fuller_mask;
6627 goto unop;
6628
dfbe1b2f 6629 case NOT:
6139ff20
RK
6630 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6631 same as the XOR case above. Ensure that the constant we form is not
6632 wider than the mode of X. */
6633
6634 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6635 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6636 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6637 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6638 < GET_MODE_BITSIZE (GET_MODE (x)))
6639 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6640 {
6641 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6642 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6643 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6644
e3d616e3 6645 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6646 }
6647
f82da7d2
JW
6648 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6649 use the full mask inside the NOT. */
6650 mask = fuller_mask;
6651
6139ff20 6652 unop:
e3d616e3
RK
6653 op0 = gen_lowpart_for_combine (op_mode,
6654 force_to_mode (XEXP (x, 0), mode, mask,
6655 reg, next_select));
6139ff20 6656 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 6657 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
6658 break;
6659
6660 case NE:
6661 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 6662 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 6663 which is equal to STORE_FLAG_VALUE. */
3aceff0d
RK
6664 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6665 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 6666 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 6667 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6668
d0ab8cd3
RK
6669 break;
6670
6671 case IF_THEN_ELSE:
6672 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6673 written in a narrower mode. We play it safe and do not do so. */
6674
6675 SUBST (XEXP (x, 1),
6676 gen_lowpart_for_combine (GET_MODE (x),
6677 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6678 mask, reg, next_select)));
d0ab8cd3
RK
6679 SUBST (XEXP (x, 2),
6680 gen_lowpart_for_combine (GET_MODE (x),
6681 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6682 mask, reg,next_select)));
d0ab8cd3 6683 break;
e9a25f70
JL
6684
6685 default:
6686 break;
dfbe1b2f
RK
6687 }
6688
d0ab8cd3 6689 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6690 return gen_lowpart_for_combine (mode, x);
6691}
6692\f
abe6e52f
RK
6693/* Return nonzero if X is an expression that has one of two values depending on
6694 whether some other value is zero or nonzero. In that case, we return the
6695 value that is being tested, *PTRUE is set to the value if the rtx being
6696 returned has a nonzero value, and *PFALSE is set to the other alternative.
6697
6698 If we return zero, we set *PTRUE and *PFALSE to X. */
6699
6700static rtx
6701if_then_else_cond (x, ptrue, pfalse)
6702 rtx x;
6703 rtx *ptrue, *pfalse;
6704{
6705 enum machine_mode mode = GET_MODE (x);
6706 enum rtx_code code = GET_CODE (x);
6707 int size = GET_MODE_BITSIZE (mode);
6708 rtx cond0, cond1, true0, true1, false0, false1;
6709 unsigned HOST_WIDE_INT nz;
6710
6711 /* If this is a unary operation whose operand has one of two values, apply
6712 our opcode to compute those values. */
6713 if (GET_RTX_CLASS (code) == '1'
6714 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6715 {
0c1c8ea6
RK
6716 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6717 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
6718 return cond0;
6719 }
6720
3a19aabc 6721 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 6722 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
6723 else if (code == COMPARE)
6724 ;
6725
abe6e52f
RK
6726 /* If this is a binary operation, see if either side has only one of two
6727 values. If either one does or if both do and they are conditional on
6728 the same value, compute the new true and false values. */
6729 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6730 || GET_RTX_CLASS (code) == '<')
6731 {
6732 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6733 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6734
6735 if ((cond0 != 0 || cond1 != 0)
6736 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6737 {
987e845a
JW
6738 /* If if_then_else_cond returned zero, then true/false are the
6739 same rtl. We must copy one of them to prevent invalid rtl
6740 sharing. */
6741 if (cond0 == 0)
6742 true0 = copy_rtx (true0);
6743 else if (cond1 == 0)
6744 true1 = copy_rtx (true1);
6745
abe6e52f
RK
6746 *ptrue = gen_binary (code, mode, true0, true1);
6747 *pfalse = gen_binary (code, mode, false0, false1);
6748 return cond0 ? cond0 : cond1;
6749 }
9210df58 6750
9210df58 6751 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
6752 operands is zero when the other is non-zero, and vice-versa,
6753 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 6754
0802d516
RK
6755 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6756 && (code == PLUS || code == IOR || code == XOR || code == MINUS
9210df58
RK
6757 || code == UMAX)
6758 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6759 {
6760 rtx op0 = XEXP (XEXP (x, 0), 1);
6761 rtx op1 = XEXP (XEXP (x, 1), 1);
6762
6763 cond0 = XEXP (XEXP (x, 0), 0);
6764 cond1 = XEXP (XEXP (x, 1), 0);
6765
6766 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6767 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6768 && reversible_comparison_p (cond1)
6769 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6770 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6771 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6772 || ((swap_condition (GET_CODE (cond0))
6773 == reverse_condition (GET_CODE (cond1)))
6774 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6775 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6776 && ! side_effects_p (x))
6777 {
6778 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6779 *pfalse = gen_binary (MULT, mode,
6780 (code == MINUS
0c1c8ea6 6781 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
6782 const_true_rtx);
6783 return cond0;
6784 }
6785 }
6786
6787 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6788 is always zero. */
0802d516
RK
6789 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6790 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
6791 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6792 {
6793 cond0 = XEXP (XEXP (x, 0), 0);
6794 cond1 = XEXP (XEXP (x, 1), 0);
6795
6796 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6797 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6798 && reversible_comparison_p (cond1)
6799 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6800 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6801 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6802 || ((swap_condition (GET_CODE (cond0))
6803 == reverse_condition (GET_CODE (cond1)))
6804 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6805 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6806 && ! side_effects_p (x))
6807 {
6808 *ptrue = *pfalse = const0_rtx;
6809 return cond0;
6810 }
6811 }
abe6e52f
RK
6812 }
6813
6814 else if (code == IF_THEN_ELSE)
6815 {
6816 /* If we have IF_THEN_ELSE already, extract the condition and
6817 canonicalize it if it is NE or EQ. */
6818 cond0 = XEXP (x, 0);
6819 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6820 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6821 return XEXP (cond0, 0);
6822 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6823 {
6824 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6825 return XEXP (cond0, 0);
6826 }
6827 else
6828 return cond0;
6829 }
6830
6831 /* If X is a normal SUBREG with both inner and outer modes integral,
6832 we can narrow both the true and false values of the inner expression,
6833 if there is a condition. */
6834 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6835 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6836 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6837 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6838 &true0, &false0)))
6839 {
00244e6b
RK
6840 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6841 *pfalse
6842 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 6843
abe6e52f
RK
6844 return cond0;
6845 }
6846
6847 /* If X is a constant, this isn't special and will cause confusions
6848 if we treat it as such. Likewise if it is equivalent to a constant. */
6849 else if (CONSTANT_P (x)
6850 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6851 ;
6852
6853 /* If X is known to be either 0 or -1, those are the true and
6854 false values when testing X. */
6855 else if (num_sign_bit_copies (x, mode) == size)
6856 {
6857 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6858 return x;
6859 }
6860
6861 /* Likewise for 0 or a single bit. */
6862 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6863 {
6864 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6865 return x;
6866 }
6867
6868 /* Otherwise fail; show no condition with true and false values the same. */
6869 *ptrue = *pfalse = x;
6870 return 0;
6871}
6872\f
1a26b032
RK
6873/* Return the value of expression X given the fact that condition COND
6874 is known to be true when applied to REG as its first operand and VAL
6875 as its second. X is known to not be shared and so can be modified in
6876 place.
6877
6878 We only handle the simplest cases, and specifically those cases that
6879 arise with IF_THEN_ELSE expressions. */
6880
6881static rtx
6882known_cond (x, cond, reg, val)
6883 rtx x;
6884 enum rtx_code cond;
6885 rtx reg, val;
6886{
6887 enum rtx_code code = GET_CODE (x);
f24ad0e4 6888 rtx temp;
1a26b032
RK
6889 char *fmt;
6890 int i, j;
6891
6892 if (side_effects_p (x))
6893 return x;
6894
6895 if (cond == EQ && rtx_equal_p (x, reg))
6896 return val;
6897
6898 /* If X is (abs REG) and we know something about REG's relationship
6899 with zero, we may be able to simplify this. */
6900
6901 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6902 switch (cond)
6903 {
6904 case GE: case GT: case EQ:
6905 return XEXP (x, 0);
6906 case LT: case LE:
0c1c8ea6
RK
6907 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6908 XEXP (x, 0));
e9a25f70
JL
6909 default:
6910 break;
1a26b032
RK
6911 }
6912
6913 /* The only other cases we handle are MIN, MAX, and comparisons if the
6914 operands are the same as REG and VAL. */
6915
6916 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6917 {
6918 if (rtx_equal_p (XEXP (x, 0), val))
6919 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6920
6921 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6922 {
6923 if (GET_RTX_CLASS (code) == '<')
6924 return (comparison_dominates_p (cond, code) ? const_true_rtx
6925 : (comparison_dominates_p (cond,
6926 reverse_condition (code))
6927 ? const0_rtx : x));
6928
6929 else if (code == SMAX || code == SMIN
6930 || code == UMIN || code == UMAX)
6931 {
6932 int unsignedp = (code == UMIN || code == UMAX);
6933
6934 if (code == SMAX || code == UMAX)
6935 cond = reverse_condition (cond);
6936
6937 switch (cond)
6938 {
6939 case GE: case GT:
6940 return unsignedp ? x : XEXP (x, 1);
6941 case LE: case LT:
6942 return unsignedp ? x : XEXP (x, 0);
6943 case GEU: case GTU:
6944 return unsignedp ? XEXP (x, 1) : x;
6945 case LEU: case LTU:
6946 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
6947 default:
6948 break;
1a26b032
RK
6949 }
6950 }
6951 }
6952 }
6953
6954 fmt = GET_RTX_FORMAT (code);
6955 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6956 {
6957 if (fmt[i] == 'e')
6958 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6959 else if (fmt[i] == 'E')
6960 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6961 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6962 cond, reg, val));
6963 }
6964
6965 return x;
6966}
6967\f
e11fa86f
RK
6968/* See if X and Y are equal for the purposes of seeing if we can rewrite an
6969 assignment as a field assignment. */
6970
6971static int
6972rtx_equal_for_field_assignment_p (x, y)
6973 rtx x;
6974 rtx y;
6975{
e11fa86f
RK
6976 if (x == y || rtx_equal_p (x, y))
6977 return 1;
6978
6979 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
6980 return 0;
6981
6982 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
6983 Note that all SUBREGs of MEM are paradoxical; otherwise they
6984 would have been rewritten. */
6985 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
6986 && GET_CODE (SUBREG_REG (y)) == MEM
6987 && rtx_equal_p (SUBREG_REG (y),
6988 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
6989 return 1;
6990
6991 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
6992 && GET_CODE (SUBREG_REG (x)) == MEM
6993 && rtx_equal_p (SUBREG_REG (x),
6994 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
6995 return 1;
6996
9ec36da5
JL
6997 /* We used to see if get_last_value of X and Y were the same but that's
6998 not correct. In one direction, we'll cause the assignment to have
6999 the wrong destination and in the case, we'll import a register into this
7000 insn that might have already have been dead. So fail if none of the
7001 above cases are true. */
7002 return 0;
e11fa86f
RK
7003}
7004\f
230d793d
RS
7005/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7006 Return that assignment if so.
7007
7008 We only handle the most common cases. */
7009
7010static rtx
7011make_field_assignment (x)
7012 rtx x;
7013{
7014 rtx dest = SET_DEST (x);
7015 rtx src = SET_SRC (x);
dfbe1b2f 7016 rtx assign;
e11fa86f 7017 rtx rhs, lhs;
5f4f0e22
CH
7018 HOST_WIDE_INT c1;
7019 int pos, len;
dfbe1b2f
RK
7020 rtx other;
7021 enum machine_mode mode;
230d793d
RS
7022
7023 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7024 a clear of a one-bit field. We will have changed it to
7025 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7026 for a SUBREG. */
7027
7028 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7029 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7030 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 7031 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7032 {
8999a12e 7033 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7034 1, 1, 1, 0);
76184def 7035 if (assign != 0)
38a448ca 7036 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7037 return x;
230d793d
RS
7038 }
7039
7040 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7041 && subreg_lowpart_p (XEXP (src, 0))
7042 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7043 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7044 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7045 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 7046 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7047 {
8999a12e 7048 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
7049 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7050 1, 1, 1, 0);
76184def 7051 if (assign != 0)
38a448ca 7052 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7053 return x;
230d793d
RS
7054 }
7055
9dd11dcb 7056 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
7057 one-bit field. */
7058 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7059 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 7060 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7061 {
8999a12e 7062 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7063 1, 1, 1, 0);
76184def 7064 if (assign != 0)
38a448ca 7065 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 7066 return x;
230d793d
RS
7067 }
7068
dfbe1b2f 7069 /* The other case we handle is assignments into a constant-position
9dd11dcb 7070 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
7071 a mask that has all one bits except for a group of zero bits and
7072 OTHER is known to have zeros where C1 has ones, this is such an
7073 assignment. Compute the position and length from C1. Shift OTHER
7074 to the appropriate position, force it to the required mode, and
7075 make the extraction. Check for the AND in both operands. */
7076
9dd11dcb 7077 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
7078 return x;
7079
7080 rhs = expand_compound_operation (XEXP (src, 0));
7081 lhs = expand_compound_operation (XEXP (src, 1));
7082
7083 if (GET_CODE (rhs) == AND
7084 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7085 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7086 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7087 else if (GET_CODE (lhs) == AND
7088 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7089 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7090 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
7091 else
7092 return x;
230d793d 7093
e11fa86f 7094 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 7095 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
e5e809f4
JL
7096 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7097 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
dfbe1b2f 7098 return x;
230d793d 7099
5f4f0e22 7100 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
7101 if (assign == 0)
7102 return x;
230d793d 7103
dfbe1b2f
RK
7104 /* The mode to use for the source is the mode of the assignment, or of
7105 what is inside a possible STRICT_LOW_PART. */
7106 mode = (GET_CODE (assign) == STRICT_LOW_PART
7107 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 7108
dfbe1b2f
RK
7109 /* Shift OTHER right POS places and make it the source, restricting it
7110 to the proper length and mode. */
230d793d 7111
5f4f0e22
CH
7112 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7113 GET_MODE (src), other, pos),
6139ff20
RK
7114 mode,
7115 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7116 ? GET_MODE_MASK (mode)
7117 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 7118 dest, 0);
230d793d 7119
dfbe1b2f 7120 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
7121}
7122\f
7123/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7124 if so. */
7125
7126static rtx
7127apply_distributive_law (x)
7128 rtx x;
7129{
7130 enum rtx_code code = GET_CODE (x);
7131 rtx lhs, rhs, other;
7132 rtx tem;
7133 enum rtx_code inner_code;
7134
d8a8a4da
RS
7135 /* Distributivity is not true for floating point.
7136 It can change the value. So don't do it.
7137 -- rms and moshier@world.std.com. */
3ad2180a 7138 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
7139 return x;
7140
230d793d
RS
7141 /* The outer operation can only be one of the following: */
7142 if (code != IOR && code != AND && code != XOR
7143 && code != PLUS && code != MINUS)
7144 return x;
7145
7146 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7147
0f41302f
MS
7148 /* If either operand is a primitive we can't do anything, so get out
7149 fast. */
230d793d 7150 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 7151 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
7152 return x;
7153
7154 lhs = expand_compound_operation (lhs);
7155 rhs = expand_compound_operation (rhs);
7156 inner_code = GET_CODE (lhs);
7157 if (inner_code != GET_CODE (rhs))
7158 return x;
7159
7160 /* See if the inner and outer operations distribute. */
7161 switch (inner_code)
7162 {
7163 case LSHIFTRT:
7164 case ASHIFTRT:
7165 case AND:
7166 case IOR:
7167 /* These all distribute except over PLUS. */
7168 if (code == PLUS || code == MINUS)
7169 return x;
7170 break;
7171
7172 case MULT:
7173 if (code != PLUS && code != MINUS)
7174 return x;
7175 break;
7176
7177 case ASHIFT:
45620ed4 7178 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
7179 break;
7180
7181 case SUBREG:
dfbe1b2f
RK
7182 /* Non-paradoxical SUBREGs distributes over all operations, provided
7183 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
7184 of a low-order part, we don't convert an fp operation to int or
7185 vice versa, and we would not be converting a single-word
dfbe1b2f 7186 operation into a multi-word operation. The latter test is not
2b4bd1bc 7187 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
7188 Some of the previous tests are redundant given the latter test, but
7189 are retained because they are required for correctness.
7190
7191 We produce the result slightly differently in this case. */
7192
7193 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7194 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7195 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
7196 || (GET_MODE_CLASS (GET_MODE (lhs))
7197 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7198 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 7199 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7200 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
7201 return x;
7202
7203 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7204 SUBREG_REG (lhs), SUBREG_REG (rhs));
7205 return gen_lowpart_for_combine (GET_MODE (x), tem);
7206
7207 default:
7208 return x;
7209 }
7210
7211 /* Set LHS and RHS to the inner operands (A and B in the example
7212 above) and set OTHER to the common operand (C in the example).
7213 These is only one way to do this unless the inner operation is
7214 commutative. */
7215 if (GET_RTX_CLASS (inner_code) == 'c'
7216 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7217 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7218 else if (GET_RTX_CLASS (inner_code) == 'c'
7219 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7220 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7221 else if (GET_RTX_CLASS (inner_code) == 'c'
7222 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7223 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7224 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7225 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7226 else
7227 return x;
7228
7229 /* Form the new inner operation, seeing if it simplifies first. */
7230 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7231
7232 /* There is one exception to the general way of distributing:
7233 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7234 if (code == XOR && inner_code == IOR)
7235 {
7236 inner_code = AND;
0c1c8ea6 7237 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
7238 }
7239
7240 /* We may be able to continuing distributing the result, so call
7241 ourselves recursively on the inner operation before forming the
7242 outer operation, which we return. */
7243 return gen_binary (inner_code, GET_MODE (x),
7244 apply_distributive_law (tem), other);
7245}
7246\f
7247/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7248 in MODE.
7249
7250 Return an equivalent form, if different from X. Otherwise, return X. If
7251 X is zero, we are to always construct the equivalent form. */
7252
7253static rtx
7254simplify_and_const_int (x, mode, varop, constop)
7255 rtx x;
7256 enum machine_mode mode;
7257 rtx varop;
5f4f0e22 7258 unsigned HOST_WIDE_INT constop;
230d793d 7259{
951553af 7260 unsigned HOST_WIDE_INT nonzero;
9fa6d012 7261 int width = GET_MODE_BITSIZE (mode);
42301240 7262 int i;
230d793d 7263
6139ff20
RK
7264 /* Simplify VAROP knowing that we will be only looking at some of the
7265 bits in it. */
e3d616e3 7266 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7267
6139ff20
RK
7268 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7269 CONST_INT, we are done. */
7270 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7271 return varop;
230d793d 7272
fc06d7aa
RK
7273 /* See what bits may be nonzero in VAROP. Unlike the general case of
7274 a call to nonzero_bits, here we don't care about bits outside
7275 MODE. */
7276
7277 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d 7278
9fa6d012
TG
7279 /* If this would be an entire word for the target, but is not for
7280 the host, then sign-extend on the host so that the number will look
7281 the same way on the host that it would on the target.
7282
7283 For example, when building a 64 bit alpha hosted 32 bit sparc
7284 targeted compiler, then we want the 32 bit unsigned value -1 to be
7285 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7286 The later confuses the sparc backend. */
7287
7288 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7289 && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
7290 nonzero |= ((HOST_WIDE_INT) (-1) << width);
7291
230d793d 7292 /* Turn off all bits in the constant that are known to already be zero.
951553af 7293 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7294 which is tested below. */
7295
951553af 7296 constop &= nonzero;
230d793d
RS
7297
7298 /* If we don't have any bits left, return zero. */
7299 if (constop == 0)
7300 return const0_rtx;
7301
42301240
RK
7302 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7303 a power of two, we can replace this with a ASHIFT. */
7304 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7305 && (i = exact_log2 (constop)) >= 0)
7306 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7307
6139ff20
RK
7308 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7309 or XOR, then try to apply the distributive law. This may eliminate
7310 operations if either branch can be simplified because of the AND.
7311 It may also make some cases more complex, but those cases probably
7312 won't match a pattern either with or without this. */
7313
7314 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7315 return
7316 gen_lowpart_for_combine
7317 (mode,
7318 apply_distributive_law
7319 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7320 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7321 XEXP (varop, 0), constop),
7322 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7323 XEXP (varop, 1), constop))));
7324
230d793d
RS
7325 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7326 if we already had one (just check for the simplest cases). */
7327 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7328 && GET_MODE (XEXP (x, 0)) == mode
7329 && SUBREG_REG (XEXP (x, 0)) == varop)
7330 varop = XEXP (x, 0);
7331 else
7332 varop = gen_lowpart_for_combine (mode, varop);
7333
0f41302f 7334 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7335 if (GET_CODE (varop) == CLOBBER)
7336 return x ? x : varop;
7337
7338 /* If we are only masking insignificant bits, return VAROP. */
951553af 7339 if (constop == nonzero)
230d793d
RS
7340 x = varop;
7341
7342 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7343 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7344 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7345
7346 else
7347 {
7348 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7349 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7350 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7351
7352 SUBST (XEXP (x, 0), varop);
7353 }
7354
7355 return x;
7356}
7357\f
b3728b0e
JW
7358/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7359 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7360 is less useful. We can't allow both, because that results in exponential
956d6950 7361 run time recursion. There is a nullstone testcase that triggered
b3728b0e
JW
7362 this. This macro avoids accidental uses of num_sign_bit_copies. */
7363#define num_sign_bit_copies()
7364
230d793d
RS
7365/* Given an expression, X, compute which bits in X can be non-zero.
7366 We don't care about bits outside of those defined in MODE.
7367
7368 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7369 a shift, AND, or zero_extract, we can do better. */
7370
5f4f0e22 7371static unsigned HOST_WIDE_INT
951553af 7372nonzero_bits (x, mode)
230d793d
RS
7373 rtx x;
7374 enum machine_mode mode;
7375{
951553af
RK
7376 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7377 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
7378 enum rtx_code code;
7379 int mode_width = GET_MODE_BITSIZE (mode);
7380 rtx tem;
7381
1c75dfa4
RK
7382 /* For floating-point values, assume all bits are needed. */
7383 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7384 return nonzero;
7385
230d793d
RS
7386 /* If X is wider than MODE, use its mode instead. */
7387 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7388 {
7389 mode = GET_MODE (x);
951553af 7390 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7391 mode_width = GET_MODE_BITSIZE (mode);
7392 }
7393
5f4f0e22 7394 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7395 /* Our only callers in this case look for single bit values. So
7396 just return the mode mask. Those tests will then be false. */
951553af 7397 return nonzero;
230d793d 7398
8baf60bb 7399#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7400 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
7401 and target machines, we can compute this from which bits of the
7402 object might be nonzero in its own mode, taking into account the fact
7403 that on many CISC machines, accessing an object in a wider mode
7404 causes the high-order bits to become undefined. So they are
7405 not known to be zero. */
7406
7407 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7408 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7409 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7410 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7411 {
7412 nonzero &= nonzero_bits (x, GET_MODE (x));
7413 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7414 return nonzero;
7415 }
7416#endif
7417
230d793d
RS
7418 code = GET_CODE (x);
7419 switch (code)
7420 {
7421 case REG:
320dd7a7
RK
7422#ifdef POINTERS_EXTEND_UNSIGNED
7423 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7424 all the bits above ptr_mode are known to be zero. */
7425 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7426 && REGNO_POINTER_FLAG (REGNO (x)))
7427 nonzero &= GET_MODE_MASK (ptr_mode);
7428#endif
7429
b0d71df9
RK
7430#ifdef STACK_BOUNDARY
7431 /* If this is the stack pointer, we may know something about its
7432 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
7433 stack to be momentarily aligned only to that amount, so we pick
7434 the least alignment. */
7435
ee49a9c7
JW
7436 /* We can't check for arg_pointer_rtx here, because it is not
7437 guaranteed to have as much alignment as the stack pointer.
7438 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7439 alignment but the argument pointer has only 64 bit alignment. */
7440
0e9ff885
DM
7441 if ((x == frame_pointer_rtx
7442 || x == stack_pointer_rtx
7443 || x == hard_frame_pointer_rtx
7444 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7445 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7446#ifdef STACK_BIAS
7447 && !STACK_BIAS
7448#endif
7449 )
230d793d 7450 {
b0d71df9 7451 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
7452
7453#ifdef PUSH_ROUNDING
91102d5a 7454 if (REGNO (x) == STACK_POINTER_REGNUM)
b0d71df9 7455 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
7456#endif
7457
320dd7a7
RK
7458 /* We must return here, otherwise we may get a worse result from
7459 one of the choices below. There is nothing useful below as
7460 far as the stack pointer is concerned. */
b0d71df9 7461 return nonzero &= ~ (sp_alignment - 1);
230d793d 7462 }
b0d71df9 7463#endif
230d793d 7464
55310dad
RK
7465 /* If X is a register whose nonzero bits value is current, use it.
7466 Otherwise, if X is a register whose value we can find, use that
7467 value. Otherwise, use the previously-computed global nonzero bits
7468 for this register. */
7469
7470 if (reg_last_set_value[REGNO (x)] != 0
7471 && reg_last_set_mode[REGNO (x)] == mode
b1f21e0a 7472 && (REG_N_SETS (REGNO (x)) == 1
55310dad
RK
7473 || reg_last_set_label[REGNO (x)] == label_tick)
7474 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7475 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
7476
7477 tem = get_last_value (x);
9afa3d54 7478
230d793d 7479 if (tem)
9afa3d54
RK
7480 {
7481#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7482 /* If X is narrower than MODE and TEM is a non-negative
7483 constant that would appear negative in the mode of X,
7484 sign-extend it for use in reg_nonzero_bits because some
7485 machines (maybe most) will actually do the sign-extension
7486 and this is the conservative approach.
7487
7488 ??? For 2.5, try to tighten up the MD files in this regard
7489 instead of this kludge. */
7490
7491 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7492 && GET_CODE (tem) == CONST_INT
7493 && INTVAL (tem) > 0
7494 && 0 != (INTVAL (tem)
7495 & ((HOST_WIDE_INT) 1
9e69be8c 7496 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7497 tem = GEN_INT (INTVAL (tem)
7498 | ((HOST_WIDE_INT) (-1)
7499 << GET_MODE_BITSIZE (GET_MODE (x))));
7500#endif
7501 return nonzero_bits (tem, mode);
7502 }
951553af
RK
7503 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7504 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7505 else
951553af 7506 return nonzero;
230d793d
RS
7507
7508 case CONST_INT:
9afa3d54
RK
7509#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7510 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
7511 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7512 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7513 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
7514#endif
7515
230d793d
RS
7516 return INTVAL (x);
7517
230d793d 7518 case MEM:
8baf60bb 7519#ifdef LOAD_EXTEND_OP
230d793d
RS
7520 /* In many, if not most, RISC machines, reading a byte from memory
7521 zeros the rest of the register. Noticing that fact saves a lot
7522 of extra zero-extends. */
8baf60bb
RK
7523 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7524 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 7525#endif
8baf60bb 7526 break;
230d793d 7527
230d793d
RS
7528 case EQ: case NE:
7529 case GT: case GTU:
7530 case LT: case LTU:
7531 case GE: case GEU:
7532 case LE: case LEU:
3f508eca 7533
c6965c0f
RK
7534 /* If this produces an integer result, we know which bits are set.
7535 Code here used to clear bits outside the mode of X, but that is
7536 now done above. */
230d793d 7537
c6965c0f
RK
7538 if (GET_MODE_CLASS (mode) == MODE_INT
7539 && mode_width <= HOST_BITS_PER_WIDE_INT)
7540 nonzero = STORE_FLAG_VALUE;
230d793d 7541 break;
230d793d 7542
230d793d 7543 case NEG:
b3728b0e
JW
7544#if 0
7545 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7546 and num_sign_bit_copies. */
d0ab8cd3
RK
7547 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7548 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7549 nonzero = 1;
b3728b0e 7550#endif
230d793d
RS
7551
7552 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 7553 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 7554 break;
d0ab8cd3
RK
7555
7556 case ABS:
b3728b0e
JW
7557#if 0
7558 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7559 and num_sign_bit_copies. */
d0ab8cd3
RK
7560 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7561 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7562 nonzero = 1;
b3728b0e 7563#endif
d0ab8cd3 7564 break;
230d793d
RS
7565
7566 case TRUNCATE:
951553af 7567 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
7568 break;
7569
7570 case ZERO_EXTEND:
951553af 7571 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 7572 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 7573 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
7574 break;
7575
7576 case SIGN_EXTEND:
7577 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7578 Otherwise, show all the bits in the outer mode but not the inner
7579 may be non-zero. */
951553af 7580 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
7581 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7582 {
951553af 7583 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
e3da301d
MS
7584 if (inner_nz
7585 & (((HOST_WIDE_INT) 1
7586 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 7587 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
7588 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7589 }
7590
951553af 7591 nonzero &= inner_nz;
230d793d
RS
7592 break;
7593
7594 case AND:
951553af
RK
7595 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7596 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7597 break;
7598
d0ab8cd3
RK
7599 case XOR: case IOR:
7600 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
7601 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7602 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7603 break;
7604
7605 case PLUS: case MINUS:
7606 case MULT:
7607 case DIV: case UDIV:
7608 case MOD: case UMOD:
7609 /* We can apply the rules of arithmetic to compute the number of
7610 high- and low-order zero bits of these operations. We start by
7611 computing the width (position of the highest-order non-zero bit)
7612 and the number of low-order zero bits for each value. */
7613 {
951553af
RK
7614 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7615 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7616 int width0 = floor_log2 (nz0) + 1;
7617 int width1 = floor_log2 (nz1) + 1;
7618 int low0 = floor_log2 (nz0 & -nz0);
7619 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
7620 HOST_WIDE_INT op0_maybe_minusp
7621 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7622 HOST_WIDE_INT op1_maybe_minusp
7623 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
7624 int result_width = mode_width;
7625 int result_low = 0;
7626
7627 switch (code)
7628 {
7629 case PLUS:
0e9ff885
DM
7630#ifdef STACK_BIAS
7631 if (STACK_BIAS
7632 && (XEXP (x, 0) == stack_pointer_rtx
7633 || XEXP (x, 0) == frame_pointer_rtx)
7634 && GET_CODE (XEXP (x, 1)) == CONST_INT)
7635 {
7636 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7637
7638 nz0 = (GET_MODE_MASK (mode) & ~ (sp_alignment - 1));
7639 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
7640 width0 = floor_log2 (nz0) + 1;
7641 width1 = floor_log2 (nz1) + 1;
7642 low0 = floor_log2 (nz0 & -nz0);
7643 low1 = floor_log2 (nz1 & -nz1);
7644 }
7645#endif
230d793d
RS
7646 result_width = MAX (width0, width1) + 1;
7647 result_low = MIN (low0, low1);
7648 break;
7649 case MINUS:
7650 result_low = MIN (low0, low1);
7651 break;
7652 case MULT:
7653 result_width = width0 + width1;
7654 result_low = low0 + low1;
7655 break;
7656 case DIV:
7657 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7658 result_width = width0;
7659 break;
7660 case UDIV:
7661 result_width = width0;
7662 break;
7663 case MOD:
7664 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7665 result_width = MIN (width0, width1);
7666 result_low = MIN (low0, low1);
7667 break;
7668 case UMOD:
7669 result_width = MIN (width0, width1);
7670 result_low = MIN (low0, low1);
7671 break;
e9a25f70
JL
7672 default:
7673 abort ();
230d793d
RS
7674 }
7675
7676 if (result_width < mode_width)
951553af 7677 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
7678
7679 if (result_low > 0)
951553af 7680 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
7681 }
7682 break;
7683
7684 case ZERO_EXTRACT:
7685 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 7686 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 7687 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
7688 break;
7689
7690 case SUBREG:
c3c2cb37
RK
7691 /* If this is a SUBREG formed for a promoted variable that has
7692 been zero-extended, we know that at least the high-order bits
7693 are zero, though others might be too. */
7694
7695 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
7696 nonzero = (GET_MODE_MASK (GET_MODE (x))
7697 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 7698
230d793d
RS
7699 /* If the inner mode is a single word for both the host and target
7700 machines, we can compute this from which bits of the inner
951553af 7701 object might be nonzero. */
230d793d 7702 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
7703 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7704 <= HOST_BITS_PER_WIDE_INT))
230d793d 7705 {
951553af 7706 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb 7707
b52ce03d
R
7708#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
7709 /* If this is a typical RISC machine, we only have to worry
7710 about the way loads are extended. */
7711 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
7712 ? (nonzero
7713 & (1L << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))
7714 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
230d793d 7715#endif
b52ce03d
R
7716 {
7717 /* On many CISC machines, accessing an object in a wider mode
7718 causes the high-order bits to become undefined. So they are
7719 not known to be zero. */
7720 if (GET_MODE_SIZE (GET_MODE (x))
7721 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7722 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7723 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7724 }
230d793d
RS
7725 }
7726 break;
7727
7728 case ASHIFTRT:
7729 case LSHIFTRT:
7730 case ASHIFT:
230d793d 7731 case ROTATE:
951553af 7732 /* The nonzero bits are in two classes: any bits within MODE
230d793d 7733 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 7734 nonzero bits are those that are significant in the operand of
230d793d
RS
7735 the shift when shifted the appropriate number of bits. This
7736 shows that high-order bits are cleared by the right shift and
7737 low-order bits by left shifts. */
7738 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7739 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 7740 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7741 {
7742 enum machine_mode inner_mode = GET_MODE (x);
7743 int width = GET_MODE_BITSIZE (inner_mode);
7744 int count = INTVAL (XEXP (x, 1));
5f4f0e22 7745 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
7746 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7747 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 7748 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
7749
7750 if (mode_width > width)
951553af 7751 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
7752
7753 if (code == LSHIFTRT)
7754 inner >>= count;
7755 else if (code == ASHIFTRT)
7756 {
7757 inner >>= count;
7758
951553af 7759 /* If the sign bit may have been nonzero before the shift, we
230d793d 7760 need to mark all the places it could have been copied to
951553af 7761 by the shift as possibly nonzero. */
5f4f0e22
CH
7762 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7763 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 7764 }
45620ed4 7765 else if (code == ASHIFT)
230d793d
RS
7766 inner <<= count;
7767 else
7768 inner = ((inner << (count % width)
7769 | (inner >> (width - (count % width)))) & mode_mask);
7770
951553af 7771 nonzero &= (outer | inner);
230d793d
RS
7772 }
7773 break;
7774
7775 case FFS:
7776 /* This is at most the number of bits in the mode. */
951553af 7777 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 7778 break;
d0ab8cd3
RK
7779
7780 case IF_THEN_ELSE:
951553af
RK
7781 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7782 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 7783 break;
e9a25f70
JL
7784
7785 default:
7786 break;
230d793d
RS
7787 }
7788
951553af 7789 return nonzero;
230d793d 7790}
b3728b0e
JW
7791
7792/* See the macro definition above. */
7793#undef num_sign_bit_copies
230d793d 7794\f
d0ab8cd3 7795/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
7796 be equal to the sign bit. X will be used in mode MODE; if MODE is
7797 VOIDmode, X will be used in its own mode. The returned value will always
7798 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
7799
7800static int
7801num_sign_bit_copies (x, mode)
7802 rtx x;
7803 enum machine_mode mode;
7804{
7805 enum rtx_code code = GET_CODE (x);
7806 int bitwidth;
7807 int num0, num1, result;
951553af 7808 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
7809 rtx tem;
7810
7811 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
7812 VOIDmode, we don't know anything. Likewise if one of the modes is
7813 floating-point. */
d0ab8cd3
RK
7814
7815 if (mode == VOIDmode)
7816 mode = GET_MODE (x);
7817
1c75dfa4 7818 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 7819 return 1;
d0ab8cd3
RK
7820
7821 bitwidth = GET_MODE_BITSIZE (mode);
7822
0f41302f 7823 /* For a smaller object, just ignore the high bits. */
312def2e
RK
7824 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7825 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7826 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7827
e9a25f70
JL
7828 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7829 {
0c314d1a
RK
7830#ifndef WORD_REGISTER_OPERATIONS
7831 /* If this machine does not do all register operations on the entire
7832 register and MODE is wider than the mode of X, we can say nothing
7833 at all about the high-order bits. */
e9a25f70
JL
7834 return 1;
7835#else
7836 /* Likewise on machines that do, if the mode of the object is smaller
7837 than a word and loads of that size don't sign extend, we can say
7838 nothing about the high order bits. */
7839 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
7840#ifdef LOAD_EXTEND_OP
7841 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
7842#endif
7843 )
7844 return 1;
0c314d1a 7845#endif
e9a25f70 7846 }
0c314d1a 7847
d0ab8cd3
RK
7848 switch (code)
7849 {
7850 case REG:
55310dad 7851
ff0dbdd1
RK
7852#ifdef POINTERS_EXTEND_UNSIGNED
7853 /* If pointers extend signed and this is a pointer in Pmode, say that
7854 all the bits above ptr_mode are known to be sign bit copies. */
7855 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7856 && REGNO_POINTER_FLAG (REGNO (x)))
7857 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7858#endif
7859
55310dad
RK
7860 if (reg_last_set_value[REGNO (x)] != 0
7861 && reg_last_set_mode[REGNO (x)] == mode
b1f21e0a 7862 && (REG_N_SETS (REGNO (x)) == 1
55310dad
RK
7863 || reg_last_set_label[REGNO (x)] == label_tick)
7864 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7865 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7866
7867 tem = get_last_value (x);
7868 if (tem != 0)
7869 return num_sign_bit_copies (tem, mode);
55310dad
RK
7870
7871 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7872 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7873 break;
7874
457816e2 7875 case MEM:
8baf60bb 7876#ifdef LOAD_EXTEND_OP
457816e2 7877 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
7878 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7879 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 7880#endif
8baf60bb 7881 break;
457816e2 7882
d0ab8cd3
RK
7883 case CONST_INT:
7884 /* If the constant is negative, take its 1's complement and remask.
7885 Then see how many zero bits we have. */
951553af 7886 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 7887 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
7888 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7889 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 7890
951553af 7891 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7892
7893 case SUBREG:
c3c2cb37
RK
7894 /* If this is a SUBREG for a promoted object that is sign-extended
7895 and we are looking at it in a wider mode, we know that at least the
7896 high-order bits are known to be sign bit copies. */
7897
7898 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
7899 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7900 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 7901
0f41302f 7902 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7903 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7904 {
7905 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7906 return MAX (1, (num0
7907 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7908 - bitwidth)));
7909 }
457816e2 7910
8baf60bb 7911#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 7912#ifdef LOAD_EXTEND_OP
8baf60bb
RK
7913 /* For paradoxical SUBREGs on machines where all register operations
7914 affect the entire register, just look inside. Note that we are
7915 passing MODE to the recursive call, so the number of sign bit copies
7916 will remain relative to that mode, not the inner mode. */
457816e2 7917
2aec5b7a
JW
7918 /* This works only if loads sign extend. Otherwise, if we get a
7919 reload for the inner part, it may be loaded from the stack, and
7920 then we lose all sign bit copies that existed before the store
7921 to the stack. */
7922
7923 if ((GET_MODE_SIZE (GET_MODE (x))
7924 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7925 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 7926 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 7927#endif
457816e2 7928#endif
d0ab8cd3
RK
7929 break;
7930
7931 case SIGN_EXTRACT:
7932 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7933 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7934 break;
7935
7936 case SIGN_EXTEND:
7937 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7938 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7939
7940 case TRUNCATE:
0f41302f 7941 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7942 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7943 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7944 - bitwidth)));
7945
7946 case NOT:
7947 return num_sign_bit_copies (XEXP (x, 0), mode);
7948
7949 case ROTATE: case ROTATERT:
7950 /* If we are rotating left by a number of bits less than the number
7951 of sign bit copies, we can just subtract that amount from the
7952 number. */
7953 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7954 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7955 {
7956 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7957 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7958 : bitwidth - INTVAL (XEXP (x, 1))));
7959 }
7960 break;
7961
7962 case NEG:
7963 /* In general, this subtracts one sign bit copy. But if the value
7964 is known to be positive, the number of sign bit copies is the
951553af
RK
7965 same as that of the input. Finally, if the input has just one bit
7966 that might be nonzero, all the bits are copies of the sign bit. */
7967 nonzero = nonzero_bits (XEXP (x, 0), mode);
7968 if (nonzero == 1)
d0ab8cd3
RK
7969 return bitwidth;
7970
7971 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7972 if (num0 > 1
ac49a949 7973 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7974 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
7975 num0--;
7976
7977 return num0;
7978
7979 case IOR: case AND: case XOR:
7980 case SMIN: case SMAX: case UMIN: case UMAX:
7981 /* Logical operations will preserve the number of sign-bit copies.
7982 MIN and MAX operations always return one of the operands. */
7983 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7984 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7985 return MIN (num0, num1);
7986
7987 case PLUS: case MINUS:
7988 /* For addition and subtraction, we can have a 1-bit carry. However,
7989 if we are subtracting 1 from a positive number, there will not
7990 be such a carry. Furthermore, if the positive number is known to
7991 be 0 or 1, we know the result is either -1 or 0. */
7992
3e3ea975 7993 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 7994 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7995 {
951553af
RK
7996 nonzero = nonzero_bits (XEXP (x, 0), mode);
7997 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7998 return (nonzero == 1 || nonzero == 0 ? bitwidth
7999 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8000 }
8001
8002 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8003 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8004 return MAX (1, MIN (num0, num1) - 1);
8005
8006 case MULT:
8007 /* The number of bits of the product is the sum of the number of
8008 bits of both terms. However, unless one of the terms if known
8009 to be positive, we must allow for an additional bit since negating
8010 a negative number can remove one sign bit copy. */
8011
8012 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8013 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8014
8015 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8016 if (result > 0
9295e6af 8017 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 8018 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 8019 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
01c82bbb
RK
8020 && ((nonzero_bits (XEXP (x, 1), mode)
8021 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
d0ab8cd3
RK
8022 result--;
8023
8024 return MAX (1, result);
8025
8026 case UDIV:
8027 /* The result must be <= the first operand. */
8028 return num_sign_bit_copies (XEXP (x, 0), mode);
8029
8030 case UMOD:
8031 /* The result must be <= the scond operand. */
8032 return num_sign_bit_copies (XEXP (x, 1), mode);
8033
8034 case DIV:
8035 /* Similar to unsigned division, except that we have to worry about
8036 the case where the divisor is negative, in which case we have
8037 to add 1. */
8038 result = num_sign_bit_copies (XEXP (x, 0), mode);
8039 if (result > 1
ac49a949 8040 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 8041 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
8042 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8043 result --;
8044
8045 return result;
8046
8047 case MOD:
8048 result = num_sign_bit_copies (XEXP (x, 1), mode);
8049 if (result > 1
ac49a949 8050 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 8051 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
8052 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8053 result --;
8054
8055 return result;
8056
8057 case ASHIFTRT:
8058 /* Shifts by a constant add to the number of bits equal to the
8059 sign bit. */
8060 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8061 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8062 && INTVAL (XEXP (x, 1)) > 0)
8063 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8064
8065 return num0;
8066
8067 case ASHIFT:
d0ab8cd3
RK
8068 /* Left shifts destroy copies. */
8069 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8070 || INTVAL (XEXP (x, 1)) < 0
8071 || INTVAL (XEXP (x, 1)) >= bitwidth)
8072 return 1;
8073
8074 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8075 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8076
8077 case IF_THEN_ELSE:
8078 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8079 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8080 return MIN (num0, num1);
8081
d0ab8cd3
RK
8082 case EQ: case NE: case GE: case GT: case LE: case LT:
8083 case GEU: case GTU: case LEU: case LTU:
0802d516
RK
8084 if (STORE_FLAG_VALUE == -1)
8085 return bitwidth;
e9a25f70
JL
8086 break;
8087
8088 default:
8089 break;
d0ab8cd3
RK
8090 }
8091
8092 /* If we haven't been able to figure it out by one of the above rules,
8093 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
8094 count those bits and return one less than that amount. If we can't
8095 safely compute the mask for this mode, always return BITWIDTH. */
8096
8097 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 8098 return 1;
d0ab8cd3 8099
951553af 8100 nonzero = nonzero_bits (x, mode);
df6f4086 8101 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 8102 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8103}
8104\f
1a26b032
RK
8105/* Return the number of "extended" bits there are in X, when interpreted
8106 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8107 unsigned quantities, this is the number of high-order zero bits.
8108 For signed quantities, this is the number of copies of the sign bit
8109 minus 1. In both case, this function returns the number of "spare"
8110 bits. For example, if two quantities for which this function returns
8111 at least 1 are added, the addition is known not to overflow.
8112
8113 This function will always return 0 unless called during combine, which
8114 implies that it must be called from a define_split. */
8115
8116int
8117extended_count (x, mode, unsignedp)
8118 rtx x;
8119 enum machine_mode mode;
8120 int unsignedp;
8121{
951553af 8122 if (nonzero_sign_valid == 0)
1a26b032
RK
8123 return 0;
8124
8125 return (unsignedp
ac49a949
RS
8126 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8127 && (GET_MODE_BITSIZE (mode) - 1
951553af 8128 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
8129 : num_sign_bit_copies (x, mode) - 1);
8130}
8131\f
230d793d
RS
8132/* This function is called from `simplify_shift_const' to merge two
8133 outer operations. Specifically, we have already found that we need
8134 to perform operation *POP0 with constant *PCONST0 at the outermost
8135 position. We would now like to also perform OP1 with constant CONST1
8136 (with *POP0 being done last).
8137
8138 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8139 the resulting operation. *PCOMP_P is set to 1 if we would need to
8140 complement the innermost operand, otherwise it is unchanged.
8141
8142 MODE is the mode in which the operation will be done. No bits outside
8143 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 8144 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
8145
8146 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8147 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8148 result is simply *PCONST0.
8149
8150 If the resulting operation cannot be expressed as one operation, we
8151 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8152
8153static int
8154merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8155 enum rtx_code *pop0;
5f4f0e22 8156 HOST_WIDE_INT *pconst0;
230d793d 8157 enum rtx_code op1;
5f4f0e22 8158 HOST_WIDE_INT const1;
230d793d
RS
8159 enum machine_mode mode;
8160 int *pcomp_p;
8161{
8162 enum rtx_code op0 = *pop0;
5f4f0e22 8163 HOST_WIDE_INT const0 = *pconst0;
9fa6d012 8164 int width = GET_MODE_BITSIZE (mode);
230d793d
RS
8165
8166 const0 &= GET_MODE_MASK (mode);
8167 const1 &= GET_MODE_MASK (mode);
8168
8169 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8170 if (op0 == AND)
8171 const1 &= const0;
8172
8173 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8174 if OP0 is SET. */
8175
8176 if (op1 == NIL || op0 == SET)
8177 return 1;
8178
8179 else if (op0 == NIL)
8180 op0 = op1, const0 = const1;
8181
8182 else if (op0 == op1)
8183 {
8184 switch (op0)
8185 {
8186 case AND:
8187 const0 &= const1;
8188 break;
8189 case IOR:
8190 const0 |= const1;
8191 break;
8192 case XOR:
8193 const0 ^= const1;
8194 break;
8195 case PLUS:
8196 const0 += const1;
8197 break;
8198 case NEG:
8199 op0 = NIL;
8200 break;
e9a25f70
JL
8201 default:
8202 break;
230d793d
RS
8203 }
8204 }
8205
8206 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8207 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8208 return 0;
8209
8210 /* If the two constants aren't the same, we can't do anything. The
8211 remaining six cases can all be done. */
8212 else if (const0 != const1)
8213 return 0;
8214
8215 else
8216 switch (op0)
8217 {
8218 case IOR:
8219 if (op1 == AND)
8220 /* (a & b) | b == b */
8221 op0 = SET;
8222 else /* op1 == XOR */
8223 /* (a ^ b) | b == a | b */
b729186a 8224 {;}
230d793d
RS
8225 break;
8226
8227 case XOR:
8228 if (op1 == AND)
8229 /* (a & b) ^ b == (~a) & b */
8230 op0 = AND, *pcomp_p = 1;
8231 else /* op1 == IOR */
8232 /* (a | b) ^ b == a & ~b */
8233 op0 = AND, *pconst0 = ~ const0;
8234 break;
8235
8236 case AND:
8237 if (op1 == IOR)
8238 /* (a | b) & b == b */
8239 op0 = SET;
8240 else /* op1 == XOR */
8241 /* (a ^ b) & b) == (~a) & b */
8242 *pcomp_p = 1;
8243 break;
e9a25f70
JL
8244 default:
8245 break;
230d793d
RS
8246 }
8247
8248 /* Check for NO-OP cases. */
8249 const0 &= GET_MODE_MASK (mode);
8250 if (const0 == 0
8251 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8252 op0 = NIL;
8253 else if (const0 == 0 && op0 == AND)
8254 op0 = SET;
8255 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
8256 op0 = NIL;
8257
9fa6d012
TG
8258 /* If this would be an entire word for the target, but is not for
8259 the host, then sign-extend on the host so that the number will look
8260 the same way on the host that it would on the target.
8261
8262 For example, when building a 64 bit alpha hosted 32 bit sparc
8263 targeted compiler, then we want the 32 bit unsigned value -1 to be
8264 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8265 The later confuses the sparc backend. */
8266
8267 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8268 && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
8269 const0 |= ((HOST_WIDE_INT) (-1) << width);
8270
230d793d
RS
8271 *pop0 = op0;
8272 *pconst0 = const0;
8273
8274 return 1;
8275}
8276\f
8277/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8278 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8279 that we started with.
8280
8281 The shift is normally computed in the widest mode we find in VAROP, as
8282 long as it isn't a different number of words than RESULT_MODE. Exceptions
8283 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8284
8285static rtx
8286simplify_shift_const (x, code, result_mode, varop, count)
8287 rtx x;
8288 enum rtx_code code;
8289 enum machine_mode result_mode;
8290 rtx varop;
8291 int count;
8292{
8293 enum rtx_code orig_code = code;
8294 int orig_count = count;
8295 enum machine_mode mode = result_mode;
8296 enum machine_mode shift_mode, tmode;
8297 int mode_words
8298 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8299 /* We form (outer_op (code varop count) (outer_const)). */
8300 enum rtx_code outer_op = NIL;
c4e861e8 8301 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8302 rtx const_rtx;
8303 int complement_p = 0;
8304 rtx new;
8305
8306 /* If we were given an invalid count, don't do anything except exactly
8307 what was requested. */
8308
8309 if (count < 0 || count > GET_MODE_BITSIZE (mode))
8310 {
8311 if (x)
8312 return x;
8313
38a448ca 8314 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
230d793d
RS
8315 }
8316
8317 /* Unless one of the branches of the `if' in this loop does a `continue',
8318 we will `break' the loop after the `if'. */
8319
8320 while (count != 0)
8321 {
8322 /* If we have an operand of (clobber (const_int 0)), just return that
8323 value. */
8324 if (GET_CODE (varop) == CLOBBER)
8325 return varop;
8326
8327 /* If we discovered we had to complement VAROP, leave. Making a NOT
8328 here would cause an infinite loop. */
8329 if (complement_p)
8330 break;
8331
abc95ed3 8332 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8333 if (code == ROTATERT)
8334 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8335
230d793d 8336 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8337 shift is a right shift or a ROTATE, we must always do it in the mode
8338 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8339 widest mode encountered. */
f6789c77
RK
8340 shift_mode
8341 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8342 ? result_mode : mode);
230d793d
RS
8343
8344 /* Handle cases where the count is greater than the size of the mode
8345 minus 1. For ASHIFT, use the size minus one as the count (this can
8346 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8347 take the count modulo the size. For other shifts, the result is
8348 zero.
8349
8350 Since these shifts are being produced by the compiler by combining
8351 multiple operations, each of which are defined, we know what the
8352 result is supposed to be. */
8353
8354 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8355 {
8356 if (code == ASHIFTRT)
8357 count = GET_MODE_BITSIZE (shift_mode) - 1;
8358 else if (code == ROTATE || code == ROTATERT)
8359 count %= GET_MODE_BITSIZE (shift_mode);
8360 else
8361 {
8362 /* We can't simply return zero because there may be an
8363 outer op. */
8364 varop = const0_rtx;
8365 count = 0;
8366 break;
8367 }
8368 }
8369
8370 /* Negative counts are invalid and should not have been made (a
8371 programmer-specified negative count should have been handled
0f41302f 8372 above). */
230d793d
RS
8373 else if (count < 0)
8374 abort ();
8375
312def2e
RK
8376 /* An arithmetic right shift of a quantity known to be -1 or 0
8377 is a no-op. */
8378 if (code == ASHIFTRT
8379 && (num_sign_bit_copies (varop, shift_mode)
8380 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8381 {
312def2e
RK
8382 count = 0;
8383 break;
8384 }
d0ab8cd3 8385
312def2e
RK
8386 /* If we are doing an arithmetic right shift and discarding all but
8387 the sign bit copies, this is equivalent to doing a shift by the
8388 bitsize minus one. Convert it into that shift because it will often
8389 allow other simplifications. */
500c518b 8390
312def2e
RK
8391 if (code == ASHIFTRT
8392 && (count + num_sign_bit_copies (varop, shift_mode)
8393 >= GET_MODE_BITSIZE (shift_mode)))
8394 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8395
230d793d
RS
8396 /* We simplify the tests below and elsewhere by converting
8397 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8398 `make_compound_operation' will convert it to a ASHIFTRT for
8399 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8400 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8401 && code == ASHIFTRT
951553af 8402 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8403 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8404 == 0))
230d793d
RS
8405 code = LSHIFTRT;
8406
8407 switch (GET_CODE (varop))
8408 {
8409 case SIGN_EXTEND:
8410 case ZERO_EXTEND:
8411 case SIGN_EXTRACT:
8412 case ZERO_EXTRACT:
8413 new = expand_compound_operation (varop);
8414 if (new != varop)
8415 {
8416 varop = new;
8417 continue;
8418 }
8419 break;
8420
8421 case MEM:
8422 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8423 minus the width of a smaller mode, we can do this with a
8424 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8425 if ((code == ASHIFTRT || code == LSHIFTRT)
8426 && ! mode_dependent_address_p (XEXP (varop, 0))
8427 && ! MEM_VOLATILE_P (varop)
8428 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8429 MODE_INT, 1)) != BLKmode)
8430 {
f76b9db2 8431 if (BYTES_BIG_ENDIAN)
38a448ca 8432 new = gen_rtx_MEM (tmode, XEXP (varop, 0));
f76b9db2 8433 else
38a448ca
RH
8434 new = gen_rtx_MEM (tmode,
8435 plus_constant (XEXP (varop, 0),
8436 count / BITS_PER_UNIT));
e24b00c8
ILT
8437 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
8438 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
8439 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
230d793d
RS
8440 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8441 : ZERO_EXTEND, mode, new);
8442 count = 0;
8443 continue;
8444 }
8445 break;
8446
8447 case USE:
8448 /* Similar to the case above, except that we can only do this if
8449 the resulting mode is the same as that of the underlying
8450 MEM and adjust the address depending on the *bits* endianness
8451 because of the way that bit-field extract insns are defined. */
8452 if ((code == ASHIFTRT || code == LSHIFTRT)
8453 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8454 MODE_INT, 1)) != BLKmode
8455 && tmode == GET_MODE (XEXP (varop, 0)))
8456 {
f76b9db2
ILT
8457 if (BITS_BIG_ENDIAN)
8458 new = XEXP (varop, 0);
8459 else
8460 {
8461 new = copy_rtx (XEXP (varop, 0));
8462 SUBST (XEXP (new, 0),
8463 plus_constant (XEXP (new, 0),
8464 count / BITS_PER_UNIT));
8465 }
230d793d
RS
8466
8467 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8468 : ZERO_EXTEND, mode, new);
8469 count = 0;
8470 continue;
8471 }
8472 break;
8473
8474 case SUBREG:
8475 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8476 the same number of words as what we've seen so far. Then store
8477 the widest mode in MODE. */
f9e67232
RS
8478 if (subreg_lowpart_p (varop)
8479 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8480 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
8481 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8482 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8483 == mode_words))
8484 {
8485 varop = SUBREG_REG (varop);
8486 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8487 mode = GET_MODE (varop);
8488 continue;
8489 }
8490 break;
8491
8492 case MULT:
8493 /* Some machines use MULT instead of ASHIFT because MULT
8494 is cheaper. But it is still better on those machines to
8495 merge two shifts into one. */
8496 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8497 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8498 {
8499 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8500 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
8501 continue;
8502 }
8503 break;
8504
8505 case UDIV:
8506 /* Similar, for when divides are cheaper. */
8507 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8508 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8509 {
8510 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8511 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
8512 continue;
8513 }
8514 break;
8515
8516 case ASHIFTRT:
8517 /* If we are extracting just the sign bit of an arithmetic right
8518 shift, that shift is not needed. */
8519 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8520 {
8521 varop = XEXP (varop, 0);
8522 continue;
8523 }
8524
0f41302f 8525 /* ... fall through ... */
230d793d
RS
8526
8527 case LSHIFTRT:
8528 case ASHIFT:
230d793d
RS
8529 case ROTATE:
8530 /* Here we have two nested shifts. The result is usually the
8531 AND of a new shift with a mask. We compute the result below. */
8532 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8533 && INTVAL (XEXP (varop, 1)) >= 0
8534 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
8535 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8536 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
8537 {
8538 enum rtx_code first_code = GET_CODE (varop);
8539 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 8540 unsigned HOST_WIDE_INT mask;
230d793d 8541 rtx mask_rtx;
230d793d 8542
230d793d
RS
8543 /* We have one common special case. We can't do any merging if
8544 the inner code is an ASHIFTRT of a smaller mode. However, if
8545 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8546 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8547 we can convert it to
8548 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8549 This simplifies certain SIGN_EXTEND operations. */
8550 if (code == ASHIFT && first_code == ASHIFTRT
8551 && (GET_MODE_BITSIZE (result_mode)
8552 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8553 {
8554 /* C3 has the low-order C1 bits zero. */
8555
5f4f0e22
CH
8556 mask = (GET_MODE_MASK (mode)
8557 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 8558
5f4f0e22 8559 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 8560 XEXP (varop, 0), mask);
5f4f0e22 8561 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
8562 varop, count);
8563 count = first_count;
8564 code = ASHIFTRT;
8565 continue;
8566 }
8567
d0ab8cd3
RK
8568 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8569 than C1 high-order bits equal to the sign bit, we can convert
8570 this to either an ASHIFT or a ASHIFTRT depending on the
8571 two counts.
230d793d
RS
8572
8573 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8574
8575 if (code == ASHIFTRT && first_code == ASHIFT
8576 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
8577 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8578 > first_count))
230d793d 8579 {
d0ab8cd3
RK
8580 count -= first_count;
8581 if (count < 0)
8582 count = - count, code = ASHIFT;
8583 varop = XEXP (varop, 0);
8584 continue;
230d793d
RS
8585 }
8586
8587 /* There are some cases we can't do. If CODE is ASHIFTRT,
8588 we can only do this if FIRST_CODE is also ASHIFTRT.
8589
8590 We can't do the case when CODE is ROTATE and FIRST_CODE is
8591 ASHIFTRT.
8592
8593 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 8594 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
8595
8596 Finally, we can't do any of these if the mode is too wide
8597 unless the codes are the same.
8598
8599 Handle the case where the shift codes are the same
8600 first. */
8601
8602 if (code == first_code)
8603 {
8604 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
8605 && (code == ASHIFTRT || code == LSHIFTRT
8606 || code == ROTATE))
230d793d
RS
8607 break;
8608
8609 count += first_count;
8610 varop = XEXP (varop, 0);
8611 continue;
8612 }
8613
8614 if (code == ASHIFTRT
8615 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 8616 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 8617 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
8618 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8619 || first_code == ROTATE
230d793d
RS
8620 || code == ROTATE)))
8621 break;
8622
8623 /* To compute the mask to apply after the shift, shift the
951553af 8624 nonzero bits of the inner shift the same way the
230d793d
RS
8625 outer shift will. */
8626
951553af 8627 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
8628
8629 mask_rtx
8630 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 8631 GEN_INT (count));
230d793d
RS
8632
8633 /* Give up if we can't compute an outer operation to use. */
8634 if (mask_rtx == 0
8635 || GET_CODE (mask_rtx) != CONST_INT
8636 || ! merge_outer_ops (&outer_op, &outer_const, AND,
8637 INTVAL (mask_rtx),
8638 result_mode, &complement_p))
8639 break;
8640
8641 /* If the shifts are in the same direction, we add the
8642 counts. Otherwise, we subtract them. */
8643 if ((code == ASHIFTRT || code == LSHIFTRT)
8644 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8645 count += first_count;
8646 else
8647 count -= first_count;
8648
8649 /* If COUNT is positive, the new shift is usually CODE,
8650 except for the two exceptions below, in which case it is
8651 FIRST_CODE. If the count is negative, FIRST_CODE should
8652 always be used */
8653 if (count > 0
8654 && ((first_code == ROTATE && code == ASHIFT)
8655 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8656 code = first_code;
8657 else if (count < 0)
8658 code = first_code, count = - count;
8659
8660 varop = XEXP (varop, 0);
8661 continue;
8662 }
8663
8664 /* If we have (A << B << C) for any shift, we can convert this to
8665 (A << C << B). This wins if A is a constant. Only try this if
8666 B is not a constant. */
8667
8668 else if (GET_CODE (varop) == code
8669 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8670 && 0 != (new
8671 = simplify_binary_operation (code, mode,
8672 XEXP (varop, 0),
5f4f0e22 8673 GEN_INT (count))))
230d793d
RS
8674 {
8675 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8676 count = 0;
8677 continue;
8678 }
8679 break;
8680
8681 case NOT:
8682 /* Make this fit the case below. */
8683 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 8684 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
8685 continue;
8686
8687 case IOR:
8688 case AND:
8689 case XOR:
8690 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8691 with C the size of VAROP - 1 and the shift is logical if
8692 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8693 we have an (le X 0) operation. If we have an arithmetic shift
8694 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8695 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8696
8697 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8698 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8699 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8700 && (code == LSHIFTRT || code == ASHIFTRT)
8701 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8702 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8703 {
8704 count = 0;
8705 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8706 const0_rtx);
8707
8708 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8709 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8710
8711 continue;
8712 }
8713
8714 /* If we have (shift (logical)), move the logical to the outside
8715 to allow it to possibly combine with another logical and the
8716 shift to combine with another shift. This also canonicalizes to
8717 what a ZERO_EXTRACT looks like. Also, some machines have
8718 (and (shift)) insns. */
8719
8720 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8721 && (new = simplify_binary_operation (code, result_mode,
8722 XEXP (varop, 1),
5f4f0e22 8723 GEN_INT (count))) != 0
7d171a1e 8724 && GET_CODE(new) == CONST_INT
230d793d
RS
8725 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8726 INTVAL (new), result_mode, &complement_p))
8727 {
8728 varop = XEXP (varop, 0);
8729 continue;
8730 }
8731
8732 /* If we can't do that, try to simplify the shift in each arm of the
8733 logical expression, make a new logical expression, and apply
8734 the inverse distributive law. */
8735 {
00d4ca1c 8736 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 8737 XEXP (varop, 0), count);
00d4ca1c 8738 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
8739 XEXP (varop, 1), count);
8740
21a64bf1 8741 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
8742 varop = apply_distributive_law (varop);
8743
8744 count = 0;
8745 }
8746 break;
8747
8748 case EQ:
45620ed4 8749 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 8750 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
8751 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8752 that may be nonzero. */
8753 if (code == LSHIFTRT
230d793d
RS
8754 && XEXP (varop, 1) == const0_rtx
8755 && GET_MODE (XEXP (varop, 0)) == result_mode
8756 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 8757 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8758 && ((STORE_FLAG_VALUE
5f4f0e22 8759 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 8760 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8761 && merge_outer_ops (&outer_op, &outer_const, XOR,
8762 (HOST_WIDE_INT) 1, result_mode,
8763 &complement_p))
230d793d
RS
8764 {
8765 varop = XEXP (varop, 0);
8766 count = 0;
8767 continue;
8768 }
8769 break;
8770
8771 case NEG:
d0ab8cd3
RK
8772 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8773 than the number of bits in the mode is equivalent to A. */
8774 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 8775 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 8776 {
d0ab8cd3 8777 varop = XEXP (varop, 0);
230d793d
RS
8778 count = 0;
8779 continue;
8780 }
8781
8782 /* NEG commutes with ASHIFT since it is multiplication. Move the
8783 NEG outside to allow shifts to combine. */
8784 if (code == ASHIFT
5f4f0e22
CH
8785 && merge_outer_ops (&outer_op, &outer_const, NEG,
8786 (HOST_WIDE_INT) 0, result_mode,
8787 &complement_p))
230d793d
RS
8788 {
8789 varop = XEXP (varop, 0);
8790 continue;
8791 }
8792 break;
8793
8794 case PLUS:
d0ab8cd3
RK
8795 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8796 is one less than the number of bits in the mode is
8797 equivalent to (xor A 1). */
230d793d
RS
8798 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8799 && XEXP (varop, 1) == constm1_rtx
951553af 8800 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8801 && merge_outer_ops (&outer_op, &outer_const, XOR,
8802 (HOST_WIDE_INT) 1, result_mode,
8803 &complement_p))
230d793d
RS
8804 {
8805 count = 0;
8806 varop = XEXP (varop, 0);
8807 continue;
8808 }
8809
3f508eca 8810 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 8811 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
8812 bits are known zero in FOO, we can replace the PLUS with FOO.
8813 Similarly in the other operand order. This code occurs when
8814 we are computing the size of a variable-size array. */
8815
8816 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8817 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
8818 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8819 && (nonzero_bits (XEXP (varop, 1), result_mode)
8820 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
8821 {
8822 varop = XEXP (varop, 0);
8823 continue;
8824 }
8825 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8826 && count < HOST_BITS_PER_WIDE_INT
ac49a949 8827 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 8828 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 8829 >> count)
951553af
RK
8830 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8831 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
8832 result_mode)))
8833 {
8834 varop = XEXP (varop, 1);
8835 continue;
8836 }
8837
230d793d
RS
8838 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8839 if (code == ASHIFT
8840 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8841 && (new = simplify_binary_operation (ASHIFT, result_mode,
8842 XEXP (varop, 1),
5f4f0e22 8843 GEN_INT (count))) != 0
7d171a1e 8844 && GET_CODE(new) == CONST_INT
230d793d
RS
8845 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8846 INTVAL (new), result_mode, &complement_p))
8847 {
8848 varop = XEXP (varop, 0);
8849 continue;
8850 }
8851 break;
8852
8853 case MINUS:
8854 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8855 with C the size of VAROP - 1 and the shift is logical if
8856 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8857 we have a (gt X 0) operation. If the shift is arithmetic with
8858 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8859 we have a (neg (gt X 0)) operation. */
8860
0802d516
RK
8861 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8862 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 8863 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
8864 && (code == LSHIFTRT || code == ASHIFTRT)
8865 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8866 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8867 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8868 {
8869 count = 0;
8870 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8871 const0_rtx);
8872
8873 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8874 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8875
8876 continue;
8877 }
8878 break;
6e0ef100
JC
8879
8880 case TRUNCATE:
8881 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
8882 if the truncate does not affect the value. */
8883 if (code == LSHIFTRT
8884 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
8885 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8886 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
8887 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
8888 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
8889 {
8890 rtx varop_inner = XEXP (varop, 0);
8891
8892 varop_inner = gen_rtx_combine (LSHIFTRT,
8893 GET_MODE (varop_inner),
8894 XEXP (varop_inner, 0),
8895 GEN_INT (count + INTVAL (XEXP (varop_inner, 1))));
8896 varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
8897 varop_inner);
8898 count = 0;
8899 continue;
8900 }
8901 break;
e9a25f70
JL
8902
8903 default:
8904 break;
230d793d
RS
8905 }
8906
8907 break;
8908 }
8909
8910 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
8911 a right shift or ROTATE, we must always do it in the mode it was
8912 originally done in. Otherwise, we can do it in MODE, the widest mode
8913 encountered. The code we care about is that of the shift that will
8914 actually be done, not the shift that was originally requested. */
8915 shift_mode
8916 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8917 ? result_mode : mode);
230d793d
RS
8918
8919 /* We have now finished analyzing the shift. The result should be
8920 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8921 OUTER_OP is non-NIL, it is an operation that needs to be applied
8922 to the result of the shift. OUTER_CONST is the relevant constant,
8923 but we must turn off all bits turned off in the shift.
8924
8925 If we were passed a value for X, see if we can use any pieces of
8926 it. If not, make new rtx. */
8927
8928 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8929 && GET_CODE (XEXP (x, 1)) == CONST_INT
8930 && INTVAL (XEXP (x, 1)) == count)
8931 const_rtx = XEXP (x, 1);
8932 else
5f4f0e22 8933 const_rtx = GEN_INT (count);
230d793d
RS
8934
8935 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8936 && GET_MODE (XEXP (x, 0)) == shift_mode
8937 && SUBREG_REG (XEXP (x, 0)) == varop)
8938 varop = XEXP (x, 0);
8939 else if (GET_MODE (varop) != shift_mode)
8940 varop = gen_lowpart_for_combine (shift_mode, varop);
8941
0f41302f 8942 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
8943 if (GET_CODE (varop) == CLOBBER)
8944 return x ? x : varop;
8945
8946 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8947 if (new != 0)
8948 x = new;
8949 else
8950 {
8951 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8952 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8953
8954 SUBST (XEXP (x, 0), varop);
8955 SUBST (XEXP (x, 1), const_rtx);
8956 }
8957
224eeff2
RK
8958 /* If we have an outer operation and we just made a shift, it is
8959 possible that we could have simplified the shift were it not
8960 for the outer operation. So try to do the simplification
8961 recursively. */
8962
8963 if (outer_op != NIL && GET_CODE (x) == code
8964 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8965 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8966 INTVAL (XEXP (x, 1)));
8967
230d793d
RS
8968 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8969 turn off all the bits that the shift would have turned off. */
8970 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 8971 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
8972 GET_MODE_MASK (result_mode) >> orig_count);
8973
8974 /* Do the remainder of the processing in RESULT_MODE. */
8975 x = gen_lowpart_for_combine (result_mode, x);
8976
8977 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8978 operation. */
8979 if (complement_p)
0c1c8ea6 8980 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
8981
8982 if (outer_op != NIL)
8983 {
5f4f0e22 8984 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9fa6d012
TG
8985 {
8986 int width = GET_MODE_BITSIZE (result_mode);
8987
8988 outer_const &= GET_MODE_MASK (result_mode);
8989
8990 /* If this would be an entire word for the target, but is not for
8991 the host, then sign-extend on the host so that the number will
8992 look the same way on the host that it would on the target.
8993
8994 For example, when building a 64 bit alpha hosted 32 bit sparc
8995 targeted compiler, then we want the 32 bit unsigned value -1 to be
8996 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8997 The later confuses the sparc backend. */
8998
8999 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
9000 && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
9001 outer_const |= ((HOST_WIDE_INT) (-1) << width);
9002 }
230d793d
RS
9003
9004 if (outer_op == AND)
5f4f0e22 9005 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
9006 else if (outer_op == SET)
9007 /* This means that we have determined that the result is
9008 equivalent to a constant. This should be rare. */
5f4f0e22 9009 x = GEN_INT (outer_const);
230d793d 9010 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 9011 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 9012 else
5f4f0e22 9013 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
9014 }
9015
9016 return x;
9017}
9018\f
9019/* Like recog, but we receive the address of a pointer to a new pattern.
9020 We try to match the rtx that the pointer points to.
9021 If that fails, we may try to modify or replace the pattern,
9022 storing the replacement into the same pointer object.
9023
9024 Modifications include deletion or addition of CLOBBERs.
9025
9026 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9027 the CLOBBERs are placed.
9028
a29ca9db
RK
9029 PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
9030 we had to add.
9031
230d793d
RS
9032 The value is the final insn code from the pattern ultimately matched,
9033 or -1. */
9034
9035static int
a29ca9db 9036recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
230d793d
RS
9037 rtx *pnewpat;
9038 rtx insn;
9039 rtx *pnotes;
a29ca9db 9040 int *padded_scratches;
230d793d
RS
9041{
9042 register rtx pat = *pnewpat;
9043 int insn_code_number;
9044 int num_clobbers_to_add = 0;
9045 int i;
9046 rtx notes = 0;
9047
a29ca9db
RK
9048 *padded_scratches = 0;
9049
974f4146
RK
9050 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9051 we use to indicate that something didn't match. If we find such a
9052 thing, force rejection. */
d96023cf 9053 if (GET_CODE (pat) == PARALLEL)
974f4146 9054 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
9055 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9056 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
9057 return -1;
9058
230d793d
RS
9059 /* Is the result of combination a valid instruction? */
9060 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9061
9062 /* If it isn't, there is the possibility that we previously had an insn
9063 that clobbered some register as a side effect, but the combined
9064 insn doesn't need to do that. So try once more without the clobbers
9065 unless this represents an ASM insn. */
9066
9067 if (insn_code_number < 0 && ! check_asm_operands (pat)
9068 && GET_CODE (pat) == PARALLEL)
9069 {
9070 int pos;
9071
9072 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9073 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9074 {
9075 if (i != pos)
9076 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9077 pos++;
9078 }
9079
9080 SUBST_INT (XVECLEN (pat, 0), pos);
9081
9082 if (pos == 1)
9083 pat = XVECEXP (pat, 0, 0);
9084
9085 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9086 }
9087
9088 /* If we had any clobbers to add, make a new pattern than contains
9089 them. Then check to make sure that all of them are dead. */
9090 if (num_clobbers_to_add)
9091 {
38a448ca
RH
9092 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9093 gen_rtvec (GET_CODE (pat) == PARALLEL
9094 ? XVECLEN (pat, 0) + num_clobbers_to_add
9095 : num_clobbers_to_add + 1));
230d793d
RS
9096
9097 if (GET_CODE (pat) == PARALLEL)
9098 for (i = 0; i < XVECLEN (pat, 0); i++)
9099 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9100 else
9101 XVECEXP (newpat, 0, 0) = pat;
9102
9103 add_clobbers (newpat, insn_code_number);
9104
9105 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9106 i < XVECLEN (newpat, 0); i++)
9107 {
9108 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9109 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9110 return -1;
a29ca9db
RK
9111 else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
9112 (*padded_scratches)++;
38a448ca
RH
9113 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9114 XEXP (XVECEXP (newpat, 0, i), 0), notes);
230d793d
RS
9115 }
9116 pat = newpat;
9117 }
9118
9119 *pnewpat = pat;
9120 *pnotes = notes;
9121
9122 return insn_code_number;
9123}
9124\f
9125/* Like gen_lowpart but for use by combine. In combine it is not possible
9126 to create any new pseudoregs. However, it is safe to create
9127 invalid memory addresses, because combine will try to recognize
9128 them and all they will do is make the combine attempt fail.
9129
9130 If for some reason this cannot do its job, an rtx
9131 (clobber (const_int 0)) is returned.
9132 An insn containing that will not be recognized. */
9133
9134#undef gen_lowpart
9135
9136static rtx
9137gen_lowpart_for_combine (mode, x)
9138 enum machine_mode mode;
9139 register rtx x;
9140{
9141 rtx result;
9142
9143 if (GET_MODE (x) == mode)
9144 return x;
9145
eae957a8
RK
9146 /* We can only support MODE being wider than a word if X is a
9147 constant integer or has a mode the same size. */
9148
9149 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9150 && ! ((GET_MODE (x) == VOIDmode
9151 && (GET_CODE (x) == CONST_INT
9152 || GET_CODE (x) == CONST_DOUBLE))
9153 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
38a448ca 9154 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9155
9156 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9157 won't know what to do. So we will strip off the SUBREG here and
9158 process normally. */
9159 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9160 {
9161 x = SUBREG_REG (x);
9162 if (GET_MODE (x) == mode)
9163 return x;
9164 }
9165
9166 result = gen_lowpart_common (mode, x);
64bf47a2
RK
9167 if (result != 0
9168 && GET_CODE (result) == SUBREG
9169 && GET_CODE (SUBREG_REG (result)) == REG
9170 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9171 && (GET_MODE_SIZE (GET_MODE (result))
9172 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
b1f21e0a 9173 REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
64bf47a2 9174
230d793d
RS
9175 if (result)
9176 return result;
9177
9178 if (GET_CODE (x) == MEM)
9179 {
9180 register int offset = 0;
9181 rtx new;
9182
9183 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9184 address. */
9185 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
38a448ca 9186 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9187
9188 /* If we want to refer to something bigger than the original memref,
9189 generate a perverse subreg instead. That will force a reload
9190 of the original memref X. */
9191 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
38a448ca 9192 return gen_rtx_SUBREG (mode, x, 0);
230d793d 9193
f76b9db2
ILT
9194 if (WORDS_BIG_ENDIAN)
9195 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9196 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9197 if (BYTES_BIG_ENDIAN)
9198 {
9199 /* Adjust the address so that the address-after-the-data is
9200 unchanged. */
9201 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9202 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9203 }
38a448ca 9204 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
230d793d
RS
9205 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
9206 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
9207 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
9208 return new;
9209 }
9210
9211 /* If X is a comparison operator, rewrite it in a new mode. This
9212 probably won't match, but may allow further simplifications. */
9213 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9214 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9215
9216 /* If we couldn't simplify X any other way, just enclose it in a
9217 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 9218 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 9219 else
dfbe1b2f
RK
9220 {
9221 int word = 0;
9222
9223 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9224 word = ((GET_MODE_SIZE (GET_MODE (x))
9225 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9226 / UNITS_PER_WORD);
38a448ca 9227 return gen_rtx_SUBREG (mode, x, word);
dfbe1b2f 9228 }
230d793d
RS
9229}
9230\f
9231/* Make an rtx expression. This is a subset of gen_rtx and only supports
9232 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9233
9234 If the identical expression was previously in the insn (in the undobuf),
9235 it will be returned. Only if it is not found will a new expression
9236 be made. */
9237
9238/*VARARGS2*/
9239static rtx
4f90e4a0 9240gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 9241{
4f90e4a0 9242#ifndef __STDC__
230d793d
RS
9243 enum rtx_code code;
9244 enum machine_mode mode;
4f90e4a0
RK
9245#endif
9246 va_list p;
230d793d
RS
9247 int n_args;
9248 rtx args[3];
b729186a 9249 int j;
230d793d
RS
9250 char *fmt;
9251 rtx rt;
241cea85 9252 struct undo *undo;
230d793d 9253
4f90e4a0
RK
9254 VA_START (p, mode);
9255
9256#ifndef __STDC__
230d793d
RS
9257 code = va_arg (p, enum rtx_code);
9258 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
9259#endif
9260
230d793d
RS
9261 n_args = GET_RTX_LENGTH (code);
9262 fmt = GET_RTX_FORMAT (code);
9263
9264 if (n_args == 0 || n_args > 3)
9265 abort ();
9266
9267 /* Get each arg and verify that it is supposed to be an expression. */
9268 for (j = 0; j < n_args; j++)
9269 {
9270 if (*fmt++ != 'e')
9271 abort ();
9272
9273 args[j] = va_arg (p, rtx);
9274 }
9275
9276 /* See if this is in undobuf. Be sure we don't use objects that came
9277 from another insn; this could produce circular rtl structures. */
9278
241cea85
RK
9279 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9280 if (!undo->is_int
9281 && GET_CODE (undo->old_contents.r) == code
9282 && GET_MODE (undo->old_contents.r) == mode)
230d793d
RS
9283 {
9284 for (j = 0; j < n_args; j++)
241cea85 9285 if (XEXP (undo->old_contents.r, j) != args[j])
230d793d
RS
9286 break;
9287
9288 if (j == n_args)
241cea85 9289 return undo->old_contents.r;
230d793d
RS
9290 }
9291
9292 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9293 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9294 rt = rtx_alloc (code);
9295 PUT_MODE (rt, mode);
9296 XEXP (rt, 0) = args[0];
9297 if (n_args > 1)
9298 {
9299 XEXP (rt, 1) = args[1];
9300 if (n_args > 2)
9301 XEXP (rt, 2) = args[2];
9302 }
9303 return rt;
9304}
9305
9306/* These routines make binary and unary operations by first seeing if they
9307 fold; if not, a new expression is allocated. */
9308
9309static rtx
9310gen_binary (code, mode, op0, op1)
9311 enum rtx_code code;
9312 enum machine_mode mode;
9313 rtx op0, op1;
9314{
9315 rtx result;
1a26b032
RK
9316 rtx tem;
9317
9318 if (GET_RTX_CLASS (code) == 'c'
9319 && (GET_CODE (op0) == CONST_INT
9320 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9321 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
9322
9323 if (GET_RTX_CLASS (code) == '<')
9324 {
9325 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
9326
9327 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9328 just (REL_OP X Y). */
9210df58
RK
9329 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9330 {
9331 op1 = XEXP (op0, 1);
9332 op0 = XEXP (op0, 0);
9333 op_mode = GET_MODE (op0);
9334 }
9335
230d793d
RS
9336 if (op_mode == VOIDmode)
9337 op_mode = GET_MODE (op1);
9338 result = simplify_relational_operation (code, op_mode, op0, op1);
9339 }
9340 else
9341 result = simplify_binary_operation (code, mode, op0, op1);
9342
9343 if (result)
9344 return result;
9345
9346 /* Put complex operands first and constants second. */
9347 if (GET_RTX_CLASS (code) == 'c'
9348 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9349 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9350 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9351 || (GET_CODE (op0) == SUBREG
9352 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9353 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9354 return gen_rtx_combine (code, mode, op1, op0);
9355
e5e809f4
JL
9356 /* If we are turning off bits already known off in OP0, we need not do
9357 an AND. */
9358 else if (code == AND && GET_CODE (op1) == CONST_INT
9359 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9360 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
9361 return op0;
9362
230d793d
RS
9363 return gen_rtx_combine (code, mode, op0, op1);
9364}
9365
9366static rtx
0c1c8ea6 9367gen_unary (code, mode, op0_mode, op0)
230d793d 9368 enum rtx_code code;
0c1c8ea6 9369 enum machine_mode mode, op0_mode;
230d793d
RS
9370 rtx op0;
9371{
0c1c8ea6 9372 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
9373
9374 if (result)
9375 return result;
9376
9377 return gen_rtx_combine (code, mode, op0);
9378}
9379\f
9380/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9381 comparison code that will be tested.
9382
9383 The result is a possibly different comparison code to use. *POP0 and
9384 *POP1 may be updated.
9385
9386 It is possible that we might detect that a comparison is either always
9387 true or always false. However, we do not perform general constant
5089e22e 9388 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9389 should have been detected earlier. Hence we ignore all such cases. */
9390
9391static enum rtx_code
9392simplify_comparison (code, pop0, pop1)
9393 enum rtx_code code;
9394 rtx *pop0;
9395 rtx *pop1;
9396{
9397 rtx op0 = *pop0;
9398 rtx op1 = *pop1;
9399 rtx tem, tem1;
9400 int i;
9401 enum machine_mode mode, tmode;
9402
9403 /* Try a few ways of applying the same transformation to both operands. */
9404 while (1)
9405 {
3a19aabc
RK
9406#ifndef WORD_REGISTER_OPERATIONS
9407 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9408 so check specially. */
9409 if (code != GTU && code != GEU && code != LTU && code != LEU
9410 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9411 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9412 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9413 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9414 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9415 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 9416 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
9417 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9418 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9419 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9420 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9421 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9422 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9423 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9424 && (INTVAL (XEXP (op0, 1))
9425 == (GET_MODE_BITSIZE (GET_MODE (op0))
9426 - (GET_MODE_BITSIZE
9427 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9428 {
9429 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9430 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9431 }
9432#endif
9433
230d793d
RS
9434 /* If both operands are the same constant shift, see if we can ignore the
9435 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 9436 this shift are known to be zero for both inputs and if the type of
230d793d 9437 comparison is compatible with the shift. */
67232b23
RK
9438 if (GET_CODE (op0) == GET_CODE (op1)
9439 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9440 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 9441 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
9442 && (code != GT && code != LT && code != GE && code != LE))
9443 || (GET_CODE (op0) == ASHIFTRT
9444 && (code != GTU && code != LTU
9445 && code != GEU && code != GEU)))
9446 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9447 && INTVAL (XEXP (op0, 1)) >= 0
9448 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9449 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
9450 {
9451 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 9452 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9453 int shift_count = INTVAL (XEXP (op0, 1));
9454
9455 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9456 mask &= (mask >> shift_count) << shift_count;
45620ed4 9457 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
9458 mask = (mask & (mask << shift_count)) >> shift_count;
9459
951553af
RK
9460 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9461 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
9462 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9463 else
9464 break;
9465 }
9466
9467 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9468 SUBREGs are of the same mode, and, in both cases, the AND would
9469 be redundant if the comparison was done in the narrower mode,
9470 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
9471 and the operand's possibly nonzero bits are 0xffffff01; in that case
9472 if we only care about QImode, we don't need the AND). This case
9473 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
9474 STORE_FLAG_VALUE == 1 (e.g., the 386).
9475
9476 Similarly, check for a case where the AND's are ZERO_EXTEND
9477 operations from some narrower mode even though a SUBREG is not
9478 present. */
230d793d
RS
9479
9480 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9481 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 9482 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 9483 {
7e4dc511
RK
9484 rtx inner_op0 = XEXP (op0, 0);
9485 rtx inner_op1 = XEXP (op1, 0);
9486 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9487 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9488 int changed = 0;
9489
9490 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9491 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9492 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9493 && (GET_MODE (SUBREG_REG (inner_op0))
9494 == GET_MODE (SUBREG_REG (inner_op1)))
729a2bc6 9495 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
7e4dc511 9496 <= HOST_BITS_PER_WIDE_INT)
01c82bbb 9497 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
729a2bc6 9498 GET_MODE (SUBREG_REG (inner_op0)))))
01c82bbb
RK
9499 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9500 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
9501 {
9502 op0 = SUBREG_REG (inner_op0);
9503 op1 = SUBREG_REG (inner_op1);
9504
9505 /* The resulting comparison is always unsigned since we masked
0f41302f 9506 off the original sign bit. */
7e4dc511
RK
9507 code = unsigned_condition (code);
9508
9509 changed = 1;
9510 }
230d793d 9511
7e4dc511
RK
9512 else if (c0 == c1)
9513 for (tmode = GET_CLASS_NARROWEST_MODE
9514 (GET_MODE_CLASS (GET_MODE (op0)));
9515 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9516 if (c0 == GET_MODE_MASK (tmode))
9517 {
9518 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9519 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 9520 code = unsigned_condition (code);
7e4dc511
RK
9521 changed = 1;
9522 break;
9523 }
9524
9525 if (! changed)
9526 break;
230d793d 9527 }
3a19aabc 9528
ad25ba17
RK
9529 /* If both operands are NOT, we can strip off the outer operation
9530 and adjust the comparison code for swapped operands; similarly for
9531 NEG, except that this must be an equality comparison. */
9532 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9533 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9534 && (code == EQ || code == NE)))
9535 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 9536
230d793d
RS
9537 else
9538 break;
9539 }
9540
9541 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
9542 comparison code appropriately, but don't do this if the second operand
9543 is already a constant integer. */
9544 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
230d793d
RS
9545 {
9546 tem = op0, op0 = op1, op1 = tem;
9547 code = swap_condition (code);
9548 }
9549
9550 /* We now enter a loop during which we will try to simplify the comparison.
9551 For the most part, we only are concerned with comparisons with zero,
9552 but some things may really be comparisons with zero but not start
9553 out looking that way. */
9554
9555 while (GET_CODE (op1) == CONST_INT)
9556 {
9557 enum machine_mode mode = GET_MODE (op0);
9558 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 9559 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9560 int equality_comparison_p;
9561 int sign_bit_comparison_p;
9562 int unsigned_comparison_p;
5f4f0e22 9563 HOST_WIDE_INT const_op;
230d793d
RS
9564
9565 /* We only want to handle integral modes. This catches VOIDmode,
9566 CCmode, and the floating-point modes. An exception is that we
9567 can handle VOIDmode if OP0 is a COMPARE or a comparison
9568 operation. */
9569
9570 if (GET_MODE_CLASS (mode) != MODE_INT
9571 && ! (mode == VOIDmode
9572 && (GET_CODE (op0) == COMPARE
9573 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9574 break;
9575
9576 /* Get the constant we are comparing against and turn off all bits
9577 not on in our mode. */
9578 const_op = INTVAL (op1);
5f4f0e22 9579 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 9580 const_op &= mask;
230d793d
RS
9581
9582 /* If we are comparing against a constant power of two and the value
951553af 9583 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
9584 `and'ed with that bit), we can replace this with a comparison
9585 with zero. */
9586 if (const_op
9587 && (code == EQ || code == NE || code == GE || code == GEU
9588 || code == LT || code == LTU)
5f4f0e22 9589 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9590 && exact_log2 (const_op) >= 0
951553af 9591 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
9592 {
9593 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9594 op1 = const0_rtx, const_op = 0;
9595 }
9596
d0ab8cd3
RK
9597 /* Similarly, if we are comparing a value known to be either -1 or
9598 0 with -1, change it to the opposite comparison against zero. */
9599
9600 if (const_op == -1
9601 && (code == EQ || code == NE || code == GT || code == LE
9602 || code == GEU || code == LTU)
9603 && num_sign_bit_copies (op0, mode) == mode_width)
9604 {
9605 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9606 op1 = const0_rtx, const_op = 0;
9607 }
9608
230d793d 9609 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
9610 comparisons against zero and then prefer equality comparisons.
9611 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
9612
9613 switch (code)
9614 {
9615 case LT:
4803a34a
RK
9616 /* < C is equivalent to <= (C - 1) */
9617 if (const_op > 0)
230d793d 9618 {
4803a34a 9619 const_op -= 1;
5f4f0e22 9620 op1 = GEN_INT (const_op);
230d793d
RS
9621 code = LE;
9622 /* ... fall through to LE case below. */
9623 }
9624 else
9625 break;
9626
9627 case LE:
4803a34a
RK
9628 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9629 if (const_op < 0)
9630 {
9631 const_op += 1;
5f4f0e22 9632 op1 = GEN_INT (const_op);
4803a34a
RK
9633 code = LT;
9634 }
230d793d
RS
9635
9636 /* If we are doing a <= 0 comparison on a value known to have
9637 a zero sign bit, we can replace this with == 0. */
9638 else if (const_op == 0
5f4f0e22 9639 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9640 && (nonzero_bits (op0, mode)
5f4f0e22 9641 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9642 code = EQ;
9643 break;
9644
9645 case GE:
0f41302f 9646 /* >= C is equivalent to > (C - 1). */
4803a34a 9647 if (const_op > 0)
230d793d 9648 {
4803a34a 9649 const_op -= 1;
5f4f0e22 9650 op1 = GEN_INT (const_op);
230d793d
RS
9651 code = GT;
9652 /* ... fall through to GT below. */
9653 }
9654 else
9655 break;
9656
9657 case GT:
4803a34a
RK
9658 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9659 if (const_op < 0)
9660 {
9661 const_op += 1;
5f4f0e22 9662 op1 = GEN_INT (const_op);
4803a34a
RK
9663 code = GE;
9664 }
230d793d
RS
9665
9666 /* If we are doing a > 0 comparison on a value known to have
9667 a zero sign bit, we can replace this with != 0. */
9668 else if (const_op == 0
5f4f0e22 9669 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9670 && (nonzero_bits (op0, mode)
5f4f0e22 9671 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9672 code = NE;
9673 break;
9674
230d793d 9675 case LTU:
4803a34a
RK
9676 /* < C is equivalent to <= (C - 1). */
9677 if (const_op > 0)
9678 {
9679 const_op -= 1;
5f4f0e22 9680 op1 = GEN_INT (const_op);
4803a34a 9681 code = LEU;
0f41302f 9682 /* ... fall through ... */
4803a34a 9683 }
d0ab8cd3
RK
9684
9685 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
9686 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9687 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9688 {
9689 const_op = 0, op1 = const0_rtx;
9690 code = GE;
9691 break;
9692 }
4803a34a
RK
9693 else
9694 break;
230d793d
RS
9695
9696 case LEU:
9697 /* unsigned <= 0 is equivalent to == 0 */
9698 if (const_op == 0)
9699 code = EQ;
d0ab8cd3 9700
0f41302f 9701 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
9702 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9703 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9704 {
9705 const_op = 0, op1 = const0_rtx;
9706 code = GE;
9707 }
230d793d
RS
9708 break;
9709
4803a34a
RK
9710 case GEU:
9711 /* >= C is equivalent to < (C - 1). */
9712 if (const_op > 1)
9713 {
9714 const_op -= 1;
5f4f0e22 9715 op1 = GEN_INT (const_op);
4803a34a 9716 code = GTU;
0f41302f 9717 /* ... fall through ... */
4803a34a 9718 }
d0ab8cd3
RK
9719
9720 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
9721 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9722 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9723 {
9724 const_op = 0, op1 = const0_rtx;
9725 code = LT;
8b2e69e1 9726 break;
d0ab8cd3 9727 }
4803a34a
RK
9728 else
9729 break;
9730
230d793d
RS
9731 case GTU:
9732 /* unsigned > 0 is equivalent to != 0 */
9733 if (const_op == 0)
9734 code = NE;
d0ab8cd3
RK
9735
9736 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
9737 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9738 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9739 {
9740 const_op = 0, op1 = const0_rtx;
9741 code = LT;
9742 }
230d793d 9743 break;
e9a25f70
JL
9744
9745 default:
9746 break;
230d793d
RS
9747 }
9748
9749 /* Compute some predicates to simplify code below. */
9750
9751 equality_comparison_p = (code == EQ || code == NE);
9752 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9753 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9754 || code == LEU);
9755
6139ff20
RK
9756 /* If this is a sign bit comparison and we can do arithmetic in
9757 MODE, say that we will only be needing the sign bit of OP0. */
9758 if (sign_bit_comparison_p
9759 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9760 op0 = force_to_mode (op0, mode,
9761 ((HOST_WIDE_INT) 1
9762 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 9763 NULL_RTX, 0);
6139ff20 9764
230d793d
RS
9765 /* Now try cases based on the opcode of OP0. If none of the cases
9766 does a "continue", we exit this loop immediately after the
9767 switch. */
9768
9769 switch (GET_CODE (op0))
9770 {
9771 case ZERO_EXTRACT:
9772 /* If we are extracting a single bit from a variable position in
9773 a constant that has only a single bit set and are comparing it
9774 with zero, we can convert this into an equality comparison
d7cd794f 9775 between the position and the location of the single bit. */
230d793d 9776
230d793d
RS
9777 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9778 && XEXP (op0, 1) == const1_rtx
9779 && equality_comparison_p && const_op == 0
d7cd794f 9780 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 9781 {
f76b9db2 9782 if (BITS_BIG_ENDIAN)
0d8e55d8 9783 {
d7cd794f 9784#ifdef HAVE_extzv
0d8e55d8
JL
9785 mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
9786 if (mode == VOIDmode)
9787 mode = word_mode;
9788 i = (GET_MODE_BITSIZE (mode) - 1 - i);
d7cd794f 9789#else
0d8e55d8 9790 i = BITS_PER_WORD - 1 - i;
230d793d 9791#endif
0d8e55d8 9792 }
230d793d
RS
9793
9794 op0 = XEXP (op0, 2);
5f4f0e22 9795 op1 = GEN_INT (i);
230d793d
RS
9796 const_op = i;
9797
9798 /* Result is nonzero iff shift count is equal to I. */
9799 code = reverse_condition (code);
9800 continue;
9801 }
230d793d 9802
0f41302f 9803 /* ... fall through ... */
230d793d
RS
9804
9805 case SIGN_EXTRACT:
9806 tem = expand_compound_operation (op0);
9807 if (tem != op0)
9808 {
9809 op0 = tem;
9810 continue;
9811 }
9812 break;
9813
9814 case NOT:
9815 /* If testing for equality, we can take the NOT of the constant. */
9816 if (equality_comparison_p
9817 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9818 {
9819 op0 = XEXP (op0, 0);
9820 op1 = tem;
9821 continue;
9822 }
9823
9824 /* If just looking at the sign bit, reverse the sense of the
9825 comparison. */
9826 if (sign_bit_comparison_p)
9827 {
9828 op0 = XEXP (op0, 0);
9829 code = (code == GE ? LT : GE);
9830 continue;
9831 }
9832 break;
9833
9834 case NEG:
9835 /* If testing for equality, we can take the NEG of the constant. */
9836 if (equality_comparison_p
9837 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9838 {
9839 op0 = XEXP (op0, 0);
9840 op1 = tem;
9841 continue;
9842 }
9843
9844 /* The remaining cases only apply to comparisons with zero. */
9845 if (const_op != 0)
9846 break;
9847
9848 /* When X is ABS or is known positive,
9849 (neg X) is < 0 if and only if X != 0. */
9850
9851 if (sign_bit_comparison_p
9852 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 9853 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9854 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9855 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
9856 {
9857 op0 = XEXP (op0, 0);
9858 code = (code == LT ? NE : EQ);
9859 continue;
9860 }
9861
3bed8141 9862 /* If we have NEG of something whose two high-order bits are the
0f41302f 9863 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 9864 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
9865 {
9866 op0 = XEXP (op0, 0);
9867 code = swap_condition (code);
9868 continue;
9869 }
9870 break;
9871
9872 case ROTATE:
9873 /* If we are testing equality and our count is a constant, we
9874 can perform the inverse operation on our RHS. */
9875 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9876 && (tem = simplify_binary_operation (ROTATERT, mode,
9877 op1, XEXP (op0, 1))) != 0)
9878 {
9879 op0 = XEXP (op0, 0);
9880 op1 = tem;
9881 continue;
9882 }
9883
9884 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9885 a particular bit. Convert it to an AND of a constant of that
9886 bit. This will be converted into a ZERO_EXTRACT. */
9887 if (const_op == 0 && sign_bit_comparison_p
9888 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9889 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9890 {
5f4f0e22
CH
9891 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9892 ((HOST_WIDE_INT) 1
9893 << (mode_width - 1
9894 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9895 code = (code == LT ? NE : EQ);
9896 continue;
9897 }
9898
0f41302f 9899 /* ... fall through ... */
230d793d
RS
9900
9901 case ABS:
9902 /* ABS is ignorable inside an equality comparison with zero. */
9903 if (const_op == 0 && equality_comparison_p)
9904 {
9905 op0 = XEXP (op0, 0);
9906 continue;
9907 }
9908 break;
9909
9910
9911 case SIGN_EXTEND:
9912 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9913 to (compare FOO CONST) if CONST fits in FOO's mode and we
9914 are either testing inequality or have an unsigned comparison
9915 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9916 if (! unsigned_comparison_p
9917 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9918 <= HOST_BITS_PER_WIDE_INT)
9919 && ((unsigned HOST_WIDE_INT) const_op
9920 < (((HOST_WIDE_INT) 1
9921 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
9922 {
9923 op0 = XEXP (op0, 0);
9924 continue;
9925 }
9926 break;
9927
9928 case SUBREG:
a687e897 9929 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 9930 both constants are smaller than 1/2 the maximum positive
a687e897
RK
9931 value in MODE, and the comparison is equality or unsigned.
9932 In that case, if A is either zero-extended to MODE or has
9933 sufficient sign bits so that the high-order bit in MODE
9934 is a copy of the sign in the inner mode, we can prove that it is
9935 safe to do the operation in the wider mode. This simplifies
9936 many range checks. */
9937
9938 if (mode_width <= HOST_BITS_PER_WIDE_INT
9939 && subreg_lowpart_p (op0)
9940 && GET_CODE (SUBREG_REG (op0)) == PLUS
9941 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9942 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9943 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9944 < GET_MODE_MASK (mode) / 2)
adb7a1cb 9945 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
9946 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9947 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
9948 & ~ GET_MODE_MASK (mode))
9949 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9950 GET_MODE (SUBREG_REG (op0)))
9951 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9952 - GET_MODE_BITSIZE (mode)))))
9953 {
9954 op0 = SUBREG_REG (op0);
9955 continue;
9956 }
9957
fe0cf571
RK
9958 /* If the inner mode is narrower and we are extracting the low part,
9959 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9960 if (subreg_lowpart_p (op0)
89f1c7f2
RS
9961 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9962 /* Fall through */ ;
9963 else
230d793d
RS
9964 break;
9965
0f41302f 9966 /* ... fall through ... */
230d793d
RS
9967
9968 case ZERO_EXTEND:
9969 if ((unsigned_comparison_p || equality_comparison_p)
9970 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9971 <= HOST_BITS_PER_WIDE_INT)
9972 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
9973 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9974 {
9975 op0 = XEXP (op0, 0);
9976 continue;
9977 }
9978 break;
9979
9980 case PLUS:
20fdd649 9981 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 9982 this for equality comparisons due to pathological cases involving
230d793d 9983 overflows. */
20fdd649
RK
9984 if (equality_comparison_p
9985 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9986 op1, XEXP (op0, 1))))
230d793d
RS
9987 {
9988 op0 = XEXP (op0, 0);
9989 op1 = tem;
9990 continue;
9991 }
9992
9993 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9994 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9995 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9996 {
9997 op0 = XEXP (XEXP (op0, 0), 0);
9998 code = (code == LT ? EQ : NE);
9999 continue;
10000 }
10001 break;
10002
10003 case MINUS:
20fdd649
RK
10004 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10005 (eq B (minus A C)), whichever simplifies. We can only do
10006 this for equality comparisons due to pathological cases involving
10007 overflows. */
10008 if (equality_comparison_p
10009 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10010 XEXP (op0, 1), op1)))
10011 {
10012 op0 = XEXP (op0, 0);
10013 op1 = tem;
10014 continue;
10015 }
10016
10017 if (equality_comparison_p
10018 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10019 XEXP (op0, 0), op1)))
10020 {
10021 op0 = XEXP (op0, 1);
10022 op1 = tem;
10023 continue;
10024 }
10025
230d793d
RS
10026 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10027 of bits in X minus 1, is one iff X > 0. */
10028 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10029 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10030 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10031 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10032 {
10033 op0 = XEXP (op0, 1);
10034 code = (code == GE ? LE : GT);
10035 continue;
10036 }
10037 break;
10038
10039 case XOR:
10040 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10041 if C is zero or B is a constant. */
10042 if (equality_comparison_p
10043 && 0 != (tem = simplify_binary_operation (XOR, mode,
10044 XEXP (op0, 1), op1)))
10045 {
10046 op0 = XEXP (op0, 0);
10047 op1 = tem;
10048 continue;
10049 }
10050 break;
10051
10052 case EQ: case NE:
10053 case LT: case LTU: case LE: case LEU:
10054 case GT: case GTU: case GE: case GEU:
10055 /* We can't do anything if OP0 is a condition code value, rather
10056 than an actual data value. */
10057 if (const_op != 0
10058#ifdef HAVE_cc0
10059 || XEXP (op0, 0) == cc0_rtx
10060#endif
10061 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10062 break;
10063
10064 /* Get the two operands being compared. */
10065 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10066 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10067 else
10068 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10069
10070 /* Check for the cases where we simply want the result of the
10071 earlier test or the opposite of that result. */
10072 if (code == NE
10073 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 10074 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 10075 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 10076 && (STORE_FLAG_VALUE
5f4f0e22
CH
10077 & (((HOST_WIDE_INT) 1
10078 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
10079 && (code == LT
10080 || (code == GE && reversible_comparison_p (op0)))))
10081 {
10082 code = (code == LT || code == NE
10083 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10084 op0 = tem, op1 = tem1;
10085 continue;
10086 }
10087 break;
10088
10089 case IOR:
10090 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10091 iff X <= 0. */
10092 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10093 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10094 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10095 {
10096 op0 = XEXP (op0, 1);
10097 code = (code == GE ? GT : LE);
10098 continue;
10099 }
10100 break;
10101
10102 case AND:
10103 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10104 will be converted to a ZERO_EXTRACT later. */
10105 if (const_op == 0 && equality_comparison_p
45620ed4 10106 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
10107 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10108 {
10109 op0 = simplify_and_const_int
10110 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10111 XEXP (op0, 1),
10112 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 10113 (HOST_WIDE_INT) 1);
230d793d
RS
10114 continue;
10115 }
10116
10117 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10118 zero and X is a comparison and C1 and C2 describe only bits set
10119 in STORE_FLAG_VALUE, we can compare with X. */
10120 if (const_op == 0 && equality_comparison_p
5f4f0e22 10121 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
10122 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10123 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10124 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10125 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 10126 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
10127 {
10128 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10129 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10130 if ((~ STORE_FLAG_VALUE & mask) == 0
10131 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10132 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10133 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10134 {
10135 op0 = XEXP (XEXP (op0, 0), 0);
10136 continue;
10137 }
10138 }
10139
10140 /* If we are doing an equality comparison of an AND of a bit equal
10141 to the sign bit, replace this with a LT or GE comparison of
10142 the underlying value. */
10143 if (equality_comparison_p
10144 && const_op == 0
10145 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10146 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10147 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 10148 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
10149 {
10150 op0 = XEXP (op0, 0);
10151 code = (code == EQ ? GE : LT);
10152 continue;
10153 }
10154
10155 /* If this AND operation is really a ZERO_EXTEND from a narrower
10156 mode, the constant fits within that mode, and this is either an
10157 equality or unsigned comparison, try to do this comparison in
10158 the narrower mode. */
10159 if ((equality_comparison_p || unsigned_comparison_p)
10160 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10161 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10162 & GET_MODE_MASK (mode))
10163 + 1)) >= 0
10164 && const_op >> i == 0
10165 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10166 {
10167 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10168 continue;
10169 }
e5e809f4
JL
10170
10171 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10172 in both M1 and M2 and the SUBREG is either paradoxical or
10173 represents the low part, permute the SUBREG and the AND and
10174 try again. */
10175 if (GET_CODE (XEXP (op0, 0)) == SUBREG
10176 && ((mode_width
10177 >= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
9ec36da5
JL
10178#ifdef WORD_REGISTER_OPERATIONS
10179 || subreg_lowpart_p (XEXP (op0, 0))
10180#endif
10181 )
adc05e6c
JL
10182#ifndef WORD_REGISTER_OPERATIONS
10183 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10184 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10185 As originally written the upper bits have a defined value
10186 due to the AND operation. However, if we commute the AND
10187 inside the SUBREG then they no longer have defined values
10188 and the meaning of the code has been changed. */
10189 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10190 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10191#endif
e5e809f4
JL
10192 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10193 && mode_width <= HOST_BITS_PER_WIDE_INT
10194 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10195 <= HOST_BITS_PER_WIDE_INT)
10196 && (INTVAL (XEXP (op0, 1)) & ~ mask) == 0
10197 && 0 == (~ GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
9ec36da5
JL
10198 & INTVAL (XEXP (op0, 1)))
10199 && INTVAL (XEXP (op0, 1)) != mask
10200 && (INTVAL (XEXP (op0, 1))
10201 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
e5e809f4
JL
10202
10203 {
10204 op0
10205 = gen_lowpart_for_combine
10206 (mode,
10207 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10208 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10209 continue;
10210 }
10211
230d793d
RS
10212 break;
10213
10214 case ASHIFT:
45620ed4 10215 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 10216 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 10217 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
10218 shifted right N bits so long as the low-order N bits of C are
10219 zero. */
10220 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10221 && INTVAL (XEXP (op0, 1)) >= 0
10222 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
10223 < HOST_BITS_PER_WIDE_INT)
10224 && ((const_op
34785d05 10225 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 10226 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10227 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
10228 & ~ (mask >> (INTVAL (XEXP (op0, 1))
10229 + ! equality_comparison_p))) == 0)
10230 {
10231 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 10232 op1 = GEN_INT (const_op);
230d793d
RS
10233 op0 = XEXP (op0, 0);
10234 continue;
10235 }
10236
dfbe1b2f 10237 /* If we are doing a sign bit comparison, it means we are testing
230d793d 10238 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 10239 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10240 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10241 {
5f4f0e22
CH
10242 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10243 ((HOST_WIDE_INT) 1
10244 << (mode_width - 1
10245 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10246 code = (code == LT ? NE : EQ);
10247 continue;
10248 }
dfbe1b2f
RK
10249
10250 /* If this an equality comparison with zero and we are shifting
10251 the low bit to the sign bit, we can convert this to an AND of the
10252 low-order bit. */
10253 if (const_op == 0 && equality_comparison_p
10254 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10255 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10256 {
5f4f0e22
CH
10257 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10258 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
10259 continue;
10260 }
230d793d
RS
10261 break;
10262
10263 case ASHIFTRT:
d0ab8cd3
RK
10264 /* If this is an equality comparison with zero, we can do this
10265 as a logical shift, which might be much simpler. */
10266 if (equality_comparison_p && const_op == 0
10267 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10268 {
10269 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10270 XEXP (op0, 0),
10271 INTVAL (XEXP (op0, 1)));
10272 continue;
10273 }
10274
230d793d
RS
10275 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10276 do the comparison in a narrower mode. */
10277 if (! unsigned_comparison_p
10278 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10279 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10280 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10281 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 10282 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
10283 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10284 || ((unsigned HOST_WIDE_INT) - const_op
10285 <= GET_MODE_MASK (tmode))))
230d793d
RS
10286 {
10287 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10288 continue;
10289 }
10290
0f41302f 10291 /* ... fall through ... */
230d793d
RS
10292 case LSHIFTRT:
10293 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 10294 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
10295 by comparing FOO with C shifted left N bits so long as no
10296 overflow occurs. */
10297 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10298 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
10299 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10300 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10301 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10302 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
10303 && (const_op == 0
10304 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10305 < mode_width)))
10306 {
10307 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 10308 op1 = GEN_INT (const_op);
230d793d
RS
10309 op0 = XEXP (op0, 0);
10310 continue;
10311 }
10312
10313 /* If we are using this shift to extract just the sign bit, we
10314 can replace this with an LT or GE comparison. */
10315 if (const_op == 0
10316 && (equality_comparison_p || sign_bit_comparison_p)
10317 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10318 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10319 {
10320 op0 = XEXP (op0, 0);
10321 code = (code == NE || code == GT ? LT : GE);
10322 continue;
10323 }
10324 break;
e9a25f70
JL
10325
10326 default:
10327 break;
230d793d
RS
10328 }
10329
10330 break;
10331 }
10332
10333 /* Now make any compound operations involved in this comparison. Then,
76d31c63 10334 check for an outmost SUBREG on OP0 that is not doing anything or is
230d793d
RS
10335 paradoxical. The latter case can only occur when it is known that the
10336 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10337 We can never remove a SUBREG for a non-equality comparison because the
10338 sign bit is in a different place in the underlying object. */
10339
10340 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10341 op1 = make_compound_operation (op1, SET);
10342
10343 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10344 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10345 && (code == NE || code == EQ)
10346 && ((GET_MODE_SIZE (GET_MODE (op0))
10347 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10348 {
10349 op0 = SUBREG_REG (op0);
10350 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10351 }
10352
10353 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10354 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10355 && (code == NE || code == EQ)
ac49a949
RS
10356 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10357 <= HOST_BITS_PER_WIDE_INT)
951553af 10358 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10359 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
10360 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10361 op1),
951553af 10362 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10363 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
10364 op0 = SUBREG_REG (op0), op1 = tem;
10365
10366 /* We now do the opposite procedure: Some machines don't have compare
10367 insns in all modes. If OP0's mode is an integer mode smaller than a
10368 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
10369 mode for which we can do the compare. There are a number of cases in
10370 which we can use the wider mode. */
230d793d
RS
10371
10372 mode = GET_MODE (op0);
10373 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10374 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10375 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10376 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
10377 (tmode != VOIDmode
10378 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 10379 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 10380 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 10381 {
951553af 10382 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
10383 narrower mode and this is an equality or unsigned comparison,
10384 we can use the wider mode. Similarly for sign-extended
7e4dc511 10385 values, in which case it is true for all comparisons. */
a687e897
RK
10386 if (((code == EQ || code == NE
10387 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
10388 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10389 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
10390 || ((num_sign_bit_copies (op0, tmode)
10391 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 10392 && (num_sign_bit_copies (op1, tmode)
58744483 10393 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
10394 {
10395 op0 = gen_lowpart_for_combine (tmode, op0);
10396 op1 = gen_lowpart_for_combine (tmode, op1);
10397 break;
10398 }
230d793d 10399
a687e897
RK
10400 /* If this is a test for negative, we can make an explicit
10401 test of the sign bit. */
10402
10403 if (op1 == const0_rtx && (code == LT || code == GE)
10404 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 10405 {
a687e897
RK
10406 op0 = gen_binary (AND, tmode,
10407 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
10408 GEN_INT ((HOST_WIDE_INT) 1
10409 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 10410 code = (code == LT) ? NE : EQ;
a687e897 10411 break;
230d793d 10412 }
230d793d
RS
10413 }
10414
b7a775b2
RK
10415#ifdef CANONICALIZE_COMPARISON
10416 /* If this machine only supports a subset of valid comparisons, see if we
10417 can convert an unsupported one into a supported one. */
10418 CANONICALIZE_COMPARISON (code, op0, op1);
10419#endif
10420
230d793d
RS
10421 *pop0 = op0;
10422 *pop1 = op1;
10423
10424 return code;
10425}
10426\f
10427/* Return 1 if we know that X, a comparison operation, is not operating
10428 on a floating-point value or is EQ or NE, meaning that we can safely
10429 reverse it. */
10430
10431static int
10432reversible_comparison_p (x)
10433 rtx x;
10434{
10435 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 10436 || flag_fast_math
230d793d
RS
10437 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10438 return 1;
10439
10440 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10441 {
10442 case MODE_INT:
3ad2180a
RK
10443 case MODE_PARTIAL_INT:
10444 case MODE_COMPLEX_INT:
230d793d
RS
10445 return 1;
10446
10447 case MODE_CC:
9210df58
RK
10448 /* If the mode of the condition codes tells us that this is safe,
10449 we need look no further. */
10450 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10451 return 1;
10452
10453 /* Otherwise try and find where the condition codes were last set and
10454 use that. */
230d793d
RS
10455 x = get_last_value (XEXP (x, 0));
10456 return (x && GET_CODE (x) == COMPARE
3ad2180a 10457 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
e9a25f70
JL
10458
10459 default:
10460 return 0;
230d793d 10461 }
230d793d
RS
10462}
10463\f
10464/* Utility function for following routine. Called when X is part of a value
10465 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10466 for each register mentioned. Similar to mention_regs in cse.c */
10467
10468static void
10469update_table_tick (x)
10470 rtx x;
10471{
10472 register enum rtx_code code = GET_CODE (x);
10473 register char *fmt = GET_RTX_FORMAT (code);
10474 register int i;
10475
10476 if (code == REG)
10477 {
10478 int regno = REGNO (x);
10479 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10480 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10481
10482 for (i = regno; i < endregno; i++)
10483 reg_last_set_table_tick[i] = label_tick;
10484
10485 return;
10486 }
10487
10488 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10489 /* Note that we can't have an "E" in values stored; see
10490 get_last_value_validate. */
10491 if (fmt[i] == 'e')
10492 update_table_tick (XEXP (x, i));
10493}
10494
10495/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10496 are saying that the register is clobbered and we no longer know its
7988fd36
RK
10497 value. If INSN is zero, don't update reg_last_set; this is only permitted
10498 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
10499
10500static void
10501record_value_for_reg (reg, insn, value)
10502 rtx reg;
10503 rtx insn;
10504 rtx value;
10505{
10506 int regno = REGNO (reg);
10507 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10508 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10509 int i;
10510
10511 /* If VALUE contains REG and we have a previous value for REG, substitute
10512 the previous value. */
10513 if (value && insn && reg_overlap_mentioned_p (reg, value))
10514 {
10515 rtx tem;
10516
10517 /* Set things up so get_last_value is allowed to see anything set up to
10518 our insn. */
10519 subst_low_cuid = INSN_CUID (insn);
10520 tem = get_last_value (reg);
10521
10522 if (tem)
10523 value = replace_rtx (copy_rtx (value), reg, tem);
10524 }
10525
10526 /* For each register modified, show we don't know its value, that
ef026f91
RS
10527 we don't know about its bitwise content, that its value has been
10528 updated, and that we don't know the location of the death of the
10529 register. */
230d793d
RS
10530 for (i = regno; i < endregno; i ++)
10531 {
10532 if (insn)
10533 reg_last_set[i] = insn;
10534 reg_last_set_value[i] = 0;
ef026f91
RS
10535 reg_last_set_mode[i] = 0;
10536 reg_last_set_nonzero_bits[i] = 0;
10537 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
10538 reg_last_death[i] = 0;
10539 }
10540
10541 /* Mark registers that are being referenced in this value. */
10542 if (value)
10543 update_table_tick (value);
10544
10545 /* Now update the status of each register being set.
10546 If someone is using this register in this block, set this register
10547 to invalid since we will get confused between the two lives in this
10548 basic block. This makes using this register always invalid. In cse, we
10549 scan the table to invalidate all entries using this register, but this
10550 is too much work for us. */
10551
10552 for (i = regno; i < endregno; i++)
10553 {
10554 reg_last_set_label[i] = label_tick;
10555 if (value && reg_last_set_table_tick[i] == label_tick)
10556 reg_last_set_invalid[i] = 1;
10557 else
10558 reg_last_set_invalid[i] = 0;
10559 }
10560
10561 /* The value being assigned might refer to X (like in "x++;"). In that
10562 case, we must replace it with (clobber (const_int 0)) to prevent
10563 infinite loops. */
9a893315 10564 if (value && ! get_last_value_validate (&value, insn,
230d793d
RS
10565 reg_last_set_label[regno], 0))
10566 {
10567 value = copy_rtx (value);
9a893315
JW
10568 if (! get_last_value_validate (&value, insn,
10569 reg_last_set_label[regno], 1))
230d793d
RS
10570 value = 0;
10571 }
10572
55310dad
RK
10573 /* For the main register being modified, update the value, the mode, the
10574 nonzero bits, and the number of sign bit copies. */
10575
230d793d
RS
10576 reg_last_set_value[regno] = value;
10577
55310dad
RK
10578 if (value)
10579 {
2afabb48 10580 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
10581 reg_last_set_mode[regno] = GET_MODE (reg);
10582 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10583 reg_last_set_sign_bit_copies[regno]
10584 = num_sign_bit_copies (value, GET_MODE (reg));
10585 }
230d793d
RS
10586}
10587
10588/* Used for communication between the following two routines. */
10589static rtx record_dead_insn;
10590
10591/* Called via note_stores from record_dead_and_set_regs to handle one
10592 SET or CLOBBER in an insn. */
10593
10594static void
10595record_dead_and_set_regs_1 (dest, setter)
10596 rtx dest, setter;
10597{
ca89d290
RK
10598 if (GET_CODE (dest) == SUBREG)
10599 dest = SUBREG_REG (dest);
10600
230d793d
RS
10601 if (GET_CODE (dest) == REG)
10602 {
10603 /* If we are setting the whole register, we know its value. Otherwise
10604 show that we don't know the value. We can handle SUBREG in
10605 some cases. */
10606 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10607 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10608 else if (GET_CODE (setter) == SET
10609 && GET_CODE (SET_DEST (setter)) == SUBREG
10610 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 10611 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 10612 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
10613 record_value_for_reg (dest, record_dead_insn,
10614 gen_lowpart_for_combine (GET_MODE (dest),
10615 SET_SRC (setter)));
230d793d 10616 else
5f4f0e22 10617 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
10618 }
10619 else if (GET_CODE (dest) == MEM
10620 /* Ignore pushes, they clobber nothing. */
10621 && ! push_operand (dest, GET_MODE (dest)))
10622 mem_last_set = INSN_CUID (record_dead_insn);
10623}
10624
10625/* Update the records of when each REG was most recently set or killed
10626 for the things done by INSN. This is the last thing done in processing
10627 INSN in the combiner loop.
10628
ef026f91
RS
10629 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10630 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10631 and also the similar information mem_last_set (which insn most recently
10632 modified memory) and last_call_cuid (which insn was the most recent
10633 subroutine call). */
230d793d
RS
10634
10635static void
10636record_dead_and_set_regs (insn)
10637 rtx insn;
10638{
10639 register rtx link;
55310dad
RK
10640 int i;
10641
230d793d
RS
10642 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10643 {
dbc131f3
RK
10644 if (REG_NOTE_KIND (link) == REG_DEAD
10645 && GET_CODE (XEXP (link, 0)) == REG)
10646 {
10647 int regno = REGNO (XEXP (link, 0));
10648 int endregno
10649 = regno + (regno < FIRST_PSEUDO_REGISTER
10650 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10651 : 1);
dbc131f3
RK
10652
10653 for (i = regno; i < endregno; i++)
10654 reg_last_death[i] = insn;
10655 }
230d793d 10656 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 10657 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
10658 }
10659
10660 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
10661 {
10662 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10663 if (call_used_regs[i])
10664 {
10665 reg_last_set_value[i] = 0;
ef026f91
RS
10666 reg_last_set_mode[i] = 0;
10667 reg_last_set_nonzero_bits[i] = 0;
10668 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
10669 reg_last_death[i] = 0;
10670 }
10671
10672 last_call_cuid = mem_last_set = INSN_CUID (insn);
10673 }
230d793d
RS
10674
10675 record_dead_insn = insn;
10676 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10677}
10678\f
10679/* Utility routine for the following function. Verify that all the registers
10680 mentioned in *LOC are valid when *LOC was part of a value set when
10681 label_tick == TICK. Return 0 if some are not.
10682
10683 If REPLACE is non-zero, replace the invalid reference with
10684 (clobber (const_int 0)) and return 1. This replacement is useful because
10685 we often can get useful information about the form of a value (e.g., if
10686 it was produced by a shift that always produces -1 or 0) even though
10687 we don't know exactly what registers it was produced from. */
10688
10689static int
9a893315 10690get_last_value_validate (loc, insn, tick, replace)
230d793d 10691 rtx *loc;
9a893315 10692 rtx insn;
230d793d
RS
10693 int tick;
10694 int replace;
10695{
10696 rtx x = *loc;
10697 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
10698 int len = GET_RTX_LENGTH (GET_CODE (x));
10699 int i;
10700
10701 if (GET_CODE (x) == REG)
10702 {
10703 int regno = REGNO (x);
10704 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10705 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10706 int j;
10707
10708 for (j = regno; j < endregno; j++)
10709 if (reg_last_set_invalid[j]
10710 /* If this is a pseudo-register that was only set once, it is
10711 always valid. */
b1f21e0a 10712 || (! (regno >= FIRST_PSEUDO_REGISTER && REG_N_SETS (regno) == 1)
230d793d
RS
10713 && reg_last_set_label[j] > tick))
10714 {
10715 if (replace)
38a448ca 10716 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
10717 return replace;
10718 }
10719
10720 return 1;
10721 }
9a893315
JW
10722 /* If this is a memory reference, make sure that there were
10723 no stores after it that might have clobbered the value. We don't
10724 have alias info, so we assume any store invalidates it. */
10725 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
10726 && INSN_CUID (insn) <= mem_last_set)
10727 {
10728 if (replace)
38a448ca 10729 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
10730 return replace;
10731 }
230d793d
RS
10732
10733 for (i = 0; i < len; i++)
10734 if ((fmt[i] == 'e'
9a893315 10735 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
230d793d
RS
10736 /* Don't bother with these. They shouldn't occur anyway. */
10737 || fmt[i] == 'E')
10738 return 0;
10739
10740 /* If we haven't found a reason for it to be invalid, it is valid. */
10741 return 1;
10742}
10743
10744/* Get the last value assigned to X, if known. Some registers
10745 in the value may be replaced with (clobber (const_int 0)) if their value
10746 is known longer known reliably. */
10747
10748static rtx
10749get_last_value (x)
10750 rtx x;
10751{
10752 int regno;
10753 rtx value;
10754
10755 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10756 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 10757 we cannot predict what values the "extra" bits might have. */
230d793d
RS
10758 if (GET_CODE (x) == SUBREG
10759 && subreg_lowpart_p (x)
10760 && (GET_MODE_SIZE (GET_MODE (x))
10761 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10762 && (value = get_last_value (SUBREG_REG (x))) != 0)
10763 return gen_lowpart_for_combine (GET_MODE (x), value);
10764
10765 if (GET_CODE (x) != REG)
10766 return 0;
10767
10768 regno = REGNO (x);
10769 value = reg_last_set_value[regno];
10770
0f41302f
MS
10771 /* If we don't have a value or if it isn't for this basic block,
10772 return 0. */
230d793d
RS
10773
10774 if (value == 0
b1f21e0a 10775 || (REG_N_SETS (regno) != 1
55310dad 10776 && reg_last_set_label[regno] != label_tick))
230d793d
RS
10777 return 0;
10778
4255220d 10779 /* If the value was set in a later insn than the ones we are processing,
4090a6b3
RK
10780 we can't use it even if the register was only set once, but make a quick
10781 check to see if the previous insn set it to something. This is commonly
0d9641d1
JW
10782 the case when the same pseudo is used by repeated insns.
10783
10784 This does not work if there exists an instruction which is temporarily
10785 not on the insn chain. */
d0ab8cd3 10786
bcd49eb7 10787 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
10788 {
10789 rtx insn, set;
10790
bcd49eb7
JW
10791 /* We can not do anything useful in this case, because there is
10792 an instruction which is not on the insn chain. */
10793 if (subst_prev_insn)
10794 return 0;
10795
4255220d
JW
10796 /* Skip over USE insns. They are not useful here, and they may have
10797 been made by combine, in which case they do not have a INSN_CUID
d6c80562 10798 value. We can't use prev_real_insn, because that would incorrectly
e340018d
JW
10799 take us backwards across labels. Skip over BARRIERs also, since
10800 they could have been made by combine. If we see one, we must be
10801 optimizing dead code, so it doesn't matter what we do. */
d6c80562
JW
10802 for (insn = prev_nonnote_insn (subst_insn);
10803 insn && ((GET_CODE (insn) == INSN
10804 && GET_CODE (PATTERN (insn)) == USE)
e340018d 10805 || GET_CODE (insn) == BARRIER
4255220d 10806 || INSN_CUID (insn) >= subst_low_cuid);
d6c80562 10807 insn = prev_nonnote_insn (insn))
3adde2a5 10808 ;
d0ab8cd3
RK
10809
10810 if (insn
10811 && (set = single_set (insn)) != 0
10812 && rtx_equal_p (SET_DEST (set), x))
10813 {
10814 value = SET_SRC (set);
10815
10816 /* Make sure that VALUE doesn't reference X. Replace any
ddd5a7c1 10817 explicit references with a CLOBBER. If there are any remaining
d0ab8cd3
RK
10818 references (rare), don't use the value. */
10819
10820 if (reg_mentioned_p (x, value))
10821 value = replace_rtx (copy_rtx (value), x,
38a448ca 10822 gen_rtx_CLOBBER (GET_MODE (x), const0_rtx));
d0ab8cd3
RK
10823
10824 if (reg_overlap_mentioned_p (x, value))
10825 return 0;
10826 }
10827 else
10828 return 0;
10829 }
10830
10831 /* If the value has all its registers valid, return it. */
9a893315
JW
10832 if (get_last_value_validate (&value, reg_last_set[regno],
10833 reg_last_set_label[regno], 0))
230d793d
RS
10834 return value;
10835
10836 /* Otherwise, make a copy and replace any invalid register with
10837 (clobber (const_int 0)). If that fails for some reason, return 0. */
10838
10839 value = copy_rtx (value);
9a893315
JW
10840 if (get_last_value_validate (&value, reg_last_set[regno],
10841 reg_last_set_label[regno], 1))
230d793d
RS
10842 return value;
10843
10844 return 0;
10845}
10846\f
10847/* Return nonzero if expression X refers to a REG or to memory
10848 that is set in an instruction more recent than FROM_CUID. */
10849
10850static int
10851use_crosses_set_p (x, from_cuid)
10852 register rtx x;
10853 int from_cuid;
10854{
10855 register char *fmt;
10856 register int i;
10857 register enum rtx_code code = GET_CODE (x);
10858
10859 if (code == REG)
10860 {
10861 register int regno = REGNO (x);
e28f5732
RK
10862 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10863 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10864
230d793d
RS
10865#ifdef PUSH_ROUNDING
10866 /* Don't allow uses of the stack pointer to be moved,
10867 because we don't know whether the move crosses a push insn. */
10868 if (regno == STACK_POINTER_REGNUM)
10869 return 1;
10870#endif
e28f5732
RK
10871 for (;regno < endreg; regno++)
10872 if (reg_last_set[regno]
10873 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10874 return 1;
10875 return 0;
230d793d
RS
10876 }
10877
10878 if (code == MEM && mem_last_set > from_cuid)
10879 return 1;
10880
10881 fmt = GET_RTX_FORMAT (code);
10882
10883 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10884 {
10885 if (fmt[i] == 'E')
10886 {
10887 register int j;
10888 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10889 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10890 return 1;
10891 }
10892 else if (fmt[i] == 'e'
10893 && use_crosses_set_p (XEXP (x, i), from_cuid))
10894 return 1;
10895 }
10896 return 0;
10897}
10898\f
10899/* Define three variables used for communication between the following
10900 routines. */
10901
10902static int reg_dead_regno, reg_dead_endregno;
10903static int reg_dead_flag;
10904
10905/* Function called via note_stores from reg_dead_at_p.
10906
ddd5a7c1 10907 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
10908 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10909
10910static void
10911reg_dead_at_p_1 (dest, x)
10912 rtx dest;
10913 rtx x;
10914{
10915 int regno, endregno;
10916
10917 if (GET_CODE (dest) != REG)
10918 return;
10919
10920 regno = REGNO (dest);
10921 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10922 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10923
10924 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10925 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10926}
10927
10928/* Return non-zero if REG is known to be dead at INSN.
10929
10930 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10931 referencing REG, it is dead. If we hit a SET referencing REG, it is
10932 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
10933 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10934 must be assumed to be always live. */
230d793d
RS
10935
10936static int
10937reg_dead_at_p (reg, insn)
10938 rtx reg;
10939 rtx insn;
10940{
10941 int block, i;
10942
10943 /* Set variables for reg_dead_at_p_1. */
10944 reg_dead_regno = REGNO (reg);
10945 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10946 ? HARD_REGNO_NREGS (reg_dead_regno,
10947 GET_MODE (reg))
10948 : 1);
10949
10950 reg_dead_flag = 0;
10951
6e25d159
RK
10952 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10953 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10954 {
10955 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10956 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10957 return 0;
10958 }
10959
230d793d
RS
10960 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10961 beginning of function. */
60715d0b 10962 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
10963 insn = prev_nonnote_insn (insn))
10964 {
10965 note_stores (PATTERN (insn), reg_dead_at_p_1);
10966 if (reg_dead_flag)
10967 return reg_dead_flag == 1 ? 1 : 0;
10968
10969 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10970 return 1;
10971 }
10972
10973 /* Get the basic block number that we were in. */
10974 if (insn == 0)
10975 block = 0;
10976 else
10977 {
10978 for (block = 0; block < n_basic_blocks; block++)
10979 if (insn == basic_block_head[block])
10980 break;
10981
10982 if (block == n_basic_blocks)
10983 return 0;
10984 }
10985
10986 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
8e08106d 10987 if (REGNO_REG_SET_P (basic_block_live_at_start[block], i))
230d793d
RS
10988 return 0;
10989
10990 return 1;
10991}
6e25d159
RK
10992\f
10993/* Note hard registers in X that are used. This code is similar to
10994 that in flow.c, but much simpler since we don't care about pseudos. */
10995
10996static void
10997mark_used_regs_combine (x)
10998 rtx x;
10999{
11000 register RTX_CODE code = GET_CODE (x);
11001 register int regno;
11002 int i;
11003
11004 switch (code)
11005 {
11006 case LABEL_REF:
11007 case SYMBOL_REF:
11008 case CONST_INT:
11009 case CONST:
11010 case CONST_DOUBLE:
11011 case PC:
11012 case ADDR_VEC:
11013 case ADDR_DIFF_VEC:
11014 case ASM_INPUT:
11015#ifdef HAVE_cc0
11016 /* CC0 must die in the insn after it is set, so we don't need to take
11017 special note of it here. */
11018 case CC0:
11019#endif
11020 return;
11021
11022 case CLOBBER:
11023 /* If we are clobbering a MEM, mark any hard registers inside the
11024 address as used. */
11025 if (GET_CODE (XEXP (x, 0)) == MEM)
11026 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11027 return;
11028
11029 case REG:
11030 regno = REGNO (x);
11031 /* A hard reg in a wide mode may really be multiple registers.
11032 If so, mark all of them just like the first. */
11033 if (regno < FIRST_PSEUDO_REGISTER)
11034 {
11035 /* None of this applies to the stack, frame or arg pointers */
11036 if (regno == STACK_POINTER_REGNUM
11037#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11038 || regno == HARD_FRAME_POINTER_REGNUM
11039#endif
11040#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11041 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11042#endif
11043 || regno == FRAME_POINTER_REGNUM)
11044 return;
11045
11046 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
11047 while (i-- > 0)
11048 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
11049 }
11050 return;
11051
11052 case SET:
11053 {
11054 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11055 the address. */
11056 register rtx testreg = SET_DEST (x);
11057
e048778f
RK
11058 while (GET_CODE (testreg) == SUBREG
11059 || GET_CODE (testreg) == ZERO_EXTRACT
11060 || GET_CODE (testreg) == SIGN_EXTRACT
11061 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
11062 testreg = XEXP (testreg, 0);
11063
11064 if (GET_CODE (testreg) == MEM)
11065 mark_used_regs_combine (XEXP (testreg, 0));
11066
11067 mark_used_regs_combine (SET_SRC (x));
6e25d159 11068 }
e9a25f70
JL
11069 return;
11070
11071 default:
11072 break;
6e25d159
RK
11073 }
11074
11075 /* Recursively scan the operands of this expression. */
11076
11077 {
11078 register char *fmt = GET_RTX_FORMAT (code);
11079
11080 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11081 {
11082 if (fmt[i] == 'e')
11083 mark_used_regs_combine (XEXP (x, i));
11084 else if (fmt[i] == 'E')
11085 {
11086 register int j;
11087
11088 for (j = 0; j < XVECLEN (x, i); j++)
11089 mark_used_regs_combine (XVECEXP (x, i, j));
11090 }
11091 }
11092 }
11093}
11094
230d793d
RS
11095\f
11096/* Remove register number REGNO from the dead registers list of INSN.
11097
11098 Return the note used to record the death, if there was one. */
11099
11100rtx
11101remove_death (regno, insn)
11102 int regno;
11103 rtx insn;
11104{
11105 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11106
11107 if (note)
1a26b032 11108 {
b1f21e0a 11109 REG_N_DEATHS (regno)--;
1a26b032
RK
11110 remove_note (insn, note);
11111 }
230d793d
RS
11112
11113 return note;
11114}
11115
11116/* For each register (hardware or pseudo) used within expression X, if its
11117 death is in an instruction with cuid between FROM_CUID (inclusive) and
11118 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11119 list headed by PNOTES.
11120
6eb12cef
RK
11121 That said, don't move registers killed by maybe_kill_insn.
11122
230d793d
RS
11123 This is done when X is being merged by combination into TO_INSN. These
11124 notes will then be distributed as needed. */
11125
11126static void
6eb12cef 11127move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 11128 rtx x;
6eb12cef 11129 rtx maybe_kill_insn;
230d793d
RS
11130 int from_cuid;
11131 rtx to_insn;
11132 rtx *pnotes;
11133{
11134 register char *fmt;
11135 register int len, i;
11136 register enum rtx_code code = GET_CODE (x);
11137
11138 if (code == REG)
11139 {
11140 register int regno = REGNO (x);
11141 register rtx where_dead = reg_last_death[regno];
e340018d
JW
11142 register rtx before_dead, after_dead;
11143
6eb12cef
RK
11144 /* Don't move the register if it gets killed in between from and to */
11145 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11146 && !reg_referenced_p (x, maybe_kill_insn))
11147 return;
11148
e340018d
JW
11149 /* WHERE_DEAD could be a USE insn made by combine, so first we
11150 make sure that we have insns with valid INSN_CUID values. */
11151 before_dead = where_dead;
11152 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11153 before_dead = PREV_INSN (before_dead);
11154 after_dead = where_dead;
11155 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11156 after_dead = NEXT_INSN (after_dead);
11157
11158 if (before_dead && after_dead
11159 && INSN_CUID (before_dead) >= from_cuid
11160 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11161 || (where_dead != after_dead
11162 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 11163 {
dbc131f3 11164 rtx note = remove_death (regno, where_dead);
230d793d
RS
11165
11166 /* It is possible for the call above to return 0. This can occur
11167 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
11168 In that case make a new note.
11169
11170 We must also check for the case where X is a hard register
11171 and NOTE is a death note for a range of hard registers
11172 including X. In that case, we must put REG_DEAD notes for
11173 the remaining registers in place of NOTE. */
11174
11175 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11176 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 11177 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3
RK
11178 {
11179 int deadregno = REGNO (XEXP (note, 0));
11180 int deadend
11181 = (deadregno + HARD_REGNO_NREGS (deadregno,
11182 GET_MODE (XEXP (note, 0))));
11183 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11184 int i;
11185
11186 for (i = deadregno; i < deadend; i++)
11187 if (i < regno || i >= ourend)
11188 REG_NOTES (where_dead)
38a448ca
RH
11189 = gen_rtx_EXPR_LIST (REG_DEAD,
11190 gen_rtx_REG (reg_raw_mode[i], i),
11191 REG_NOTES (where_dead));
dbc131f3 11192 }
24e46fc4
JW
11193 /* If we didn't find any note, or if we found a REG_DEAD note that
11194 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
11195 register, then to be safe we must check for REG_DEAD notes
11196 for each register other than the first. They could have
11197 their own REG_DEAD notes lying around. */
24e46fc4
JW
11198 else if ((note == 0
11199 || (note != 0
11200 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11201 < GET_MODE_SIZE (GET_MODE (x)))))
11202 && regno < FIRST_PSEUDO_REGISTER
fabd69e8
RK
11203 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11204 {
11205 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
24e46fc4 11206 int i, offset;
fabd69e8
RK
11207 rtx oldnotes = 0;
11208
24e46fc4
JW
11209 if (note)
11210 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11211 else
11212 offset = 1;
11213
11214 for (i = regno + offset; i < ourend; i++)
38a448ca 11215 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
6eb12cef 11216 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 11217 }
230d793d 11218
dbc131f3 11219 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
11220 {
11221 XEXP (note, 1) = *pnotes;
11222 *pnotes = note;
11223 }
11224 else
38a448ca 11225 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
1a26b032 11226
b1f21e0a 11227 REG_N_DEATHS (regno)++;
230d793d
RS
11228 }
11229
11230 return;
11231 }
11232
11233 else if (GET_CODE (x) == SET)
11234 {
11235 rtx dest = SET_DEST (x);
11236
6eb12cef 11237 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 11238
a7c99304
RK
11239 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11240 that accesses one word of a multi-word item, some
11241 piece of everything register in the expression is used by
11242 this insn, so remove any old death. */
11243
11244 if (GET_CODE (dest) == ZERO_EXTRACT
11245 || GET_CODE (dest) == STRICT_LOW_PART
11246 || (GET_CODE (dest) == SUBREG
11247 && (((GET_MODE_SIZE (GET_MODE (dest))
11248 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11249 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11250 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 11251 {
6eb12cef 11252 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 11253 return;
230d793d
RS
11254 }
11255
a7c99304
RK
11256 /* If this is some other SUBREG, we know it replaces the entire
11257 value, so use that as the destination. */
11258 if (GET_CODE (dest) == SUBREG)
11259 dest = SUBREG_REG (dest);
11260
11261 /* If this is a MEM, adjust deaths of anything used in the address.
11262 For a REG (the only other possibility), the entire value is
11263 being replaced so the old value is not used in this insn. */
230d793d
RS
11264
11265 if (GET_CODE (dest) == MEM)
6eb12cef
RK
11266 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11267 to_insn, pnotes);
230d793d
RS
11268 return;
11269 }
11270
11271 else if (GET_CODE (x) == CLOBBER)
11272 return;
11273
11274 len = GET_RTX_LENGTH (code);
11275 fmt = GET_RTX_FORMAT (code);
11276
11277 for (i = 0; i < len; i++)
11278 {
11279 if (fmt[i] == 'E')
11280 {
11281 register int j;
11282 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
11283 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11284 to_insn, pnotes);
230d793d
RS
11285 }
11286 else if (fmt[i] == 'e')
6eb12cef 11287 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
11288 }
11289}
11290\f
a7c99304
RK
11291/* Return 1 if X is the target of a bit-field assignment in BODY, the
11292 pattern of an insn. X must be a REG. */
230d793d
RS
11293
11294static int
a7c99304
RK
11295reg_bitfield_target_p (x, body)
11296 rtx x;
230d793d
RS
11297 rtx body;
11298{
11299 int i;
11300
11301 if (GET_CODE (body) == SET)
a7c99304
RK
11302 {
11303 rtx dest = SET_DEST (body);
11304 rtx target;
11305 int regno, tregno, endregno, endtregno;
11306
11307 if (GET_CODE (dest) == ZERO_EXTRACT)
11308 target = XEXP (dest, 0);
11309 else if (GET_CODE (dest) == STRICT_LOW_PART)
11310 target = SUBREG_REG (XEXP (dest, 0));
11311 else
11312 return 0;
11313
11314 if (GET_CODE (target) == SUBREG)
11315 target = SUBREG_REG (target);
11316
11317 if (GET_CODE (target) != REG)
11318 return 0;
11319
11320 tregno = REGNO (target), regno = REGNO (x);
11321 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11322 return target == x;
11323
11324 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11325 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11326
11327 return endregno > tregno && regno < endtregno;
11328 }
230d793d
RS
11329
11330 else if (GET_CODE (body) == PARALLEL)
11331 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 11332 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
11333 return 1;
11334
11335 return 0;
11336}
11337\f
11338/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11339 as appropriate. I3 and I2 are the insns resulting from the combination
11340 insns including FROM (I2 may be zero).
11341
11342 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11343 not need REG_DEAD notes because they are being substituted for. This
11344 saves searching in the most common cases.
11345
11346 Each note in the list is either ignored or placed on some insns, depending
11347 on the type of note. */
11348
11349static void
11350distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11351 rtx notes;
11352 rtx from_insn;
11353 rtx i3, i2;
11354 rtx elim_i2, elim_i1;
11355{
11356 rtx note, next_note;
11357 rtx tem;
11358
11359 for (note = notes; note; note = next_note)
11360 {
11361 rtx place = 0, place2 = 0;
11362
11363 /* If this NOTE references a pseudo register, ensure it references
11364 the latest copy of that register. */
11365 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11366 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11367 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11368
11369 next_note = XEXP (note, 1);
11370 switch (REG_NOTE_KIND (note))
11371 {
c9903b44
DE
11372 case REG_BR_PROB:
11373 case REG_EXEC_COUNT:
11374 /* Doesn't matter much where we put this, as long as it's somewhere.
11375 It is preferable to keep these notes on branches, which is most
11376 likely to be i3. */
11377 place = i3;
11378 break;
11379
230d793d 11380 case REG_UNUSED:
07d0cbdd 11381 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
11382 REG_UNUSED notes from that insn.
11383
11384 Any clobbers from i2 or i1 can only exist if they were added by
11385 recog_for_combine. In that case, recog_for_combine created the
11386 necessary REG_UNUSED notes. Trying to keep any original
11387 REG_UNUSED notes from these insns can cause incorrect output
11388 if it is for the same register as the original i3 dest.
11389 In that case, we will notice that the register is set in i3,
11390 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
11391 is wrong. However, it is possible to have REG_UNUSED notes from
11392 i2 or i1 for register which were both used and clobbered, so
11393 we keep notes from i2 or i1 if they will turn into REG_DEAD
11394 notes. */
176c9e6b 11395
230d793d
RS
11396 /* If this register is set or clobbered in I3, put the note there
11397 unless there is one already. */
07d0cbdd 11398 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 11399 {
07d0cbdd
JW
11400 if (from_insn != i3)
11401 break;
11402
230d793d
RS
11403 if (! (GET_CODE (XEXP (note, 0)) == REG
11404 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11405 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11406 place = i3;
11407 }
11408 /* Otherwise, if this register is used by I3, then this register
11409 now dies here, so we must put a REG_DEAD note here unless there
11410 is one already. */
11411 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11412 && ! (GET_CODE (XEXP (note, 0)) == REG
11413 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
11414 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11415 {
11416 PUT_REG_NOTE_KIND (note, REG_DEAD);
11417 place = i3;
11418 }
11419 break;
11420
11421 case REG_EQUAL:
11422 case REG_EQUIV:
11423 case REG_NONNEG:
9ae8ffe7 11424 case REG_NOALIAS:
230d793d
RS
11425 /* These notes say something about results of an insn. We can
11426 only support them if they used to be on I3 in which case they
a687e897
RK
11427 remain on I3. Otherwise they are ignored.
11428
11429 If the note refers to an expression that is not a constant, we
11430 must also ignore the note since we cannot tell whether the
11431 equivalence is still true. It might be possible to do
11432 slightly better than this (we only have a problem if I2DEST
11433 or I1DEST is present in the expression), but it doesn't
11434 seem worth the trouble. */
11435
11436 if (from_insn == i3
11437 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
11438 place = i3;
11439 break;
11440
11441 case REG_INC:
11442 case REG_NO_CONFLICT:
11443 case REG_LABEL:
11444 /* These notes say something about how a register is used. They must
11445 be present on any use of the register in I2 or I3. */
11446 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11447 place = i3;
11448
11449 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11450 {
11451 if (place)
11452 place2 = i2;
11453 else
11454 place = i2;
11455 }
11456 break;
11457
11458 case REG_WAS_0:
11459 /* It is too much trouble to try to see if this note is still
11460 correct in all situations. It is better to simply delete it. */
11461 break;
11462
11463 case REG_RETVAL:
11464 /* If the insn previously containing this note still exists,
11465 put it back where it was. Otherwise move it to the previous
11466 insn. Adjust the corresponding REG_LIBCALL note. */
11467 if (GET_CODE (from_insn) != NOTE)
11468 place = from_insn;
11469 else
11470 {
5f4f0e22 11471 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
11472 place = prev_real_insn (from_insn);
11473 if (tem && place)
11474 XEXP (tem, 0) = place;
11475 }
11476 break;
11477
11478 case REG_LIBCALL:
11479 /* This is handled similarly to REG_RETVAL. */
11480 if (GET_CODE (from_insn) != NOTE)
11481 place = from_insn;
11482 else
11483 {
5f4f0e22 11484 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
11485 place = next_real_insn (from_insn);
11486 if (tem && place)
11487 XEXP (tem, 0) = place;
11488 }
11489 break;
11490
11491 case REG_DEAD:
11492 /* If the register is used as an input in I3, it dies there.
11493 Similarly for I2, if it is non-zero and adjacent to I3.
11494
11495 If the register is not used as an input in either I3 or I2
11496 and it is not one of the registers we were supposed to eliminate,
11497 there are two possibilities. We might have a non-adjacent I2
11498 or we might have somehow eliminated an additional register
11499 from a computation. For example, we might have had A & B where
11500 we discover that B will always be zero. In this case we will
11501 eliminate the reference to A.
11502
11503 In both cases, we must search to see if we can find a previous
11504 use of A and put the death note there. */
11505
6e2d1486
RK
11506 if (from_insn
11507 && GET_CODE (from_insn) == CALL_INSN
11508 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11509 place = from_insn;
11510 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
11511 place = i3;
11512 else if (i2 != 0 && next_nonnote_insn (i2) == i3
11513 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11514 place = i2;
11515
11516 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11517 break;
11518
510dd77e
RK
11519 /* If the register is used in both I2 and I3 and it dies in I3,
11520 we might have added another reference to it. If reg_n_refs
11521 was 2, bump it to 3. This has to be correct since the
11522 register must have been set somewhere. The reason this is
11523 done is because local-alloc.c treats 2 references as a
11524 special case. */
11525
11526 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
b1f21e0a 11527 && REG_N_REFS (REGNO (XEXP (note, 0)))== 2
510dd77e 11528 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
b1f21e0a 11529 REG_N_REFS (REGNO (XEXP (note, 0))) = 3;
510dd77e 11530
230d793d 11531 if (place == 0)
38d8473f
RK
11532 {
11533 for (tem = prev_nonnote_insn (i3);
11534 place == 0 && tem
11535 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
11536 tem = prev_nonnote_insn (tem))
11537 {
11538 /* If the register is being set at TEM, see if that is all
11539 TEM is doing. If so, delete TEM. Otherwise, make this
11540 into a REG_UNUSED note instead. */
11541 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11542 {
11543 rtx set = single_set (tem);
e5e809f4 11544 rtx inner_dest = 0;
f5c97640 11545 rtx cc0_setter = NULL_RTX;
e5e809f4
JL
11546
11547 if (set != 0)
11548 for (inner_dest = SET_DEST (set);
11549 GET_CODE (inner_dest) == STRICT_LOW_PART
11550 || GET_CODE (inner_dest) == SUBREG
11551 || GET_CODE (inner_dest) == ZERO_EXTRACT;
11552 inner_dest = XEXP (inner_dest, 0))
11553 ;
38d8473f
RK
11554
11555 /* Verify that it was the set, and not a clobber that
f5c97640
RH
11556 modified the register.
11557
11558 CC0 targets must be careful to maintain setter/user
11559 pairs. If we cannot delete the setter due to side
11560 effects, mark the user with an UNUSED note instead
11561 of deleting it. */
38d8473f
RK
11562
11563 if (set != 0 && ! side_effects_p (SET_SRC (set))
f5c97640
RH
11564 && rtx_equal_p (XEXP (note, 0), inner_dest)
11565#ifdef HAVE_cc0
11566 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
11567 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
11568 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
11569#endif
11570 )
38d8473f
RK
11571 {
11572 /* Move the notes and links of TEM elsewhere.
11573 This might delete other dead insns recursively.
11574 First set the pattern to something that won't use
11575 any register. */
11576
11577 PATTERN (tem) = pc_rtx;
11578
11579 distribute_notes (REG_NOTES (tem), tem, tem,
11580 NULL_RTX, NULL_RTX, NULL_RTX);
11581 distribute_links (LOG_LINKS (tem));
11582
11583 PUT_CODE (tem, NOTE);
11584 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11585 NOTE_SOURCE_FILE (tem) = 0;
f5c97640
RH
11586
11587#ifdef HAVE_cc0
11588 /* Delete the setter too. */
11589 if (cc0_setter)
11590 {
11591 PATTERN (cc0_setter) = pc_rtx;
11592
11593 distribute_notes (REG_NOTES (cc0_setter),
11594 cc0_setter, cc0_setter,
11595 NULL_RTX, NULL_RTX, NULL_RTX);
11596 distribute_links (LOG_LINKS (cc0_setter));
11597
11598 PUT_CODE (cc0_setter, NOTE);
11599 NOTE_LINE_NUMBER (cc0_setter) = NOTE_INSN_DELETED;
11600 NOTE_SOURCE_FILE (cc0_setter) = 0;
11601 }
11602#endif
38d8473f 11603 }
e5e809f4
JL
11604 /* If the register is both set and used here, put the
11605 REG_DEAD note here, but place a REG_UNUSED note
11606 here too unless there already is one. */
11607 else if (reg_referenced_p (XEXP (note, 0),
11608 PATTERN (tem)))
11609 {
11610 place = tem;
11611
11612 if (! find_regno_note (tem, REG_UNUSED,
11613 REGNO (XEXP (note, 0))))
11614 REG_NOTES (tem)
9e6a5703
JC
11615 = gen_rtx_EXPR_LIST (REG_UNUSED,
11616 XEXP (note, 0),
11617 REG_NOTES (tem));
e5e809f4 11618 }
38d8473f
RK
11619 else
11620 {
11621 PUT_REG_NOTE_KIND (note, REG_UNUSED);
11622
11623 /* If there isn't already a REG_UNUSED note, put one
11624 here. */
11625 if (! find_regno_note (tem, REG_UNUSED,
11626 REGNO (XEXP (note, 0))))
11627 place = tem;
11628 break;
230d793d
RS
11629 }
11630 }
13018fad
RE
11631 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11632 || (GET_CODE (tem) == CALL_INSN
11633 && find_reg_fusage (tem, USE, XEXP (note, 0))))
230d793d
RS
11634 {
11635 place = tem;
932d1119
RK
11636
11637 /* If we are doing a 3->2 combination, and we have a
11638 register which formerly died in i3 and was not used
11639 by i2, which now no longer dies in i3 and is used in
11640 i2 but does not die in i2, and place is between i2
11641 and i3, then we may need to move a link from place to
11642 i2. */
a8908849
RK
11643 if (i2 && INSN_UID (place) <= max_uid_cuid
11644 && INSN_CUID (place) > INSN_CUID (i2)
932d1119
RK
11645 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11646 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11647 {
11648 rtx links = LOG_LINKS (place);
11649 LOG_LINKS (place) = 0;
11650 distribute_links (links);
11651 }
230d793d
RS
11652 break;
11653 }
38d8473f
RK
11654 }
11655
11656 /* If we haven't found an insn for the death note and it
11657 is still a REG_DEAD note, but we have hit a CODE_LABEL,
11658 insert a USE insn for the register at that label and
11659 put the death node there. This prevents problems with
11660 call-state tracking in caller-save.c. */
11661 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
e2cce0cf
RK
11662 {
11663 place
38a448ca 11664 = emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (note, 0)),
e2cce0cf
RK
11665 tem);
11666
11667 /* If this insn was emitted between blocks, then update
11668 basic_block_head of the current block to include it. */
11669 if (basic_block_end[this_basic_block - 1] == tem)
11670 basic_block_head[this_basic_block] = place;
11671 }
38d8473f 11672 }
230d793d
RS
11673
11674 /* If the register is set or already dead at PLACE, we needn't do
e5e809f4
JL
11675 anything with this note if it is still a REG_DEAD note.
11676 We can here if it is set at all, not if is it totally replace,
11677 which is what `dead_or_set_p' checks, so also check for it being
11678 set partially. */
11679
230d793d 11680
230d793d
RS
11681 if (place && REG_NOTE_KIND (note) == REG_DEAD)
11682 {
11683 int regno = REGNO (XEXP (note, 0));
11684
11685 if (dead_or_set_p (place, XEXP (note, 0))
11686 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11687 {
11688 /* Unless the register previously died in PLACE, clear
11689 reg_last_death. [I no longer understand why this is
11690 being done.] */
11691 if (reg_last_death[regno] != place)
11692 reg_last_death[regno] = 0;
11693 place = 0;
11694 }
11695 else
11696 reg_last_death[regno] = place;
11697
11698 /* If this is a death note for a hard reg that is occupying
11699 multiple registers, ensure that we are still using all
11700 parts of the object. If we find a piece of the object
11701 that is unused, we must add a USE for that piece before
11702 PLACE and put the appropriate REG_DEAD note on it.
11703
11704 An alternative would be to put a REG_UNUSED for the pieces
11705 on the insn that set the register, but that can't be done if
11706 it is not in the same block. It is simpler, though less
11707 efficient, to add the USE insns. */
11708
11709 if (place && regno < FIRST_PSEUDO_REGISTER
11710 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11711 {
11712 int endregno
11713 = regno + HARD_REGNO_NREGS (regno,
11714 GET_MODE (XEXP (note, 0)));
11715 int all_used = 1;
11716 int i;
11717
11718 for (i = regno; i < endregno; i++)
9fd5bb62
JW
11719 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11720 && ! find_regno_fusage (place, USE, i))
230d793d 11721 {
38a448ca 11722 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
28f6d3af
RK
11723 rtx p;
11724
11725 /* See if we already placed a USE note for this
11726 register in front of PLACE. */
11727 for (p = place;
11728 GET_CODE (PREV_INSN (p)) == INSN
11729 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11730 p = PREV_INSN (p))
11731 if (rtx_equal_p (piece,
11732 XEXP (PATTERN (PREV_INSN (p)), 0)))
11733 {
11734 p = 0;
11735 break;
11736 }
11737
11738 if (p)
11739 {
11740 rtx use_insn
38a448ca
RH
11741 = emit_insn_before (gen_rtx_USE (VOIDmode,
11742 piece),
28f6d3af
RK
11743 p);
11744 REG_NOTES (use_insn)
38a448ca
RH
11745 = gen_rtx_EXPR_LIST (REG_DEAD, piece,
11746 REG_NOTES (use_insn));
28f6d3af 11747 }
230d793d 11748
5089e22e 11749 all_used = 0;
230d793d
RS
11750 }
11751
a394b17b
JW
11752 /* Check for the case where the register dying partially
11753 overlaps the register set by this insn. */
11754 if (all_used)
11755 for (i = regno; i < endregno; i++)
11756 if (dead_or_set_regno_p (place, i))
11757 {
11758 all_used = 0;
11759 break;
11760 }
11761
230d793d
RS
11762 if (! all_used)
11763 {
11764 /* Put only REG_DEAD notes for pieces that are
11765 still used and that are not already dead or set. */
11766
11767 for (i = regno; i < endregno; i++)
11768 {
38a448ca 11769 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
230d793d 11770
17cbf358
JW
11771 if ((reg_referenced_p (piece, PATTERN (place))
11772 || (GET_CODE (place) == CALL_INSN
11773 && find_reg_fusage (place, USE, piece)))
230d793d
RS
11774 && ! dead_or_set_p (place, piece)
11775 && ! reg_bitfield_target_p (piece,
11776 PATTERN (place)))
38a448ca
RH
11777 REG_NOTES (place)
11778 = gen_rtx_EXPR_LIST (REG_DEAD,
11779 piece, REG_NOTES (place));
230d793d
RS
11780 }
11781
11782 place = 0;
11783 }
11784 }
11785 }
11786 break;
11787
11788 default:
11789 /* Any other notes should not be present at this point in the
11790 compilation. */
11791 abort ();
11792 }
11793
11794 if (place)
11795 {
11796 XEXP (note, 1) = REG_NOTES (place);
11797 REG_NOTES (place) = note;
11798 }
1a26b032
RK
11799 else if ((REG_NOTE_KIND (note) == REG_DEAD
11800 || REG_NOTE_KIND (note) == REG_UNUSED)
11801 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 11802 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
230d793d
RS
11803
11804 if (place2)
1a26b032
RK
11805 {
11806 if ((REG_NOTE_KIND (note) == REG_DEAD
11807 || REG_NOTE_KIND (note) == REG_UNUSED)
11808 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 11809 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 11810
38a448ca
RH
11811 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
11812 REG_NOTE_KIND (note),
11813 XEXP (note, 0),
11814 REG_NOTES (place2));
1a26b032 11815 }
230d793d
RS
11816 }
11817}
11818\f
11819/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
11820 I3, I2, and I1 to new locations. This is also called in one case to
11821 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
11822
11823static void
11824distribute_links (links)
11825 rtx links;
11826{
11827 rtx link, next_link;
11828
11829 for (link = links; link; link = next_link)
11830 {
11831 rtx place = 0;
11832 rtx insn;
11833 rtx set, reg;
11834
11835 next_link = XEXP (link, 1);
11836
11837 /* If the insn that this link points to is a NOTE or isn't a single
11838 set, ignore it. In the latter case, it isn't clear what we
11839 can do other than ignore the link, since we can't tell which
11840 register it was for. Such links wouldn't be used by combine
11841 anyway.
11842
11843 It is not possible for the destination of the target of the link to
11844 have been changed by combine. The only potential of this is if we
11845 replace I3, I2, and I1 by I3 and I2. But in that case the
11846 destination of I2 also remains unchanged. */
11847
11848 if (GET_CODE (XEXP (link, 0)) == NOTE
11849 || (set = single_set (XEXP (link, 0))) == 0)
11850 continue;
11851
11852 reg = SET_DEST (set);
11853 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11854 || GET_CODE (reg) == SIGN_EXTRACT
11855 || GET_CODE (reg) == STRICT_LOW_PART)
11856 reg = XEXP (reg, 0);
11857
11858 /* A LOG_LINK is defined as being placed on the first insn that uses
11859 a register and points to the insn that sets the register. Start
11860 searching at the next insn after the target of the link and stop
11861 when we reach a set of the register or the end of the basic block.
11862
11863 Note that this correctly handles the link that used to point from
5089e22e 11864 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
11865 since most links don't point very far away. */
11866
11867 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3
RK
11868 (insn && (this_basic_block == n_basic_blocks - 1
11869 || basic_block_head[this_basic_block + 1] != insn));
230d793d
RS
11870 insn = NEXT_INSN (insn))
11871 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11872 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11873 {
11874 if (reg_referenced_p (reg, PATTERN (insn)))
11875 place = insn;
11876 break;
11877 }
6e2d1486
RK
11878 else if (GET_CODE (insn) == CALL_INSN
11879 && find_reg_fusage (insn, USE, reg))
11880 {
11881 place = insn;
11882 break;
11883 }
230d793d
RS
11884
11885 /* If we found a place to put the link, place it there unless there
11886 is already a link to the same insn as LINK at that point. */
11887
11888 if (place)
11889 {
11890 rtx link2;
11891
11892 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11893 if (XEXP (link2, 0) == XEXP (link, 0))
11894 break;
11895
11896 if (link2 == 0)
11897 {
11898 XEXP (link, 1) = LOG_LINKS (place);
11899 LOG_LINKS (place) = link;
abe6e52f
RK
11900
11901 /* Set added_links_insn to the earliest insn we added a
11902 link to. */
11903 if (added_links_insn == 0
11904 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11905 added_links_insn = place;
230d793d
RS
11906 }
11907 }
11908 }
11909}
11910\f
1427d6d2
RK
11911/* Compute INSN_CUID for INSN, which is an insn made by combine. */
11912
11913static int
11914insn_cuid (insn)
11915 rtx insn;
11916{
11917 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
11918 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
11919 insn = NEXT_INSN (insn);
11920
11921 if (INSN_UID (insn) > max_uid_cuid)
11922 abort ();
11923
11924 return INSN_CUID (insn);
11925}
11926\f
230d793d
RS
11927void
11928dump_combine_stats (file)
11929 FILE *file;
11930{
11931 fprintf
11932 (file,
11933 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11934 combine_attempts, combine_merges, combine_extras, combine_successes);
11935}
11936
11937void
11938dump_combine_total_stats (file)
11939 FILE *file;
11940{
11941 fprintf
11942 (file,
11943 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11944 total_attempts, total_merges, total_extras, total_successes);
11945}
This page took 2.170786 seconds and 5 git commands to generate.