]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
disable
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
c85f7c16 2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
230d793d
RS
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
4f90e4a0 78#ifdef __STDC__
04fe4385 79#include <stdarg.h>
4f90e4a0 80#else
04fe4385 81#include <varargs.h>
4f90e4a0 82#endif
dfa3449b 83
9c3b4c8b
RS
84/* Must precede rtl.h for FFS. */
85#include <stdio.h>
86
230d793d
RS
87#include "rtl.h"
88#include "flags.h"
89#include "regs.h"
55310dad 90#include "hard-reg-set.h"
230d793d
RS
91#include "expr.h"
92#include "basic-block.h"
93#include "insn-config.h"
94#include "insn-flags.h"
95#include "insn-codes.h"
96#include "insn-attr.h"
97#include "recog.h"
98#include "real.h"
99
100/* It is not safe to use ordinary gen_lowpart in combine.
101 Use gen_lowpart_for_combine instead. See comments there. */
102#define gen_lowpart dont_use_gen_lowpart_you_dummy
103
104/* Number of attempts to combine instructions in this function. */
105
106static int combine_attempts;
107
108/* Number of attempts that got as far as substitution in this function. */
109
110static int combine_merges;
111
112/* Number of instructions combined with added SETs in this function. */
113
114static int combine_extras;
115
116/* Number of instructions combined in this function. */
117
118static int combine_successes;
119
120/* Totals over entire compilation. */
121
122static int total_attempts, total_merges, total_extras, total_successes;
9210df58 123
ddd5a7c1 124/* Define a default value for REVERSIBLE_CC_MODE.
9210df58
RK
125 We can never assume that a condition code mode is safe to reverse unless
126 the md tells us so. */
127#ifndef REVERSIBLE_CC_MODE
128#define REVERSIBLE_CC_MODE(MODE) 0
129#endif
230d793d
RS
130\f
131/* Vector mapping INSN_UIDs to cuids.
5089e22e 132 The cuids are like uids but increase monotonically always.
230d793d
RS
133 Combine always uses cuids so that it can compare them.
134 But actually renumbering the uids, which we used to do,
135 proves to be a bad idea because it makes it hard to compare
136 the dumps produced by earlier passes with those from later passes. */
137
138static int *uid_cuid;
4255220d 139static int max_uid_cuid;
230d793d
RS
140
141/* Get the cuid of an insn. */
142
1427d6d2
RK
143#define INSN_CUID(INSN) \
144(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d
RS
145
146/* Maximum register number, which is the size of the tables below. */
147
148static int combine_max_regno;
149
150/* Record last point of death of (hard or pseudo) register n. */
151
152static rtx *reg_last_death;
153
154/* Record last point of modification of (hard or pseudo) register n. */
155
156static rtx *reg_last_set;
157
158/* Record the cuid of the last insn that invalidated memory
159 (anything that writes memory, and subroutine calls, but not pushes). */
160
161static int mem_last_set;
162
163/* Record the cuid of the last CALL_INSN
164 so we can tell whether a potential combination crosses any calls. */
165
166static int last_call_cuid;
167
168/* When `subst' is called, this is the insn that is being modified
169 (by combining in a previous insn). The PATTERN of this insn
170 is still the old pattern partially modified and it should not be
171 looked at, but this may be used to examine the successors of the insn
172 to judge whether a simplification is valid. */
173
174static rtx subst_insn;
175
0d9641d1
JW
176/* This is an insn that belongs before subst_insn, but is not currently
177 on the insn chain. */
178
179static rtx subst_prev_insn;
180
230d793d
RS
181/* This is the lowest CUID that `subst' is currently dealing with.
182 get_last_value will not return a value if the register was set at or
183 after this CUID. If not for this mechanism, we could get confused if
184 I2 or I1 in try_combine were an insn that used the old value of a register
185 to obtain a new value. In that case, we might erroneously get the
186 new value of the register when we wanted the old one. */
187
188static int subst_low_cuid;
189
6e25d159
RK
190/* This contains any hard registers that are used in newpat; reg_dead_at_p
191 must consider all these registers to be always live. */
192
193static HARD_REG_SET newpat_used_regs;
194
abe6e52f
RK
195/* This is an insn to which a LOG_LINKS entry has been added. If this
196 insn is the earlier than I2 or I3, combine should rescan starting at
197 that location. */
198
199static rtx added_links_insn;
200
0d4d42c3
RK
201/* Basic block number of the block in which we are performing combines. */
202static int this_basic_block;
230d793d
RS
203\f
204/* The next group of arrays allows the recording of the last value assigned
205 to (hard or pseudo) register n. We use this information to see if a
5089e22e 206 operation being processed is redundant given a prior operation performed
230d793d
RS
207 on the register. For example, an `and' with a constant is redundant if
208 all the zero bits are already known to be turned off.
209
210 We use an approach similar to that used by cse, but change it in the
211 following ways:
212
213 (1) We do not want to reinitialize at each label.
214 (2) It is useful, but not critical, to know the actual value assigned
215 to a register. Often just its form is helpful.
216
217 Therefore, we maintain the following arrays:
218
219 reg_last_set_value the last value assigned
220 reg_last_set_label records the value of label_tick when the
221 register was assigned
222 reg_last_set_table_tick records the value of label_tick when a
223 value using the register is assigned
224 reg_last_set_invalid set to non-zero when it is not valid
225 to use the value of this register in some
226 register's value
227
228 To understand the usage of these tables, it is important to understand
229 the distinction between the value in reg_last_set_value being valid
230 and the register being validly contained in some other expression in the
231 table.
232
233 Entry I in reg_last_set_value is valid if it is non-zero, and either
234 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
235
236 Register I may validly appear in any expression returned for the value
237 of another register if reg_n_sets[i] is 1. It may also appear in the
238 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239 reg_last_set_invalid[j] is zero.
240
241 If an expression is found in the table containing a register which may
242 not validly appear in an expression, the register is replaced by
243 something that won't match, (clobber (const_int 0)).
244
245 reg_last_set_invalid[i] is set non-zero when register I is being assigned
246 to and reg_last_set_table_tick[i] == label_tick. */
247
0f41302f 248/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
249
250static rtx *reg_last_set_value;
251
252/* Record the value of label_tick when the value for register n is placed in
253 reg_last_set_value[n]. */
254
568356af 255static int *reg_last_set_label;
230d793d
RS
256
257/* Record the value of label_tick when an expression involving register n
0f41302f 258 is placed in reg_last_set_value. */
230d793d 259
568356af 260static int *reg_last_set_table_tick;
230d793d
RS
261
262/* Set non-zero if references to register n in expressions should not be
263 used. */
264
265static char *reg_last_set_invalid;
266
0f41302f 267/* Incremented for each label. */
230d793d 268
568356af 269static int label_tick;
230d793d
RS
270
271/* Some registers that are set more than once and used in more than one
272 basic block are nevertheless always set in similar ways. For example,
273 a QImode register may be loaded from memory in two places on a machine
274 where byte loads zero extend.
275
951553af 276 We record in the following array what we know about the nonzero
230d793d
RS
277 bits of a register, specifically which bits are known to be zero.
278
279 If an entry is zero, it means that we don't know anything special. */
280
55310dad 281static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 282
951553af 283/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 284 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 285
951553af 286static enum machine_mode nonzero_bits_mode;
230d793d 287
d0ab8cd3
RK
288/* Nonzero if we know that a register has some leading bits that are always
289 equal to the sign bit. */
290
291static char *reg_sign_bit_copies;
292
951553af 293/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
294 It is zero while computing them and after combine has completed. This
295 former test prevents propagating values based on previously set values,
296 which can be incorrect if a variable is modified in a loop. */
230d793d 297
951553af 298static int nonzero_sign_valid;
55310dad
RK
299
300/* These arrays are maintained in parallel with reg_last_set_value
301 and are used to store the mode in which the register was last set,
302 the bits that were known to be zero when it was last set, and the
303 number of sign bits copies it was known to have when it was last set. */
304
305static enum machine_mode *reg_last_set_mode;
306static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307static char *reg_last_set_sign_bit_copies;
230d793d
RS
308\f
309/* Record one modification to rtl structure
310 to be undone by storing old_contents into *where.
311 is_int is 1 if the contents are an int. */
312
313struct undo
314{
241cea85 315 struct undo *next;
230d793d 316 int is_int;
f5393ab9
RS
317 union {rtx r; int i;} old_contents;
318 union {rtx *r; int *i;} where;
230d793d
RS
319};
320
321/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322 num_undo says how many are currently recorded.
323
324 storage is nonzero if we must undo the allocation of new storage.
325 The value of storage is what to pass to obfree.
326
327 other_insn is nonzero if we have modified some other insn in the process
241cea85 328 of working on subst_insn. It must be verified too.
230d793d 329
241cea85
RK
330 previous_undos is the value of undobuf.undos when we started processing
331 this substitution. This will prevent gen_rtx_combine from re-used a piece
332 from the previous expression. Doing so can produce circular rtl
333 structures. */
230d793d
RS
334
335struct undobuf
336{
230d793d 337 char *storage;
241cea85
RK
338 struct undo *undos;
339 struct undo *frees;
340 struct undo *previous_undos;
230d793d
RS
341 rtx other_insn;
342};
343
344static struct undobuf undobuf;
345
cc876596 346/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 347 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
348 set to NEWVAL, do not record this change. Because computing NEWVAL might
349 also call SUBST, we have to compute it before we put anything into
350 the undo table. */
230d793d
RS
351
352#define SUBST(INTO, NEWVAL) \
241cea85
RK
353 do { rtx _new = (NEWVAL); \
354 struct undo *_buf; \
355 \
356 if (undobuf.frees) \
357 _buf = undobuf.frees, undobuf.frees = _buf->next; \
358 else \
359 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
360 \
361 _buf->is_int = 0; \
362 _buf->where.r = &INTO; \
363 _buf->old_contents.r = INTO; \
364 INTO = _new; \
365 if (_buf->old_contents.r == INTO) \
366 _buf->next = undobuf.frees, undobuf.frees = _buf; \
367 else \
368 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
369 } while (0)
370
241cea85
RK
371/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
372 for the value of a HOST_WIDE_INT value (including CONST_INT) is
373 not safe. */
230d793d
RS
374
375#define SUBST_INT(INTO, NEWVAL) \
241cea85
RK
376 do { struct undo *_buf; \
377 \
378 if (undobuf.frees) \
379 _buf = undobuf.frees, undobuf.frees = _buf->next; \
380 else \
381 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
382 \
383 _buf->is_int = 1; \
384 _buf->where.i = (int *) &INTO; \
385 _buf->old_contents.i = INTO; \
386 INTO = NEWVAL; \
387 if (_buf->old_contents.i == INTO) \
388 _buf->next = undobuf.frees, undobuf.frees = _buf; \
389 else \
390 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
391 } while (0)
392
393/* Number of times the pseudo being substituted for
394 was found and replaced. */
395
396static int n_occurrences;
397
c5ad722c
RK
398static void init_reg_last_arrays PROTO((void));
399static void setup_incoming_promotions PROTO((void));
fe2db4fb
RK
400static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
401static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
402static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
403static rtx try_combine PROTO((rtx, rtx, rtx));
404static void undo_all PROTO((void));
405static rtx *find_split_point PROTO((rtx *, rtx));
406static rtx subst PROTO((rtx, rtx, rtx, int, int));
8079805d
RK
407static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
408static rtx simplify_if_then_else PROTO((rtx));
409static rtx simplify_set PROTO((rtx));
410static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
411static rtx expand_compound_operation PROTO((rtx));
412static rtx expand_field_assignment PROTO((rtx));
413static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
414 int, int, int));
71923da7 415static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
416static rtx make_compound_operation PROTO((rtx, enum rtx_code));
417static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 418static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 419 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 420static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb 421static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
e11fa86f 422static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
fe2db4fb
RK
423static rtx make_field_assignment PROTO((rtx));
424static rtx apply_distributive_law PROTO((rtx));
425static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
426 unsigned HOST_WIDE_INT));
427static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
428static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
429static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
430 enum rtx_code, HOST_WIDE_INT,
431 enum machine_mode, int *));
432static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
433 rtx, int));
a29ca9db 434static int recog_for_combine PROTO((rtx *, rtx, rtx *, int *));
fe2db4fb 435static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 436static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 437 ...));
fe2db4fb
RK
438static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
439 rtx, rtx));
0c1c8ea6
RK
440static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
441 enum machine_mode, rtx));
fe2db4fb
RK
442static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
443static int reversible_comparison_p PROTO((rtx));
444static void update_table_tick PROTO((rtx));
445static void record_value_for_reg PROTO((rtx, rtx, rtx));
446static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
447static void record_dead_and_set_regs PROTO((rtx));
9a893315 448static int get_last_value_validate PROTO((rtx *, rtx, int, int));
fe2db4fb
RK
449static rtx get_last_value PROTO((rtx));
450static int use_crosses_set_p PROTO((rtx, int));
451static void reg_dead_at_p_1 PROTO((rtx, rtx));
452static int reg_dead_at_p PROTO((rtx, rtx));
6eb12cef 453static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
fe2db4fb
RK
454static int reg_bitfield_target_p PROTO((rtx, rtx));
455static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
456static void distribute_links PROTO((rtx));
6e25d159 457static void mark_used_regs_combine PROTO((rtx));
1427d6d2 458static int insn_cuid PROTO((rtx));
230d793d
RS
459\f
460/* Main entry point for combiner. F is the first insn of the function.
461 NREGS is the first unused pseudo-reg number. */
462
463void
464combine_instructions (f, nregs)
465 rtx f;
466 int nregs;
467{
b729186a
JL
468 register rtx insn, next;
469#ifdef HAVE_cc0
470 register rtx prev;
471#endif
230d793d
RS
472 register int i;
473 register rtx links, nextlinks;
474
475 combine_attempts = 0;
476 combine_merges = 0;
477 combine_extras = 0;
478 combine_successes = 0;
241cea85 479 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
480
481 combine_max_regno = nregs;
482
ef026f91
RS
483 reg_nonzero_bits
484 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
485 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
486
4c9a05bc 487 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
488 bzero (reg_sign_bit_copies, nregs * sizeof (char));
489
230d793d
RS
490 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
491 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
492 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
493 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
494 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 495 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
496 reg_last_set_mode
497 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
498 reg_last_set_nonzero_bits
499 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
500 reg_last_set_sign_bit_copies
501 = (char *) alloca (nregs * sizeof (char));
502
ef026f91 503 init_reg_last_arrays ();
230d793d
RS
504
505 init_recog_no_volatile ();
506
507 /* Compute maximum uid value so uid_cuid can be allocated. */
508
509 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
510 if (INSN_UID (insn) > i)
511 i = INSN_UID (insn);
512
513 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
4255220d 514 max_uid_cuid = i;
230d793d 515
951553af 516 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 517
951553af 518 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
519 when, for example, we have j <<= 1 in a loop. */
520
951553af 521 nonzero_sign_valid = 0;
230d793d
RS
522
523 /* Compute the mapping from uids to cuids.
524 Cuids are numbers assigned to insns, like uids,
525 except that cuids increase monotonically through the code.
526
527 Scan all SETs and see if we can deduce anything about what
951553af 528 bits are known to be zero for some registers and how many copies
d79f08e0
RK
529 of the sign bit are known to exist for those registers.
530
531 Also set any known values so that we can use it while searching
532 for what bits are known to be set. */
533
534 label_tick = 1;
230d793d 535
bcd49eb7
JW
536 /* We need to initialize it here, because record_dead_and_set_regs may call
537 get_last_value. */
538 subst_prev_insn = NULL_RTX;
539
7988fd36
RK
540 setup_incoming_promotions ();
541
230d793d
RS
542 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
543 {
4255220d 544 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
545 subst_low_cuid = i;
546 subst_insn = insn;
547
230d793d 548 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
549 {
550 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
551 record_dead_and_set_regs (insn);
2dab894a
RK
552
553#ifdef AUTO_INC_DEC
554 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
555 if (REG_NOTE_KIND (links) == REG_INC)
556 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
557#endif
d79f08e0
RK
558 }
559
560 if (GET_CODE (insn) == CODE_LABEL)
561 label_tick++;
230d793d
RS
562 }
563
951553af 564 nonzero_sign_valid = 1;
230d793d
RS
565
566 /* Now scan all the insns in forward order. */
567
0d4d42c3 568 this_basic_block = -1;
230d793d
RS
569 label_tick = 1;
570 last_call_cuid = 0;
571 mem_last_set = 0;
ef026f91 572 init_reg_last_arrays ();
7988fd36
RK
573 setup_incoming_promotions ();
574
230d793d
RS
575 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
576 {
577 next = 0;
578
0d4d42c3 579 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 580 if (this_basic_block + 1 < n_basic_blocks
0d4d42c3
RK
581 && basic_block_head[this_basic_block + 1] == insn)
582 this_basic_block++;
583
230d793d
RS
584 if (GET_CODE (insn) == CODE_LABEL)
585 label_tick++;
586
0d4d42c3 587 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
588 {
589 /* Try this insn with each insn it links back to. */
590
591 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 592 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
593 goto retry;
594
595 /* Try each sequence of three linked insns ending with this one. */
596
597 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
598 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
599 nextlinks = XEXP (nextlinks, 1))
600 if ((next = try_combine (insn, XEXP (links, 0),
601 XEXP (nextlinks, 0))) != 0)
602 goto retry;
603
604#ifdef HAVE_cc0
605 /* Try to combine a jump insn that uses CC0
606 with a preceding insn that sets CC0, and maybe with its
607 logical predecessor as well.
608 This is how we make decrement-and-branch insns.
609 We need this special code because data flow connections
610 via CC0 do not get entered in LOG_LINKS. */
611
612 if (GET_CODE (insn) == JUMP_INSN
613 && (prev = prev_nonnote_insn (insn)) != 0
614 && GET_CODE (prev) == INSN
615 && sets_cc0_p (PATTERN (prev)))
616 {
5f4f0e22 617 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
618 goto retry;
619
620 for (nextlinks = LOG_LINKS (prev); nextlinks;
621 nextlinks = XEXP (nextlinks, 1))
622 if ((next = try_combine (insn, prev,
623 XEXP (nextlinks, 0))) != 0)
624 goto retry;
625 }
626
627 /* Do the same for an insn that explicitly references CC0. */
628 if (GET_CODE (insn) == INSN
629 && (prev = prev_nonnote_insn (insn)) != 0
630 && GET_CODE (prev) == INSN
631 && sets_cc0_p (PATTERN (prev))
632 && GET_CODE (PATTERN (insn)) == SET
633 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
634 {
5f4f0e22 635 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
636 goto retry;
637
638 for (nextlinks = LOG_LINKS (prev); nextlinks;
639 nextlinks = XEXP (nextlinks, 1))
640 if ((next = try_combine (insn, prev,
641 XEXP (nextlinks, 0))) != 0)
642 goto retry;
643 }
644
645 /* Finally, see if any of the insns that this insn links to
646 explicitly references CC0. If so, try this insn, that insn,
5089e22e 647 and its predecessor if it sets CC0. */
230d793d
RS
648 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
649 if (GET_CODE (XEXP (links, 0)) == INSN
650 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
651 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
652 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
653 && GET_CODE (prev) == INSN
654 && sets_cc0_p (PATTERN (prev))
655 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
656 goto retry;
657#endif
658
659 /* Try combining an insn with two different insns whose results it
660 uses. */
661 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
662 for (nextlinks = XEXP (links, 1); nextlinks;
663 nextlinks = XEXP (nextlinks, 1))
664 if ((next = try_combine (insn, XEXP (links, 0),
665 XEXP (nextlinks, 0))) != 0)
666 goto retry;
667
668 if (GET_CODE (insn) != NOTE)
669 record_dead_and_set_regs (insn);
670
671 retry:
672 ;
673 }
674 }
675
676 total_attempts += combine_attempts;
677 total_merges += combine_merges;
678 total_extras += combine_extras;
679 total_successes += combine_successes;
1a26b032 680
951553af 681 nonzero_sign_valid = 0;
230d793d 682}
ef026f91
RS
683
684/* Wipe the reg_last_xxx arrays in preparation for another pass. */
685
686static void
687init_reg_last_arrays ()
688{
689 int nregs = combine_max_regno;
690
4c9a05bc
RK
691 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
692 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
693 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
694 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
695 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 696 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
697 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
698 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
699 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
700}
230d793d 701\f
7988fd36
RK
702/* Set up any promoted values for incoming argument registers. */
703
ee791cc3 704static void
7988fd36
RK
705setup_incoming_promotions ()
706{
707#ifdef PROMOTE_FUNCTION_ARGS
708 int regno;
709 rtx reg;
710 enum machine_mode mode;
711 int unsignedp;
712 rtx first = get_insns ();
713
714 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
715 if (FUNCTION_ARG_REGNO_P (regno)
716 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
38a448ca
RH
717 {
718 record_value_for_reg
719 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
720 : SIGN_EXTEND),
721 GET_MODE (reg),
722 gen_rtx_CLOBBER (mode, const0_rtx)));
723 }
7988fd36
RK
724#endif
725}
726\f
91102d5a
RK
727/* Called via note_stores. If X is a pseudo that is narrower than
728 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
729
730 If we are setting only a portion of X and we can't figure out what
731 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
732 be happening.
733
734 Similarly, set how many bits of X are known to be copies of the sign bit
735 at all locations in the function. This is the smallest number implied
736 by any set of X. */
230d793d
RS
737
738static void
951553af 739set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
740 rtx x;
741 rtx set;
742{
d0ab8cd3
RK
743 int num;
744
230d793d
RS
745 if (GET_CODE (x) == REG
746 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
747 /* If this register is undefined at the start of the file, we can't
748 say what its contents were. */
8e08106d 749 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], REGNO (x))
5f4f0e22 750 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 751 {
2dab894a 752 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
753 {
754 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 755 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
756 return;
757 }
230d793d
RS
758
759 /* If this is a complex assignment, see if we can convert it into a
5089e22e 760 simple assignment. */
230d793d 761 set = expand_field_assignment (set);
d79f08e0
RK
762
763 /* If this is a simple assignment, or we have a paradoxical SUBREG,
764 set what we know about X. */
765
766 if (SET_DEST (set) == x
767 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
768 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
769 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 770 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 771 {
9afa3d54
RK
772 rtx src = SET_SRC (set);
773
774#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
775 /* If X is narrower than a word and SRC is a non-negative
776 constant that would appear negative in the mode of X,
777 sign-extend it for use in reg_nonzero_bits because some
778 machines (maybe most) will actually do the sign-extension
779 and this is the conservative approach.
780
781 ??? For 2.5, try to tighten up the MD files in this regard
782 instead of this kludge. */
783
784 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
785 && GET_CODE (src) == CONST_INT
786 && INTVAL (src) > 0
787 && 0 != (INTVAL (src)
788 & ((HOST_WIDE_INT) 1
9e69be8c 789 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
790 src = GEN_INT (INTVAL (src)
791 | ((HOST_WIDE_INT) (-1)
792 << GET_MODE_BITSIZE (GET_MODE (x))));
793#endif
794
951553af 795 reg_nonzero_bits[REGNO (x)]
9afa3d54 796 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
797 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
798 if (reg_sign_bit_copies[REGNO (x)] == 0
799 || reg_sign_bit_copies[REGNO (x)] > num)
800 reg_sign_bit_copies[REGNO (x)] = num;
801 }
230d793d 802 else
d0ab8cd3 803 {
951553af 804 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 805 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 806 }
230d793d
RS
807 }
808}
809\f
810/* See if INSN can be combined into I3. PRED and SUCC are optionally
811 insns that were previously combined into I3 or that will be combined
812 into the merger of INSN and I3.
813
814 Return 0 if the combination is not allowed for any reason.
815
816 If the combination is allowed, *PDEST will be set to the single
817 destination of INSN and *PSRC to the single source, and this function
818 will return 1. */
819
820static int
821can_combine_p (insn, i3, pred, succ, pdest, psrc)
822 rtx insn;
823 rtx i3;
824 rtx pred, succ;
825 rtx *pdest, *psrc;
826{
827 int i;
828 rtx set = 0, src, dest;
b729186a
JL
829 rtx p;
830#ifdef AUTO_INC_DEC
76d31c63 831 rtx link;
b729186a 832#endif
230d793d
RS
833 int all_adjacent = (succ ? (next_active_insn (insn) == succ
834 && next_active_insn (succ) == i3)
835 : next_active_insn (insn) == i3);
836
837 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
838 or a PARALLEL consisting of such a SET and CLOBBERs.
839
840 If INSN has CLOBBER parallel parts, ignore them for our processing.
841 By definition, these happen during the execution of the insn. When it
842 is merged with another insn, all bets are off. If they are, in fact,
843 needed and aren't also supplied in I3, they may be added by
844 recog_for_combine. Otherwise, it won't match.
845
846 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
847 note.
848
849 Get the source and destination of INSN. If more than one, can't
850 combine. */
851
852 if (GET_CODE (PATTERN (insn)) == SET)
853 set = PATTERN (insn);
854 else if (GET_CODE (PATTERN (insn)) == PARALLEL
855 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
856 {
857 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
858 {
859 rtx elt = XVECEXP (PATTERN (insn), 0, i);
860
861 switch (GET_CODE (elt))
862 {
e3258cef
R
863 /* This is important to combine floating point insns
864 for the SH4 port. */
865 case USE:
866 /* Combining an isolated USE doesn't make sense.
867 We depend here on combinable_i3_pat to reject them. */
868 /* The code below this loop only verifies that the inputs of
869 the SET in INSN do not change. We call reg_set_between_p
870 to verify that the REG in the USE does not change betweeen
871 I3 and INSN.
872 If the USE in INSN was for a pseudo register, the matching
873 insn pattern will likely match any register; combining this
874 with any other USE would only be safe if we knew that the
875 used registers have identical values, or if there was
876 something to tell them apart, e.g. different modes. For
877 now, we forgo such compilcated tests and simply disallow
878 combining of USES of pseudo registers with any other USE. */
879 if (GET_CODE (XEXP (elt, 0)) == REG
880 && GET_CODE (PATTERN (i3)) == PARALLEL)
881 {
882 rtx i3pat = PATTERN (i3);
883 int i = XVECLEN (i3pat, 0) - 1;
884 int regno = REGNO (XEXP (elt, 0));
885 do
886 {
887 rtx i3elt = XVECEXP (i3pat, 0, i);
888 if (GET_CODE (i3elt) == USE
889 && GET_CODE (XEXP (i3elt, 0)) == REG
890 && (REGNO (XEXP (i3elt, 0)) == regno
891 ? reg_set_between_p (XEXP (elt, 0),
892 PREV_INSN (insn), i3)
893 : regno >= FIRST_PSEUDO_REGISTER))
894 return 0;
895 }
896 while (--i >= 0);
897 }
898 break;
899
230d793d
RS
900 /* We can ignore CLOBBERs. */
901 case CLOBBER:
902 break;
903
904 case SET:
905 /* Ignore SETs whose result isn't used but not those that
906 have side-effects. */
907 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
908 && ! side_effects_p (elt))
909 break;
910
911 /* If we have already found a SET, this is a second one and
912 so we cannot combine with this insn. */
913 if (set)
914 return 0;
915
916 set = elt;
917 break;
918
919 default:
920 /* Anything else means we can't combine. */
921 return 0;
922 }
923 }
924
925 if (set == 0
926 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
927 so don't do anything with it. */
928 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
929 return 0;
930 }
931 else
932 return 0;
933
934 if (set == 0)
935 return 0;
936
937 set = expand_field_assignment (set);
938 src = SET_SRC (set), dest = SET_DEST (set);
939
940 /* Don't eliminate a store in the stack pointer. */
941 if (dest == stack_pointer_rtx
230d793d
RS
942 /* If we couldn't eliminate a field assignment, we can't combine. */
943 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
944 /* Don't combine with an insn that sets a register to itself if it has
945 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 946 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
947 /* Can't merge a function call. */
948 || GET_CODE (src) == CALL
cd5e8f1f 949 /* Don't eliminate a function call argument. */
4dca5ec5
RK
950 || (GET_CODE (i3) == CALL_INSN
951 && (find_reg_fusage (i3, USE, dest)
952 || (GET_CODE (dest) == REG
953 && REGNO (dest) < FIRST_PSEUDO_REGISTER
954 && global_regs[REGNO (dest)])))
230d793d
RS
955 /* Don't substitute into an incremented register. */
956 || FIND_REG_INC_NOTE (i3, dest)
957 || (succ && FIND_REG_INC_NOTE (succ, dest))
958 /* Don't combine the end of a libcall into anything. */
5f4f0e22 959 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
960 /* Make sure that DEST is not used after SUCC but before I3. */
961 || (succ && ! all_adjacent
962 && reg_used_between_p (dest, succ, i3))
963 /* Make sure that the value that is to be substituted for the register
964 does not use any registers whose values alter in between. However,
965 If the insns are adjacent, a use can't cross a set even though we
966 think it might (this can happen for a sequence of insns each setting
967 the same destination; reg_last_set of that register might point to
d81481d3
RK
968 a NOTE). If INSN has a REG_EQUIV note, the register is always
969 equivalent to the memory so the substitution is valid even if there
970 are intervening stores. Also, don't move a volatile asm or
971 UNSPEC_VOLATILE across any other insns. */
230d793d 972 || (! all_adjacent
d81481d3
RK
973 && (((GET_CODE (src) != MEM
974 || ! find_reg_note (insn, REG_EQUIV, src))
975 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
976 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
977 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
978 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
979 better register allocation by not doing the combine. */
980 || find_reg_note (i3, REG_NO_CONFLICT, dest)
981 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
982 /* Don't combine across a CALL_INSN, because that would possibly
983 change whether the life span of some REGs crosses calls or not,
984 and it is a pain to update that information.
985 Exception: if source is a constant, moving it later can't hurt.
986 Accept that special case, because it helps -fforce-addr a lot. */
987 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
988 return 0;
989
990 /* DEST must either be a REG or CC0. */
991 if (GET_CODE (dest) == REG)
992 {
993 /* If register alignment is being enforced for multi-word items in all
994 cases except for parameters, it is possible to have a register copy
995 insn referencing a hard register that is not allowed to contain the
996 mode being copied and which would not be valid as an operand of most
997 insns. Eliminate this problem by not combining with such an insn.
998
999 Also, on some machines we don't want to extend the life of a hard
4d2c432d
RK
1000 register.
1001
1002 This is the same test done in can_combine except that we don't test
1003 if SRC is a CALL operation to permit a hard register with
1004 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1005 into account. */
230d793d
RS
1006
1007 if (GET_CODE (src) == REG
1008 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1009 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1010 /* Don't extend the life of a hard register unless it is
1011 user variable (if we have few registers) or it can't
1012 fit into the desired register (meaning something special
ecd40809
RK
1013 is going on).
1014 Also avoid substituting a return register into I3, because
1015 reload can't handle a conflict with constraints of other
1016 inputs. */
230d793d 1017 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e 1018 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
f95182a4
ILT
1019 || (SMALL_REGISTER_CLASSES
1020 && ((! all_adjacent && ! REG_USERVAR_P (src))
1021 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
e9a25f70 1022 && ! REG_USERVAR_P (src))))))))
230d793d
RS
1023 return 0;
1024 }
1025 else if (GET_CODE (dest) != CC0)
1026 return 0;
1027
5f96750d
RS
1028 /* Don't substitute for a register intended as a clobberable operand.
1029 Similarly, don't substitute an expression containing a register that
1030 will be clobbered in I3. */
230d793d
RS
1031 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1032 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1033 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
1034 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1035 src)
1036 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
1037 return 0;
1038
1039 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1040 or not), reject, unless nothing volatile comes between it and I3,
1041 with the exception of SUCC. */
1042
1043 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1044 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1045 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1046 && p != succ && volatile_refs_p (PATTERN (p)))
1047 return 0;
1048
b79ee7eb
RH
1049 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1050 to be an explicit register variable, and was chosen for a reason. */
1051
1052 if (GET_CODE (src) == ASM_OPERANDS
1053 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1054 return 0;
1055
4b2cb4a2
RS
1056 /* If there are any volatile insns between INSN and I3, reject, because
1057 they might affect machine state. */
1058
1059 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1060 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1061 && p != succ && volatile_insn_p (PATTERN (p)))
1062 return 0;
1063
230d793d
RS
1064 /* If INSN or I2 contains an autoincrement or autodecrement,
1065 make sure that register is not used between there and I3,
1066 and not already used in I3 either.
1067 Also insist that I3 not be a jump; if it were one
1068 and the incremented register were spilled, we would lose. */
1069
1070#ifdef AUTO_INC_DEC
1071 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1072 if (REG_NOTE_KIND (link) == REG_INC
1073 && (GET_CODE (i3) == JUMP_INSN
1074 || reg_used_between_p (XEXP (link, 0), insn, i3)
1075 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1076 return 0;
1077#endif
1078
1079#ifdef HAVE_cc0
1080 /* Don't combine an insn that follows a CC0-setting insn.
1081 An insn that uses CC0 must not be separated from the one that sets it.
1082 We do, however, allow I2 to follow a CC0-setting insn if that insn
1083 is passed as I1; in that case it will be deleted also.
1084 We also allow combining in this case if all the insns are adjacent
1085 because that would leave the two CC0 insns adjacent as well.
1086 It would be more logical to test whether CC0 occurs inside I1 or I2,
1087 but that would be much slower, and this ought to be equivalent. */
1088
1089 p = prev_nonnote_insn (insn);
1090 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1091 && ! all_adjacent)
1092 return 0;
1093#endif
1094
1095 /* If we get here, we have passed all the tests and the combination is
1096 to be allowed. */
1097
1098 *pdest = dest;
1099 *psrc = src;
1100
1101 return 1;
1102}
1103\f
956d6950
JL
1104/* Check if PAT is an insn - or a part of it - used to set up an
1105 argument for a function in a hard register. */
1106
1107static int
1108sets_function_arg_p (pat)
1109 rtx pat;
1110{
1111 int i;
1112 rtx inner_dest;
1113
1114 switch (GET_CODE (pat))
1115 {
1116 case INSN:
1117 return sets_function_arg_p (PATTERN (pat));
1118
1119 case PARALLEL:
1120 for (i = XVECLEN (pat, 0); --i >= 0;)
1121 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1122 return 1;
1123
1124 break;
1125
1126 case SET:
1127 inner_dest = SET_DEST (pat);
1128 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1129 || GET_CODE (inner_dest) == SUBREG
1130 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1131 inner_dest = XEXP (inner_dest, 0);
1132
1133 return (GET_CODE (inner_dest) == REG
1134 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1135 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1d300e19
KG
1136
1137 default:
1138 break;
956d6950
JL
1139 }
1140
1141 return 0;
1142}
1143
230d793d
RS
1144/* LOC is the location within I3 that contains its pattern or the component
1145 of a PARALLEL of the pattern. We validate that it is valid for combining.
1146
1147 One problem is if I3 modifies its output, as opposed to replacing it
1148 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1149 so would produce an insn that is not equivalent to the original insns.
1150
1151 Consider:
1152
1153 (set (reg:DI 101) (reg:DI 100))
1154 (set (subreg:SI (reg:DI 101) 0) <foo>)
1155
1156 This is NOT equivalent to:
1157
1158 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1159 (set (reg:DI 101) (reg:DI 100))])
1160
1161 Not only does this modify 100 (in which case it might still be valid
1162 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1163
1164 We can also run into a problem if I2 sets a register that I1
1165 uses and I1 gets directly substituted into I3 (not via I2). In that
1166 case, we would be getting the wrong value of I2DEST into I3, so we
1167 must reject the combination. This case occurs when I2 and I1 both
1168 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1169 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1170 of a SET must prevent combination from occurring.
1171
e9a25f70 1172 On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
c448a43e
RK
1173 if the destination of a SET is a hard register that isn't a user
1174 variable.
230d793d
RS
1175
1176 Before doing the above check, we first try to expand a field assignment
1177 into a set of logical operations.
1178
1179 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1180 we place a register that is both set and used within I3. If more than one
1181 such register is detected, we fail.
1182
1183 Return 1 if the combination is valid, zero otherwise. */
1184
1185static int
1186combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1187 rtx i3;
1188 rtx *loc;
1189 rtx i2dest;
1190 rtx i1dest;
1191 int i1_not_in_src;
1192 rtx *pi3dest_killed;
1193{
1194 rtx x = *loc;
1195
1196 if (GET_CODE (x) == SET)
1197 {
1198 rtx set = expand_field_assignment (x);
1199 rtx dest = SET_DEST (set);
1200 rtx src = SET_SRC (set);
1201 rtx inner_dest = dest, inner_src = src;
1202
1203 SUBST (*loc, set);
1204
1205 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1206 || GET_CODE (inner_dest) == SUBREG
1207 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1208 inner_dest = XEXP (inner_dest, 0);
1209
1210 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1211 was added. */
1212#if 0
1213 while (GET_CODE (inner_src) == STRICT_LOW_PART
1214 || GET_CODE (inner_src) == SUBREG
1215 || GET_CODE (inner_src) == ZERO_EXTRACT)
1216 inner_src = XEXP (inner_src, 0);
1217
1218 /* If it is better that two different modes keep two different pseudos,
1219 avoid combining them. This avoids producing the following pattern
1220 on a 386:
1221 (set (subreg:SI (reg/v:QI 21) 0)
1222 (lshiftrt:SI (reg/v:SI 20)
1223 (const_int 24)))
1224 If that were made, reload could not handle the pair of
1225 reg 20/21, since it would try to get any GENERAL_REGS
1226 but some of them don't handle QImode. */
1227
1228 if (rtx_equal_p (inner_src, i2dest)
1229 && GET_CODE (inner_dest) == REG
1230 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1231 return 0;
1232#endif
1233
1234 /* Check for the case where I3 modifies its output, as
1235 discussed above. */
1236 if ((inner_dest != dest
1237 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1238 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1239
3f508eca
RK
1240 /* This is the same test done in can_combine_p except that we
1241 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
956d6950
JL
1242 CALL operation. Moreover, we can't test all_adjacent; we don't
1243 have to, since this instruction will stay in place, thus we are
1244 not considering increasing the lifetime of INNER_DEST.
1245
1246 Also, if this insn sets a function argument, combining it with
1247 something that might need a spill could clobber a previous
1248 function argument; the all_adjacent test in can_combine_p also
1249 checks this; here, we do a more specific test for this case. */
1250
230d793d 1251 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1252 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1253 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1254 GET_MODE (inner_dest))
e9a25f70
JL
1255 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1256 && ! REG_USERVAR_P (inner_dest)
956d6950
JL
1257 && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1258 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1259 && i3 != 0
1260 && sets_function_arg_p (prev_nonnote_insn (i3)))))))
230d793d
RS
1261 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1262 return 0;
1263
1264 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1265 so record that for later.
1266 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1267 STACK_POINTER_REGNUM, since these are always considered to be
1268 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1269 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1270 && reg_referenced_p (dest, PATTERN (i3))
1271 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1272#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1273 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1274#endif
36a9c2e9
JL
1275#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1276 && (REGNO (dest) != ARG_POINTER_REGNUM
1277 || ! fixed_regs [REGNO (dest)])
1278#endif
1279 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1280 {
1281 if (*pi3dest_killed)
1282 return 0;
1283
1284 *pi3dest_killed = dest;
1285 }
1286 }
1287
1288 else if (GET_CODE (x) == PARALLEL)
1289 {
1290 int i;
1291
1292 for (i = 0; i < XVECLEN (x, 0); i++)
1293 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1294 i1_not_in_src, pi3dest_killed))
1295 return 0;
1296 }
1297
1298 return 1;
1299}
1300\f
1301/* Try to combine the insns I1 and I2 into I3.
1302 Here I1 and I2 appear earlier than I3.
1303 I1 can be zero; then we combine just I2 into I3.
1304
1305 It we are combining three insns and the resulting insn is not recognized,
1306 try splitting it into two insns. If that happens, I2 and I3 are retained
1307 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1308 are pseudo-deleted.
1309
abe6e52f
RK
1310 Return 0 if the combination does not work. Then nothing is changed.
1311 If we did the combination, return the insn at which combine should
1312 resume scanning. */
230d793d
RS
1313
1314static rtx
1315try_combine (i3, i2, i1)
1316 register rtx i3, i2, i1;
1317{
1318 /* New patterns for I3 and I3, respectively. */
1319 rtx newpat, newi2pat = 0;
1320 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1321 int added_sets_1, added_sets_2;
1322 /* Total number of SETs to put into I3. */
1323 int total_sets;
1324 /* Nonzero is I2's body now appears in I3. */
1325 int i2_is_used;
1326 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1327 int insn_code_number, i2_code_number, other_code_number;
1328 /* Contains I3 if the destination of I3 is used in its source, which means
1329 that the old life of I3 is being killed. If that usage is placed into
1330 I2 and not in I3, a REG_DEAD note must be made. */
1331 rtx i3dest_killed = 0;
1332 /* SET_DEST and SET_SRC of I2 and I1. */
1333 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1334 /* PATTERN (I2), or a copy of it in certain cases. */
1335 rtx i2pat;
1336 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1337 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1338 int i1_feeds_i3 = 0;
1339 /* Notes that must be added to REG_NOTES in I3 and I2. */
1340 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1341 /* Notes that we substituted I3 into I2 instead of the normal case. */
1342 int i3_subst_into_i2 = 0;
df7d75de
RK
1343 /* Notes that I1, I2 or I3 is a MULT operation. */
1344 int have_mult = 0;
a29ca9db
RK
1345 /* Number of clobbers of SCRATCH we had to add. */
1346 int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
230d793d
RS
1347
1348 int maxreg;
1349 rtx temp;
1350 register rtx link;
1351 int i;
1352
1353 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1354 This can occur when flow deletes an insn that it has merged into an
1355 auto-increment address. We also can't do anything if I3 has a
1356 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1357 libcall. */
1358
1359 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1360 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1361 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1362 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1363 return 0;
1364
1365 combine_attempts++;
1366
241cea85 1367 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
1368 undobuf.other_insn = 0;
1369
1370 /* Save the current high-water-mark so we can free storage if we didn't
1371 accept this combination. */
1372 undobuf.storage = (char *) oballoc (0);
1373
6e25d159
RK
1374 /* Reset the hard register usage information. */
1375 CLEAR_HARD_REG_SET (newpat_used_regs);
1376
230d793d
RS
1377 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1378 code below, set I1 to be the earlier of the two insns. */
1379 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1380 temp = i1, i1 = i2, i2 = temp;
1381
abe6e52f 1382 added_links_insn = 0;
137e889e 1383
230d793d
RS
1384 /* First check for one important special-case that the code below will
1385 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1386 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1387 we may be able to replace that destination with the destination of I3.
1388 This occurs in the common code where we compute both a quotient and
1389 remainder into a structure, in which case we want to do the computation
1390 directly into the structure to avoid register-register copies.
1391
1392 We make very conservative checks below and only try to handle the
1393 most common cases of this. For example, we only handle the case
1394 where I2 and I3 are adjacent to avoid making difficult register
1395 usage tests. */
1396
1397 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1398 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1399 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
f95182a4 1400 && (! SMALL_REGISTER_CLASSES
e9a25f70
JL
1401 || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1402 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1403 || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
230d793d
RS
1404 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1405 && GET_CODE (PATTERN (i2)) == PARALLEL
1406 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1407 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1408 below would need to check what is inside (and reg_overlap_mentioned_p
1409 doesn't support those codes anyway). Don't allow those destinations;
1410 the resulting insn isn't likely to be recognized anyway. */
1411 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1412 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1413 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1414 SET_DEST (PATTERN (i3)))
1415 && next_real_insn (i2) == i3)
5089e22e
RS
1416 {
1417 rtx p2 = PATTERN (i2);
1418
1419 /* Make sure that the destination of I3,
1420 which we are going to substitute into one output of I2,
1421 is not used within another output of I2. We must avoid making this:
1422 (parallel [(set (mem (reg 69)) ...)
1423 (set (reg 69) ...)])
1424 which is not well-defined as to order of actions.
1425 (Besides, reload can't handle output reloads for this.)
1426
1427 The problem can also happen if the dest of I3 is a memory ref,
1428 if another dest in I2 is an indirect memory ref. */
1429 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1430 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1431 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1432 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1433 SET_DEST (XVECEXP (p2, 0, i))))
1434 break;
230d793d 1435
5089e22e
RS
1436 if (i == XVECLEN (p2, 0))
1437 for (i = 0; i < XVECLEN (p2, 0); i++)
1438 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1439 {
1440 combine_merges++;
230d793d 1441
5089e22e
RS
1442 subst_insn = i3;
1443 subst_low_cuid = INSN_CUID (i2);
230d793d 1444
c4e861e8 1445 added_sets_2 = added_sets_1 = 0;
5089e22e 1446 i2dest = SET_SRC (PATTERN (i3));
230d793d 1447
5089e22e
RS
1448 /* Replace the dest in I2 with our dest and make the resulting
1449 insn the new pattern for I3. Then skip to where we
1450 validate the pattern. Everything was set up above. */
1451 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1452 SET_DEST (PATTERN (i3)));
1453
1454 newpat = p2;
176c9e6b 1455 i3_subst_into_i2 = 1;
5089e22e
RS
1456 goto validate_replacement;
1457 }
1458 }
230d793d
RS
1459
1460#ifndef HAVE_cc0
1461 /* If we have no I1 and I2 looks like:
1462 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1463 (set Y OP)])
1464 make up a dummy I1 that is
1465 (set Y OP)
1466 and change I2 to be
1467 (set (reg:CC X) (compare:CC Y (const_int 0)))
1468
1469 (We can ignore any trailing CLOBBERs.)
1470
1471 This undoes a previous combination and allows us to match a branch-and-
1472 decrement insn. */
1473
1474 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1475 && XVECLEN (PATTERN (i2), 0) >= 2
1476 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1477 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1478 == MODE_CC)
1479 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1480 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1481 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1482 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1483 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1484 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1485 {
1486 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1487 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1488 break;
1489
1490 if (i == 1)
1491 {
1492 /* We make I1 with the same INSN_UID as I2. This gives it
1493 the same INSN_CUID for value tracking. Our fake I1 will
1494 never appear in the insn stream so giving it the same INSN_UID
1495 as I2 will not cause a problem. */
1496
0d9641d1 1497 subst_prev_insn = i1
38a448ca
RH
1498 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1499 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1500 NULL_RTX);
230d793d
RS
1501
1502 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1503 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1504 SET_DEST (PATTERN (i1)));
1505 }
1506 }
1507#endif
1508
1509 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1510 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1511 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1512 {
1513 undo_all ();
1514 return 0;
1515 }
1516
1517 /* Record whether I2DEST is used in I2SRC and similarly for the other
1518 cases. Knowing this will help in register status updating below. */
1519 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1520 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1521 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1522
916f14f1 1523 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1524 in I2SRC. */
1525 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1526
1527 /* Ensure that I3's pattern can be the destination of combines. */
1528 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1529 i1 && i2dest_in_i1src && i1_feeds_i3,
1530 &i3dest_killed))
1531 {
1532 undo_all ();
1533 return 0;
1534 }
1535
df7d75de
RK
1536 /* See if any of the insns is a MULT operation. Unless one is, we will
1537 reject a combination that is, since it must be slower. Be conservative
1538 here. */
1539 if (GET_CODE (i2src) == MULT
1540 || (i1 != 0 && GET_CODE (i1src) == MULT)
1541 || (GET_CODE (PATTERN (i3)) == SET
1542 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1543 have_mult = 1;
1544
230d793d
RS
1545 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1546 We used to do this EXCEPT in one case: I3 has a post-inc in an
1547 output operand. However, that exception can give rise to insns like
1548 mov r3,(r3)+
1549 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1550 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1551
1552#if 0
1553 if (!(GET_CODE (PATTERN (i3)) == SET
1554 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1555 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1556 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1557 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1558 /* It's not the exception. */
1559#endif
1560#ifdef AUTO_INC_DEC
1561 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1562 if (REG_NOTE_KIND (link) == REG_INC
1563 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1564 || (i1 != 0
1565 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1566 {
1567 undo_all ();
1568 return 0;
1569 }
1570#endif
1571
1572 /* See if the SETs in I1 or I2 need to be kept around in the merged
1573 instruction: whenever the value set there is still needed past I3.
1574 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1575
1576 For the SET in I1, we have two cases: If I1 and I2 independently
1577 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1578 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1579 in I1 needs to be kept around unless I1DEST dies or is set in either
1580 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1581 I1DEST. If so, we know I1 feeds into I2. */
1582
1583 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1584
1585 added_sets_1
1586 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1587 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1588
1589 /* If the set in I2 needs to be kept around, we must make a copy of
1590 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1591 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1592 an already-substituted copy. This also prevents making self-referential
1593 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1594 I2DEST. */
1595
1596 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
38a448ca 1597 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
230d793d
RS
1598 : PATTERN (i2));
1599
1600 if (added_sets_2)
1601 i2pat = copy_rtx (i2pat);
1602
1603 combine_merges++;
1604
1605 /* Substitute in the latest insn for the regs set by the earlier ones. */
1606
1607 maxreg = max_reg_num ();
1608
1609 subst_insn = i3;
230d793d
RS
1610
1611 /* It is possible that the source of I2 or I1 may be performing an
1612 unneeded operation, such as a ZERO_EXTEND of something that is known
1613 to have the high part zero. Handle that case by letting subst look at
1614 the innermost one of them.
1615
1616 Another way to do this would be to have a function that tries to
1617 simplify a single insn instead of merging two or more insns. We don't
1618 do this because of the potential of infinite loops and because
1619 of the potential extra memory required. However, doing it the way
1620 we are is a bit of a kludge and doesn't catch all cases.
1621
1622 But only do this if -fexpensive-optimizations since it slows things down
1623 and doesn't usually win. */
1624
1625 if (flag_expensive_optimizations)
1626 {
1627 /* Pass pc_rtx so no substitutions are done, just simplifications.
1628 The cases that we are interested in here do not involve the few
1629 cases were is_replaced is checked. */
1630 if (i1)
d0ab8cd3
RK
1631 {
1632 subst_low_cuid = INSN_CUID (i1);
1633 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1634 }
230d793d 1635 else
d0ab8cd3
RK
1636 {
1637 subst_low_cuid = INSN_CUID (i2);
1638 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1639 }
230d793d 1640
241cea85 1641 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1642 }
1643
1644#ifndef HAVE_cc0
1645 /* Many machines that don't use CC0 have insns that can both perform an
1646 arithmetic operation and set the condition code. These operations will
1647 be represented as a PARALLEL with the first element of the vector
1648 being a COMPARE of an arithmetic operation with the constant zero.
1649 The second element of the vector will set some pseudo to the result
1650 of the same arithmetic operation. If we simplify the COMPARE, we won't
1651 match such a pattern and so will generate an extra insn. Here we test
1652 for this case, where both the comparison and the operation result are
1653 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1654 I2SRC. Later we will make the PARALLEL that contains I2. */
1655
1656 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1657 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1658 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1659 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1660 {
1661 rtx *cc_use;
1662 enum machine_mode compare_mode;
1663
1664 newpat = PATTERN (i3);
1665 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1666
1667 i2_is_used = 1;
1668
1669#ifdef EXTRA_CC_MODES
1670 /* See if a COMPARE with the operand we substituted in should be done
1671 with the mode that is currently being used. If not, do the same
1672 processing we do in `subst' for a SET; namely, if the destination
1673 is used only once, try to replace it with a register of the proper
1674 mode and also replace the COMPARE. */
1675 if (undobuf.other_insn == 0
1676 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1677 &undobuf.other_insn))
77fa0940
RK
1678 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1679 i2src, const0_rtx))
230d793d
RS
1680 != GET_MODE (SET_DEST (newpat))))
1681 {
1682 int regno = REGNO (SET_DEST (newpat));
38a448ca 1683 rtx new_dest = gen_rtx_REG (compare_mode, regno);
230d793d
RS
1684
1685 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 1686 || (REG_N_SETS (regno) == 1 && ! added_sets_2
230d793d
RS
1687 && ! REG_USERVAR_P (SET_DEST (newpat))))
1688 {
1689 if (regno >= FIRST_PSEUDO_REGISTER)
1690 SUBST (regno_reg_rtx[regno], new_dest);
1691
1692 SUBST (SET_DEST (newpat), new_dest);
1693 SUBST (XEXP (*cc_use, 0), new_dest);
1694 SUBST (SET_SRC (newpat),
1695 gen_rtx_combine (COMPARE, compare_mode,
1696 i2src, const0_rtx));
1697 }
1698 else
1699 undobuf.other_insn = 0;
1700 }
1701#endif
1702 }
1703 else
1704#endif
1705 {
1706 n_occurrences = 0; /* `subst' counts here */
1707
1708 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1709 need to make a unique copy of I2SRC each time we substitute it
1710 to avoid self-referential rtl. */
1711
d0ab8cd3 1712 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1713 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1714 ! i1_feeds_i3 && i1dest_in_i1src);
241cea85 1715 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1716
1717 /* Record whether i2's body now appears within i3's body. */
1718 i2_is_used = n_occurrences;
1719 }
1720
1721 /* If we already got a failure, don't try to do more. Otherwise,
1722 try to substitute in I1 if we have it. */
1723
1724 if (i1 && GET_CODE (newpat) != CLOBBER)
1725 {
1726 /* Before we can do this substitution, we must redo the test done
1727 above (see detailed comments there) that ensures that I1DEST
0f41302f 1728 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1729
5f4f0e22
CH
1730 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1731 0, NULL_PTR))
230d793d
RS
1732 {
1733 undo_all ();
1734 return 0;
1735 }
1736
1737 n_occurrences = 0;
d0ab8cd3 1738 subst_low_cuid = INSN_CUID (i1);
230d793d 1739 newpat = subst (newpat, i1dest, i1src, 0, 0);
241cea85 1740 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1741 }
1742
916f14f1
RK
1743 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1744 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1745 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1746 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1747 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1748 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1749 > 1))
230d793d
RS
1750 /* Fail if we tried to make a new register (we used to abort, but there's
1751 really no reason to). */
1752 || max_reg_num () != maxreg
1753 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1754 || GET_CODE (newpat) == CLOBBER
1755 /* Fail if this new pattern is a MULT and we didn't have one before
1756 at the outer level. */
1757 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1758 && ! have_mult))
230d793d
RS
1759 {
1760 undo_all ();
1761 return 0;
1762 }
1763
1764 /* If the actions of the earlier insns must be kept
1765 in addition to substituting them into the latest one,
1766 we must make a new PARALLEL for the latest insn
1767 to hold additional the SETs. */
1768
1769 if (added_sets_1 || added_sets_2)
1770 {
1771 combine_extras++;
1772
1773 if (GET_CODE (newpat) == PARALLEL)
1774 {
1775 rtvec old = XVEC (newpat, 0);
1776 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 1777 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
59888de2 1778 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
230d793d
RS
1779 sizeof (old->elem[0]) * old->num_elem);
1780 }
1781 else
1782 {
1783 rtx old = newpat;
1784 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 1785 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
1786 XVECEXP (newpat, 0, 0) = old;
1787 }
1788
1789 if (added_sets_1)
1790 XVECEXP (newpat, 0, --total_sets)
1791 = (GET_CODE (PATTERN (i1)) == PARALLEL
38a448ca 1792 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
230d793d
RS
1793
1794 if (added_sets_2)
1795 {
1796 /* If there is no I1, use I2's body as is. We used to also not do
1797 the subst call below if I2 was substituted into I3,
1798 but that could lose a simplification. */
1799 if (i1 == 0)
1800 XVECEXP (newpat, 0, --total_sets) = i2pat;
1801 else
1802 /* See comment where i2pat is assigned. */
1803 XVECEXP (newpat, 0, --total_sets)
1804 = subst (i2pat, i1dest, i1src, 0, 0);
1805 }
1806 }
1807
1808 /* We come here when we are replacing a destination in I2 with the
1809 destination of I3. */
1810 validate_replacement:
1811
6e25d159
RK
1812 /* Note which hard regs this insn has as inputs. */
1813 mark_used_regs_combine (newpat);
1814
230d793d 1815 /* Is the result of combination a valid instruction? */
a29ca9db
RK
1816 insn_code_number
1817 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1818
1819 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1820 the second SET's destination is a register that is unused. In that case,
1821 we just need the first SET. This can occur when simplifying a divmod
1822 insn. We *must* test for this case here because the code below that
1823 splits two independent SETs doesn't handle this case correctly when it
1824 updates the register status. Also check the case where the first
1825 SET's destination is unused. That would not cause incorrect code, but
1826 does cause an unneeded insn to remain. */
1827
1828 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1829 && XVECLEN (newpat, 0) == 2
1830 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1831 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1832 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1833 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1834 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1835 && asm_noperands (newpat) < 0)
1836 {
1837 newpat = XVECEXP (newpat, 0, 0);
a29ca9db
RK
1838 insn_code_number
1839 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1840 }
1841
1842 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1843 && XVECLEN (newpat, 0) == 2
1844 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1845 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1846 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1847 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1848 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1849 && asm_noperands (newpat) < 0)
1850 {
1851 newpat = XVECEXP (newpat, 0, 1);
a29ca9db
RK
1852 insn_code_number
1853 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1854 }
1855
1856 /* If we were combining three insns and the result is a simple SET
1857 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1858 insns. There are two ways to do this. It can be split using a
1859 machine-specific method (like when you have an addition of a large
1860 constant) or by combine in the function find_split_point. */
1861
230d793d
RS
1862 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1863 && asm_noperands (newpat) < 0)
1864 {
916f14f1 1865 rtx m_split, *split;
42495ca0 1866 rtx ni2dest = i2dest;
916f14f1
RK
1867
1868 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1869 use I2DEST as a scratch register will help. In the latter case,
1870 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1871
1872 m_split = split_insns (newpat, i3);
a70c61d9
JW
1873
1874 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1875 inputs of NEWPAT. */
1876
1877 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1878 possible to try that as a scratch reg. This would require adding
1879 more code to make it work though. */
1880
1881 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1882 {
1883 /* If I2DEST is a hard register or the only use of a pseudo,
1884 we can change its mode. */
1885 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1886 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1887 && GET_CODE (i2dest) == REG
42495ca0 1888 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 1889 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
42495ca0 1890 && ! REG_USERVAR_P (i2dest))))
38a448ca 1891 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
42495ca0
RK
1892 REGNO (i2dest));
1893
38a448ca
RH
1894 m_split = split_insns
1895 (gen_rtx_PARALLEL (VOIDmode,
1896 gen_rtvec (2, newpat,
1897 gen_rtx_CLOBBER (VOIDmode,
1898 ni2dest))),
1899 i3);
42495ca0 1900 }
916f14f1
RK
1901
1902 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1903 && XVECLEN (m_split, 0) == 2
1904 && (next_real_insn (i2) == i3
1905 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1906 INSN_CUID (i2))))
916f14f1 1907 {
1a26b032 1908 rtx i2set, i3set;
d0ab8cd3 1909 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1910 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1911
e4ba89be
RK
1912 i3set = single_set (XVECEXP (m_split, 0, 1));
1913 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1914
42495ca0
RK
1915 /* In case we changed the mode of I2DEST, replace it in the
1916 pseudo-register table here. We can't do it above in case this
1917 code doesn't get executed and we do a split the other way. */
1918
1919 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1920 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1921
a29ca9db
RK
1922 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1923 &i2_scratches);
1a26b032
RK
1924
1925 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
1926 register status, so don't use these insns. If I2's destination
1927 is used between I2 and I3, we also can't use these insns. */
1a26b032 1928
9cc96794
RK
1929 if (i2_code_number >= 0 && i2set && i3set
1930 && (next_real_insn (i2) == i3
1931 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
a29ca9db
RK
1932 insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1933 &i3_scratches);
d0ab8cd3
RK
1934 if (insn_code_number >= 0)
1935 newpat = newi3pat;
1936
c767f54b 1937 /* It is possible that both insns now set the destination of I3.
22609cbf 1938 If so, we must show an extra use of it. */
c767f54b 1939
393de53f
RK
1940 if (insn_code_number >= 0)
1941 {
1942 rtx new_i3_dest = SET_DEST (i3set);
1943 rtx new_i2_dest = SET_DEST (i2set);
1944
1945 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1946 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1947 || GET_CODE (new_i3_dest) == SUBREG)
1948 new_i3_dest = XEXP (new_i3_dest, 0);
1949
d4096689
RK
1950 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
1951 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
1952 || GET_CODE (new_i2_dest) == SUBREG)
1953 new_i2_dest = XEXP (new_i2_dest, 0);
1954
393de53f
RK
1955 if (GET_CODE (new_i3_dest) == REG
1956 && GET_CODE (new_i2_dest) == REG
1957 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
b1f21e0a 1958 REG_N_SETS (REGNO (new_i2_dest))++;
393de53f 1959 }
916f14f1 1960 }
230d793d
RS
1961
1962 /* If we can split it and use I2DEST, go ahead and see if that
1963 helps things be recognized. Verify that none of the registers
1964 are set between I2 and I3. */
d0ab8cd3 1965 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1966#ifdef HAVE_cc0
1967 && GET_CODE (i2dest) == REG
1968#endif
1969 /* We need I2DEST in the proper mode. If it is a hard register
1970 or the only use of a pseudo, we can change its mode. */
1971 && (GET_MODE (*split) == GET_MODE (i2dest)
1972 || GET_MODE (*split) == VOIDmode
1973 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 1974 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
230d793d
RS
1975 && ! REG_USERVAR_P (i2dest)))
1976 && (next_real_insn (i2) == i3
1977 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1978 /* We can't overwrite I2DEST if its value is still used by
1979 NEWPAT. */
1980 && ! reg_referenced_p (i2dest, newpat))
1981 {
1982 rtx newdest = i2dest;
df7d75de
RK
1983 enum rtx_code split_code = GET_CODE (*split);
1984 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
1985
1986 /* Get NEWDEST as a register in the proper mode. We have already
1987 validated that we can do this. */
df7d75de 1988 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 1989 {
38a448ca 1990 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
230d793d
RS
1991
1992 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1993 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1994 }
1995
1996 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1997 an ASHIFT. This can occur if it was inside a PLUS and hence
1998 appeared to be a memory address. This is a kludge. */
df7d75de 1999 if (split_code == MULT
230d793d
RS
2000 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2001 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
2002 {
2003 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2004 XEXP (*split, 0), GEN_INT (i)));
2005 /* Update split_code because we may not have a multiply
2006 anymore. */
2007 split_code = GET_CODE (*split);
2008 }
230d793d
RS
2009
2010#ifdef INSN_SCHEDULING
2011 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2012 be written as a ZERO_EXTEND. */
df7d75de
RK
2013 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2014 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
2015 XEXP (*split, 0)));
2016#endif
2017
2018 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2019 SUBST (*split, newdest);
a29ca9db
RK
2020 i2_code_number
2021 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
df7d75de
RK
2022
2023 /* If the split point was a MULT and we didn't have one before,
2024 don't use one now. */
2025 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
a29ca9db
RK
2026 insn_code_number
2027 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
2028 }
2029 }
2030
2031 /* Check for a case where we loaded from memory in a narrow mode and
2032 then sign extended it, but we need both registers. In that case,
2033 we have a PARALLEL with both loads from the same memory location.
2034 We can split this into a load from memory followed by a register-register
2035 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
2036 eliminate the copy.
2037
2038 We cannot do this if the destination of the second assignment is
2039 a register that we have already assumed is zero-extended. Similarly
2040 for a SUBREG of such a register. */
230d793d
RS
2041
2042 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2043 && GET_CODE (newpat) == PARALLEL
2044 && XVECLEN (newpat, 0) == 2
2045 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2046 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2047 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2048 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2049 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2050 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2051 INSN_CUID (i2))
2052 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2053 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
2054 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2055 (GET_CODE (temp) == REG
2056 && reg_nonzero_bits[REGNO (temp)] != 0
2057 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2058 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2059 && (reg_nonzero_bits[REGNO (temp)]
2060 != GET_MODE_MASK (word_mode))))
2061 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2062 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2063 (GET_CODE (temp) == REG
2064 && reg_nonzero_bits[REGNO (temp)] != 0
2065 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2066 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2067 && (reg_nonzero_bits[REGNO (temp)]
2068 != GET_MODE_MASK (word_mode)))))
230d793d
RS
2069 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2070 SET_SRC (XVECEXP (newpat, 0, 1)))
2071 && ! find_reg_note (i3, REG_UNUSED,
2072 SET_DEST (XVECEXP (newpat, 0, 0))))
2073 {
472fbdd1
RK
2074 rtx ni2dest;
2075
230d793d 2076 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 2077 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
2078 newpat = XVECEXP (newpat, 0, 1);
2079 SUBST (SET_SRC (newpat),
472fbdd1 2080 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
a29ca9db
RK
2081 i2_code_number
2082 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2083
230d793d 2084 if (i2_code_number >= 0)
a29ca9db
RK
2085 insn_code_number
2086 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
5089e22e
RS
2087
2088 if (insn_code_number >= 0)
2089 {
2090 rtx insn;
2091 rtx link;
2092
2093 /* If we will be able to accept this, we have made a change to the
2094 destination of I3. This can invalidate a LOG_LINKS pointing
2095 to I3. No other part of combine.c makes such a transformation.
2096
2097 The new I3 will have a destination that was previously the
2098 destination of I1 or I2 and which was used in i2 or I3. Call
2099 distribute_links to make a LOG_LINK from the next use of
2100 that destination. */
2101
2102 PATTERN (i3) = newpat;
38a448ca 2103 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
5089e22e
RS
2104
2105 /* I3 now uses what used to be its destination and which is
2106 now I2's destination. That means we need a LOG_LINK from
2107 I3 to I2. But we used to have one, so we still will.
2108
2109 However, some later insn might be using I2's dest and have
2110 a LOG_LINK pointing at I3. We must remove this link.
2111 The simplest way to remove the link is to point it at I1,
2112 which we know will be a NOTE. */
2113
2114 for (insn = NEXT_INSN (i3);
0d4d42c3
RK
2115 insn && (this_basic_block == n_basic_blocks - 1
2116 || insn != basic_block_head[this_basic_block + 1]);
5089e22e
RS
2117 insn = NEXT_INSN (insn))
2118 {
2119 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 2120 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2121 {
2122 for (link = LOG_LINKS (insn); link;
2123 link = XEXP (link, 1))
2124 if (XEXP (link, 0) == i3)
2125 XEXP (link, 0) = i1;
2126
2127 break;
2128 }
2129 }
2130 }
230d793d
RS
2131 }
2132
2133 /* Similarly, check for a case where we have a PARALLEL of two independent
2134 SETs but we started with three insns. In this case, we can do the sets
2135 as two separate insns. This case occurs when some SET allows two
2136 other insns to combine, but the destination of that SET is still live. */
2137
2138 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2139 && GET_CODE (newpat) == PARALLEL
2140 && XVECLEN (newpat, 0) == 2
2141 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2142 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2143 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2144 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2145 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2146 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2147 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2148 INSN_CUID (i2))
2149 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2150 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2151 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2152 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2153 XVECEXP (newpat, 0, 0))
2154 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2155 XVECEXP (newpat, 0, 1)))
2156 {
e9a25f70
JL
2157 /* Normally, it doesn't matter which of the two is done first,
2158 but it does if one references cc0. In that case, it has to
2159 be first. */
2160#ifdef HAVE_cc0
2161 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2162 {
2163 newi2pat = XVECEXP (newpat, 0, 0);
2164 newpat = XVECEXP (newpat, 0, 1);
2165 }
2166 else
2167#endif
2168 {
2169 newi2pat = XVECEXP (newpat, 0, 1);
2170 newpat = XVECEXP (newpat, 0, 0);
2171 }
230d793d 2172
a29ca9db
RK
2173 i2_code_number
2174 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2175
230d793d 2176 if (i2_code_number >= 0)
a29ca9db
RK
2177 insn_code_number
2178 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
2179 }
2180
2181 /* If it still isn't recognized, fail and change things back the way they
2182 were. */
2183 if ((insn_code_number < 0
2184 /* Is the result a reasonable ASM_OPERANDS? */
2185 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2186 {
2187 undo_all ();
2188 return 0;
2189 }
2190
2191 /* If we had to change another insn, make sure it is valid also. */
2192 if (undobuf.other_insn)
2193 {
230d793d
RS
2194 rtx other_pat = PATTERN (undobuf.other_insn);
2195 rtx new_other_notes;
2196 rtx note, next;
2197
6e25d159
RK
2198 CLEAR_HARD_REG_SET (newpat_used_regs);
2199
a29ca9db
RK
2200 other_code_number
2201 = recog_for_combine (&other_pat, undobuf.other_insn,
2202 &new_other_notes, &other_scratches);
230d793d
RS
2203
2204 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2205 {
2206 undo_all ();
2207 return 0;
2208 }
2209
2210 PATTERN (undobuf.other_insn) = other_pat;
2211
2212 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2213 are still valid. Then add any non-duplicate notes added by
2214 recog_for_combine. */
2215 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2216 {
2217 next = XEXP (note, 1);
2218
2219 if (REG_NOTE_KIND (note) == REG_UNUSED
2220 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2221 {
2222 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2223 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
1a26b032
RK
2224
2225 remove_note (undobuf.other_insn, note);
2226 }
230d793d
RS
2227 }
2228
1a26b032
RK
2229 for (note = new_other_notes; note; note = XEXP (note, 1))
2230 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2231 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 2232
230d793d 2233 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2234 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2235 }
2236
2237 /* We now know that we can do this combination. Merge the insns and
2238 update the status of registers and LOG_LINKS. */
2239
2240 {
2241 rtx i3notes, i2notes, i1notes = 0;
2242 rtx i3links, i2links, i1links = 0;
2243 rtx midnotes = 0;
230d793d 2244 register int regno;
ff3467a9
JW
2245 /* Compute which registers we expect to eliminate. newi2pat may be setting
2246 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2247 same as i3dest, in which case newi2pat may be setting i1dest. */
2248 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2249 || i2dest_in_i2src || i2dest_in_i1src
230d793d 2250 ? 0 : i2dest);
ff3467a9
JW
2251 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2252 || (newi2pat && reg_set_p (i1dest, newi2pat))
2253 ? 0 : i1dest);
230d793d
RS
2254
2255 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2256 clear them. */
2257 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2258 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2259 if (i1)
2260 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2261
2262 /* Ensure that we do not have something that should not be shared but
2263 occurs multiple times in the new insns. Check this by first
5089e22e 2264 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2265
2266 reset_used_flags (i3notes);
2267 reset_used_flags (i2notes);
2268 reset_used_flags (i1notes);
2269 reset_used_flags (newpat);
2270 reset_used_flags (newi2pat);
2271 if (undobuf.other_insn)
2272 reset_used_flags (PATTERN (undobuf.other_insn));
2273
2274 i3notes = copy_rtx_if_shared (i3notes);
2275 i2notes = copy_rtx_if_shared (i2notes);
2276 i1notes = copy_rtx_if_shared (i1notes);
2277 newpat = copy_rtx_if_shared (newpat);
2278 newi2pat = copy_rtx_if_shared (newi2pat);
2279 if (undobuf.other_insn)
2280 reset_used_flags (PATTERN (undobuf.other_insn));
2281
2282 INSN_CODE (i3) = insn_code_number;
2283 PATTERN (i3) = newpat;
2284 if (undobuf.other_insn)
2285 INSN_CODE (undobuf.other_insn) = other_code_number;
2286
2287 /* We had one special case above where I2 had more than one set and
2288 we replaced a destination of one of those sets with the destination
2289 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2290 in this basic block. Note that this (expensive) case is rare.
2291
2292 Also, in this case, we must pretend that all REG_NOTEs for I2
2293 actually came from I3, so that REG_UNUSED notes from I2 will be
2294 properly handled. */
2295
2296 if (i3_subst_into_i2)
2297 {
2298 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2299 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2300 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2301 && ! find_reg_note (i2, REG_UNUSED,
2302 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2303 for (temp = NEXT_INSN (i2);
2304 temp && (this_basic_block == n_basic_blocks - 1
2305 || basic_block_head[this_basic_block] != temp);
2306 temp = NEXT_INSN (temp))
2307 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2308 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2309 if (XEXP (link, 0) == i2)
2310 XEXP (link, 0) = i3;
2311
2312 if (i3notes)
2313 {
2314 rtx link = i3notes;
2315 while (XEXP (link, 1))
2316 link = XEXP (link, 1);
2317 XEXP (link, 1) = i2notes;
2318 }
2319 else
2320 i3notes = i2notes;
2321 i2notes = 0;
2322 }
230d793d
RS
2323
2324 LOG_LINKS (i3) = 0;
2325 REG_NOTES (i3) = 0;
2326 LOG_LINKS (i2) = 0;
2327 REG_NOTES (i2) = 0;
2328
2329 if (newi2pat)
2330 {
2331 INSN_CODE (i2) = i2_code_number;
2332 PATTERN (i2) = newi2pat;
2333 }
2334 else
2335 {
2336 PUT_CODE (i2, NOTE);
2337 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2338 NOTE_SOURCE_FILE (i2) = 0;
2339 }
2340
2341 if (i1)
2342 {
2343 LOG_LINKS (i1) = 0;
2344 REG_NOTES (i1) = 0;
2345 PUT_CODE (i1, NOTE);
2346 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2347 NOTE_SOURCE_FILE (i1) = 0;
2348 }
2349
2350 /* Get death notes for everything that is now used in either I3 or
6eb12cef
RK
2351 I2 and used to die in a previous insn. If we built two new
2352 patterns, move from I1 to I2 then I2 to I3 so that we get the
2353 proper movement on registers that I2 modifies. */
230d793d 2354
230d793d 2355 if (newi2pat)
6eb12cef
RK
2356 {
2357 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2358 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2359 }
2360 else
2361 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2362 i3, &midnotes);
230d793d
RS
2363
2364 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2365 if (i3notes)
5f4f0e22
CH
2366 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2367 elim_i2, elim_i1);
230d793d 2368 if (i2notes)
5f4f0e22
CH
2369 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2370 elim_i2, elim_i1);
230d793d 2371 if (i1notes)
5f4f0e22
CH
2372 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2373 elim_i2, elim_i1);
230d793d 2374 if (midnotes)
5f4f0e22
CH
2375 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2376 elim_i2, elim_i1);
230d793d
RS
2377
2378 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2379 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2380 so we always pass it as i3. We have not counted the notes in
2381 reg_n_deaths yet, so we need to do so now. */
2382
230d793d 2383 if (newi2pat && new_i2_notes)
1a26b032
RK
2384 {
2385 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2386 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2387 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2388
2389 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2390 }
2391
230d793d 2392 if (new_i3_notes)
1a26b032
RK
2393 {
2394 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2395 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2396 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2397
2398 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2399 }
230d793d
RS
2400
2401 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
2402 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2403 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2404 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
2405 Show an additional death due to the REG_DEAD note we make here. If
2406 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2407
230d793d 2408 if (i3dest_killed)
1a26b032
RK
2409 {
2410 if (GET_CODE (i3dest_killed) == REG)
b1f21e0a 2411 REG_N_DEATHS (REGNO (i3dest_killed))++;
1a26b032 2412
e9a25f70 2413 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
38a448ca
RH
2414 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2415 NULL_RTX),
ff3467a9 2416 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 2417 else
38a448ca
RH
2418 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2419 NULL_RTX),
e9a25f70 2420 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
ff3467a9 2421 elim_i2, elim_i1);
1a26b032 2422 }
58c8c593 2423
230d793d 2424 if (i2dest_in_i2src)
58c8c593 2425 {
1a26b032 2426 if (GET_CODE (i2dest) == REG)
b1f21e0a 2427 REG_N_DEATHS (REGNO (i2dest))++;
1a26b032 2428
58c8c593 2429 if (newi2pat && reg_set_p (i2dest, newi2pat))
38a448ca 2430 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2431 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2432 else
38a448ca 2433 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2434 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2435 NULL_RTX, NULL_RTX);
2436 }
2437
230d793d 2438 if (i1dest_in_i1src)
58c8c593 2439 {
1a26b032 2440 if (GET_CODE (i1dest) == REG)
b1f21e0a 2441 REG_N_DEATHS (REGNO (i1dest))++;
1a26b032 2442
58c8c593 2443 if (newi2pat && reg_set_p (i1dest, newi2pat))
38a448ca 2444 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2445 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2446 else
38a448ca 2447 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2448 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2449 NULL_RTX, NULL_RTX);
2450 }
230d793d
RS
2451
2452 distribute_links (i3links);
2453 distribute_links (i2links);
2454 distribute_links (i1links);
2455
2456 if (GET_CODE (i2dest) == REG)
2457 {
d0ab8cd3
RK
2458 rtx link;
2459 rtx i2_insn = 0, i2_val = 0, set;
2460
2461 /* The insn that used to set this register doesn't exist, and
2462 this life of the register may not exist either. See if one of
2463 I3's links points to an insn that sets I2DEST. If it does,
2464 that is now the last known value for I2DEST. If we don't update
2465 this and I2 set the register to a value that depended on its old
230d793d
RS
2466 contents, we will get confused. If this insn is used, thing
2467 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2468
2469 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2470 if ((set = single_set (XEXP (link, 0))) != 0
2471 && rtx_equal_p (i2dest, SET_DEST (set)))
2472 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2473
2474 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2475
2476 /* If the reg formerly set in I2 died only once and that was in I3,
2477 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2478 if (! added_sets_2
2479 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2480 && ! i2dest_in_i2src)
230d793d
RS
2481 {
2482 regno = REGNO (i2dest);
b1f21e0a
MM
2483 REG_N_SETS (regno)--;
2484 if (REG_N_SETS (regno) == 0
8e08106d 2485 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
b1f21e0a 2486 REG_N_REFS (regno) = 0;
230d793d
RS
2487 }
2488 }
2489
2490 if (i1 && GET_CODE (i1dest) == REG)
2491 {
d0ab8cd3
RK
2492 rtx link;
2493 rtx i1_insn = 0, i1_val = 0, set;
2494
2495 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2496 if ((set = single_set (XEXP (link, 0))) != 0
2497 && rtx_equal_p (i1dest, SET_DEST (set)))
2498 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2499
2500 record_value_for_reg (i1dest, i1_insn, i1_val);
2501
230d793d 2502 regno = REGNO (i1dest);
5af91171 2503 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d 2504 {
b1f21e0a
MM
2505 REG_N_SETS (regno)--;
2506 if (REG_N_SETS (regno) == 0
8e08106d 2507 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
b1f21e0a 2508 REG_N_REFS (regno) = 0;
230d793d
RS
2509 }
2510 }
2511
951553af 2512 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2513 to this insn. */
2514
951553af 2515 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2516 if (newi2pat)
951553af 2517 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2518
a29ca9db
RK
2519 /* If we added any (clobber (scratch)), add them to the max for a
2520 block. This is a very pessimistic calculation, since we might
2521 have had them already and this might not be the worst block, but
2522 it's not worth doing any better. */
2523 max_scratch += i3_scratches + i2_scratches + other_scratches;
2524
230d793d
RS
2525 /* If I3 is now an unconditional jump, ensure that it has a
2526 BARRIER following it since it may have initially been a
381ee8af 2527 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2528
2529 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2530 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2531 || GET_CODE (temp) != BARRIER))
230d793d
RS
2532 emit_barrier_after (i3);
2533 }
2534
2535 combine_successes++;
2536
bcd49eb7
JW
2537 /* Clear this here, so that subsequent get_last_value calls are not
2538 affected. */
2539 subst_prev_insn = NULL_RTX;
2540
abe6e52f
RK
2541 if (added_links_insn
2542 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2543 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2544 return added_links_insn;
2545 else
2546 return newi2pat ? i2 : i3;
230d793d
RS
2547}
2548\f
2549/* Undo all the modifications recorded in undobuf. */
2550
2551static void
2552undo_all ()
2553{
241cea85
RK
2554 struct undo *undo, *next;
2555
2556 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2557 {
241cea85
RK
2558 next = undo->next;
2559 if (undo->is_int)
2560 *undo->where.i = undo->old_contents.i;
7c046e4e 2561 else
241cea85
RK
2562 *undo->where.r = undo->old_contents.r;
2563
2564 undo->next = undobuf.frees;
2565 undobuf.frees = undo;
7c046e4e 2566 }
230d793d
RS
2567
2568 obfree (undobuf.storage);
845fc875 2569 undobuf.undos = undobuf.previous_undos = 0;
bcd49eb7
JW
2570
2571 /* Clear this here, so that subsequent get_last_value calls are not
2572 affected. */
2573 subst_prev_insn = NULL_RTX;
230d793d
RS
2574}
2575\f
2576/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2577 where we have an arithmetic expression and return that point. LOC will
2578 be inside INSN.
230d793d
RS
2579
2580 try_combine will call this function to see if an insn can be split into
2581 two insns. */
2582
2583static rtx *
d0ab8cd3 2584find_split_point (loc, insn)
230d793d 2585 rtx *loc;
d0ab8cd3 2586 rtx insn;
230d793d
RS
2587{
2588 rtx x = *loc;
2589 enum rtx_code code = GET_CODE (x);
2590 rtx *split;
2591 int len = 0, pos, unsignedp;
2592 rtx inner;
2593
2594 /* First special-case some codes. */
2595 switch (code)
2596 {
2597 case SUBREG:
2598#ifdef INSN_SCHEDULING
2599 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2600 point. */
2601 if (GET_CODE (SUBREG_REG (x)) == MEM)
2602 return loc;
2603#endif
d0ab8cd3 2604 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2605
230d793d 2606 case MEM:
916f14f1 2607#ifdef HAVE_lo_sum
230d793d
RS
2608 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2609 using LO_SUM and HIGH. */
2610 if (GET_CODE (XEXP (x, 0)) == CONST
2611 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2612 {
2613 SUBST (XEXP (x, 0),
2614 gen_rtx_combine (LO_SUM, Pmode,
2615 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2616 XEXP (x, 0)));
2617 return &XEXP (XEXP (x, 0), 0);
2618 }
230d793d
RS
2619#endif
2620
916f14f1
RK
2621 /* If we have a PLUS whose second operand is a constant and the
2622 address is not valid, perhaps will can split it up using
2623 the machine-specific way to split large constants. We use
ddd5a7c1 2624 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2625 it will not remain in the result. */
2626 if (GET_CODE (XEXP (x, 0)) == PLUS
2627 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2628 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2629 {
2630 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
38a448ca 2631 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
916f14f1
RK
2632 subst_insn);
2633
2634 /* This should have produced two insns, each of which sets our
2635 placeholder. If the source of the second is a valid address,
2636 we can make put both sources together and make a split point
2637 in the middle. */
2638
2639 if (seq && XVECLEN (seq, 0) == 2
2640 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2641 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2642 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2643 && ! reg_mentioned_p (reg,
2644 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2645 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2646 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2647 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2648 && memory_address_p (GET_MODE (x),
2649 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2650 {
2651 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2652 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2653
2654 /* Replace the placeholder in SRC2 with SRC1. If we can
2655 find where in SRC2 it was placed, that can become our
2656 split point and we can replace this address with SRC2.
2657 Just try two obvious places. */
2658
2659 src2 = replace_rtx (src2, reg, src1);
2660 split = 0;
2661 if (XEXP (src2, 0) == src1)
2662 split = &XEXP (src2, 0);
2663 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2664 && XEXP (XEXP (src2, 0), 0) == src1)
2665 split = &XEXP (XEXP (src2, 0), 0);
2666
2667 if (split)
2668 {
2669 SUBST (XEXP (x, 0), src2);
2670 return split;
2671 }
2672 }
1a26b032
RK
2673
2674 /* If that didn't work, perhaps the first operand is complex and
2675 needs to be computed separately, so make a split point there.
2676 This will occur on machines that just support REG + CONST
2677 and have a constant moved through some previous computation. */
2678
2679 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2680 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2681 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2682 == 'o')))
2683 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2684 }
2685 break;
2686
230d793d
RS
2687 case SET:
2688#ifdef HAVE_cc0
2689 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2690 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2691 we need to put the operand into a register. So split at that
2692 point. */
2693
2694 if (SET_DEST (x) == cc0_rtx
2695 && GET_CODE (SET_SRC (x)) != COMPARE
2696 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2697 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2698 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2699 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2700 return &SET_SRC (x);
2701#endif
2702
2703 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2704 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2705 if (split && split != &SET_SRC (x))
2706 return split;
2707
041d7180
JL
2708 /* See if we can split SET_DEST as it stands. */
2709 split = find_split_point (&SET_DEST (x), insn);
2710 if (split && split != &SET_DEST (x))
2711 return split;
2712
230d793d
RS
2713 /* See if this is a bitfield assignment with everything constant. If
2714 so, this is an IOR of an AND, so split it into that. */
2715 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2716 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2717 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2718 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2719 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2720 && GET_CODE (SET_SRC (x)) == CONST_INT
2721 && ((INTVAL (XEXP (SET_DEST (x), 1))
2722 + INTVAL (XEXP (SET_DEST (x), 2)))
2723 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2724 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2725 {
2726 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2727 int len = INTVAL (XEXP (SET_DEST (x), 1));
2728 int src = INTVAL (SET_SRC (x));
2729 rtx dest = XEXP (SET_DEST (x), 0);
2730 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2731 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2732
f76b9db2
ILT
2733 if (BITS_BIG_ENDIAN)
2734 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d
RS
2735
2736 if (src == mask)
2737 SUBST (SET_SRC (x),
5f4f0e22 2738 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2739 else
2740 SUBST (SET_SRC (x),
2741 gen_binary (IOR, mode,
2742 gen_binary (AND, mode, dest,
5f4f0e22
CH
2743 GEN_INT (~ (mask << pos)
2744 & GET_MODE_MASK (mode))),
2745 GEN_INT (src << pos)));
230d793d
RS
2746
2747 SUBST (SET_DEST (x), dest);
2748
d0ab8cd3 2749 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2750 if (split && split != &SET_SRC (x))
2751 return split;
2752 }
2753
2754 /* Otherwise, see if this is an operation that we can split into two.
2755 If so, try to split that. */
2756 code = GET_CODE (SET_SRC (x));
2757
2758 switch (code)
2759 {
d0ab8cd3
RK
2760 case AND:
2761 /* If we are AND'ing with a large constant that is only a single
2762 bit and the result is only being used in a context where we
2763 need to know if it is zero or non-zero, replace it with a bit
2764 extraction. This will avoid the large constant, which might
2765 have taken more than one insn to make. If the constant were
2766 not a valid argument to the AND but took only one insn to make,
2767 this is no worse, but if it took more than one insn, it will
2768 be better. */
2769
2770 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2771 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2772 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2773 && GET_CODE (SET_DEST (x)) == REG
2774 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2775 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2776 && XEXP (*split, 0) == SET_DEST (x)
2777 && XEXP (*split, 1) == const0_rtx)
2778 {
76184def
DE
2779 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2780 XEXP (SET_SRC (x), 0),
2781 pos, NULL_RTX, 1, 1, 0, 0);
2782 if (extraction != 0)
2783 {
2784 SUBST (SET_SRC (x), extraction);
2785 return find_split_point (loc, insn);
2786 }
d0ab8cd3
RK
2787 }
2788 break;
2789
1a6ec070
RK
2790 case NE:
2791 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2792 is known to be on, this can be converted into a NEG of a shift. */
2793 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2794 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 2795 && 1 <= (pos = exact_log2
1a6ec070
RK
2796 (nonzero_bits (XEXP (SET_SRC (x), 0),
2797 GET_MODE (XEXP (SET_SRC (x), 0))))))
2798 {
2799 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2800
2801 SUBST (SET_SRC (x),
2802 gen_rtx_combine (NEG, mode,
2803 gen_rtx_combine (LSHIFTRT, mode,
2804 XEXP (SET_SRC (x), 0),
4eb2cb10 2805 GEN_INT (pos))));
1a6ec070
RK
2806
2807 split = find_split_point (&SET_SRC (x), insn);
2808 if (split && split != &SET_SRC (x))
2809 return split;
2810 }
2811 break;
2812
230d793d
RS
2813 case SIGN_EXTEND:
2814 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
2815
2816 /* We can't optimize if either mode is a partial integer
2817 mode as we don't know how many bits are significant
2818 in those modes. */
2819 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
2820 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
2821 break;
2822
230d793d
RS
2823 pos = 0;
2824 len = GET_MODE_BITSIZE (GET_MODE (inner));
2825 unsignedp = 0;
2826 break;
2827
2828 case SIGN_EXTRACT:
2829 case ZERO_EXTRACT:
2830 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2831 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2832 {
2833 inner = XEXP (SET_SRC (x), 0);
2834 len = INTVAL (XEXP (SET_SRC (x), 1));
2835 pos = INTVAL (XEXP (SET_SRC (x), 2));
2836
f76b9db2
ILT
2837 if (BITS_BIG_ENDIAN)
2838 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
2839 unsignedp = (code == ZERO_EXTRACT);
2840 }
2841 break;
e9a25f70
JL
2842
2843 default:
2844 break;
230d793d
RS
2845 }
2846
2847 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2848 {
2849 enum machine_mode mode = GET_MODE (SET_SRC (x));
2850
d0ab8cd3
RK
2851 /* For unsigned, we have a choice of a shift followed by an
2852 AND or two shifts. Use two shifts for field sizes where the
2853 constant might be too large. We assume here that we can
2854 always at least get 8-bit constants in an AND insn, which is
2855 true for every current RISC. */
2856
2857 if (unsignedp && len <= 8)
230d793d
RS
2858 {
2859 SUBST (SET_SRC (x),
2860 gen_rtx_combine
2861 (AND, mode,
2862 gen_rtx_combine (LSHIFTRT, mode,
2863 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2864 GEN_INT (pos)),
2865 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2866
d0ab8cd3 2867 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2868 if (split && split != &SET_SRC (x))
2869 return split;
2870 }
2871 else
2872 {
2873 SUBST (SET_SRC (x),
2874 gen_rtx_combine
d0ab8cd3 2875 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2876 gen_rtx_combine (ASHIFT, mode,
2877 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2878 GEN_INT (GET_MODE_BITSIZE (mode)
2879 - len - pos)),
2880 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2881
d0ab8cd3 2882 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2883 if (split && split != &SET_SRC (x))
2884 return split;
2885 }
2886 }
2887
2888 /* See if this is a simple operation with a constant as the second
2889 operand. It might be that this constant is out of range and hence
2890 could be used as a split point. */
2891 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2892 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2893 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2894 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2895 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2896 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2897 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2898 == 'o'))))
2899 return &XEXP (SET_SRC (x), 1);
2900
2901 /* Finally, see if this is a simple operation with its first operand
2902 not in a register. The operation might require this operand in a
2903 register, so return it as a split point. We can always do this
2904 because if the first operand were another operation, we would have
2905 already found it as a split point. */
2906 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2907 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2908 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2909 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2910 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2911 return &XEXP (SET_SRC (x), 0);
2912
2913 return 0;
2914
2915 case AND:
2916 case IOR:
2917 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2918 it is better to write this as (not (ior A B)) so we can split it.
2919 Similarly for IOR. */
2920 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2921 {
2922 SUBST (*loc,
2923 gen_rtx_combine (NOT, GET_MODE (x),
2924 gen_rtx_combine (code == IOR ? AND : IOR,
2925 GET_MODE (x),
2926 XEXP (XEXP (x, 0), 0),
2927 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2928 return find_split_point (loc, insn);
230d793d
RS
2929 }
2930
2931 /* Many RISC machines have a large set of logical insns. If the
2932 second operand is a NOT, put it first so we will try to split the
2933 other operand first. */
2934 if (GET_CODE (XEXP (x, 1)) == NOT)
2935 {
2936 rtx tem = XEXP (x, 0);
2937 SUBST (XEXP (x, 0), XEXP (x, 1));
2938 SUBST (XEXP (x, 1), tem);
2939 }
2940 break;
e9a25f70
JL
2941
2942 default:
2943 break;
230d793d
RS
2944 }
2945
2946 /* Otherwise, select our actions depending on our rtx class. */
2947 switch (GET_RTX_CLASS (code))
2948 {
2949 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2950 case '3':
d0ab8cd3 2951 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2952 if (split)
2953 return split;
0f41302f 2954 /* ... fall through ... */
230d793d
RS
2955 case '2':
2956 case 'c':
2957 case '<':
d0ab8cd3 2958 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2959 if (split)
2960 return split;
0f41302f 2961 /* ... fall through ... */
230d793d
RS
2962 case '1':
2963 /* Some machines have (and (shift ...) ...) insns. If X is not
2964 an AND, but XEXP (X, 0) is, use it as our split point. */
2965 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2966 return &XEXP (x, 0);
2967
d0ab8cd3 2968 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2969 if (split)
2970 return split;
2971 return loc;
2972 }
2973
2974 /* Otherwise, we don't have a split point. */
2975 return 0;
2976}
2977\f
2978/* Throughout X, replace FROM with TO, and return the result.
2979 The result is TO if X is FROM;
2980 otherwise the result is X, but its contents may have been modified.
2981 If they were modified, a record was made in undobuf so that
2982 undo_all will (among other things) return X to its original state.
2983
2984 If the number of changes necessary is too much to record to undo,
2985 the excess changes are not made, so the result is invalid.
2986 The changes already made can still be undone.
2987 undobuf.num_undo is incremented for such changes, so by testing that
2988 the caller can tell whether the result is valid.
2989
2990 `n_occurrences' is incremented each time FROM is replaced.
2991
2992 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2993
5089e22e 2994 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2995 by copying if `n_occurrences' is non-zero. */
2996
2997static rtx
2998subst (x, from, to, in_dest, unique_copy)
2999 register rtx x, from, to;
3000 int in_dest;
3001 int unique_copy;
3002{
f24ad0e4 3003 register enum rtx_code code = GET_CODE (x);
230d793d 3004 enum machine_mode op0_mode = VOIDmode;
8079805d
RK
3005 register char *fmt;
3006 register int len, i;
3007 rtx new;
230d793d
RS
3008
3009/* Two expressions are equal if they are identical copies of a shared
3010 RTX or if they are both registers with the same register number
3011 and mode. */
3012
3013#define COMBINE_RTX_EQUAL_P(X,Y) \
3014 ((X) == (Y) \
3015 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3016 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3017
3018 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3019 {
3020 n_occurrences++;
3021 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3022 }
3023
3024 /* If X and FROM are the same register but different modes, they will
3025 not have been seen as equal above. However, flow.c will make a
3026 LOG_LINKS entry for that case. If we do nothing, we will try to
3027 rerecognize our original insn and, when it succeeds, we will
3028 delete the feeding insn, which is incorrect.
3029
3030 So force this insn not to match in this (rare) case. */
3031 if (! in_dest && code == REG && GET_CODE (from) == REG
3032 && REGNO (x) == REGNO (from))
38a448ca 3033 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
3034
3035 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3036 of which may contain things that can be combined. */
3037 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3038 return x;
3039
3040 /* It is possible to have a subexpression appear twice in the insn.
3041 Suppose that FROM is a register that appears within TO.
3042 Then, after that subexpression has been scanned once by `subst',
3043 the second time it is scanned, TO may be found. If we were
3044 to scan TO here, we would find FROM within it and create a
3045 self-referent rtl structure which is completely wrong. */
3046 if (COMBINE_RTX_EQUAL_P (x, to))
3047 return to;
3048
3049 len = GET_RTX_LENGTH (code);
3050 fmt = GET_RTX_FORMAT (code);
3051
3052 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
3053 set up to skip this common case. All other cases where we want to
3054 suppress replacing something inside a SET_SRC are handled via the
3055 IN_DEST operand. */
3056 if (code == SET
3057 && (GET_CODE (SET_DEST (x)) == REG
3058 || GET_CODE (SET_DEST (x)) == CC0
3059 || GET_CODE (SET_DEST (x)) == PC))
3060 fmt = "ie";
3061
0f41302f
MS
3062 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3063 constant. */
230d793d
RS
3064 if (fmt[0] == 'e')
3065 op0_mode = GET_MODE (XEXP (x, 0));
3066
3067 for (i = 0; i < len; i++)
3068 {
3069 if (fmt[i] == 'E')
3070 {
3071 register int j;
3072 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3073 {
230d793d
RS
3074 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3075 {
3076 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3077 n_occurrences++;
3078 }
3079 else
3080 {
3081 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
3082
3083 /* If this substitution failed, this whole thing fails. */
3084 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3085 return new;
3086 }
3087
3088 SUBST (XVECEXP (x, i, j), new);
3089 }
3090 }
3091 else if (fmt[i] == 'e')
3092 {
230d793d
RS
3093 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3094 {
42301240
RK
3095 /* In general, don't install a subreg involving two modes not
3096 tieable. It can worsen register allocation, and can even
3097 make invalid reload insns, since the reg inside may need to
3098 be copied from in the outside mode, and that may be invalid
3099 if it is an fp reg copied in integer mode.
3100
3101 We allow two exceptions to this: It is valid if it is inside
3102 another SUBREG and the mode of that SUBREG and the mode of
3103 the inside of TO is tieable and it is valid if X is a SET
3104 that copies FROM to CC0. */
3105 if (GET_CODE (to) == SUBREG
3106 && ! MODES_TIEABLE_P (GET_MODE (to),
3107 GET_MODE (SUBREG_REG (to)))
3108 && ! (code == SUBREG
8079805d
RK
3109 && MODES_TIEABLE_P (GET_MODE (x),
3110 GET_MODE (SUBREG_REG (to))))
42301240
RK
3111#ifdef HAVE_cc0
3112 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3113#endif
3114 )
38a448ca 3115 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 3116
230d793d
RS
3117 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3118 n_occurrences++;
3119 }
3120 else
3121 /* If we are in a SET_DEST, suppress most cases unless we
3122 have gone inside a MEM, in which case we want to
3123 simplify the address. We assume here that things that
3124 are actually part of the destination have their inner
3125 parts in the first expression. This is true for SUBREG,
3126 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3127 things aside from REG and MEM that should appear in a
3128 SET_DEST. */
3129 new = subst (XEXP (x, i), from, to,
3130 (((in_dest
3131 && (code == SUBREG || code == STRICT_LOW_PART
3132 || code == ZERO_EXTRACT))
3133 || code == SET)
3134 && i == 0), unique_copy);
3135
3136 /* If we found that we will have to reject this combination,
3137 indicate that by returning the CLOBBER ourselves, rather than
3138 an expression containing it. This will speed things up as
3139 well as prevent accidents where two CLOBBERs are considered
3140 to be equal, thus producing an incorrect simplification. */
3141
3142 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3143 return new;
3144
3145 SUBST (XEXP (x, i), new);
3146 }
3147 }
3148
8079805d
RK
3149 /* Try to simplify X. If the simplification changed the code, it is likely
3150 that further simplification will help, so loop, but limit the number
3151 of repetitions that will be performed. */
3152
3153 for (i = 0; i < 4; i++)
3154 {
3155 /* If X is sufficiently simple, don't bother trying to do anything
3156 with it. */
3157 if (code != CONST_INT && code != REG && code != CLOBBER)
3158 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3159
8079805d
RK
3160 if (GET_CODE (x) == code)
3161 break;
d0ab8cd3 3162
8079805d 3163 code = GET_CODE (x);
eeb43d32 3164
8079805d
RK
3165 /* We no longer know the original mode of operand 0 since we
3166 have changed the form of X) */
3167 op0_mode = VOIDmode;
3168 }
eeb43d32 3169
8079805d
RK
3170 return x;
3171}
3172\f
3173/* Simplify X, a piece of RTL. We just operate on the expression at the
3174 outer level; call `subst' to simplify recursively. Return the new
3175 expression.
3176
3177 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3178 will be the iteration even if an expression with a code different from
3179 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3180
8079805d
RK
3181static rtx
3182simplify_rtx (x, op0_mode, last, in_dest)
3183 rtx x;
3184 enum machine_mode op0_mode;
3185 int last;
3186 int in_dest;
3187{
3188 enum rtx_code code = GET_CODE (x);
3189 enum machine_mode mode = GET_MODE (x);
3190 rtx temp;
3191 int i;
d0ab8cd3 3192
230d793d
RS
3193 /* If this is a commutative operation, put a constant last and a complex
3194 expression first. We don't need to do this for comparisons here. */
3195 if (GET_RTX_CLASS (code) == 'c'
3196 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3197 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3198 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3199 || (GET_CODE (XEXP (x, 0)) == SUBREG
3200 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3201 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3202 {
3203 temp = XEXP (x, 0);
3204 SUBST (XEXP (x, 0), XEXP (x, 1));
3205 SUBST (XEXP (x, 1), temp);
3206 }
3207
22609cbf
RK
3208 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3209 sign extension of a PLUS with a constant, reverse the order of the sign
3210 extension and the addition. Note that this not the same as the original
3211 code, but overflow is undefined for signed values. Also note that the
3212 PLUS will have been partially moved "inside" the sign-extension, so that
3213 the first operand of X will really look like:
3214 (ashiftrt (plus (ashift A C4) C5) C4).
3215 We convert this to
3216 (plus (ashiftrt (ashift A C4) C2) C4)
3217 and replace the first operand of X with that expression. Later parts
3218 of this function may simplify the expression further.
3219
3220 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3221 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3222 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3223
3224 We do this to simplify address expressions. */
3225
3226 if ((code == PLUS || code == MINUS || code == MULT)
3227 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3228 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3229 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3230 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3231 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3232 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3233 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3234 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3235 XEXP (XEXP (XEXP (x, 0), 0), 1),
3236 XEXP (XEXP (x, 0), 1))) != 0)
3237 {
3238 rtx new
3239 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3240 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3241 INTVAL (XEXP (XEXP (x, 0), 1)));
3242
3243 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3244 INTVAL (XEXP (XEXP (x, 0), 1)));
3245
3246 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3247 }
3248
d0ab8cd3
RK
3249 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3250 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3251 things. Check for cases where both arms are testing the same
3252 condition.
3253
3254 Don't do anything if all operands are very simple. */
3255
3256 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3257 || GET_RTX_CLASS (code) == '<')
3258 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3259 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3260 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3261 == 'o')))
3262 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3263 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3264 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3265 == 'o')))))
3266 || (GET_RTX_CLASS (code) == '1'
3267 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3268 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3269 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3270 == 'o'))))))
d0ab8cd3 3271 {
abe6e52f
RK
3272 rtx cond, true, false;
3273
3274 cond = if_then_else_cond (x, &true, &false);
0802d516
RK
3275 if (cond != 0
3276 /* If everything is a comparison, what we have is highly unlikely
3277 to be simpler, so don't use it. */
3278 && ! (GET_RTX_CLASS (code) == '<'
3279 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3280 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
abe6e52f
RK
3281 {
3282 rtx cop1 = const0_rtx;
3283 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3284
15448afc
RK
3285 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3286 return x;
3287
9210df58
RK
3288 /* Simplify the alternative arms; this may collapse the true and
3289 false arms to store-flag values. */
3290 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3291 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3292
3293 /* Restarting if we generate a store-flag expression will cause
3294 us to loop. Just drop through in this case. */
3295
abe6e52f
RK
3296 /* If the result values are STORE_FLAG_VALUE and zero, we can
3297 just make the comparison operation. */
3298 if (true == const_true_rtx && false == const0_rtx)
3299 x = gen_binary (cond_code, mode, cond, cop1);
3300 else if (true == const0_rtx && false == const_true_rtx)
3301 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3302
3303 /* Likewise, we can make the negate of a comparison operation
3304 if the result values are - STORE_FLAG_VALUE and zero. */
3305 else if (GET_CODE (true) == CONST_INT
3306 && INTVAL (true) == - STORE_FLAG_VALUE
3307 && false == const0_rtx)
0c1c8ea6 3308 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3309 gen_binary (cond_code, mode, cond, cop1));
3310 else if (GET_CODE (false) == CONST_INT
3311 && INTVAL (false) == - STORE_FLAG_VALUE
3312 && true == const0_rtx)
0c1c8ea6 3313 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3314 gen_binary (reverse_condition (cond_code),
3315 mode, cond, cop1));
3316 else
38a448ca
RH
3317 return gen_rtx_IF_THEN_ELSE (mode,
3318 gen_binary (cond_code, VOIDmode,
3319 cond, cop1),
3320 true, false);
5109d49f 3321
9210df58
RK
3322 code = GET_CODE (x);
3323 op0_mode = VOIDmode;
abe6e52f 3324 }
d0ab8cd3
RK
3325 }
3326
230d793d
RS
3327 /* Try to fold this expression in case we have constants that weren't
3328 present before. */
3329 temp = 0;
3330 switch (GET_RTX_CLASS (code))
3331 {
3332 case '1':
3333 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3334 break;
3335 case '<':
3336 temp = simplify_relational_operation (code, op0_mode,
3337 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3338#ifdef FLOAT_STORE_FLAG_VALUE
3339 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3340 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3341 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3342#endif
230d793d
RS
3343 break;
3344 case 'c':
3345 case '2':
3346 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3347 break;
3348 case 'b':
3349 case '3':
3350 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3351 XEXP (x, 1), XEXP (x, 2));
3352 break;
3353 }
3354
3355 if (temp)
d0ab8cd3 3356 x = temp, code = GET_CODE (temp);
230d793d 3357
230d793d 3358 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3359 if (code == PLUS || code == MINUS
3360 || code == AND || code == IOR || code == XOR)
230d793d
RS
3361 {
3362 x = apply_distributive_law (x);
3363 code = GET_CODE (x);
3364 }
3365
3366 /* If CODE is an associative operation not otherwise handled, see if we
3367 can associate some operands. This can win if they are constants or
3368 if they are logically related (i.e. (a & b) & a. */
3369 if ((code == PLUS || code == MINUS
3370 || code == MULT || code == AND || code == IOR || code == XOR
3371 || code == DIV || code == UDIV
3372 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3373 && INTEGRAL_MODE_P (mode))
230d793d
RS
3374 {
3375 if (GET_CODE (XEXP (x, 0)) == code)
3376 {
3377 rtx other = XEXP (XEXP (x, 0), 0);
3378 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3379 rtx inner_op1 = XEXP (x, 1);
3380 rtx inner;
3381
3382 /* Make sure we pass the constant operand if any as the second
3383 one if this is a commutative operation. */
3384 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3385 {
3386 rtx tem = inner_op0;
3387 inner_op0 = inner_op1;
3388 inner_op1 = tem;
3389 }
3390 inner = simplify_binary_operation (code == MINUS ? PLUS
3391 : code == DIV ? MULT
3392 : code == UDIV ? MULT
3393 : code,
3394 mode, inner_op0, inner_op1);
3395
3396 /* For commutative operations, try the other pair if that one
3397 didn't simplify. */
3398 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3399 {
3400 other = XEXP (XEXP (x, 0), 1);
3401 inner = simplify_binary_operation (code, mode,
3402 XEXP (XEXP (x, 0), 0),
3403 XEXP (x, 1));
3404 }
3405
3406 if (inner)
8079805d 3407 return gen_binary (code, mode, other, inner);
230d793d
RS
3408 }
3409 }
3410
3411 /* A little bit of algebraic simplification here. */
3412 switch (code)
3413 {
3414 case MEM:
3415 /* Ensure that our address has any ASHIFTs converted to MULT in case
3416 address-recognizing predicates are called later. */
3417 temp = make_compound_operation (XEXP (x, 0), MEM);
3418 SUBST (XEXP (x, 0), temp);
3419 break;
3420
3421 case SUBREG:
3422 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3423 is paradoxical. If we can't do that safely, then it becomes
3424 something nonsensical so that this combination won't take place. */
3425
3426 if (GET_CODE (SUBREG_REG (x)) == MEM
3427 && (GET_MODE_SIZE (mode)
3428 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3429 {
3430 rtx inner = SUBREG_REG (x);
3431 int endian_offset = 0;
3432 /* Don't change the mode of the MEM
3433 if that would change the meaning of the address. */
3434 if (MEM_VOLATILE_P (SUBREG_REG (x))
3435 || mode_dependent_address_p (XEXP (inner, 0)))
38a448ca 3436 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d 3437
f76b9db2
ILT
3438 if (BYTES_BIG_ENDIAN)
3439 {
3440 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3441 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3442 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3443 endian_offset -= (UNITS_PER_WORD
3444 - GET_MODE_SIZE (GET_MODE (inner)));
3445 }
230d793d
RS
3446 /* Note if the plus_constant doesn't make a valid address
3447 then this combination won't be accepted. */
38a448ca
RH
3448 x = gen_rtx_MEM (mode,
3449 plus_constant (XEXP (inner, 0),
3450 (SUBREG_WORD (x) * UNITS_PER_WORD
3451 + endian_offset)));
230d793d
RS
3452 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3453 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3454 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3455 return x;
3456 }
3457
3458 /* If we are in a SET_DEST, these other cases can't apply. */
3459 if (in_dest)
3460 return x;
3461
3462 /* Changing mode twice with SUBREG => just change it once,
3463 or not at all if changing back to starting mode. */
3464 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3465 {
3466 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3467 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3468 return SUBREG_REG (SUBREG_REG (x));
3469
3470 SUBST_INT (SUBREG_WORD (x),
3471 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3472 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3473 }
3474
3475 /* SUBREG of a hard register => just change the register number
3476 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3477 suppress this combination. If the hard register is the stack,
3478 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3479
3480 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3481 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3482 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3483#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3484 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3485#endif
26ecfc76
RK
3486#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3487 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3488#endif
3489 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3490 {
3491 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3492 mode))
38a448ca
RH
3493 return gen_rtx_REG (mode,
3494 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
230d793d 3495 else
38a448ca 3496 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d
RS
3497 }
3498
3499 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3500 word and low-order part. Only do this if we are narrowing
3501 the constant; if it is being widened, we have no idea what
3502 the extra bits will have been set to. */
230d793d
RS
3503
3504 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3505 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3c99d5ff 3506 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
230d793d
RS
3507 && GET_MODE_CLASS (mode) == MODE_INT)
3508 {
3509 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3510 0, op0_mode);
230d793d
RS
3511 if (temp)
3512 return temp;
3513 }
3514
19808e22
RS
3515 /* If we want a subreg of a constant, at offset 0,
3516 take the low bits. On a little-endian machine, that's
3517 always valid. On a big-endian machine, it's valid
3c99d5ff 3518 only if the constant's mode fits in one word. Note that we
61b1bece 3519 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3c99d5ff
RK
3520 if (CONSTANT_P (SUBREG_REG (x))
3521 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3522 || ! WORDS_BIG_ENDIAN)
3523 ? SUBREG_WORD (x) == 0
3524 : (SUBREG_WORD (x)
3525 == ((GET_MODE_SIZE (op0_mode)
3526 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3527 / UNITS_PER_WORD)))
f82da7d2 3528 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3529 && (! WORDS_BIG_ENDIAN
3530 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3531 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3532
b65c1b5b
RK
3533 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3534 since we are saying that the high bits don't matter. */
3535 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3536 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3537 return SUBREG_REG (x);
3538
87e3e0c1
RK
3539 /* Note that we cannot do any narrowing for non-constants since
3540 we might have been counting on using the fact that some bits were
3541 zero. We now do this in the SET. */
3542
230d793d
RS
3543 break;
3544
3545 case NOT:
3546 /* (not (plus X -1)) can become (neg X). */
3547 if (GET_CODE (XEXP (x, 0)) == PLUS
3548 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3549 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3550
3551 /* Similarly, (not (neg X)) is (plus X -1). */
3552 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3553 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3554 constm1_rtx);
230d793d 3555
d0ab8cd3
RK
3556 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3557 if (GET_CODE (XEXP (x, 0)) == XOR
3558 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3559 && (temp = simplify_unary_operation (NOT, mode,
3560 XEXP (XEXP (x, 0), 1),
3561 mode)) != 0)
787745f5 3562 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3563
230d793d
RS
3564 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3565 other than 1, but that is not valid. We could do a similar
3566 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3567 but this doesn't seem common enough to bother with. */
3568 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3569 && XEXP (XEXP (x, 0), 0) == const1_rtx)
38a448ca
RH
3570 return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3571 XEXP (XEXP (x, 0), 1));
230d793d
RS
3572
3573 if (GET_CODE (XEXP (x, 0)) == SUBREG
3574 && subreg_lowpart_p (XEXP (x, 0))
3575 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3576 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3577 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3578 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3579 {
3580 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3581
38a448ca
RH
3582 x = gen_rtx_ROTATE (inner_mode,
3583 gen_unary (NOT, inner_mode, inner_mode,
3584 const1_rtx),
3585 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3586 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3587 }
3588
0802d516
RK
3589 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3590 reversing the comparison code if valid. */
3591 if (STORE_FLAG_VALUE == -1
3592 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
230d793d
RS
3593 && reversible_comparison_p (XEXP (x, 0)))
3594 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3595 mode, XEXP (XEXP (x, 0), 0),
3596 XEXP (XEXP (x, 0), 1));
500c518b
RK
3597
3598 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3599 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3600 perform the above simplification. */
500c518b 3601
0802d516
RK
3602 if (STORE_FLAG_VALUE == -1
3603 && XEXP (x, 1) == const1_rtx
500c518b
RK
3604 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3605 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3606 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3607 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3608
3609 /* Apply De Morgan's laws to reduce number of patterns for machines
3610 with negating logical insns (and-not, nand, etc.). If result has
3611 only one NOT, put it first, since that is how the patterns are
3612 coded. */
3613
3614 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3615 {
3616 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3617
3618 if (GET_CODE (in1) == NOT)
3619 in1 = XEXP (in1, 0);
3620 else
3621 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3622
3623 if (GET_CODE (in2) == NOT)
3624 in2 = XEXP (in2, 0);
3625 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3626 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3627 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3628 else
3629 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3630
3631 if (GET_CODE (in2) == NOT)
3632 {
3633 rtx tem = in2;
3634 in2 = in1; in1 = tem;
3635 }
3636
8079805d
RK
3637 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3638 mode, in1, in2);
230d793d
RS
3639 }
3640 break;
3641
3642 case NEG:
3643 /* (neg (plus X 1)) can become (not X). */
3644 if (GET_CODE (XEXP (x, 0)) == PLUS
3645 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3646 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3647
3648 /* Similarly, (neg (not X)) is (plus X 1). */
3649 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3650 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3651
230d793d
RS
3652 /* (neg (minus X Y)) can become (minus Y X). */
3653 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3654 && (! FLOAT_MODE_P (mode)
0f41302f 3655 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3656 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3657 || flag_fast_math))
8079805d
RK
3658 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3659 XEXP (XEXP (x, 0), 0));
230d793d 3660
0f41302f 3661 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3662 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3663 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3664 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3665
230d793d
RS
3666 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3667 if we can then eliminate the NEG (e.g.,
3668 if the operand is a constant). */
3669
3670 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3671 {
3672 temp = simplify_unary_operation (NEG, mode,
3673 XEXP (XEXP (x, 0), 0), mode);
3674 if (temp)
3675 {
3676 SUBST (XEXP (XEXP (x, 0), 0), temp);
3677 return XEXP (x, 0);
3678 }
3679 }
3680
3681 temp = expand_compound_operation (XEXP (x, 0));
3682
3683 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3684 replaced by (lshiftrt X C). This will convert
3685 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3686
3687 if (GET_CODE (temp) == ASHIFTRT
3688 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3689 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3690 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3691 INTVAL (XEXP (temp, 1)));
230d793d 3692
951553af 3693 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3694 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3695 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3696 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3697 or a SUBREG of one since we'd be making the expression more
3698 complex if it was just a register. */
3699
3700 if (GET_CODE (temp) != REG
3701 && ! (GET_CODE (temp) == SUBREG
3702 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3703 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3704 {
3705 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3706 (NULL_RTX, ASHIFTRT, mode,
3707 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3708 GET_MODE_BITSIZE (mode) - 1 - i),
3709 GET_MODE_BITSIZE (mode) - 1 - i);
3710
3711 /* If all we did was surround TEMP with the two shifts, we
3712 haven't improved anything, so don't use it. Otherwise,
3713 we are better off with TEMP1. */
3714 if (GET_CODE (temp1) != ASHIFTRT
3715 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3716 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3717 return temp1;
230d793d
RS
3718 }
3719 break;
3720
2ca9ae17 3721 case TRUNCATE:
e30fb98f
JL
3722 /* We can't handle truncation to a partial integer mode here
3723 because we don't know the real bitsize of the partial
3724 integer mode. */
3725 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3726 break;
3727
2ca9ae17
JW
3728 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3729 SUBST (XEXP (x, 0),
3730 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3731 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
3732
3733 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3734 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3735 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3736 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3737 return XEXP (XEXP (x, 0), 0);
3738
3739 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3740 (OP:SI foo:SI) if OP is NEG or ABS. */
3741 if ((GET_CODE (XEXP (x, 0)) == ABS
3742 || GET_CODE (XEXP (x, 0)) == NEG)
3743 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3744 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3745 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3746 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3747 XEXP (XEXP (XEXP (x, 0), 0), 0));
3748
3749 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3750 (truncate:SI x). */
3751 if (GET_CODE (XEXP (x, 0)) == SUBREG
3752 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3753 && subreg_lowpart_p (XEXP (x, 0)))
3754 return SUBREG_REG (XEXP (x, 0));
3755
3756 /* If we know that the value is already truncated, we can
3757 replace the TRUNCATE with a SUBREG. */
3758 if (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) <= HOST_BITS_PER_WIDE_INT
3759 && (nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3760 &~ GET_MODE_MASK (mode)) == 0)
3761 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3762
3763 /* A truncate of a comparison can be replaced with a subreg if
3764 STORE_FLAG_VALUE permits. This is like the previous test,
3765 but it works even if the comparison is done in a mode larger
3766 than HOST_BITS_PER_WIDE_INT. */
3767 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3768 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3769 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3770 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3771
3772 /* Similarly, a truncate of a register whose value is a
3773 comparison can be replaced with a subreg if STORE_FLAG_VALUE
3774 permits. */
3775 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3776 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3777 && (temp = get_last_value (XEXP (x, 0)))
3778 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3779 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3780
2ca9ae17
JW
3781 break;
3782
230d793d
RS
3783 case FLOAT_TRUNCATE:
3784 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3785 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3786 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3787 return XEXP (XEXP (x, 0), 0);
4635f748
RK
3788
3789 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3790 (OP:SF foo:SF) if OP is NEG or ABS. */
3791 if ((GET_CODE (XEXP (x, 0)) == ABS
3792 || GET_CODE (XEXP (x, 0)) == NEG)
3793 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3794 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
3795 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3796 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
3797
3798 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3799 is (float_truncate:SF x). */
3800 if (GET_CODE (XEXP (x, 0)) == SUBREG
3801 && subreg_lowpart_p (XEXP (x, 0))
3802 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3803 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
3804 break;
3805
3806#ifdef HAVE_cc0
3807 case COMPARE:
3808 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3809 using cc0, in which case we want to leave it as a COMPARE
3810 so we can distinguish it from a register-register-copy. */
3811 if (XEXP (x, 1) == const0_rtx)
3812 return XEXP (x, 0);
3813
3814 /* In IEEE floating point, x-0 is not the same as x. */
3815 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3816 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3817 || flag_fast_math)
230d793d
RS
3818 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3819 return XEXP (x, 0);
3820 break;
3821#endif
3822
3823 case CONST:
3824 /* (const (const X)) can become (const X). Do it this way rather than
3825 returning the inner CONST since CONST can be shared with a
3826 REG_EQUAL note. */
3827 if (GET_CODE (XEXP (x, 0)) == CONST)
3828 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3829 break;
3830
3831#ifdef HAVE_lo_sum
3832 case LO_SUM:
3833 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3834 can add in an offset. find_split_point will split this address up
3835 again if it doesn't match. */
3836 if (GET_CODE (XEXP (x, 0)) == HIGH
3837 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3838 return XEXP (x, 1);
3839 break;
3840#endif
3841
3842 case PLUS:
3843 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3844 outermost. That's because that's the way indexed addresses are
3845 supposed to appear. This code used to check many more cases, but
3846 they are now checked elsewhere. */
3847 if (GET_CODE (XEXP (x, 0)) == PLUS
3848 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3849 return gen_binary (PLUS, mode,
3850 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3851 XEXP (x, 1)),
3852 XEXP (XEXP (x, 0), 1));
3853
3854 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3855 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3856 bit-field and can be replaced by either a sign_extend or a
3857 sign_extract. The `and' may be a zero_extend. */
3858 if (GET_CODE (XEXP (x, 0)) == XOR
3859 && GET_CODE (XEXP (x, 1)) == CONST_INT
3860 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3861 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3862 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3863 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3864 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3865 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3866 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3867 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3868 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3869 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3870 == i + 1))))
8079805d
RK
3871 return simplify_shift_const
3872 (NULL_RTX, ASHIFTRT, mode,
3873 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3874 XEXP (XEXP (XEXP (x, 0), 0), 0),
3875 GET_MODE_BITSIZE (mode) - (i + 1)),
3876 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 3877
bc0776c6
RK
3878 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3879 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3880 is 1. This produces better code than the alternative immediately
3881 below. */
3882 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3883 && reversible_comparison_p (XEXP (x, 0))
3884 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3885 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 3886 return
0c1c8ea6 3887 gen_unary (NEG, mode, mode,
8079805d
RK
3888 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3889 mode, XEXP (XEXP (x, 0), 0),
3890 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
3891
3892 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3893 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3894 the bitsize of the mode - 1. This allows simplification of
3895 "a = (b & 8) == 0;" */
3896 if (XEXP (x, 1) == constm1_rtx
3897 && GET_CODE (XEXP (x, 0)) != REG
3898 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3899 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3900 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
3901 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3902 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3903 gen_rtx_combine (XOR, mode,
3904 XEXP (x, 0), const1_rtx),
3905 GET_MODE_BITSIZE (mode) - 1),
3906 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
3907
3908 /* If we are adding two things that have no bits in common, convert
3909 the addition into an IOR. This will often be further simplified,
3910 for example in cases like ((a & 1) + (a & 2)), which can
3911 become a & 3. */
3912
ac49a949 3913 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3914 && (nonzero_bits (XEXP (x, 0), mode)
3915 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 3916 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
3917 break;
3918
3919 case MINUS:
0802d516
RK
3920 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
3921 by reversing the comparison code if valid. */
3922 if (STORE_FLAG_VALUE == 1
3923 && XEXP (x, 0) == const1_rtx
5109d49f
RK
3924 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3925 && reversible_comparison_p (XEXP (x, 1)))
3926 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3927 mode, XEXP (XEXP (x, 1), 0),
3928 XEXP (XEXP (x, 1), 1));
5109d49f 3929
230d793d
RS
3930 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3931 (and <foo> (const_int pow2-1)) */
3932 if (GET_CODE (XEXP (x, 1)) == AND
3933 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3934 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3935 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
3936 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3937 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
3938
3939 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3940 integers. */
3941 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
3942 return gen_binary (MINUS, mode,
3943 gen_binary (MINUS, mode, XEXP (x, 0),
3944 XEXP (XEXP (x, 1), 0)),
3945 XEXP (XEXP (x, 1), 1));
230d793d
RS
3946 break;
3947
3948 case MULT:
3949 /* If we have (mult (plus A B) C), apply the distributive law and then
3950 the inverse distributive law to see if things simplify. This
3951 occurs mostly in addresses, often when unrolling loops. */
3952
3953 if (GET_CODE (XEXP (x, 0)) == PLUS)
3954 {
3955 x = apply_distributive_law
3956 (gen_binary (PLUS, mode,
3957 gen_binary (MULT, mode,
3958 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3959 gen_binary (MULT, mode,
3960 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3961
3962 if (GET_CODE (x) != MULT)
8079805d 3963 return x;
230d793d 3964 }
230d793d
RS
3965 break;
3966
3967 case UDIV:
3968 /* If this is a divide by a power of two, treat it as a shift if
3969 its first operand is a shift. */
3970 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3971 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3972 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3973 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3974 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3975 || GET_CODE (XEXP (x, 0)) == ROTATE
3976 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 3977 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3978 break;
3979
3980 case EQ: case NE:
3981 case GT: case GTU: case GE: case GEU:
3982 case LT: case LTU: case LE: case LEU:
3983 /* If the first operand is a condition code, we can't do anything
3984 with it. */
3985 if (GET_CODE (XEXP (x, 0)) == COMPARE
3986 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3987#ifdef HAVE_cc0
3988 && XEXP (x, 0) != cc0_rtx
3989#endif
3990 ))
3991 {
3992 rtx op0 = XEXP (x, 0);
3993 rtx op1 = XEXP (x, 1);
3994 enum rtx_code new_code;
3995
3996 if (GET_CODE (op0) == COMPARE)
3997 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3998
3999 /* Simplify our comparison, if possible. */
4000 new_code = simplify_comparison (code, &op0, &op1);
4001
230d793d 4002 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 4003 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
4004 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4005 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4006 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4007 (plus X 1).
4008
4009 Remove any ZERO_EXTRACT we made when thinking this was a
4010 comparison. It may now be simpler to use, e.g., an AND. If a
4011 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4012 the call to make_compound_operation in the SET case. */
4013
0802d516
RK
4014 if (STORE_FLAG_VALUE == 1
4015 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4016 && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4017 return gen_lowpart_for_combine (mode,
4018 expand_compound_operation (op0));
5109d49f 4019
0802d516
RK
4020 else if (STORE_FLAG_VALUE == 1
4021 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4022 && op1 == const0_rtx
4023 && (num_sign_bit_copies (op0, mode)
4024 == GET_MODE_BITSIZE (mode)))
4025 {
4026 op0 = expand_compound_operation (op0);
0c1c8ea6 4027 return gen_unary (NEG, mode, mode,
8079805d 4028 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4029 }
4030
0802d516
RK
4031 else if (STORE_FLAG_VALUE == 1
4032 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4033 && op1 == const0_rtx
5109d49f 4034 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4035 {
4036 op0 = expand_compound_operation (op0);
8079805d
RK
4037 return gen_binary (XOR, mode,
4038 gen_lowpart_for_combine (mode, op0),
4039 const1_rtx);
5109d49f 4040 }
818b11b9 4041
0802d516
RK
4042 else if (STORE_FLAG_VALUE == 1
4043 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4044 && op1 == const0_rtx
4045 && (num_sign_bit_copies (op0, mode)
4046 == GET_MODE_BITSIZE (mode)))
4047 {
4048 op0 = expand_compound_operation (op0);
8079805d 4049 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 4050 }
230d793d 4051
5109d49f
RK
4052 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4053 those above. */
0802d516
RK
4054 if (STORE_FLAG_VALUE == -1
4055 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4056 && op1 == const0_rtx
5109d49f
RK
4057 && (num_sign_bit_copies (op0, mode)
4058 == GET_MODE_BITSIZE (mode)))
4059 return gen_lowpart_for_combine (mode,
4060 expand_compound_operation (op0));
4061
0802d516
RK
4062 else if (STORE_FLAG_VALUE == -1
4063 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4064 && op1 == const0_rtx
4065 && nonzero_bits (op0, mode) == 1)
4066 {
4067 op0 = expand_compound_operation (op0);
0c1c8ea6 4068 return gen_unary (NEG, mode, mode,
8079805d 4069 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4070 }
4071
0802d516
RK
4072 else if (STORE_FLAG_VALUE == -1
4073 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4074 && op1 == const0_rtx
4075 && (num_sign_bit_copies (op0, mode)
4076 == GET_MODE_BITSIZE (mode)))
230d793d 4077 {
818b11b9 4078 op0 = expand_compound_operation (op0);
0c1c8ea6 4079 return gen_unary (NOT, mode, mode,
8079805d 4080 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4081 }
4082
4083 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
4084 else if (STORE_FLAG_VALUE == -1
4085 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4086 && op1 == const0_rtx
4087 && nonzero_bits (op0, mode) == 1)
4088 {
4089 op0 = expand_compound_operation (op0);
8079805d 4090 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 4091 }
230d793d
RS
4092
4093 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
4094 one bit that might be nonzero, we can convert (ne x 0) to
4095 (ashift x c) where C puts the bit in the sign bit. Remove any
4096 AND with STORE_FLAG_VALUE when we are done, since we are only
4097 going to test the sign bit. */
3f508eca 4098 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 4099 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4100 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5f4f0e22 4101 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
4102 && op1 == const0_rtx
4103 && mode == GET_MODE (op0)
5109d49f 4104 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 4105 {
818b11b9
RK
4106 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4107 expand_compound_operation (op0),
230d793d
RS
4108 GET_MODE_BITSIZE (mode) - 1 - i);
4109 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4110 return XEXP (x, 0);
4111 else
4112 return x;
4113 }
4114
4115 /* If the code changed, return a whole new comparison. */
4116 if (new_code != code)
4117 return gen_rtx_combine (new_code, mode, op0, op1);
4118
4119 /* Otherwise, keep this operation, but maybe change its operands.
4120 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4121 SUBST (XEXP (x, 0), op0);
4122 SUBST (XEXP (x, 1), op1);
4123 }
4124 break;
4125
4126 case IF_THEN_ELSE:
8079805d 4127 return simplify_if_then_else (x);
9210df58 4128
8079805d
RK
4129 case ZERO_EXTRACT:
4130 case SIGN_EXTRACT:
4131 case ZERO_EXTEND:
4132 case SIGN_EXTEND:
0f41302f 4133 /* If we are processing SET_DEST, we are done. */
8079805d
RK
4134 if (in_dest)
4135 return x;
d0ab8cd3 4136
8079805d 4137 return expand_compound_operation (x);
d0ab8cd3 4138
8079805d
RK
4139 case SET:
4140 return simplify_set (x);
1a26b032 4141
8079805d
RK
4142 case AND:
4143 case IOR:
4144 case XOR:
4145 return simplify_logical (x, last);
d0ab8cd3 4146
b472527b 4147 case ABS:
8079805d
RK
4148 /* (abs (neg <foo>)) -> (abs <foo>) */
4149 if (GET_CODE (XEXP (x, 0)) == NEG)
4150 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4151
b472527b
JL
4152 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4153 do nothing. */
4154 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4155 break;
f40421ce 4156
8079805d
RK
4157 /* If operand is something known to be positive, ignore the ABS. */
4158 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4159 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4160 <= HOST_BITS_PER_WIDE_INT)
4161 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4162 & ((HOST_WIDE_INT) 1
4163 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4164 == 0)))
4165 return XEXP (x, 0);
1a26b032 4166
1a26b032 4167
8079805d
RK
4168 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4169 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4170 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 4171
8079805d 4172 break;
1a26b032 4173
8079805d
RK
4174 case FFS:
4175 /* (ffs (*_extend <X>)) = (ffs <X>) */
4176 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4177 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4178 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4179 break;
1a26b032 4180
8079805d
RK
4181 case FLOAT:
4182 /* (float (sign_extend <X>)) = (float <X>). */
4183 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4184 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4185 break;
1a26b032 4186
8079805d
RK
4187 case ASHIFT:
4188 case LSHIFTRT:
4189 case ASHIFTRT:
4190 case ROTATE:
4191 case ROTATERT:
4192 /* If this is a shift by a constant amount, simplify it. */
4193 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4194 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4195 INTVAL (XEXP (x, 1)));
4196
4197#ifdef SHIFT_COUNT_TRUNCATED
4198 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4199 SUBST (XEXP (x, 1),
4200 force_to_mode (XEXP (x, 1), GET_MODE (x),
4201 ((HOST_WIDE_INT) 1
4202 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4203 - 1,
4204 NULL_RTX, 0));
4205#endif
4206
4207 break;
e9a25f70
JL
4208
4209 default:
4210 break;
8079805d
RK
4211 }
4212
4213 return x;
4214}
4215\f
4216/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4217
8079805d
RK
4218static rtx
4219simplify_if_then_else (x)
4220 rtx x;
4221{
4222 enum machine_mode mode = GET_MODE (x);
4223 rtx cond = XEXP (x, 0);
4224 rtx true = XEXP (x, 1);
4225 rtx false = XEXP (x, 2);
4226 enum rtx_code true_code = GET_CODE (cond);
4227 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4228 rtx temp;
4229 int i;
4230
0f41302f 4231 /* Simplify storing of the truth value. */
8079805d
RK
4232 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4233 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4234
0f41302f 4235 /* Also when the truth value has to be reversed. */
8079805d
RK
4236 if (comparison_p && reversible_comparison_p (cond)
4237 && true == const0_rtx && false == const_true_rtx)
4238 return gen_binary (reverse_condition (true_code),
4239 mode, XEXP (cond, 0), XEXP (cond, 1));
4240
4241 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4242 in it is being compared against certain values. Get the true and false
4243 comparisons and see if that says anything about the value of each arm. */
4244
4245 if (comparison_p && reversible_comparison_p (cond)
4246 && GET_CODE (XEXP (cond, 0)) == REG)
4247 {
4248 HOST_WIDE_INT nzb;
4249 rtx from = XEXP (cond, 0);
4250 enum rtx_code false_code = reverse_condition (true_code);
4251 rtx true_val = XEXP (cond, 1);
4252 rtx false_val = true_val;
4253 int swapped = 0;
9210df58 4254
8079805d 4255 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4256
8079805d 4257 if (false_code == EQ)
1a26b032 4258 {
8079805d
RK
4259 swapped = 1, true_code = EQ, false_code = NE;
4260 temp = true, true = false, false = temp;
4261 }
5109d49f 4262
8079805d
RK
4263 /* If we are comparing against zero and the expression being tested has
4264 only a single bit that might be nonzero, that is its value when it is
4265 not equal to zero. Similarly if it is known to be -1 or 0. */
4266
4267 if (true_code == EQ && true_val == const0_rtx
4268 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4269 false_code = EQ, false_val = GEN_INT (nzb);
4270 else if (true_code == EQ && true_val == const0_rtx
4271 && (num_sign_bit_copies (from, GET_MODE (from))
4272 == GET_MODE_BITSIZE (GET_MODE (from))))
4273 false_code = EQ, false_val = constm1_rtx;
4274
4275 /* Now simplify an arm if we know the value of the register in the
4276 branch and it is used in the arm. Be careful due to the potential
4277 of locally-shared RTL. */
4278
4279 if (reg_mentioned_p (from, true))
4280 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4281 pc_rtx, pc_rtx, 0, 0);
4282 if (reg_mentioned_p (from, false))
4283 false = subst (known_cond (copy_rtx (false), false_code,
4284 from, false_val),
4285 pc_rtx, pc_rtx, 0, 0);
4286
4287 SUBST (XEXP (x, 1), swapped ? false : true);
4288 SUBST (XEXP (x, 2), swapped ? true : false);
4289
4290 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4291 }
5109d49f 4292
8079805d
RK
4293 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4294 reversed, do so to avoid needing two sets of patterns for
4295 subtract-and-branch insns. Similarly if we have a constant in the true
4296 arm, the false arm is the same as the first operand of the comparison, or
4297 the false arm is more complicated than the true arm. */
4298
4299 if (comparison_p && reversible_comparison_p (cond)
4300 && (true == pc_rtx
4301 || (CONSTANT_P (true)
4302 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4303 || true == const0_rtx
4304 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4305 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4306 || (GET_CODE (true) == SUBREG
4307 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4308 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4309 || reg_mentioned_p (true, false)
4310 || rtx_equal_p (false, XEXP (cond, 0))))
4311 {
4312 true_code = reverse_condition (true_code);
4313 SUBST (XEXP (x, 0),
4314 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4315 XEXP (cond, 1)));
5109d49f 4316
8079805d
RK
4317 SUBST (XEXP (x, 1), false);
4318 SUBST (XEXP (x, 2), true);
1a26b032 4319
8079805d 4320 temp = true, true = false, false = temp, cond = XEXP (x, 0);
bb821298 4321
0f41302f 4322 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4323 true_code = GET_CODE (cond);
4324 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4325 }
abe6e52f 4326
8079805d 4327 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4328
8079805d
RK
4329 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4330 return true;
1a26b032 4331
5be669c7
RK
4332 /* Convert a == b ? b : a to "a". */
4333 if (true_code == EQ && ! side_effects_p (cond)
4334 && rtx_equal_p (XEXP (cond, 0), false)
4335 && rtx_equal_p (XEXP (cond, 1), true))
4336 return false;
4337 else if (true_code == NE && ! side_effects_p (cond)
4338 && rtx_equal_p (XEXP (cond, 0), true)
4339 && rtx_equal_p (XEXP (cond, 1), false))
4340 return true;
4341
8079805d
RK
4342 /* Look for cases where we have (abs x) or (neg (abs X)). */
4343
4344 if (GET_MODE_CLASS (mode) == MODE_INT
4345 && GET_CODE (false) == NEG
4346 && rtx_equal_p (true, XEXP (false, 0))
4347 && comparison_p
4348 && rtx_equal_p (true, XEXP (cond, 0))
4349 && ! side_effects_p (true))
4350 switch (true_code)
4351 {
4352 case GT:
4353 case GE:
0c1c8ea6 4354 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4355 case LT:
4356 case LE:
0c1c8ea6 4357 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
e9a25f70
JL
4358 default:
4359 break;
8079805d
RK
4360 }
4361
4362 /* Look for MIN or MAX. */
4363
34c8be72 4364 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4365 && comparison_p
4366 && rtx_equal_p (XEXP (cond, 0), true)
4367 && rtx_equal_p (XEXP (cond, 1), false)
4368 && ! side_effects_p (cond))
4369 switch (true_code)
4370 {
4371 case GE:
4372 case GT:
4373 return gen_binary (SMAX, mode, true, false);
4374 case LE:
4375 case LT:
4376 return gen_binary (SMIN, mode, true, false);
4377 case GEU:
4378 case GTU:
4379 return gen_binary (UMAX, mode, true, false);
4380 case LEU:
4381 case LTU:
4382 return gen_binary (UMIN, mode, true, false);
e9a25f70
JL
4383 default:
4384 break;
8079805d
RK
4385 }
4386
8079805d
RK
4387 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4388 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4389 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4390 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4391 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4392 neither 1 or -1, but it isn't worth checking for. */
8079805d 4393
0802d516
RK
4394 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4395 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d
RK
4396 {
4397 rtx t = make_compound_operation (true, SET);
4398 rtx f = make_compound_operation (false, SET);
4399 rtx cond_op0 = XEXP (cond, 0);
4400 rtx cond_op1 = XEXP (cond, 1);
4401 enum rtx_code op, extend_op = NIL;
4402 enum machine_mode m = mode;
f24ad0e4 4403 rtx z = 0, c1;
8079805d 4404
8079805d
RK
4405 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4406 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4407 || GET_CODE (t) == ASHIFT
4408 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4409 && rtx_equal_p (XEXP (t, 0), f))
4410 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4411
4412 /* If an identity-zero op is commutative, check whether there
0f41302f 4413 would be a match if we swapped the operands. */
8079805d
RK
4414 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4415 || GET_CODE (t) == XOR)
4416 && rtx_equal_p (XEXP (t, 1), f))
4417 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4418 else if (GET_CODE (t) == SIGN_EXTEND
4419 && (GET_CODE (XEXP (t, 0)) == PLUS
4420 || GET_CODE (XEXP (t, 0)) == MINUS
4421 || GET_CODE (XEXP (t, 0)) == IOR
4422 || GET_CODE (XEXP (t, 0)) == XOR
4423 || GET_CODE (XEXP (t, 0)) == ASHIFT
4424 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4425 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4426 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4427 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4428 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4429 && (num_sign_bit_copies (f, GET_MODE (f))
4430 > (GET_MODE_BITSIZE (mode)
4431 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4432 {
4433 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4434 extend_op = SIGN_EXTEND;
4435 m = GET_MODE (XEXP (t, 0));
1a26b032 4436 }
8079805d
RK
4437 else if (GET_CODE (t) == SIGN_EXTEND
4438 && (GET_CODE (XEXP (t, 0)) == PLUS
4439 || GET_CODE (XEXP (t, 0)) == IOR
4440 || GET_CODE (XEXP (t, 0)) == XOR)
4441 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4442 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4443 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4444 && (num_sign_bit_copies (f, GET_MODE (f))
4445 > (GET_MODE_BITSIZE (mode)
4446 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4447 {
4448 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4449 extend_op = SIGN_EXTEND;
4450 m = GET_MODE (XEXP (t, 0));
4451 }
4452 else if (GET_CODE (t) == ZERO_EXTEND
4453 && (GET_CODE (XEXP (t, 0)) == PLUS
4454 || GET_CODE (XEXP (t, 0)) == MINUS
4455 || GET_CODE (XEXP (t, 0)) == IOR
4456 || GET_CODE (XEXP (t, 0)) == XOR
4457 || GET_CODE (XEXP (t, 0)) == ASHIFT
4458 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4459 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4460 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4461 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4462 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4463 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4464 && ((nonzero_bits (f, GET_MODE (f))
4465 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4466 == 0))
4467 {
4468 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4469 extend_op = ZERO_EXTEND;
4470 m = GET_MODE (XEXP (t, 0));
4471 }
4472 else if (GET_CODE (t) == ZERO_EXTEND
4473 && (GET_CODE (XEXP (t, 0)) == PLUS
4474 || GET_CODE (XEXP (t, 0)) == IOR
4475 || GET_CODE (XEXP (t, 0)) == XOR)
4476 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4477 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4478 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4479 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4480 && ((nonzero_bits (f, GET_MODE (f))
4481 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4482 == 0))
4483 {
4484 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4485 extend_op = ZERO_EXTEND;
4486 m = GET_MODE (XEXP (t, 0));
4487 }
4488
4489 if (z)
4490 {
4491 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4492 pc_rtx, pc_rtx, 0, 0);
4493 temp = gen_binary (MULT, m, temp,
4494 gen_binary (MULT, m, c1, const_true_rtx));
4495 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4496 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4497
4498 if (extend_op != NIL)
0c1c8ea6 4499 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4500
4501 return temp;
4502 }
4503 }
224eeff2 4504
8079805d
RK
4505 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4506 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4507 negation of a single bit, we can convert this operation to a shift. We
4508 can actually do this more generally, but it doesn't seem worth it. */
4509
4510 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4511 && false == const0_rtx && GET_CODE (true) == CONST_INT
4512 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4513 && (i = exact_log2 (INTVAL (true))) >= 0)
4514 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4515 == GET_MODE_BITSIZE (mode))
4516 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4517 return
4518 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4519 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4520
8079805d
RK
4521 return x;
4522}
4523\f
4524/* Simplify X, a SET expression. Return the new expression. */
230d793d 4525
8079805d
RK
4526static rtx
4527simplify_set (x)
4528 rtx x;
4529{
4530 rtx src = SET_SRC (x);
4531 rtx dest = SET_DEST (x);
4532 enum machine_mode mode
4533 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4534 rtx other_insn;
4535 rtx *cc_use;
4536
4537 /* (set (pc) (return)) gets written as (return). */
4538 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4539 return src;
230d793d 4540
87e3e0c1
RK
4541 /* Now that we know for sure which bits of SRC we are using, see if we can
4542 simplify the expression for the object knowing that we only need the
4543 low-order bits. */
4544
4545 if (GET_MODE_CLASS (mode) == MODE_INT)
4546 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4547
8079805d
RK
4548 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4549 the comparison result and try to simplify it unless we already have used
4550 undobuf.other_insn. */
4551 if ((GET_CODE (src) == COMPARE
230d793d 4552#ifdef HAVE_cc0
8079805d 4553 || dest == cc0_rtx
230d793d 4554#endif
8079805d
RK
4555 )
4556 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4557 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4558 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4559 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4560 {
4561 enum rtx_code old_code = GET_CODE (*cc_use);
4562 enum rtx_code new_code;
4563 rtx op0, op1;
4564 int other_changed = 0;
4565 enum machine_mode compare_mode = GET_MODE (dest);
4566
4567 if (GET_CODE (src) == COMPARE)
4568 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4569 else
4570 op0 = src, op1 = const0_rtx;
230d793d 4571
8079805d
RK
4572 /* Simplify our comparison, if possible. */
4573 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4574
c141a106 4575#ifdef EXTRA_CC_MODES
8079805d
RK
4576 /* If this machine has CC modes other than CCmode, check to see if we
4577 need to use a different CC mode here. */
4578 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4579#endif /* EXTRA_CC_MODES */
230d793d 4580
c141a106 4581#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4582 /* If the mode changed, we have to change SET_DEST, the mode in the
4583 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4584 a hard register, just build new versions with the proper mode. If it
4585 is a pseudo, we lose unless it is only time we set the pseudo, in
4586 which case we can safely change its mode. */
4587 if (compare_mode != GET_MODE (dest))
4588 {
4589 int regno = REGNO (dest);
38a448ca 4590 rtx new_dest = gen_rtx_REG (compare_mode, regno);
8079805d
RK
4591
4592 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 4593 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
230d793d 4594 {
8079805d
RK
4595 if (regno >= FIRST_PSEUDO_REGISTER)
4596 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4597
8079805d
RK
4598 SUBST (SET_DEST (x), new_dest);
4599 SUBST (XEXP (*cc_use, 0), new_dest);
4600 other_changed = 1;
230d793d 4601
8079805d 4602 dest = new_dest;
230d793d 4603 }
8079805d 4604 }
230d793d
RS
4605#endif
4606
8079805d
RK
4607 /* If the code changed, we have to build a new comparison in
4608 undobuf.other_insn. */
4609 if (new_code != old_code)
4610 {
4611 unsigned HOST_WIDE_INT mask;
4612
4613 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4614 dest, const0_rtx));
4615
4616 /* If the only change we made was to change an EQ into an NE or
4617 vice versa, OP0 has only one bit that might be nonzero, and OP1
4618 is zero, check if changing the user of the condition code will
4619 produce a valid insn. If it won't, we can keep the original code
4620 in that insn by surrounding our operation with an XOR. */
4621
4622 if (((old_code == NE && new_code == EQ)
4623 || (old_code == EQ && new_code == NE))
4624 && ! other_changed && op1 == const0_rtx
4625 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4626 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4627 {
8079805d 4628 rtx pat = PATTERN (other_insn), note = 0;
a29ca9db 4629 int scratches;
230d793d 4630
a29ca9db 4631 if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
8079805d
RK
4632 && ! check_asm_operands (pat)))
4633 {
4634 PUT_CODE (*cc_use, old_code);
4635 other_insn = 0;
230d793d 4636
8079805d 4637 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4638 }
230d793d
RS
4639 }
4640
8079805d
RK
4641 other_changed = 1;
4642 }
4643
4644 if (other_changed)
4645 undobuf.other_insn = other_insn;
230d793d
RS
4646
4647#ifdef HAVE_cc0
8079805d
RK
4648 /* If we are now comparing against zero, change our source if
4649 needed. If we do not use cc0, we always have a COMPARE. */
4650 if (op1 == const0_rtx && dest == cc0_rtx)
4651 {
4652 SUBST (SET_SRC (x), op0);
4653 src = op0;
4654 }
4655 else
230d793d
RS
4656#endif
4657
8079805d
RK
4658 /* Otherwise, if we didn't previously have a COMPARE in the
4659 correct mode, we need one. */
4660 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4661 {
4662 SUBST (SET_SRC (x),
4663 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4664 src = SET_SRC (x);
230d793d
RS
4665 }
4666 else
4667 {
8079805d
RK
4668 /* Otherwise, update the COMPARE if needed. */
4669 SUBST (XEXP (src, 0), op0);
4670 SUBST (XEXP (src, 1), op1);
230d793d 4671 }
8079805d
RK
4672 }
4673 else
4674 {
4675 /* Get SET_SRC in a form where we have placed back any
4676 compound expressions. Then do the checks below. */
4677 src = make_compound_operation (src, SET);
4678 SUBST (SET_SRC (x), src);
4679 }
230d793d 4680
8079805d
RK
4681 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4682 and X being a REG or (subreg (reg)), we may be able to convert this to
4683 (set (subreg:m2 x) (op)).
df62f951 4684
8079805d
RK
4685 We can always do this if M1 is narrower than M2 because that means that
4686 we only care about the low bits of the result.
df62f951 4687
8079805d
RK
4688 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4689 perform a narrower operation that requested since the high-order bits will
4690 be undefined. On machine where it is defined, this transformation is safe
4691 as long as M1 and M2 have the same number of words. */
df62f951 4692
8079805d
RK
4693 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4694 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4695 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4696 / UNITS_PER_WORD)
4697 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4698 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4699#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4700 && (GET_MODE_SIZE (GET_MODE (src))
4701 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4702#endif
f507a070
RK
4703#ifdef CLASS_CANNOT_CHANGE_SIZE
4704 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4705 && (TEST_HARD_REG_BIT
4706 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4707 REGNO (dest)))
4708 && (GET_MODE_SIZE (GET_MODE (src))
4709 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4710#endif
8079805d
RK
4711 && (GET_CODE (dest) == REG
4712 || (GET_CODE (dest) == SUBREG
4713 && GET_CODE (SUBREG_REG (dest)) == REG)))
4714 {
4715 SUBST (SET_DEST (x),
4716 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4717 dest));
4718 SUBST (SET_SRC (x), SUBREG_REG (src));
4719
4720 src = SET_SRC (x), dest = SET_DEST (x);
4721 }
df62f951 4722
8baf60bb 4723#ifdef LOAD_EXTEND_OP
8079805d
RK
4724 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4725 would require a paradoxical subreg. Replace the subreg with a
0f41302f 4726 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
4727
4728 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4729 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4730 && SUBREG_WORD (src) == 0
4731 && (GET_MODE_SIZE (GET_MODE (src))
4732 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4733 && GET_CODE (SUBREG_REG (src)) == MEM)
4734 {
4735 SUBST (SET_SRC (x),
4736 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4737 GET_MODE (src), XEXP (src, 0)));
4738
4739 src = SET_SRC (x);
4740 }
230d793d
RS
4741#endif
4742
8079805d
RK
4743 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4744 are comparing an item known to be 0 or -1 against 0, use a logical
4745 operation instead. Check for one of the arms being an IOR of the other
4746 arm with some value. We compute three terms to be IOR'ed together. In
4747 practice, at most two will be nonzero. Then we do the IOR's. */
4748
4749 if (GET_CODE (dest) != PC
4750 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 4751 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4752 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4753 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 4754 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
4755#ifdef HAVE_conditional_move
4756 && ! can_conditionally_move_p (GET_MODE (src))
4757#endif
8079805d
RK
4758 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4759 GET_MODE (XEXP (XEXP (src, 0), 0)))
4760 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4761 && ! side_effects_p (src))
4762 {
4763 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4764 ? XEXP (src, 1) : XEXP (src, 2));
4765 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4766 ? XEXP (src, 2) : XEXP (src, 1));
4767 rtx term1 = const0_rtx, term2, term3;
4768
4769 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4770 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4771 else if (GET_CODE (true) == IOR
4772 && rtx_equal_p (XEXP (true, 1), false))
4773 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4774 else if (GET_CODE (false) == IOR
4775 && rtx_equal_p (XEXP (false, 0), true))
4776 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4777 else if (GET_CODE (false) == IOR
4778 && rtx_equal_p (XEXP (false, 1), true))
4779 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4780
4781 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4782 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 4783 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
4784 XEXP (XEXP (src, 0), 0)),
4785 false);
4786
4787 SUBST (SET_SRC (x),
4788 gen_binary (IOR, GET_MODE (src),
4789 gen_binary (IOR, GET_MODE (src), term1, term2),
4790 term3));
4791
4792 src = SET_SRC (x);
4793 }
230d793d 4794
246e00f2
RK
4795 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4796 whole thing fail. */
4797 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4798 return src;
4799 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4800 return dest;
4801 else
4802 /* Convert this into a field assignment operation, if possible. */
4803 return make_field_assignment (x);
8079805d
RK
4804}
4805\f
4806/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4807 result. LAST is nonzero if this is the last retry. */
4808
4809static rtx
4810simplify_logical (x, last)
4811 rtx x;
4812 int last;
4813{
4814 enum machine_mode mode = GET_MODE (x);
4815 rtx op0 = XEXP (x, 0);
4816 rtx op1 = XEXP (x, 1);
4817
4818 switch (GET_CODE (x))
4819 {
230d793d 4820 case AND:
8079805d
RK
4821 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4822 insn (and may simplify more). */
4823 if (GET_CODE (op0) == XOR
4824 && rtx_equal_p (XEXP (op0, 0), op1)
4825 && ! side_effects_p (op1))
0c1c8ea6
RK
4826 x = gen_binary (AND, mode,
4827 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
4828
4829 if (GET_CODE (op0) == XOR
4830 && rtx_equal_p (XEXP (op0, 1), op1)
4831 && ! side_effects_p (op1))
0c1c8ea6
RK
4832 x = gen_binary (AND, mode,
4833 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
4834
4835 /* Similarly for (~ (A ^ B)) & A. */
4836 if (GET_CODE (op0) == NOT
4837 && GET_CODE (XEXP (op0, 0)) == XOR
4838 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4839 && ! side_effects_p (op1))
4840 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4841
4842 if (GET_CODE (op0) == NOT
4843 && GET_CODE (XEXP (op0, 0)) == XOR
4844 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4845 && ! side_effects_p (op1))
4846 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4847
4848 if (GET_CODE (op1) == CONST_INT)
230d793d 4849 {
8079805d 4850 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
4851
4852 /* If we have (ior (and (X C1) C2)) and the next restart would be
4853 the last, simplify this by making C1 as small as possible
0f41302f 4854 and then exit. */
8079805d
RK
4855 if (last
4856 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4857 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4858 && GET_CODE (op1) == CONST_INT)
4859 return gen_binary (IOR, mode,
4860 gen_binary (AND, mode, XEXP (op0, 0),
4861 GEN_INT (INTVAL (XEXP (op0, 1))
4862 & ~ INTVAL (op1))), op1);
230d793d
RS
4863
4864 if (GET_CODE (x) != AND)
8079805d 4865 return x;
0e32506c
RK
4866
4867 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4868 || GET_RTX_CLASS (GET_CODE (x)) == '2')
4869 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
4870 }
4871
4872 /* Convert (A | B) & A to A. */
8079805d
RK
4873 if (GET_CODE (op0) == IOR
4874 && (rtx_equal_p (XEXP (op0, 0), op1)
4875 || rtx_equal_p (XEXP (op0, 1), op1))
4876 && ! side_effects_p (XEXP (op0, 0))
4877 && ! side_effects_p (XEXP (op0, 1)))
4878 return op1;
230d793d 4879
d0ab8cd3 4880 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4881 we start with some combination of logical operations and apply
4882 the distributive law followed by the inverse distributive law.
4883 Most of the time, this results in no change. However, if some of
4884 the operands are the same or inverses of each other, simplifications
4885 will result.
4886
4887 For example, (and (ior A B) (not B)) can occur as the result of
4888 expanding a bit field assignment. When we apply the distributive
4889 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 4890 which then simplifies to (and (A (not B))).
230d793d 4891
8079805d 4892 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
4893 the inverse distributive law to see if things simplify. */
4894
8079805d 4895 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
4896 {
4897 x = apply_distributive_law
8079805d
RK
4898 (gen_binary (GET_CODE (op0), mode,
4899 gen_binary (AND, mode, XEXP (op0, 0), op1),
4900 gen_binary (AND, mode, XEXP (op0, 1), op1)));
230d793d 4901 if (GET_CODE (x) != AND)
8079805d 4902 return x;
230d793d
RS
4903 }
4904
8079805d
RK
4905 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4906 return apply_distributive_law
4907 (gen_binary (GET_CODE (op1), mode,
4908 gen_binary (AND, mode, XEXP (op1, 0), op0),
4909 gen_binary (AND, mode, XEXP (op1, 1), op0)));
230d793d
RS
4910
4911 /* Similarly, taking advantage of the fact that
4912 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4913
8079805d
RK
4914 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4915 return apply_distributive_law
4916 (gen_binary (XOR, mode,
4917 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4918 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
230d793d 4919
8079805d
RK
4920 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4921 return apply_distributive_law
4922 (gen_binary (XOR, mode,
4923 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4924 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
230d793d
RS
4925 break;
4926
4927 case IOR:
951553af 4928 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 4929 if (GET_CODE (op1) == CONST_INT
ac49a949 4930 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
4931 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4932 return op1;
d0ab8cd3 4933
230d793d 4934 /* Convert (A & B) | A to A. */
8079805d
RK
4935 if (GET_CODE (op0) == AND
4936 && (rtx_equal_p (XEXP (op0, 0), op1)
4937 || rtx_equal_p (XEXP (op0, 1), op1))
4938 && ! side_effects_p (XEXP (op0, 0))
4939 && ! side_effects_p (XEXP (op0, 1)))
4940 return op1;
230d793d
RS
4941
4942 /* If we have (ior (and A B) C), apply the distributive law and then
4943 the inverse distributive law to see if things simplify. */
4944
8079805d 4945 if (GET_CODE (op0) == AND)
230d793d
RS
4946 {
4947 x = apply_distributive_law
4948 (gen_binary (AND, mode,
8079805d
RK
4949 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4950 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
230d793d
RS
4951
4952 if (GET_CODE (x) != IOR)
8079805d 4953 return x;
230d793d
RS
4954 }
4955
8079805d 4956 if (GET_CODE (op1) == AND)
230d793d
RS
4957 {
4958 x = apply_distributive_law
4959 (gen_binary (AND, mode,
8079805d
RK
4960 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4961 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
230d793d
RS
4962
4963 if (GET_CODE (x) != IOR)
8079805d 4964 return x;
230d793d
RS
4965 }
4966
4967 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4968 mode size to (rotate A CX). */
4969
8079805d
RK
4970 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4971 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4972 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4973 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4974 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4975 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 4976 == GET_MODE_BITSIZE (mode)))
38a448ca
RH
4977 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
4978 (GET_CODE (op0) == ASHIFT
4979 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 4980
71923da7
RK
4981 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4982 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4983 does not affect any of the bits in OP1, it can really be done
4984 as a PLUS and we can associate. We do this by seeing if OP1
4985 can be safely shifted left C bits. */
4986 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4987 && GET_CODE (XEXP (op0, 0)) == PLUS
4988 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4989 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4990 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4991 {
4992 int count = INTVAL (XEXP (op0, 1));
4993 HOST_WIDE_INT mask = INTVAL (op1) << count;
4994
4995 if (mask >> count == INTVAL (op1)
4996 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4997 {
4998 SUBST (XEXP (XEXP (op0, 0), 1),
4999 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5000 return op0;
5001 }
5002 }
230d793d
RS
5003 break;
5004
5005 case XOR:
5006 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5007 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5008 (NOT y). */
5009 {
5010 int num_negated = 0;
230d793d 5011
8079805d
RK
5012 if (GET_CODE (op0) == NOT)
5013 num_negated++, op0 = XEXP (op0, 0);
5014 if (GET_CODE (op1) == NOT)
5015 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
5016
5017 if (num_negated == 2)
5018 {
8079805d
RK
5019 SUBST (XEXP (x, 0), op0);
5020 SUBST (XEXP (x, 1), op1);
230d793d
RS
5021 }
5022 else if (num_negated == 1)
0c1c8ea6 5023 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
5024 }
5025
5026 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5027 correspond to a machine insn or result in further simplifications
5028 if B is a constant. */
5029
8079805d
RK
5030 if (GET_CODE (op0) == AND
5031 && rtx_equal_p (XEXP (op0, 1), op1)
5032 && ! side_effects_p (op1))
0c1c8ea6
RK
5033 return gen_binary (AND, mode,
5034 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 5035 op1);
230d793d 5036
8079805d
RK
5037 else if (GET_CODE (op0) == AND
5038 && rtx_equal_p (XEXP (op0, 0), op1)
5039 && ! side_effects_p (op1))
0c1c8ea6
RK
5040 return gen_binary (AND, mode,
5041 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 5042 op1);
230d793d 5043
230d793d 5044 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
5045 comparison if STORE_FLAG_VALUE is 1. */
5046 if (STORE_FLAG_VALUE == 1
5047 && op1 == const1_rtx
8079805d
RK
5048 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5049 && reversible_comparison_p (op0))
5050 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5051 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
5052
5053 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5054 is (lt foo (const_int 0)), so we can perform the above
0802d516 5055 simplification if STORE_FLAG_VALUE is 1. */
500c518b 5056
0802d516
RK
5057 if (STORE_FLAG_VALUE == 1
5058 && op1 == const1_rtx
8079805d
RK
5059 && GET_CODE (op0) == LSHIFTRT
5060 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5061 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5062 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
5063
5064 /* (xor (comparison foo bar) (const_int sign-bit))
5065 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 5066 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5067 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5f4f0e22 5068 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
5069 && op1 == const_true_rtx
5070 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5071 && reversible_comparison_p (op0))
5072 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5073 mode, XEXP (op0, 0), XEXP (op0, 1));
230d793d 5074 break;
e9a25f70
JL
5075
5076 default:
5077 abort ();
230d793d
RS
5078 }
5079
5080 return x;
5081}
5082\f
5083/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5084 operations" because they can be replaced with two more basic operations.
5085 ZERO_EXTEND is also considered "compound" because it can be replaced with
5086 an AND operation, which is simpler, though only one operation.
5087
5088 The function expand_compound_operation is called with an rtx expression
5089 and will convert it to the appropriate shifts and AND operations,
5090 simplifying at each stage.
5091
5092 The function make_compound_operation is called to convert an expression
5093 consisting of shifts and ANDs into the equivalent compound expression.
5094 It is the inverse of this function, loosely speaking. */
5095
5096static rtx
5097expand_compound_operation (x)
5098 rtx x;
5099{
5100 int pos = 0, len;
5101 int unsignedp = 0;
5102 int modewidth;
5103 rtx tem;
5104
5105 switch (GET_CODE (x))
5106 {
5107 case ZERO_EXTEND:
5108 unsignedp = 1;
5109 case SIGN_EXTEND:
75473182
RS
5110 /* We can't necessarily use a const_int for a multiword mode;
5111 it depends on implicitly extending the value.
5112 Since we don't know the right way to extend it,
5113 we can't tell whether the implicit way is right.
5114
5115 Even for a mode that is no wider than a const_int,
5116 we can't win, because we need to sign extend one of its bits through
5117 the rest of it, and we don't know which bit. */
230d793d 5118 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 5119 return x;
230d793d 5120
8079805d
RK
5121 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5122 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5123 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5124 reloaded. If not for that, MEM's would very rarely be safe.
5125
5126 Reject MODEs bigger than a word, because we might not be able
5127 to reference a two-register group starting with an arbitrary register
5128 (and currently gen_lowpart might crash for a SUBREG). */
5129
5130 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
5131 return x;
5132
5133 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5134 /* If the inner object has VOIDmode (the only way this can happen
5135 is if it is a ASM_OPERANDS), we can't do anything since we don't
5136 know how much masking to do. */
5137 if (len == 0)
5138 return x;
5139
5140 break;
5141
5142 case ZERO_EXTRACT:
5143 unsignedp = 1;
5144 case SIGN_EXTRACT:
5145 /* If the operand is a CLOBBER, just return it. */
5146 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5147 return XEXP (x, 0);
5148
5149 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5150 || GET_CODE (XEXP (x, 2)) != CONST_INT
5151 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5152 return x;
5153
5154 len = INTVAL (XEXP (x, 1));
5155 pos = INTVAL (XEXP (x, 2));
5156
5157 /* If this goes outside the object being extracted, replace the object
5158 with a (use (mem ...)) construct that only combine understands
5159 and is used only for this purpose. */
5160 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
38a448ca 5161 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
230d793d 5162
f76b9db2
ILT
5163 if (BITS_BIG_ENDIAN)
5164 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5165
230d793d
RS
5166 break;
5167
5168 default:
5169 return x;
5170 }
5171
0f13a422
ILT
5172 /* We can optimize some special cases of ZERO_EXTEND. */
5173 if (GET_CODE (x) == ZERO_EXTEND)
5174 {
5175 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5176 know that the last value didn't have any inappropriate bits
5177 set. */
5178 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5179 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5180 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5181 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5182 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5183 return XEXP (XEXP (x, 0), 0);
5184
5185 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5186 if (GET_CODE (XEXP (x, 0)) == SUBREG
5187 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5188 && subreg_lowpart_p (XEXP (x, 0))
5189 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5190 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5191 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))) == 0)
5192 return SUBREG_REG (XEXP (x, 0));
5193
5194 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5195 is a comparison and STORE_FLAG_VALUE permits. This is like
5196 the first case, but it works even when GET_MODE (x) is larger
5197 than HOST_WIDE_INT. */
5198 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5199 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5200 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5201 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5202 <= HOST_BITS_PER_WIDE_INT)
5203 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5204 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5205 return XEXP (XEXP (x, 0), 0);
5206
5207 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5208 if (GET_CODE (XEXP (x, 0)) == SUBREG
5209 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5210 && subreg_lowpart_p (XEXP (x, 0))
5211 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5212 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5213 <= HOST_BITS_PER_WIDE_INT)
5214 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5215 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5216 return SUBREG_REG (XEXP (x, 0));
5217
5218 /* If sign extension is cheaper than zero extension, then use it
5219 if we know that no extraneous bits are set, and that the high
5220 bit is not set. */
5221 if (flag_expensive_optimizations
5222 && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5223 && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5224 & ~ (((unsigned HOST_WIDE_INT)
5225 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5226 >> 1))
5227 == 0))
5228 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5229 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5230 <= HOST_BITS_PER_WIDE_INT)
5231 && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5232 & ~ (((unsigned HOST_WIDE_INT)
5233 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5234 >> 1))
5235 == 0))))
5236 {
38a448ca 5237 rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0));
0f13a422
ILT
5238
5239 if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5240 return expand_compound_operation (temp);
5241 }
5242 }
5243
230d793d
RS
5244 /* If we reach here, we want to return a pair of shifts. The inner
5245 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5246 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5247 logical depending on the value of UNSIGNEDP.
5248
5249 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5250 converted into an AND of a shift.
5251
5252 We must check for the case where the left shift would have a negative
5253 count. This can happen in a case like (x >> 31) & 255 on machines
5254 that can't shift by a constant. On those machines, we would first
5255 combine the shift with the AND to produce a variable-position
5256 extraction. Then the constant of 31 would be substituted in to produce
5257 a such a position. */
5258
5259 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5260 if (modewidth >= pos - len)
5f4f0e22 5261 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5262 GET_MODE (x),
5f4f0e22
CH
5263 simplify_shift_const (NULL_RTX, ASHIFT,
5264 GET_MODE (x),
230d793d
RS
5265 XEXP (x, 0),
5266 modewidth - pos - len),
5267 modewidth - len);
5268
5f4f0e22
CH
5269 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5270 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5271 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5272 GET_MODE (x),
5273 XEXP (x, 0), pos),
5f4f0e22 5274 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5275 else
5276 /* Any other cases we can't handle. */
5277 return x;
5278
5279
5280 /* If we couldn't do this for some reason, return the original
5281 expression. */
5282 if (GET_CODE (tem) == CLOBBER)
5283 return x;
5284
5285 return tem;
5286}
5287\f
5288/* X is a SET which contains an assignment of one object into
5289 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5290 or certain SUBREGS). If possible, convert it into a series of
5291 logical operations.
5292
5293 We half-heartedly support variable positions, but do not at all
5294 support variable lengths. */
5295
5296static rtx
5297expand_field_assignment (x)
5298 rtx x;
5299{
5300 rtx inner;
0f41302f 5301 rtx pos; /* Always counts from low bit. */
230d793d
RS
5302 int len;
5303 rtx mask;
5304 enum machine_mode compute_mode;
5305
5306 /* Loop until we find something we can't simplify. */
5307 while (1)
5308 {
5309 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5310 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5311 {
5312 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5313 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4d9cfc7b 5314 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
230d793d
RS
5315 }
5316 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5317 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5318 {
5319 inner = XEXP (SET_DEST (x), 0);
5320 len = INTVAL (XEXP (SET_DEST (x), 1));
5321 pos = XEXP (SET_DEST (x), 2);
5322
5323 /* If the position is constant and spans the width of INNER,
5324 surround INNER with a USE to indicate this. */
5325 if (GET_CODE (pos) == CONST_INT
5326 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
38a448ca 5327 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
230d793d 5328
f76b9db2
ILT
5329 if (BITS_BIG_ENDIAN)
5330 {
5331 if (GET_CODE (pos) == CONST_INT)
5332 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5333 - INTVAL (pos));
5334 else if (GET_CODE (pos) == MINUS
5335 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5336 && (INTVAL (XEXP (pos, 1))
5337 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5338 /* If position is ADJUST - X, new position is X. */
5339 pos = XEXP (pos, 0);
5340 else
5341 pos = gen_binary (MINUS, GET_MODE (pos),
5342 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5343 - len),
5344 pos);
5345 }
230d793d
RS
5346 }
5347
5348 /* A SUBREG between two modes that occupy the same numbers of words
5349 can be done by moving the SUBREG to the source. */
5350 else if (GET_CODE (SET_DEST (x)) == SUBREG
5351 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5352 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5353 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5354 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5355 {
38a448ca
RH
5356 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5357 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
5358 SET_SRC (x)));
230d793d
RS
5359 continue;
5360 }
5361 else
5362 break;
5363
5364 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5365 inner = SUBREG_REG (inner);
5366
5367 compute_mode = GET_MODE (inner);
5368
5369 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5370 if (len < HOST_BITS_PER_WIDE_INT)
5371 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5372 else
5373 break;
5374
5375 /* Now compute the equivalent expression. Make a copy of INNER
5376 for the SET_DEST in case it is a MEM into which we will substitute;
5377 we don't want shared RTL in that case. */
38a448ca
RH
5378 x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
5379 gen_binary (IOR, compute_mode,
5380 gen_binary (AND, compute_mode,
5381 gen_unary (NOT, compute_mode,
5382 compute_mode,
5383 gen_binary (ASHIFT,
5384 compute_mode,
5385 mask, pos)),
5386 inner),
5387 gen_binary (ASHIFT, compute_mode,
5388 gen_binary (AND, compute_mode,
5389 gen_lowpart_for_combine
5390 (compute_mode,
5391 SET_SRC (x)),
5392 mask),
5393 pos)));
230d793d
RS
5394 }
5395
5396 return x;
5397}
5398\f
8999a12e
RK
5399/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5400 it is an RTX that represents a variable starting position; otherwise,
5401 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5402
5403 INNER may be a USE. This will occur when we started with a bitfield
5404 that went outside the boundary of the object in memory, which is
5405 allowed on most machines. To isolate this case, we produce a USE
5406 whose mode is wide enough and surround the MEM with it. The only
5407 code that understands the USE is this routine. If it is not removed,
5408 it will cause the resulting insn not to match.
5409
5410 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5411 signed reference.
5412
5413 IN_DEST is non-zero if this is a reference in the destination of a
5414 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5415 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5416 be used.
5417
5418 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5419 ZERO_EXTRACT should be built even for bits starting at bit 0.
5420
76184def
DE
5421 MODE is the desired mode of the result (if IN_DEST == 0).
5422
5423 The result is an RTX for the extraction or NULL_RTX if the target
5424 can't handle it. */
230d793d
RS
5425
5426static rtx
5427make_extraction (mode, inner, pos, pos_rtx, len,
5428 unsignedp, in_dest, in_compare)
5429 enum machine_mode mode;
5430 rtx inner;
5431 int pos;
5432 rtx pos_rtx;
5433 int len;
5434 int unsignedp;
5435 int in_dest, in_compare;
5436{
94b4b17a
RS
5437 /* This mode describes the size of the storage area
5438 to fetch the overall value from. Within that, we
5439 ignore the POS lowest bits, etc. */
230d793d
RS
5440 enum machine_mode is_mode = GET_MODE (inner);
5441 enum machine_mode inner_mode;
d7cd794f
RK
5442 enum machine_mode wanted_inner_mode = byte_mode;
5443 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5444 enum machine_mode pos_mode = word_mode;
5445 enum machine_mode extraction_mode = word_mode;
5446 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5447 int spans_byte = 0;
5448 rtx new = 0;
8999a12e 5449 rtx orig_pos_rtx = pos_rtx;
6139ff20 5450 int orig_pos;
230d793d
RS
5451
5452 /* Get some information about INNER and get the innermost object. */
5453 if (GET_CODE (inner) == USE)
94b4b17a 5454 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5455 /* We don't need to adjust the position because we set up the USE
5456 to pretend that it was a full-word object. */
5457 spans_byte = 1, inner = XEXP (inner, 0);
5458 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5459 {
5460 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5461 consider just the QI as the memory to extract from.
5462 The subreg adds or removes high bits; its mode is
5463 irrelevant to the meaning of this extraction,
5464 since POS and LEN count from the lsb. */
5465 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5466 is_mode = GET_MODE (SUBREG_REG (inner));
5467 inner = SUBREG_REG (inner);
5468 }
230d793d
RS
5469
5470 inner_mode = GET_MODE (inner);
5471
5472 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5473 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5474
5475 /* See if this can be done without an extraction. We never can if the
5476 width of the field is not the same as that of some integer mode. For
5477 registers, we can only avoid the extraction if the position is at the
5478 low-order bit and this is either not in the destination or we have the
5479 appropriate STRICT_LOW_PART operation available.
5480
5481 For MEM, we can avoid an extract if the field starts on an appropriate
5482 boundary and we can change the mode of the memory reference. However,
5483 we cannot directly access the MEM if we have a USE and the underlying
5484 MEM is not TMODE. This combination means that MEM was being used in a
5485 context where bits outside its mode were being referenced; that is only
5486 valid in bit-field insns. */
5487
5488 if (tmode != BLKmode
5489 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5490 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5491 && GET_CODE (inner) != MEM
230d793d 5492 && (! in_dest
df62f951
RK
5493 || (GET_CODE (inner) == REG
5494 && (movstrict_optab->handlers[(int) tmode].insn_code
5495 != CODE_FOR_nothing))))
8999a12e 5496 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5497 && (pos
5498 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5499 : BITS_PER_UNIT)) == 0
230d793d
RS
5500 /* We can't do this if we are widening INNER_MODE (it
5501 may not be aligned, for one thing). */
5502 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5503 && (inner_mode == tmode
5504 || (! mode_dependent_address_p (XEXP (inner, 0))
5505 && ! MEM_VOLATILE_P (inner))))))
5506 {
230d793d
RS
5507 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5508 field. If the original and current mode are the same, we need not
5509 adjust the offset. Otherwise, we do if bytes big endian.
5510
4d9cfc7b
RK
5511 If INNER is not a MEM, get a piece consisting of just the field
5512 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5513
5514 if (GET_CODE (inner) == MEM)
5515 {
94b4b17a
RS
5516 int offset;
5517 /* POS counts from lsb, but make OFFSET count in memory order. */
5518 if (BYTES_BIG_ENDIAN)
5519 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5520 else
5521 offset = pos / BITS_PER_UNIT;
230d793d 5522
38a448ca 5523 new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
230d793d
RS
5524 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5525 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5526 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5527 }
df62f951 5528 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5529 {
5530 /* We can't call gen_lowpart_for_combine here since we always want
5531 a SUBREG and it would sometimes return a new hard register. */
5532 if (tmode != inner_mode)
38a448ca
RH
5533 new = gen_rtx_SUBREG (tmode, inner,
5534 (WORDS_BIG_ENDIAN
5535 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5536 ? (((GET_MODE_SIZE (inner_mode)
5537 - GET_MODE_SIZE (tmode))
5538 / UNITS_PER_WORD)
5539 - pos / BITS_PER_WORD)
5540 : pos / BITS_PER_WORD));
c0d3ac4d
RK
5541 else
5542 new = inner;
5543 }
230d793d 5544 else
6139ff20
RK
5545 new = force_to_mode (inner, tmode,
5546 len >= HOST_BITS_PER_WIDE_INT
5547 ? GET_MODE_MASK (tmode)
5548 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5549 NULL_RTX, 0);
230d793d
RS
5550
5551 /* If this extraction is going into the destination of a SET,
5552 make a STRICT_LOW_PART unless we made a MEM. */
5553
5554 if (in_dest)
5555 return (GET_CODE (new) == MEM ? new
77fa0940 5556 : (GET_CODE (new) != SUBREG
38a448ca 5557 ? gen_rtx_CLOBBER (tmode, const0_rtx)
77fa0940 5558 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5559
5560 /* Otherwise, sign- or zero-extend unless we already are in the
5561 proper mode. */
5562
5563 return (mode == tmode ? new
5564 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5565 mode, new));
5566 }
5567
cc471082
RS
5568 /* Unless this is a COMPARE or we have a funny memory reference,
5569 don't do anything with zero-extending field extracts starting at
5570 the low-order bit since they are simple AND operations. */
8999a12e
RK
5571 if (pos_rtx == 0 && pos == 0 && ! in_dest
5572 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5573 return 0;
5574
e7373556
RK
5575 /* Unless we are allowed to span bytes, reject this if we would be
5576 spanning bytes or if the position is not a constant and the length
5577 is not 1. In all other cases, we would only be going outside
5578 out object in cases when an original shift would have been
5579 undefined. */
5580 if (! spans_byte
5581 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5582 || (pos_rtx != 0 && len != 1)))
5583 return 0;
5584
d7cd794f 5585 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
5586 and the mode for the result. */
5587#ifdef HAVE_insv
5588 if (in_dest)
5589 {
d7cd794f 5590 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
230d793d
RS
5591 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5592 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5593 }
5594#endif
5595
5596#ifdef HAVE_extzv
5597 if (! in_dest && unsignedp)
5598 {
d7cd794f 5599 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
230d793d
RS
5600 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5601 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5602 }
5603#endif
5604
5605#ifdef HAVE_extv
5606 if (! in_dest && ! unsignedp)
5607 {
d7cd794f 5608 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
230d793d
RS
5609 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5610 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5611 }
5612#endif
5613
5614 /* Never narrow an object, since that might not be safe. */
5615
5616 if (mode != VOIDmode
5617 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5618 extraction_mode = mode;
5619
5620 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5621 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5622 pos_mode = GET_MODE (pos_rtx);
5623
d7cd794f
RK
5624 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5625 if we have to change the mode of memory and cannot, the desired mode is
5626 EXTRACTION_MODE. */
5627 if (GET_CODE (inner) != MEM)
5628 wanted_inner_mode = wanted_inner_reg_mode;
5629 else if (inner_mode != wanted_inner_mode
5630 && (mode_dependent_address_p (XEXP (inner, 0))
5631 || MEM_VOLATILE_P (inner)))
5632 wanted_inner_mode = extraction_mode;
230d793d 5633
6139ff20
RK
5634 orig_pos = pos;
5635
f76b9db2
ILT
5636 if (BITS_BIG_ENDIAN)
5637 {
cf54c2cd
DE
5638 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5639 BITS_BIG_ENDIAN style. If position is constant, compute new
5640 position. Otherwise, build subtraction.
5641 Note that POS is relative to the mode of the original argument.
5642 If it's a MEM we need to recompute POS relative to that.
5643 However, if we're extracting from (or inserting into) a register,
5644 we want to recompute POS relative to wanted_inner_mode. */
5645 int width = (GET_CODE (inner) == MEM
5646 ? GET_MODE_BITSIZE (is_mode)
5647 : GET_MODE_BITSIZE (wanted_inner_mode));
5648
f76b9db2 5649 if (pos_rtx == 0)
cf54c2cd 5650 pos = width - len - pos;
f76b9db2
ILT
5651 else
5652 pos_rtx
5653 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
cf54c2cd
DE
5654 GEN_INT (width - len), pos_rtx);
5655 /* POS may be less than 0 now, but we check for that below.
5656 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 5657 }
230d793d
RS
5658
5659 /* If INNER has a wider mode, make it smaller. If this is a constant
5660 extract, try to adjust the byte to point to the byte containing
5661 the value. */
d7cd794f
RK
5662 if (wanted_inner_mode != VOIDmode
5663 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 5664 && ((GET_CODE (inner) == MEM
d7cd794f 5665 && (inner_mode == wanted_inner_mode
230d793d
RS
5666 || (! mode_dependent_address_p (XEXP (inner, 0))
5667 && ! MEM_VOLATILE_P (inner))))))
5668 {
5669 int offset = 0;
5670
5671 /* The computations below will be correct if the machine is big
5672 endian in both bits and bytes or little endian in bits and bytes.
5673 If it is mixed, we must adjust. */
5674
230d793d 5675 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 5676 adjust OFFSET to compensate. */
f76b9db2
ILT
5677 if (BYTES_BIG_ENDIAN
5678 && ! spans_byte
230d793d
RS
5679 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5680 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
5681
5682 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5683 if (pos_rtx == 0)
230d793d
RS
5684 {
5685 offset += pos / BITS_PER_UNIT;
d7cd794f 5686 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
5687 }
5688
f76b9db2
ILT
5689 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5690 && ! spans_byte
d7cd794f 5691 && is_mode != wanted_inner_mode)
c6b3f1f2 5692 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 5693 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 5694
d7cd794f 5695 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 5696 {
38a448ca
RH
5697 rtx newmem = gen_rtx_MEM (wanted_inner_mode,
5698 plus_constant (XEXP (inner, 0), offset));
230d793d
RS
5699 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5700 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5701 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5702 inner = newmem;
5703 }
5704 }
5705
9e74dc41
RK
5706 /* If INNER is not memory, we can always get it into the proper mode. If we
5707 are changing its mode, POS must be a constant and smaller than the size
5708 of the new mode. */
230d793d 5709 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
5710 {
5711 if (GET_MODE (inner) != wanted_inner_mode
5712 && (pos_rtx != 0
5713 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5714 return 0;
5715
5716 inner = force_to_mode (inner, wanted_inner_mode,
5717 pos_rtx
5718 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5719 ? GET_MODE_MASK (wanted_inner_mode)
5720 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5721 NULL_RTX, 0);
5722 }
230d793d
RS
5723
5724 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5725 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5726 if (pos_rtx != 0
230d793d
RS
5727 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5728 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5729 else if (pos_rtx != 0
230d793d
RS
5730 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5731 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5732
8999a12e
RK
5733 /* Make POS_RTX unless we already have it and it is correct. If we don't
5734 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 5735 be a CONST_INT. */
8999a12e
RK
5736 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5737 pos_rtx = orig_pos_rtx;
5738
5739 else if (pos_rtx == 0)
5f4f0e22 5740 pos_rtx = GEN_INT (pos);
230d793d
RS
5741
5742 /* Make the required operation. See if we can use existing rtx. */
5743 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5744 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5745 if (! in_dest)
5746 new = gen_lowpart_for_combine (mode, new);
5747
5748 return new;
5749}
5750\f
71923da7
RK
5751/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5752 with any other operations in X. Return X without that shift if so. */
5753
5754static rtx
5755extract_left_shift (x, count)
5756 rtx x;
5757 int count;
5758{
5759 enum rtx_code code = GET_CODE (x);
5760 enum machine_mode mode = GET_MODE (x);
5761 rtx tem;
5762
5763 switch (code)
5764 {
5765 case ASHIFT:
5766 /* This is the shift itself. If it is wide enough, we will return
5767 either the value being shifted if the shift count is equal to
5768 COUNT or a shift for the difference. */
5769 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5770 && INTVAL (XEXP (x, 1)) >= count)
5771 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5772 INTVAL (XEXP (x, 1)) - count);
5773 break;
5774
5775 case NEG: case NOT:
5776 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 5777 return gen_unary (code, mode, mode, tem);
71923da7
RK
5778
5779 break;
5780
5781 case PLUS: case IOR: case XOR: case AND:
5782 /* If we can safely shift this constant and we find the inner shift,
5783 make a new operation. */
5784 if (GET_CODE (XEXP (x,1)) == CONST_INT
b729186a 5785 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7
RK
5786 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5787 return gen_binary (code, mode, tem,
5788 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5789
5790 break;
e9a25f70
JL
5791
5792 default:
5793 break;
71923da7
RK
5794 }
5795
5796 return 0;
5797}
5798\f
230d793d
RS
5799/* Look at the expression rooted at X. Look for expressions
5800 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5801 Form these expressions.
5802
5803 Return the new rtx, usually just X.
5804
5805 Also, for machines like the Vax that don't have logical shift insns,
5806 try to convert logical to arithmetic shift operations in cases where
5807 they are equivalent. This undoes the canonicalizations to logical
5808 shifts done elsewhere.
5809
5810 We try, as much as possible, to re-use rtl expressions to save memory.
5811
5812 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5813 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5814 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5815 or a COMPARE against zero, it is COMPARE. */
5816
5817static rtx
5818make_compound_operation (x, in_code)
5819 rtx x;
5820 enum rtx_code in_code;
5821{
5822 enum rtx_code code = GET_CODE (x);
5823 enum machine_mode mode = GET_MODE (x);
5824 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 5825 rtx rhs, lhs;
230d793d 5826 enum rtx_code next_code;
f24ad0e4 5827 int i;
230d793d 5828 rtx new = 0;
280f58ba 5829 rtx tem;
230d793d
RS
5830 char *fmt;
5831
5832 /* Select the code to be used in recursive calls. Once we are inside an
5833 address, we stay there. If we have a comparison, set to COMPARE,
5834 but once inside, go back to our default of SET. */
5835
42495ca0 5836 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5837 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5838 && XEXP (x, 1) == const0_rtx) ? COMPARE
5839 : in_code == COMPARE ? SET : in_code);
5840
5841 /* Process depending on the code of this operation. If NEW is set
5842 non-zero, it will be returned. */
5843
5844 switch (code)
5845 {
5846 case ASHIFT:
230d793d
RS
5847 /* Convert shifts by constants into multiplications if inside
5848 an address. */
5849 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5850 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5851 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5852 {
5853 new = make_compound_operation (XEXP (x, 0), next_code);
5854 new = gen_rtx_combine (MULT, mode, new,
5855 GEN_INT ((HOST_WIDE_INT) 1
5856 << INTVAL (XEXP (x, 1))));
5857 }
230d793d
RS
5858 break;
5859
5860 case AND:
5861 /* If the second operand is not a constant, we can't do anything
5862 with it. */
5863 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5864 break;
5865
5866 /* If the constant is a power of two minus one and the first operand
5867 is a logical right shift, make an extraction. */
5868 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5869 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5870 {
5871 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5872 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5873 0, in_code == COMPARE);
5874 }
dfbe1b2f 5875
230d793d
RS
5876 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5877 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5878 && subreg_lowpart_p (XEXP (x, 0))
5879 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5880 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5881 {
5882 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5883 next_code);
2f99f437 5884 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
5885 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5886 0, in_code == COMPARE);
5887 }
45620ed4 5888 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
5889 else if ((GET_CODE (XEXP (x, 0)) == XOR
5890 || GET_CODE (XEXP (x, 0)) == IOR)
5891 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5892 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5893 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5894 {
5895 /* Apply the distributive law, and then try to make extractions. */
5896 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
38a448ca
RH
5897 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
5898 XEXP (x, 1)),
5899 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
5900 XEXP (x, 1)));
c2f9f64e
JW
5901 new = make_compound_operation (new, in_code);
5902 }
a7c99304
RK
5903
5904 /* If we are have (and (rotate X C) M) and C is larger than the number
5905 of bits in M, this is an extraction. */
5906
5907 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5908 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5909 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5910 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5911 {
5912 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5913 new = make_extraction (mode, new,
5914 (GET_MODE_BITSIZE (mode)
5915 - INTVAL (XEXP (XEXP (x, 0), 1))),
5916 NULL_RTX, i, 1, 0, in_code == COMPARE);
5917 }
a7c99304
RK
5918
5919 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5920 a logical shift and our mask turns off all the propagated sign
5921 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5922 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5923 && (lshr_optab->handlers[(int) mode].insn_code
5924 == CODE_FOR_nothing)
230d793d
RS
5925 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5926 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5927 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5928 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5929 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5930 {
5f4f0e22 5931 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5932
5933 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5934 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5935 SUBST (XEXP (x, 0),
280f58ba
RK
5936 gen_rtx_combine (ASHIFTRT, mode,
5937 make_compound_operation (XEXP (XEXP (x, 0), 0),
5938 next_code),
230d793d
RS
5939 XEXP (XEXP (x, 0), 1)));
5940 }
5941
5942 /* If the constant is one less than a power of two, this might be
5943 representable by an extraction even if no shift is present.
5944 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5945 we are in a COMPARE. */
5946 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5947 new = make_extraction (mode,
5948 make_compound_operation (XEXP (x, 0),
5949 next_code),
5950 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5951
5952 /* If we are in a comparison and this is an AND with a power of two,
5953 convert this into the appropriate bit extract. */
5954 else if (in_code == COMPARE
5955 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5956 new = make_extraction (mode,
5957 make_compound_operation (XEXP (x, 0),
5958 next_code),
5959 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5960
5961 break;
5962
5963 case LSHIFTRT:
5964 /* If the sign bit is known to be zero, replace this with an
5965 arithmetic shift. */
d0ab8cd3
RK
5966 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5967 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5968 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5969 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5970 {
280f58ba
RK
5971 new = gen_rtx_combine (ASHIFTRT, mode,
5972 make_compound_operation (XEXP (x, 0),
5973 next_code),
5974 XEXP (x, 1));
230d793d
RS
5975 break;
5976 }
5977
0f41302f 5978 /* ... fall through ... */
230d793d
RS
5979
5980 case ASHIFTRT:
71923da7
RK
5981 lhs = XEXP (x, 0);
5982 rhs = XEXP (x, 1);
5983
230d793d
RS
5984 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5985 this is a SIGN_EXTRACT. */
71923da7
RK
5986 if (GET_CODE (rhs) == CONST_INT
5987 && GET_CODE (lhs) == ASHIFT
5988 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5989 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 5990 {
71923da7 5991 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 5992 new = make_extraction (mode, new,
71923da7
RK
5993 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5994 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
5995 code == LSHIFTRT, 0, in_code == COMPARE);
5996 }
5997
71923da7
RK
5998 /* See if we have operations between an ASHIFTRT and an ASHIFT.
5999 If so, try to merge the shifts into a SIGN_EXTEND. We could
6000 also do this for some cases of SIGN_EXTRACT, but it doesn't
6001 seem worth the effort; the case checked for occurs on Alpha. */
6002
6003 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6004 && ! (GET_CODE (lhs) == SUBREG
6005 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6006 && GET_CODE (rhs) == CONST_INT
6007 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6008 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6009 new = make_extraction (mode, make_compound_operation (new, next_code),
6010 0, NULL_RTX, mode_width - INTVAL (rhs),
6011 code == LSHIFTRT, 0, in_code == COMPARE);
6012
230d793d 6013 break;
280f58ba
RK
6014
6015 case SUBREG:
6016 /* Call ourselves recursively on the inner expression. If we are
6017 narrowing the object and it has a different RTL code from
6018 what it originally did, do this SUBREG as a force_to_mode. */
6019
0a5cbff6 6020 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
6021 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6022 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6023 && subreg_lowpart_p (x))
0a5cbff6
RK
6024 {
6025 rtx newer = force_to_mode (tem, mode,
e3d616e3 6026 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
6027
6028 /* If we have something other than a SUBREG, we might have
6029 done an expansion, so rerun outselves. */
6030 if (GET_CODE (newer) != SUBREG)
6031 newer = make_compound_operation (newer, in_code);
6032
6033 return newer;
6034 }
e9a25f70
JL
6035 break;
6036
6037 default:
6038 break;
230d793d
RS
6039 }
6040
6041 if (new)
6042 {
df62f951 6043 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
6044 code = GET_CODE (x);
6045 }
6046
6047 /* Now recursively process each operand of this operation. */
6048 fmt = GET_RTX_FORMAT (code);
6049 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6050 if (fmt[i] == 'e')
6051 {
6052 new = make_compound_operation (XEXP (x, i), next_code);
6053 SUBST (XEXP (x, i), new);
6054 }
6055
6056 return x;
6057}
6058\f
6059/* Given M see if it is a value that would select a field of bits
6060 within an item, but not the entire word. Return -1 if not.
6061 Otherwise, return the starting position of the field, where 0 is the
6062 low-order bit.
6063
6064 *PLEN is set to the length of the field. */
6065
6066static int
6067get_pos_from_mask (m, plen)
5f4f0e22 6068 unsigned HOST_WIDE_INT m;
230d793d
RS
6069 int *plen;
6070{
6071 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6072 int pos = exact_log2 (m & - m);
6073
6074 if (pos < 0)
6075 return -1;
6076
6077 /* Now shift off the low-order zero bits and see if we have a power of
6078 two minus 1. */
6079 *plen = exact_log2 ((m >> pos) + 1);
6080
6081 if (*plen <= 0)
6082 return -1;
6083
6084 return pos;
6085}
6086\f
6139ff20
RK
6087/* See if X can be simplified knowing that we will only refer to it in
6088 MODE and will only refer to those bits that are nonzero in MASK.
6089 If other bits are being computed or if masking operations are done
6090 that select a superset of the bits in MASK, they can sometimes be
6091 ignored.
6092
6093 Return a possibly simplified expression, but always convert X to
6094 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
6095
6096 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
6097 replace X with REG.
6098
6099 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6100 are all off in X. This is used when X will be complemented, by either
180b8e4b 6101 NOT, NEG, or XOR. */
dfbe1b2f
RK
6102
6103static rtx
e3d616e3 6104force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
6105 rtx x;
6106 enum machine_mode mode;
6139ff20 6107 unsigned HOST_WIDE_INT mask;
dfbe1b2f 6108 rtx reg;
e3d616e3 6109 int just_select;
dfbe1b2f
RK
6110{
6111 enum rtx_code code = GET_CODE (x);
180b8e4b 6112 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
6113 enum machine_mode op_mode;
6114 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
6115 rtx op0, op1, temp;
6116
132d2040
RK
6117 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6118 code below will do the wrong thing since the mode of such an
be3d27d6
CI
6119 expression is VOIDmode.
6120
6121 Also do nothing if X is a CLOBBER; this can happen if X was
6122 the return value from a call to gen_lowpart_for_combine. */
6123 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
6124 return x;
6125
6139ff20
RK
6126 /* We want to perform the operation is its present mode unless we know
6127 that the operation is valid in MODE, in which case we do the operation
6128 in MODE. */
1c75dfa4
RK
6129 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6130 && code_to_optab[(int) code] != 0
ef026f91
RS
6131 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6132 != CODE_FOR_nothing))
6133 ? mode : GET_MODE (x));
e3d616e3 6134
aa988991
RS
6135 /* It is not valid to do a right-shift in a narrower mode
6136 than the one it came in with. */
6137 if ((code == LSHIFTRT || code == ASHIFTRT)
6138 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6139 op_mode = GET_MODE (x);
ef026f91
RS
6140
6141 /* Truncate MASK to fit OP_MODE. */
6142 if (op_mode)
6143 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
6144
6145 /* When we have an arithmetic operation, or a shift whose count we
6146 do not know, we need to assume that all bit the up to the highest-order
6147 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
6148 if (op_mode)
6149 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6150 ? GET_MODE_MASK (op_mode)
6151 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
6152 else
6153 fuller_mask = ~ (HOST_WIDE_INT) 0;
6154
6155 /* Determine what bits of X are guaranteed to be (non)zero. */
6156 nonzero = nonzero_bits (x, mode);
6139ff20
RK
6157
6158 /* If none of the bits in X are needed, return a zero. */
e3d616e3 6159 if (! just_select && (nonzero & mask) == 0)
6139ff20 6160 return const0_rtx;
dfbe1b2f 6161
6139ff20
RK
6162 /* If X is a CONST_INT, return a new one. Do this here since the
6163 test below will fail. */
6164 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
6165 {
6166 HOST_WIDE_INT cval = INTVAL (x) & mask;
6167 int width = GET_MODE_BITSIZE (mode);
6168
6169 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6170 number, sign extend it. */
6171 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6172 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6173 cval |= (HOST_WIDE_INT) -1 << width;
6174
6175 return GEN_INT (cval);
6176 }
dfbe1b2f 6177
180b8e4b
RK
6178 /* If X is narrower than MODE and we want all the bits in X's mode, just
6179 get X in the proper mode. */
6180 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6181 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
6182 return gen_lowpart_for_combine (mode, x);
6183
71923da7
RK
6184 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6185 MASK are already known to be zero in X, we need not do anything. */
6186 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
6187 return x;
6188
dfbe1b2f
RK
6189 switch (code)
6190 {
6139ff20
RK
6191 case CLOBBER:
6192 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6193 generating something that won't match. */
6139ff20
RK
6194 return x;
6195
6139ff20
RK
6196 case USE:
6197 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6198 spanned the boundary of the MEM. If we are now masking so it is
6199 within that boundary, we don't need the USE any more. */
f76b9db2
ILT
6200 if (! BITS_BIG_ENDIAN
6201 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6202 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6203 break;
6139ff20 6204
dfbe1b2f
RK
6205 case SIGN_EXTEND:
6206 case ZERO_EXTEND:
6207 case ZERO_EXTRACT:
6208 case SIGN_EXTRACT:
6209 x = expand_compound_operation (x);
6210 if (GET_CODE (x) != code)
e3d616e3 6211 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6212 break;
6213
6214 case REG:
6215 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6216 || rtx_equal_p (reg, get_last_value (x))))
6217 x = reg;
6218 break;
6219
dfbe1b2f 6220 case SUBREG:
6139ff20 6221 if (subreg_lowpart_p (x)
180b8e4b
RK
6222 /* We can ignore the effect of this SUBREG if it narrows the mode or
6223 if the constant masks to zero all the bits the mode doesn't
6224 have. */
6139ff20
RK
6225 && ((GET_MODE_SIZE (GET_MODE (x))
6226 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6227 || (0 == (mask
6228 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 6229 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6230 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6231 break;
6232
6233 case AND:
6139ff20
RK
6234 /* If this is an AND with a constant, convert it into an AND
6235 whose constant is the AND of that constant with MASK. If it
6236 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6237
2ca9ae17 6238 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6239 {
6139ff20
RK
6240 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6241 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6242
6243 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6244 is just some low-order bits. If so, and it is MASK, we don't
6245 need it. */
dfbe1b2f
RK
6246
6247 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6248 && INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6249 x = XEXP (x, 0);
d0ab8cd3 6250
71923da7
RK
6251 /* If it remains an AND, try making another AND with the bits
6252 in the mode mask that aren't in MASK turned on. If the
6253 constant in the AND is wide enough, this might make a
6254 cheaper constant. */
6255
6256 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6257 && GET_MODE_MASK (GET_MODE (x)) != mask
6258 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6259 {
6260 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6261 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6262 int width = GET_MODE_BITSIZE (GET_MODE (x));
6263 rtx y;
6264
6265 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6266 number, sign extend it. */
6267 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6268 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6269 cval |= (HOST_WIDE_INT) -1 << width;
6270
6271 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6272 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6273 x = y;
6274 }
6275
d0ab8cd3 6276 break;
dfbe1b2f
RK
6277 }
6278
6139ff20 6279 goto binop;
dfbe1b2f
RK
6280
6281 case PLUS:
6139ff20
RK
6282 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6283 low-order bits (as in an alignment operation) and FOO is already
6284 aligned to that boundary, mask C1 to that boundary as well.
6285 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6286
6287 {
6288 int width = GET_MODE_BITSIZE (mode);
6289 unsigned HOST_WIDE_INT smask = mask;
6290
6291 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6292 number, sign extend it. */
6293
6294 if (width < HOST_BITS_PER_WIDE_INT
6295 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6296 smask |= (HOST_WIDE_INT) -1 << width;
6297
6298 if (GET_CODE (XEXP (x, 1)) == CONST_INT
0e9ff885
DM
6299 && exact_log2 (- smask) >= 0)
6300 {
6301#ifdef STACK_BIAS
6302 if (STACK_BIAS
6303 && (XEXP (x, 0) == stack_pointer_rtx
6304 || XEXP (x, 0) == frame_pointer_rtx))
6305 {
6306 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6307 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6308
6309 sp_mask &= ~ (sp_alignment - 1);
6310 if ((sp_mask & ~ mask) == 0
6311 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~ mask) != 0)
6312 return force_to_mode (plus_constant (XEXP (x, 0),
6313 ((INTVAL (XEXP (x, 1)) -
6314 STACK_BIAS) & mask)
6315 + STACK_BIAS),
6316 mode, mask, reg, next_select);
6317 }
6318#endif
6319 if ((nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
6320 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
6321 return force_to_mode (plus_constant (XEXP (x, 0),
6322 INTVAL (XEXP (x, 1)) & mask),
6323 mode, mask, reg, next_select);
6324 }
9fa6d012 6325 }
6139ff20 6326
0f41302f 6327 /* ... fall through ... */
6139ff20 6328
dfbe1b2f
RK
6329 case MINUS:
6330 case MULT:
6139ff20
RK
6331 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6332 most significant bit in MASK since carries from those bits will
6333 affect the bits we are interested in. */
6334 mask = fuller_mask;
6335 goto binop;
6336
dfbe1b2f
RK
6337 case IOR:
6338 case XOR:
6139ff20
RK
6339 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6340 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6341 operation which may be a bitfield extraction. Ensure that the
6342 constant we form is not wider than the mode of X. */
6343
6344 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6345 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6346 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6347 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6348 && GET_CODE (XEXP (x, 1)) == CONST_INT
6349 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6350 + floor_log2 (INTVAL (XEXP (x, 1))))
6351 < GET_MODE_BITSIZE (GET_MODE (x)))
6352 && (INTVAL (XEXP (x, 1))
01c82bbb 6353 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6354 {
6355 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6356 << INTVAL (XEXP (XEXP (x, 0), 1)));
6357 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6358 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6359 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6360 XEXP (XEXP (x, 0), 1));
e3d616e3 6361 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6362 }
6363
6364 binop:
dfbe1b2f 6365 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6366 change the mode if we have an operation of that mode. */
6367
e3d616e3
RK
6368 op0 = gen_lowpart_for_combine (op_mode,
6369 force_to_mode (XEXP (x, 0), mode, mask,
6370 reg, next_select));
6371 op1 = gen_lowpart_for_combine (op_mode,
6372 force_to_mode (XEXP (x, 1), mode, mask,
6373 reg, next_select));
6139ff20 6374
2dd484ed
RK
6375 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6376 MASK since OP1 might have been sign-extended but we never want
6377 to turn on extra bits, since combine might have previously relied
6378 on them being off. */
6379 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6380 && (INTVAL (op1) & mask) != 0)
6381 op1 = GEN_INT (INTVAL (op1) & mask);
6382
6139ff20
RK
6383 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6384 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6385 break;
dfbe1b2f
RK
6386
6387 case ASHIFT:
dfbe1b2f 6388 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6389 However, we cannot do anything with shifts where we cannot
6390 guarantee that the counts are smaller than the size of the mode
6391 because such a count will have a different meaning in a
6139ff20 6392 wider mode. */
f6785026
RK
6393
6394 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6395 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6396 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6397 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6398 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6399 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
6400 break;
6401
6139ff20
RK
6402 /* If the shift count is a constant and we can do arithmetic in
6403 the mode of the shift, refine which bits we need. Otherwise, use the
6404 conservative form of the mask. */
6405 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6406 && INTVAL (XEXP (x, 1)) >= 0
6407 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6408 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6409 mask >>= INTVAL (XEXP (x, 1));
6410 else
6411 mask = fuller_mask;
6412
6413 op0 = gen_lowpart_for_combine (op_mode,
6414 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6415 mask, reg, next_select));
6139ff20
RK
6416
6417 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6418 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6419 break;
dfbe1b2f
RK
6420
6421 case LSHIFTRT:
1347292b
JW
6422 /* Here we can only do something if the shift count is a constant,
6423 this shift constant is valid for the host, and we can do arithmetic
6424 in OP_MODE. */
dfbe1b2f
RK
6425
6426 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6427 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6428 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6429 {
6139ff20
RK
6430 rtx inner = XEXP (x, 0);
6431
6432 /* Select the mask of the bits we need for the shift operand. */
6433 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 6434
6139ff20
RK
6435 /* We can only change the mode of the shift if we can do arithmetic
6436 in the mode of the shift and MASK is no wider than the width of
6437 OP_MODE. */
6438 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6439 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6440 op_mode = GET_MODE (x);
6441
e3d616e3 6442 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
6443
6444 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6445 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6446 }
6139ff20
RK
6447
6448 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6449 shift and AND produces only copies of the sign bit (C2 is one less
6450 than a power of two), we can do this with just a shift. */
6451
6452 if (GET_CODE (x) == LSHIFTRT
6453 && GET_CODE (XEXP (x, 1)) == CONST_INT
6454 && ((INTVAL (XEXP (x, 1))
6455 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6456 >= GET_MODE_BITSIZE (GET_MODE (x)))
6457 && exact_log2 (mask + 1) >= 0
6458 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6459 >= exact_log2 (mask + 1)))
6460 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6461 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6462 - exact_log2 (mask + 1)));
d0ab8cd3
RK
6463 break;
6464
6465 case ASHIFTRT:
6139ff20
RK
6466 /* If we are just looking for the sign bit, we don't need this shift at
6467 all, even if it has a variable count. */
9bf22b75
RK
6468 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6469 && (mask == ((HOST_WIDE_INT) 1
6470 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6471 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6472
6473 /* If this is a shift by a constant, get a mask that contains those bits
6474 that are not copies of the sign bit. We then have two cases: If
6475 MASK only includes those bits, this can be a logical shift, which may
6476 allow simplifications. If MASK is a single-bit field not within
6477 those bits, we are requesting a copy of the sign bit and hence can
6478 shift the sign bit to the appropriate location. */
6479
6480 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6481 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6482 {
6483 int i = -1;
6484
b69960ac
RK
6485 /* If the considered data is wider then HOST_WIDE_INT, we can't
6486 represent a mask for all its bits in a single scalar.
6487 But we only care about the lower bits, so calculate these. */
6488
6a11342f 6489 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 6490 {
0f41302f 6491 nonzero = ~ (HOST_WIDE_INT) 0;
b69960ac
RK
6492
6493 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6494 is the number of bits a full-width mask would have set.
6495 We need only shift if these are fewer than nonzero can
6496 hold. If not, we must keep all bits set in nonzero. */
6497
6498 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6499 < HOST_BITS_PER_WIDE_INT)
6500 nonzero >>= INTVAL (XEXP (x, 1))
6501 + HOST_BITS_PER_WIDE_INT
6502 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6503 }
6504 else
6505 {
6506 nonzero = GET_MODE_MASK (GET_MODE (x));
6507 nonzero >>= INTVAL (XEXP (x, 1));
6508 }
6139ff20
RK
6509
6510 if ((mask & ~ nonzero) == 0
6511 || (i = exact_log2 (mask)) >= 0)
6512 {
6513 x = simplify_shift_const
6514 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6515 i < 0 ? INTVAL (XEXP (x, 1))
6516 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6517
6518 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 6519 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6520 }
6521 }
6522
6523 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6524 even if the shift count isn't a constant. */
6525 if (mask == 1)
6526 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6527
d0ab8cd3 6528 /* If this is a sign-extension operation that just affects bits
4c002f29
RK
6529 we don't care about, remove it. Be sure the call above returned
6530 something that is still a shift. */
d0ab8cd3 6531
4c002f29
RK
6532 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6533 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 6534 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
6535 && (INTVAL (XEXP (x, 1))
6536 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
6537 && GET_CODE (XEXP (x, 0)) == ASHIFT
6538 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6539 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
6540 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6541 reg, next_select);
6139ff20 6542
dfbe1b2f
RK
6543 break;
6544
6139ff20
RK
6545 case ROTATE:
6546 case ROTATERT:
6547 /* If the shift count is constant and we can do computations
6548 in the mode of X, compute where the bits we care about are.
6549 Otherwise, we can't do anything. Don't change the mode of
6550 the shift or propagate MODE into the shift, though. */
6551 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6552 && INTVAL (XEXP (x, 1)) >= 0)
6553 {
6554 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6555 GET_MODE (x), GEN_INT (mask),
6556 XEXP (x, 1));
7d171a1e 6557 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
6558 SUBST (XEXP (x, 0),
6559 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6560 INTVAL (temp), reg, next_select));
6139ff20
RK
6561 }
6562 break;
6563
dfbe1b2f 6564 case NEG:
180b8e4b
RK
6565 /* If we just want the low-order bit, the NEG isn't needed since it
6566 won't change the low-order bit. */
6567 if (mask == 1)
6568 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6569
6139ff20
RK
6570 /* We need any bits less significant than the most significant bit in
6571 MASK since carries from those bits will affect the bits we are
6572 interested in. */
6573 mask = fuller_mask;
6574 goto unop;
6575
dfbe1b2f 6576 case NOT:
6139ff20
RK
6577 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6578 same as the XOR case above. Ensure that the constant we form is not
6579 wider than the mode of X. */
6580
6581 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6582 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6583 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6584 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6585 < GET_MODE_BITSIZE (GET_MODE (x)))
6586 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6587 {
6588 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6589 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6590 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6591
e3d616e3 6592 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6593 }
6594
f82da7d2
JW
6595 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6596 use the full mask inside the NOT. */
6597 mask = fuller_mask;
6598
6139ff20 6599 unop:
e3d616e3
RK
6600 op0 = gen_lowpart_for_combine (op_mode,
6601 force_to_mode (XEXP (x, 0), mode, mask,
6602 reg, next_select));
6139ff20 6603 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 6604 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
6605 break;
6606
6607 case NE:
6608 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 6609 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 6610 which is equal to STORE_FLAG_VALUE. */
3aceff0d
RK
6611 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6612 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 6613 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 6614 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6615
d0ab8cd3
RK
6616 break;
6617
6618 case IF_THEN_ELSE:
6619 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6620 written in a narrower mode. We play it safe and do not do so. */
6621
6622 SUBST (XEXP (x, 1),
6623 gen_lowpart_for_combine (GET_MODE (x),
6624 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6625 mask, reg, next_select)));
d0ab8cd3
RK
6626 SUBST (XEXP (x, 2),
6627 gen_lowpart_for_combine (GET_MODE (x),
6628 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6629 mask, reg,next_select)));
d0ab8cd3 6630 break;
e9a25f70
JL
6631
6632 default:
6633 break;
dfbe1b2f
RK
6634 }
6635
d0ab8cd3 6636 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6637 return gen_lowpart_for_combine (mode, x);
6638}
6639\f
abe6e52f
RK
6640/* Return nonzero if X is an expression that has one of two values depending on
6641 whether some other value is zero or nonzero. In that case, we return the
6642 value that is being tested, *PTRUE is set to the value if the rtx being
6643 returned has a nonzero value, and *PFALSE is set to the other alternative.
6644
6645 If we return zero, we set *PTRUE and *PFALSE to X. */
6646
6647static rtx
6648if_then_else_cond (x, ptrue, pfalse)
6649 rtx x;
6650 rtx *ptrue, *pfalse;
6651{
6652 enum machine_mode mode = GET_MODE (x);
6653 enum rtx_code code = GET_CODE (x);
6654 int size = GET_MODE_BITSIZE (mode);
6655 rtx cond0, cond1, true0, true1, false0, false1;
6656 unsigned HOST_WIDE_INT nz;
6657
6658 /* If this is a unary operation whose operand has one of two values, apply
6659 our opcode to compute those values. */
6660 if (GET_RTX_CLASS (code) == '1'
6661 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6662 {
0c1c8ea6
RK
6663 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6664 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
6665 return cond0;
6666 }
6667
3a19aabc 6668 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 6669 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
6670 else if (code == COMPARE)
6671 ;
6672
abe6e52f
RK
6673 /* If this is a binary operation, see if either side has only one of two
6674 values. If either one does or if both do and they are conditional on
6675 the same value, compute the new true and false values. */
6676 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6677 || GET_RTX_CLASS (code) == '<')
6678 {
6679 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6680 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6681
6682 if ((cond0 != 0 || cond1 != 0)
6683 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6684 {
987e845a
JW
6685 /* If if_then_else_cond returned zero, then true/false are the
6686 same rtl. We must copy one of them to prevent invalid rtl
6687 sharing. */
6688 if (cond0 == 0)
6689 true0 = copy_rtx (true0);
6690 else if (cond1 == 0)
6691 true1 = copy_rtx (true1);
6692
abe6e52f
RK
6693 *ptrue = gen_binary (code, mode, true0, true1);
6694 *pfalse = gen_binary (code, mode, false0, false1);
6695 return cond0 ? cond0 : cond1;
6696 }
9210df58 6697
9210df58 6698 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
6699 operands is zero when the other is non-zero, and vice-versa,
6700 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 6701
0802d516
RK
6702 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6703 && (code == PLUS || code == IOR || code == XOR || code == MINUS
9210df58
RK
6704 || code == UMAX)
6705 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6706 {
6707 rtx op0 = XEXP (XEXP (x, 0), 1);
6708 rtx op1 = XEXP (XEXP (x, 1), 1);
6709
6710 cond0 = XEXP (XEXP (x, 0), 0);
6711 cond1 = XEXP (XEXP (x, 1), 0);
6712
6713 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6714 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6715 && reversible_comparison_p (cond1)
6716 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6717 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6718 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6719 || ((swap_condition (GET_CODE (cond0))
6720 == reverse_condition (GET_CODE (cond1)))
6721 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6722 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6723 && ! side_effects_p (x))
6724 {
6725 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6726 *pfalse = gen_binary (MULT, mode,
6727 (code == MINUS
0c1c8ea6 6728 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
6729 const_true_rtx);
6730 return cond0;
6731 }
6732 }
6733
6734 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6735 is always zero. */
0802d516
RK
6736 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6737 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
6738 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6739 {
6740 cond0 = XEXP (XEXP (x, 0), 0);
6741 cond1 = XEXP (XEXP (x, 1), 0);
6742
6743 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6744 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6745 && reversible_comparison_p (cond1)
6746 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6747 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6748 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6749 || ((swap_condition (GET_CODE (cond0))
6750 == reverse_condition (GET_CODE (cond1)))
6751 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6752 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6753 && ! side_effects_p (x))
6754 {
6755 *ptrue = *pfalse = const0_rtx;
6756 return cond0;
6757 }
6758 }
abe6e52f
RK
6759 }
6760
6761 else if (code == IF_THEN_ELSE)
6762 {
6763 /* If we have IF_THEN_ELSE already, extract the condition and
6764 canonicalize it if it is NE or EQ. */
6765 cond0 = XEXP (x, 0);
6766 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6767 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6768 return XEXP (cond0, 0);
6769 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6770 {
6771 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6772 return XEXP (cond0, 0);
6773 }
6774 else
6775 return cond0;
6776 }
6777
6778 /* If X is a normal SUBREG with both inner and outer modes integral,
6779 we can narrow both the true and false values of the inner expression,
6780 if there is a condition. */
6781 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6782 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6783 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6784 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6785 &true0, &false0)))
6786 {
00244e6b
RK
6787 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6788 *pfalse
6789 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 6790
abe6e52f
RK
6791 return cond0;
6792 }
6793
6794 /* If X is a constant, this isn't special and will cause confusions
6795 if we treat it as such. Likewise if it is equivalent to a constant. */
6796 else if (CONSTANT_P (x)
6797 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6798 ;
6799
6800 /* If X is known to be either 0 or -1, those are the true and
6801 false values when testing X. */
6802 else if (num_sign_bit_copies (x, mode) == size)
6803 {
6804 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6805 return x;
6806 }
6807
6808 /* Likewise for 0 or a single bit. */
6809 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6810 {
6811 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6812 return x;
6813 }
6814
6815 /* Otherwise fail; show no condition with true and false values the same. */
6816 *ptrue = *pfalse = x;
6817 return 0;
6818}
6819\f
1a26b032
RK
6820/* Return the value of expression X given the fact that condition COND
6821 is known to be true when applied to REG as its first operand and VAL
6822 as its second. X is known to not be shared and so can be modified in
6823 place.
6824
6825 We only handle the simplest cases, and specifically those cases that
6826 arise with IF_THEN_ELSE expressions. */
6827
6828static rtx
6829known_cond (x, cond, reg, val)
6830 rtx x;
6831 enum rtx_code cond;
6832 rtx reg, val;
6833{
6834 enum rtx_code code = GET_CODE (x);
f24ad0e4 6835 rtx temp;
1a26b032
RK
6836 char *fmt;
6837 int i, j;
6838
6839 if (side_effects_p (x))
6840 return x;
6841
6842 if (cond == EQ && rtx_equal_p (x, reg))
6843 return val;
6844
6845 /* If X is (abs REG) and we know something about REG's relationship
6846 with zero, we may be able to simplify this. */
6847
6848 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6849 switch (cond)
6850 {
6851 case GE: case GT: case EQ:
6852 return XEXP (x, 0);
6853 case LT: case LE:
0c1c8ea6
RK
6854 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6855 XEXP (x, 0));
e9a25f70
JL
6856 default:
6857 break;
1a26b032
RK
6858 }
6859
6860 /* The only other cases we handle are MIN, MAX, and comparisons if the
6861 operands are the same as REG and VAL. */
6862
6863 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6864 {
6865 if (rtx_equal_p (XEXP (x, 0), val))
6866 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6867
6868 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6869 {
6870 if (GET_RTX_CLASS (code) == '<')
6871 return (comparison_dominates_p (cond, code) ? const_true_rtx
6872 : (comparison_dominates_p (cond,
6873 reverse_condition (code))
6874 ? const0_rtx : x));
6875
6876 else if (code == SMAX || code == SMIN
6877 || code == UMIN || code == UMAX)
6878 {
6879 int unsignedp = (code == UMIN || code == UMAX);
6880
6881 if (code == SMAX || code == UMAX)
6882 cond = reverse_condition (cond);
6883
6884 switch (cond)
6885 {
6886 case GE: case GT:
6887 return unsignedp ? x : XEXP (x, 1);
6888 case LE: case LT:
6889 return unsignedp ? x : XEXP (x, 0);
6890 case GEU: case GTU:
6891 return unsignedp ? XEXP (x, 1) : x;
6892 case LEU: case LTU:
6893 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
6894 default:
6895 break;
1a26b032
RK
6896 }
6897 }
6898 }
6899 }
6900
6901 fmt = GET_RTX_FORMAT (code);
6902 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6903 {
6904 if (fmt[i] == 'e')
6905 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6906 else if (fmt[i] == 'E')
6907 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6908 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6909 cond, reg, val));
6910 }
6911
6912 return x;
6913}
6914\f
e11fa86f
RK
6915/* See if X and Y are equal for the purposes of seeing if we can rewrite an
6916 assignment as a field assignment. */
6917
6918static int
6919rtx_equal_for_field_assignment_p (x, y)
6920 rtx x;
6921 rtx y;
6922{
6923 rtx last_x, last_y;
6924
6925 if (x == y || rtx_equal_p (x, y))
6926 return 1;
6927
6928 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
6929 return 0;
6930
6931 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
6932 Note that all SUBREGs of MEM are paradoxical; otherwise they
6933 would have been rewritten. */
6934 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
6935 && GET_CODE (SUBREG_REG (y)) == MEM
6936 && rtx_equal_p (SUBREG_REG (y),
6937 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
6938 return 1;
6939
6940 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
6941 && GET_CODE (SUBREG_REG (x)) == MEM
6942 && rtx_equal_p (SUBREG_REG (x),
6943 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
6944 return 1;
6945
6946 last_x = get_last_value (x);
6947 last_y = get_last_value (y);
6948
0f47edd3
JL
6949 return ((last_x != 0
6950 && GET_CODE (last_x) != CLOBBER
6951 && rtx_equal_for_field_assignment_p (last_x, y))
6952 || (last_y != 0
6953 && GET_CODE (last_y) != CLOBBER
6954 && rtx_equal_for_field_assignment_p (x, last_y))
e11fa86f 6955 || (last_x != 0 && last_y != 0
0f47edd3
JL
6956 && GET_CODE (last_x) != CLOBBER
6957 && GET_CODE (last_y) != CLOBBER
e11fa86f
RK
6958 && rtx_equal_for_field_assignment_p (last_x, last_y)));
6959}
6960\f
230d793d
RS
6961/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6962 Return that assignment if so.
6963
6964 We only handle the most common cases. */
6965
6966static rtx
6967make_field_assignment (x)
6968 rtx x;
6969{
6970 rtx dest = SET_DEST (x);
6971 rtx src = SET_SRC (x);
dfbe1b2f 6972 rtx assign;
e11fa86f 6973 rtx rhs, lhs;
5f4f0e22
CH
6974 HOST_WIDE_INT c1;
6975 int pos, len;
dfbe1b2f
RK
6976 rtx other;
6977 enum machine_mode mode;
230d793d
RS
6978
6979 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6980 a clear of a one-bit field. We will have changed it to
6981 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6982 for a SUBREG. */
6983
6984 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6985 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6986 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 6987 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6988 {
8999a12e 6989 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6990 1, 1, 1, 0);
76184def 6991 if (assign != 0)
38a448ca 6992 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 6993 return x;
230d793d
RS
6994 }
6995
6996 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6997 && subreg_lowpart_p (XEXP (src, 0))
6998 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6999 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7000 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7001 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 7002 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7003 {
8999a12e 7004 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
7005 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7006 1, 1, 1, 0);
76184def 7007 if (assign != 0)
38a448ca 7008 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7009 return x;
230d793d
RS
7010 }
7011
9dd11dcb 7012 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
7013 one-bit field. */
7014 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7015 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 7016 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7017 {
8999a12e 7018 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7019 1, 1, 1, 0);
76184def 7020 if (assign != 0)
38a448ca 7021 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 7022 return x;
230d793d
RS
7023 }
7024
dfbe1b2f 7025 /* The other case we handle is assignments into a constant-position
9dd11dcb 7026 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
7027 a mask that has all one bits except for a group of zero bits and
7028 OTHER is known to have zeros where C1 has ones, this is such an
7029 assignment. Compute the position and length from C1. Shift OTHER
7030 to the appropriate position, force it to the required mode, and
7031 make the extraction. Check for the AND in both operands. */
7032
9dd11dcb 7033 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
7034 return x;
7035
7036 rhs = expand_compound_operation (XEXP (src, 0));
7037 lhs = expand_compound_operation (XEXP (src, 1));
7038
7039 if (GET_CODE (rhs) == AND
7040 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7041 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7042 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7043 else if (GET_CODE (lhs) == AND
7044 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7045 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7046 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
7047 else
7048 return x;
230d793d 7049
e11fa86f 7050 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 7051 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 7052 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 7053 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 7054 return x;
230d793d 7055
5f4f0e22 7056 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
7057 if (assign == 0)
7058 return x;
230d793d 7059
dfbe1b2f
RK
7060 /* The mode to use for the source is the mode of the assignment, or of
7061 what is inside a possible STRICT_LOW_PART. */
7062 mode = (GET_CODE (assign) == STRICT_LOW_PART
7063 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 7064
dfbe1b2f
RK
7065 /* Shift OTHER right POS places and make it the source, restricting it
7066 to the proper length and mode. */
230d793d 7067
5f4f0e22
CH
7068 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7069 GET_MODE (src), other, pos),
6139ff20
RK
7070 mode,
7071 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7072 ? GET_MODE_MASK (mode)
7073 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 7074 dest, 0);
230d793d 7075
dfbe1b2f 7076 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
7077}
7078\f
7079/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7080 if so. */
7081
7082static rtx
7083apply_distributive_law (x)
7084 rtx x;
7085{
7086 enum rtx_code code = GET_CODE (x);
7087 rtx lhs, rhs, other;
7088 rtx tem;
7089 enum rtx_code inner_code;
7090
d8a8a4da
RS
7091 /* Distributivity is not true for floating point.
7092 It can change the value. So don't do it.
7093 -- rms and moshier@world.std.com. */
3ad2180a 7094 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
7095 return x;
7096
230d793d
RS
7097 /* The outer operation can only be one of the following: */
7098 if (code != IOR && code != AND && code != XOR
7099 && code != PLUS && code != MINUS)
7100 return x;
7101
7102 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7103
0f41302f
MS
7104 /* If either operand is a primitive we can't do anything, so get out
7105 fast. */
230d793d 7106 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 7107 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
7108 return x;
7109
7110 lhs = expand_compound_operation (lhs);
7111 rhs = expand_compound_operation (rhs);
7112 inner_code = GET_CODE (lhs);
7113 if (inner_code != GET_CODE (rhs))
7114 return x;
7115
7116 /* See if the inner and outer operations distribute. */
7117 switch (inner_code)
7118 {
7119 case LSHIFTRT:
7120 case ASHIFTRT:
7121 case AND:
7122 case IOR:
7123 /* These all distribute except over PLUS. */
7124 if (code == PLUS || code == MINUS)
7125 return x;
7126 break;
7127
7128 case MULT:
7129 if (code != PLUS && code != MINUS)
7130 return x;
7131 break;
7132
7133 case ASHIFT:
45620ed4 7134 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
7135 break;
7136
7137 case SUBREG:
dfbe1b2f
RK
7138 /* Non-paradoxical SUBREGs distributes over all operations, provided
7139 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
7140 of a low-order part, we don't convert an fp operation to int or
7141 vice versa, and we would not be converting a single-word
dfbe1b2f 7142 operation into a multi-word operation. The latter test is not
2b4bd1bc 7143 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
7144 Some of the previous tests are redundant given the latter test, but
7145 are retained because they are required for correctness.
7146
7147 We produce the result slightly differently in this case. */
7148
7149 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7150 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7151 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
7152 || (GET_MODE_CLASS (GET_MODE (lhs))
7153 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7154 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 7155 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7156 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
7157 return x;
7158
7159 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7160 SUBREG_REG (lhs), SUBREG_REG (rhs));
7161 return gen_lowpart_for_combine (GET_MODE (x), tem);
7162
7163 default:
7164 return x;
7165 }
7166
7167 /* Set LHS and RHS to the inner operands (A and B in the example
7168 above) and set OTHER to the common operand (C in the example).
7169 These is only one way to do this unless the inner operation is
7170 commutative. */
7171 if (GET_RTX_CLASS (inner_code) == 'c'
7172 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7173 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7174 else if (GET_RTX_CLASS (inner_code) == 'c'
7175 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7176 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7177 else if (GET_RTX_CLASS (inner_code) == 'c'
7178 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7179 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7180 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7181 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7182 else
7183 return x;
7184
7185 /* Form the new inner operation, seeing if it simplifies first. */
7186 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7187
7188 /* There is one exception to the general way of distributing:
7189 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7190 if (code == XOR && inner_code == IOR)
7191 {
7192 inner_code = AND;
0c1c8ea6 7193 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
7194 }
7195
7196 /* We may be able to continuing distributing the result, so call
7197 ourselves recursively on the inner operation before forming the
7198 outer operation, which we return. */
7199 return gen_binary (inner_code, GET_MODE (x),
7200 apply_distributive_law (tem), other);
7201}
7202\f
7203/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7204 in MODE.
7205
7206 Return an equivalent form, if different from X. Otherwise, return X. If
7207 X is zero, we are to always construct the equivalent form. */
7208
7209static rtx
7210simplify_and_const_int (x, mode, varop, constop)
7211 rtx x;
7212 enum machine_mode mode;
7213 rtx varop;
5f4f0e22 7214 unsigned HOST_WIDE_INT constop;
230d793d 7215{
951553af 7216 unsigned HOST_WIDE_INT nonzero;
9fa6d012 7217 int width = GET_MODE_BITSIZE (mode);
42301240 7218 int i;
230d793d 7219
6139ff20
RK
7220 /* Simplify VAROP knowing that we will be only looking at some of the
7221 bits in it. */
e3d616e3 7222 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7223
6139ff20
RK
7224 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7225 CONST_INT, we are done. */
7226 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7227 return varop;
230d793d 7228
fc06d7aa
RK
7229 /* See what bits may be nonzero in VAROP. Unlike the general case of
7230 a call to nonzero_bits, here we don't care about bits outside
7231 MODE. */
7232
7233 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d 7234
9fa6d012
TG
7235 /* If this would be an entire word for the target, but is not for
7236 the host, then sign-extend on the host so that the number will look
7237 the same way on the host that it would on the target.
7238
7239 For example, when building a 64 bit alpha hosted 32 bit sparc
7240 targeted compiler, then we want the 32 bit unsigned value -1 to be
7241 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7242 The later confuses the sparc backend. */
7243
7244 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7245 && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
7246 nonzero |= ((HOST_WIDE_INT) (-1) << width);
7247
230d793d 7248 /* Turn off all bits in the constant that are known to already be zero.
951553af 7249 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7250 which is tested below. */
7251
951553af 7252 constop &= nonzero;
230d793d
RS
7253
7254 /* If we don't have any bits left, return zero. */
7255 if (constop == 0)
7256 return const0_rtx;
7257
42301240
RK
7258 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7259 a power of two, we can replace this with a ASHIFT. */
7260 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7261 && (i = exact_log2 (constop)) >= 0)
7262 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7263
6139ff20
RK
7264 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7265 or XOR, then try to apply the distributive law. This may eliminate
7266 operations if either branch can be simplified because of the AND.
7267 It may also make some cases more complex, but those cases probably
7268 won't match a pattern either with or without this. */
7269
7270 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7271 return
7272 gen_lowpart_for_combine
7273 (mode,
7274 apply_distributive_law
7275 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7276 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7277 XEXP (varop, 0), constop),
7278 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7279 XEXP (varop, 1), constop))));
7280
230d793d
RS
7281 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7282 if we already had one (just check for the simplest cases). */
7283 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7284 && GET_MODE (XEXP (x, 0)) == mode
7285 && SUBREG_REG (XEXP (x, 0)) == varop)
7286 varop = XEXP (x, 0);
7287 else
7288 varop = gen_lowpart_for_combine (mode, varop);
7289
0f41302f 7290 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7291 if (GET_CODE (varop) == CLOBBER)
7292 return x ? x : varop;
7293
7294 /* If we are only masking insignificant bits, return VAROP. */
951553af 7295 if (constop == nonzero)
230d793d
RS
7296 x = varop;
7297
7298 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7299 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7300 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7301
7302 else
7303 {
7304 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7305 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7306 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7307
7308 SUBST (XEXP (x, 0), varop);
7309 }
7310
7311 return x;
7312}
7313\f
b3728b0e
JW
7314/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7315 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7316 is less useful. We can't allow both, because that results in exponential
956d6950 7317 run time recursion. There is a nullstone testcase that triggered
b3728b0e
JW
7318 this. This macro avoids accidental uses of num_sign_bit_copies. */
7319#define num_sign_bit_copies()
7320
230d793d
RS
7321/* Given an expression, X, compute which bits in X can be non-zero.
7322 We don't care about bits outside of those defined in MODE.
7323
7324 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7325 a shift, AND, or zero_extract, we can do better. */
7326
5f4f0e22 7327static unsigned HOST_WIDE_INT
951553af 7328nonzero_bits (x, mode)
230d793d
RS
7329 rtx x;
7330 enum machine_mode mode;
7331{
951553af
RK
7332 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7333 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
7334 enum rtx_code code;
7335 int mode_width = GET_MODE_BITSIZE (mode);
7336 rtx tem;
7337
1c75dfa4
RK
7338 /* For floating-point values, assume all bits are needed. */
7339 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7340 return nonzero;
7341
230d793d
RS
7342 /* If X is wider than MODE, use its mode instead. */
7343 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7344 {
7345 mode = GET_MODE (x);
951553af 7346 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7347 mode_width = GET_MODE_BITSIZE (mode);
7348 }
7349
5f4f0e22 7350 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7351 /* Our only callers in this case look for single bit values. So
7352 just return the mode mask. Those tests will then be false. */
951553af 7353 return nonzero;
230d793d 7354
8baf60bb 7355#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7356 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
7357 and target machines, we can compute this from which bits of the
7358 object might be nonzero in its own mode, taking into account the fact
7359 that on many CISC machines, accessing an object in a wider mode
7360 causes the high-order bits to become undefined. So they are
7361 not known to be zero. */
7362
7363 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7364 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7365 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7366 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7367 {
7368 nonzero &= nonzero_bits (x, GET_MODE (x));
7369 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7370 return nonzero;
7371 }
7372#endif
7373
230d793d
RS
7374 code = GET_CODE (x);
7375 switch (code)
7376 {
7377 case REG:
320dd7a7
RK
7378#ifdef POINTERS_EXTEND_UNSIGNED
7379 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7380 all the bits above ptr_mode are known to be zero. */
7381 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7382 && REGNO_POINTER_FLAG (REGNO (x)))
7383 nonzero &= GET_MODE_MASK (ptr_mode);
7384#endif
7385
b0d71df9
RK
7386#ifdef STACK_BOUNDARY
7387 /* If this is the stack pointer, we may know something about its
7388 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
7389 stack to be momentarily aligned only to that amount, so we pick
7390 the least alignment. */
7391
ee49a9c7
JW
7392 /* We can't check for arg_pointer_rtx here, because it is not
7393 guaranteed to have as much alignment as the stack pointer.
7394 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7395 alignment but the argument pointer has only 64 bit alignment. */
7396
0e9ff885
DM
7397 if ((x == frame_pointer_rtx
7398 || x == stack_pointer_rtx
7399 || x == hard_frame_pointer_rtx
7400 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7401 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7402#ifdef STACK_BIAS
7403 && !STACK_BIAS
7404#endif
7405 )
230d793d 7406 {
b0d71df9 7407 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
7408
7409#ifdef PUSH_ROUNDING
91102d5a 7410 if (REGNO (x) == STACK_POINTER_REGNUM)
b0d71df9 7411 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
7412#endif
7413
320dd7a7
RK
7414 /* We must return here, otherwise we may get a worse result from
7415 one of the choices below. There is nothing useful below as
7416 far as the stack pointer is concerned. */
b0d71df9 7417 return nonzero &= ~ (sp_alignment - 1);
230d793d 7418 }
b0d71df9 7419#endif
230d793d 7420
55310dad
RK
7421 /* If X is a register whose nonzero bits value is current, use it.
7422 Otherwise, if X is a register whose value we can find, use that
7423 value. Otherwise, use the previously-computed global nonzero bits
7424 for this register. */
7425
7426 if (reg_last_set_value[REGNO (x)] != 0
7427 && reg_last_set_mode[REGNO (x)] == mode
b1f21e0a 7428 && (REG_N_SETS (REGNO (x)) == 1
55310dad
RK
7429 || reg_last_set_label[REGNO (x)] == label_tick)
7430 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7431 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
7432
7433 tem = get_last_value (x);
9afa3d54 7434
230d793d 7435 if (tem)
9afa3d54
RK
7436 {
7437#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7438 /* If X is narrower than MODE and TEM is a non-negative
7439 constant that would appear negative in the mode of X,
7440 sign-extend it for use in reg_nonzero_bits because some
7441 machines (maybe most) will actually do the sign-extension
7442 and this is the conservative approach.
7443
7444 ??? For 2.5, try to tighten up the MD files in this regard
7445 instead of this kludge. */
7446
7447 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7448 && GET_CODE (tem) == CONST_INT
7449 && INTVAL (tem) > 0
7450 && 0 != (INTVAL (tem)
7451 & ((HOST_WIDE_INT) 1
9e69be8c 7452 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7453 tem = GEN_INT (INTVAL (tem)
7454 | ((HOST_WIDE_INT) (-1)
7455 << GET_MODE_BITSIZE (GET_MODE (x))));
7456#endif
7457 return nonzero_bits (tem, mode);
7458 }
951553af
RK
7459 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7460 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7461 else
951553af 7462 return nonzero;
230d793d
RS
7463
7464 case CONST_INT:
9afa3d54
RK
7465#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7466 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
7467 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7468 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7469 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
7470#endif
7471
230d793d
RS
7472 return INTVAL (x);
7473
230d793d 7474 case MEM:
8baf60bb 7475#ifdef LOAD_EXTEND_OP
230d793d
RS
7476 /* In many, if not most, RISC machines, reading a byte from memory
7477 zeros the rest of the register. Noticing that fact saves a lot
7478 of extra zero-extends. */
8baf60bb
RK
7479 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7480 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 7481#endif
8baf60bb 7482 break;
230d793d 7483
230d793d
RS
7484 case EQ: case NE:
7485 case GT: case GTU:
7486 case LT: case LTU:
7487 case GE: case GEU:
7488 case LE: case LEU:
3f508eca 7489
c6965c0f
RK
7490 /* If this produces an integer result, we know which bits are set.
7491 Code here used to clear bits outside the mode of X, but that is
7492 now done above. */
230d793d 7493
c6965c0f
RK
7494 if (GET_MODE_CLASS (mode) == MODE_INT
7495 && mode_width <= HOST_BITS_PER_WIDE_INT)
7496 nonzero = STORE_FLAG_VALUE;
230d793d 7497 break;
230d793d 7498
230d793d 7499 case NEG:
b3728b0e
JW
7500#if 0
7501 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7502 and num_sign_bit_copies. */
d0ab8cd3
RK
7503 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7504 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7505 nonzero = 1;
b3728b0e 7506#endif
230d793d
RS
7507
7508 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 7509 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 7510 break;
d0ab8cd3
RK
7511
7512 case ABS:
b3728b0e
JW
7513#if 0
7514 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7515 and num_sign_bit_copies. */
d0ab8cd3
RK
7516 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7517 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7518 nonzero = 1;
b3728b0e 7519#endif
d0ab8cd3 7520 break;
230d793d
RS
7521
7522 case TRUNCATE:
951553af 7523 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
7524 break;
7525
7526 case ZERO_EXTEND:
951553af 7527 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 7528 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 7529 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
7530 break;
7531
7532 case SIGN_EXTEND:
7533 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7534 Otherwise, show all the bits in the outer mode but not the inner
7535 may be non-zero. */
951553af 7536 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
7537 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7538 {
951553af 7539 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
e3da301d
MS
7540 if (inner_nz
7541 & (((HOST_WIDE_INT) 1
7542 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 7543 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
7544 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7545 }
7546
951553af 7547 nonzero &= inner_nz;
230d793d
RS
7548 break;
7549
7550 case AND:
951553af
RK
7551 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7552 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7553 break;
7554
d0ab8cd3
RK
7555 case XOR: case IOR:
7556 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
7557 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7558 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7559 break;
7560
7561 case PLUS: case MINUS:
7562 case MULT:
7563 case DIV: case UDIV:
7564 case MOD: case UMOD:
7565 /* We can apply the rules of arithmetic to compute the number of
7566 high- and low-order zero bits of these operations. We start by
7567 computing the width (position of the highest-order non-zero bit)
7568 and the number of low-order zero bits for each value. */
7569 {
951553af
RK
7570 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7571 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7572 int width0 = floor_log2 (nz0) + 1;
7573 int width1 = floor_log2 (nz1) + 1;
7574 int low0 = floor_log2 (nz0 & -nz0);
7575 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
7576 HOST_WIDE_INT op0_maybe_minusp
7577 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7578 HOST_WIDE_INT op1_maybe_minusp
7579 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
7580 int result_width = mode_width;
7581 int result_low = 0;
7582
7583 switch (code)
7584 {
7585 case PLUS:
0e9ff885
DM
7586#ifdef STACK_BIAS
7587 if (STACK_BIAS
7588 && (XEXP (x, 0) == stack_pointer_rtx
7589 || XEXP (x, 0) == frame_pointer_rtx)
7590 && GET_CODE (XEXP (x, 1)) == CONST_INT)
7591 {
7592 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7593
7594 nz0 = (GET_MODE_MASK (mode) & ~ (sp_alignment - 1));
7595 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
7596 width0 = floor_log2 (nz0) + 1;
7597 width1 = floor_log2 (nz1) + 1;
7598 low0 = floor_log2 (nz0 & -nz0);
7599 low1 = floor_log2 (nz1 & -nz1);
7600 }
7601#endif
230d793d
RS
7602 result_width = MAX (width0, width1) + 1;
7603 result_low = MIN (low0, low1);
7604 break;
7605 case MINUS:
7606 result_low = MIN (low0, low1);
7607 break;
7608 case MULT:
7609 result_width = width0 + width1;
7610 result_low = low0 + low1;
7611 break;
7612 case DIV:
7613 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7614 result_width = width0;
7615 break;
7616 case UDIV:
7617 result_width = width0;
7618 break;
7619 case MOD:
7620 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7621 result_width = MIN (width0, width1);
7622 result_low = MIN (low0, low1);
7623 break;
7624 case UMOD:
7625 result_width = MIN (width0, width1);
7626 result_low = MIN (low0, low1);
7627 break;
e9a25f70
JL
7628 default:
7629 abort ();
230d793d
RS
7630 }
7631
7632 if (result_width < mode_width)
951553af 7633 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
7634
7635 if (result_low > 0)
951553af 7636 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
7637 }
7638 break;
7639
7640 case ZERO_EXTRACT:
7641 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 7642 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 7643 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
7644 break;
7645
7646 case SUBREG:
c3c2cb37
RK
7647 /* If this is a SUBREG formed for a promoted variable that has
7648 been zero-extended, we know that at least the high-order bits
7649 are zero, though others might be too. */
7650
7651 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
7652 nonzero = (GET_MODE_MASK (GET_MODE (x))
7653 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 7654
230d793d
RS
7655 /* If the inner mode is a single word for both the host and target
7656 machines, we can compute this from which bits of the inner
951553af 7657 object might be nonzero. */
230d793d 7658 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
7659 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7660 <= HOST_BITS_PER_WIDE_INT))
230d793d 7661 {
951553af 7662 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb
RK
7663
7664#ifndef WORD_REGISTER_OPERATIONS
230d793d
RS
7665 /* On many CISC machines, accessing an object in a wider mode
7666 causes the high-order bits to become undefined. So they are
7667 not known to be zero. */
7668 if (GET_MODE_SIZE (GET_MODE (x))
7669 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
7670 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7671 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
7672#endif
7673 }
7674 break;
7675
7676 case ASHIFTRT:
7677 case LSHIFTRT:
7678 case ASHIFT:
230d793d 7679 case ROTATE:
951553af 7680 /* The nonzero bits are in two classes: any bits within MODE
230d793d 7681 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 7682 nonzero bits are those that are significant in the operand of
230d793d
RS
7683 the shift when shifted the appropriate number of bits. This
7684 shows that high-order bits are cleared by the right shift and
7685 low-order bits by left shifts. */
7686 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7687 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 7688 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7689 {
7690 enum machine_mode inner_mode = GET_MODE (x);
7691 int width = GET_MODE_BITSIZE (inner_mode);
7692 int count = INTVAL (XEXP (x, 1));
5f4f0e22 7693 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
7694 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7695 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 7696 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
7697
7698 if (mode_width > width)
951553af 7699 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
7700
7701 if (code == LSHIFTRT)
7702 inner >>= count;
7703 else if (code == ASHIFTRT)
7704 {
7705 inner >>= count;
7706
951553af 7707 /* If the sign bit may have been nonzero before the shift, we
230d793d 7708 need to mark all the places it could have been copied to
951553af 7709 by the shift as possibly nonzero. */
5f4f0e22
CH
7710 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7711 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 7712 }
45620ed4 7713 else if (code == ASHIFT)
230d793d
RS
7714 inner <<= count;
7715 else
7716 inner = ((inner << (count % width)
7717 | (inner >> (width - (count % width)))) & mode_mask);
7718
951553af 7719 nonzero &= (outer | inner);
230d793d
RS
7720 }
7721 break;
7722
7723 case FFS:
7724 /* This is at most the number of bits in the mode. */
951553af 7725 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 7726 break;
d0ab8cd3
RK
7727
7728 case IF_THEN_ELSE:
951553af
RK
7729 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7730 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 7731 break;
e9a25f70
JL
7732
7733 default:
7734 break;
230d793d
RS
7735 }
7736
951553af 7737 return nonzero;
230d793d 7738}
b3728b0e
JW
7739
7740/* See the macro definition above. */
7741#undef num_sign_bit_copies
230d793d 7742\f
d0ab8cd3 7743/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
7744 be equal to the sign bit. X will be used in mode MODE; if MODE is
7745 VOIDmode, X will be used in its own mode. The returned value will always
7746 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
7747
7748static int
7749num_sign_bit_copies (x, mode)
7750 rtx x;
7751 enum machine_mode mode;
7752{
7753 enum rtx_code code = GET_CODE (x);
7754 int bitwidth;
7755 int num0, num1, result;
951553af 7756 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
7757 rtx tem;
7758
7759 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
7760 VOIDmode, we don't know anything. Likewise if one of the modes is
7761 floating-point. */
d0ab8cd3
RK
7762
7763 if (mode == VOIDmode)
7764 mode = GET_MODE (x);
7765
1c75dfa4 7766 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 7767 return 1;
d0ab8cd3
RK
7768
7769 bitwidth = GET_MODE_BITSIZE (mode);
7770
0f41302f 7771 /* For a smaller object, just ignore the high bits. */
312def2e
RK
7772 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7773 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7774 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7775
e9a25f70
JL
7776 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7777 {
0c314d1a
RK
7778#ifndef WORD_REGISTER_OPERATIONS
7779 /* If this machine does not do all register operations on the entire
7780 register and MODE is wider than the mode of X, we can say nothing
7781 at all about the high-order bits. */
e9a25f70
JL
7782 return 1;
7783#else
7784 /* Likewise on machines that do, if the mode of the object is smaller
7785 than a word and loads of that size don't sign extend, we can say
7786 nothing about the high order bits. */
7787 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
7788#ifdef LOAD_EXTEND_OP
7789 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
7790#endif
7791 )
7792 return 1;
0c314d1a 7793#endif
e9a25f70 7794 }
0c314d1a 7795
d0ab8cd3
RK
7796 switch (code)
7797 {
7798 case REG:
55310dad 7799
ff0dbdd1
RK
7800#ifdef POINTERS_EXTEND_UNSIGNED
7801 /* If pointers extend signed and this is a pointer in Pmode, say that
7802 all the bits above ptr_mode are known to be sign bit copies. */
7803 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7804 && REGNO_POINTER_FLAG (REGNO (x)))
7805 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7806#endif
7807
55310dad
RK
7808 if (reg_last_set_value[REGNO (x)] != 0
7809 && reg_last_set_mode[REGNO (x)] == mode
b1f21e0a 7810 && (REG_N_SETS (REGNO (x)) == 1
55310dad
RK
7811 || reg_last_set_label[REGNO (x)] == label_tick)
7812 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7813 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7814
7815 tem = get_last_value (x);
7816 if (tem != 0)
7817 return num_sign_bit_copies (tem, mode);
55310dad
RK
7818
7819 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7820 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7821 break;
7822
457816e2 7823 case MEM:
8baf60bb 7824#ifdef LOAD_EXTEND_OP
457816e2 7825 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
7826 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7827 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 7828#endif
8baf60bb 7829 break;
457816e2 7830
d0ab8cd3
RK
7831 case CONST_INT:
7832 /* If the constant is negative, take its 1's complement and remask.
7833 Then see how many zero bits we have. */
951553af 7834 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 7835 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
7836 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7837 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 7838
951553af 7839 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7840
7841 case SUBREG:
c3c2cb37
RK
7842 /* If this is a SUBREG for a promoted object that is sign-extended
7843 and we are looking at it in a wider mode, we know that at least the
7844 high-order bits are known to be sign bit copies. */
7845
7846 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
7847 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7848 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 7849
0f41302f 7850 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7851 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7852 {
7853 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7854 return MAX (1, (num0
7855 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7856 - bitwidth)));
7857 }
457816e2 7858
8baf60bb 7859#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 7860#ifdef LOAD_EXTEND_OP
8baf60bb
RK
7861 /* For paradoxical SUBREGs on machines where all register operations
7862 affect the entire register, just look inside. Note that we are
7863 passing MODE to the recursive call, so the number of sign bit copies
7864 will remain relative to that mode, not the inner mode. */
457816e2 7865
2aec5b7a
JW
7866 /* This works only if loads sign extend. Otherwise, if we get a
7867 reload for the inner part, it may be loaded from the stack, and
7868 then we lose all sign bit copies that existed before the store
7869 to the stack. */
7870
7871 if ((GET_MODE_SIZE (GET_MODE (x))
7872 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7873 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 7874 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 7875#endif
457816e2 7876#endif
d0ab8cd3
RK
7877 break;
7878
7879 case SIGN_EXTRACT:
7880 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7881 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7882 break;
7883
7884 case SIGN_EXTEND:
7885 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7886 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7887
7888 case TRUNCATE:
0f41302f 7889 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7890 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7891 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7892 - bitwidth)));
7893
7894 case NOT:
7895 return num_sign_bit_copies (XEXP (x, 0), mode);
7896
7897 case ROTATE: case ROTATERT:
7898 /* If we are rotating left by a number of bits less than the number
7899 of sign bit copies, we can just subtract that amount from the
7900 number. */
7901 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7902 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7903 {
7904 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7905 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7906 : bitwidth - INTVAL (XEXP (x, 1))));
7907 }
7908 break;
7909
7910 case NEG:
7911 /* In general, this subtracts one sign bit copy. But if the value
7912 is known to be positive, the number of sign bit copies is the
951553af
RK
7913 same as that of the input. Finally, if the input has just one bit
7914 that might be nonzero, all the bits are copies of the sign bit. */
7915 nonzero = nonzero_bits (XEXP (x, 0), mode);
7916 if (nonzero == 1)
d0ab8cd3
RK
7917 return bitwidth;
7918
7919 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7920 if (num0 > 1
ac49a949 7921 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7922 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
7923 num0--;
7924
7925 return num0;
7926
7927 case IOR: case AND: case XOR:
7928 case SMIN: case SMAX: case UMIN: case UMAX:
7929 /* Logical operations will preserve the number of sign-bit copies.
7930 MIN and MAX operations always return one of the operands. */
7931 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7932 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7933 return MIN (num0, num1);
7934
7935 case PLUS: case MINUS:
7936 /* For addition and subtraction, we can have a 1-bit carry. However,
7937 if we are subtracting 1 from a positive number, there will not
7938 be such a carry. Furthermore, if the positive number is known to
7939 be 0 or 1, we know the result is either -1 or 0. */
7940
3e3ea975 7941 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 7942 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7943 {
951553af
RK
7944 nonzero = nonzero_bits (XEXP (x, 0), mode);
7945 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7946 return (nonzero == 1 || nonzero == 0 ? bitwidth
7947 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7948 }
7949
7950 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7951 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7952 return MAX (1, MIN (num0, num1) - 1);
7953
7954 case MULT:
7955 /* The number of bits of the product is the sum of the number of
7956 bits of both terms. However, unless one of the terms if known
7957 to be positive, we must allow for an additional bit since negating
7958 a negative number can remove one sign bit copy. */
7959
7960 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7961 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7962
7963 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7964 if (result > 0
9295e6af 7965 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7966 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 7967 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
01c82bbb
RK
7968 && ((nonzero_bits (XEXP (x, 1), mode)
7969 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
d0ab8cd3
RK
7970 result--;
7971
7972 return MAX (1, result);
7973
7974 case UDIV:
7975 /* The result must be <= the first operand. */
7976 return num_sign_bit_copies (XEXP (x, 0), mode);
7977
7978 case UMOD:
7979 /* The result must be <= the scond operand. */
7980 return num_sign_bit_copies (XEXP (x, 1), mode);
7981
7982 case DIV:
7983 /* Similar to unsigned division, except that we have to worry about
7984 the case where the divisor is negative, in which case we have
7985 to add 1. */
7986 result = num_sign_bit_copies (XEXP (x, 0), mode);
7987 if (result > 1
ac49a949 7988 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7989 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7990 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7991 result --;
7992
7993 return result;
7994
7995 case MOD:
7996 result = num_sign_bit_copies (XEXP (x, 1), mode);
7997 if (result > 1
ac49a949 7998 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7999 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
8000 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8001 result --;
8002
8003 return result;
8004
8005 case ASHIFTRT:
8006 /* Shifts by a constant add to the number of bits equal to the
8007 sign bit. */
8008 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8009 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8010 && INTVAL (XEXP (x, 1)) > 0)
8011 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8012
8013 return num0;
8014
8015 case ASHIFT:
d0ab8cd3
RK
8016 /* Left shifts destroy copies. */
8017 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8018 || INTVAL (XEXP (x, 1)) < 0
8019 || INTVAL (XEXP (x, 1)) >= bitwidth)
8020 return 1;
8021
8022 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8023 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8024
8025 case IF_THEN_ELSE:
8026 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8027 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8028 return MIN (num0, num1);
8029
d0ab8cd3
RK
8030 case EQ: case NE: case GE: case GT: case LE: case LT:
8031 case GEU: case GTU: case LEU: case LTU:
0802d516
RK
8032 if (STORE_FLAG_VALUE == -1)
8033 return bitwidth;
e9a25f70
JL
8034 break;
8035
8036 default:
8037 break;
d0ab8cd3
RK
8038 }
8039
8040 /* If we haven't been able to figure it out by one of the above rules,
8041 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
8042 count those bits and return one less than that amount. If we can't
8043 safely compute the mask for this mode, always return BITWIDTH. */
8044
8045 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 8046 return 1;
d0ab8cd3 8047
951553af 8048 nonzero = nonzero_bits (x, mode);
df6f4086 8049 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 8050 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8051}
8052\f
1a26b032
RK
8053/* Return the number of "extended" bits there are in X, when interpreted
8054 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8055 unsigned quantities, this is the number of high-order zero bits.
8056 For signed quantities, this is the number of copies of the sign bit
8057 minus 1. In both case, this function returns the number of "spare"
8058 bits. For example, if two quantities for which this function returns
8059 at least 1 are added, the addition is known not to overflow.
8060
8061 This function will always return 0 unless called during combine, which
8062 implies that it must be called from a define_split. */
8063
8064int
8065extended_count (x, mode, unsignedp)
8066 rtx x;
8067 enum machine_mode mode;
8068 int unsignedp;
8069{
951553af 8070 if (nonzero_sign_valid == 0)
1a26b032
RK
8071 return 0;
8072
8073 return (unsignedp
ac49a949
RS
8074 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8075 && (GET_MODE_BITSIZE (mode) - 1
951553af 8076 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
8077 : num_sign_bit_copies (x, mode) - 1);
8078}
8079\f
230d793d
RS
8080/* This function is called from `simplify_shift_const' to merge two
8081 outer operations. Specifically, we have already found that we need
8082 to perform operation *POP0 with constant *PCONST0 at the outermost
8083 position. We would now like to also perform OP1 with constant CONST1
8084 (with *POP0 being done last).
8085
8086 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8087 the resulting operation. *PCOMP_P is set to 1 if we would need to
8088 complement the innermost operand, otherwise it is unchanged.
8089
8090 MODE is the mode in which the operation will be done. No bits outside
8091 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 8092 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
8093
8094 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8095 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8096 result is simply *PCONST0.
8097
8098 If the resulting operation cannot be expressed as one operation, we
8099 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8100
8101static int
8102merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8103 enum rtx_code *pop0;
5f4f0e22 8104 HOST_WIDE_INT *pconst0;
230d793d 8105 enum rtx_code op1;
5f4f0e22 8106 HOST_WIDE_INT const1;
230d793d
RS
8107 enum machine_mode mode;
8108 int *pcomp_p;
8109{
8110 enum rtx_code op0 = *pop0;
5f4f0e22 8111 HOST_WIDE_INT const0 = *pconst0;
9fa6d012 8112 int width = GET_MODE_BITSIZE (mode);
230d793d
RS
8113
8114 const0 &= GET_MODE_MASK (mode);
8115 const1 &= GET_MODE_MASK (mode);
8116
8117 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8118 if (op0 == AND)
8119 const1 &= const0;
8120
8121 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8122 if OP0 is SET. */
8123
8124 if (op1 == NIL || op0 == SET)
8125 return 1;
8126
8127 else if (op0 == NIL)
8128 op0 = op1, const0 = const1;
8129
8130 else if (op0 == op1)
8131 {
8132 switch (op0)
8133 {
8134 case AND:
8135 const0 &= const1;
8136 break;
8137 case IOR:
8138 const0 |= const1;
8139 break;
8140 case XOR:
8141 const0 ^= const1;
8142 break;
8143 case PLUS:
8144 const0 += const1;
8145 break;
8146 case NEG:
8147 op0 = NIL;
8148 break;
e9a25f70
JL
8149 default:
8150 break;
230d793d
RS
8151 }
8152 }
8153
8154 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8155 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8156 return 0;
8157
8158 /* If the two constants aren't the same, we can't do anything. The
8159 remaining six cases can all be done. */
8160 else if (const0 != const1)
8161 return 0;
8162
8163 else
8164 switch (op0)
8165 {
8166 case IOR:
8167 if (op1 == AND)
8168 /* (a & b) | b == b */
8169 op0 = SET;
8170 else /* op1 == XOR */
8171 /* (a ^ b) | b == a | b */
b729186a 8172 {;}
230d793d
RS
8173 break;
8174
8175 case XOR:
8176 if (op1 == AND)
8177 /* (a & b) ^ b == (~a) & b */
8178 op0 = AND, *pcomp_p = 1;
8179 else /* op1 == IOR */
8180 /* (a | b) ^ b == a & ~b */
8181 op0 = AND, *pconst0 = ~ const0;
8182 break;
8183
8184 case AND:
8185 if (op1 == IOR)
8186 /* (a | b) & b == b */
8187 op0 = SET;
8188 else /* op1 == XOR */
8189 /* (a ^ b) & b) == (~a) & b */
8190 *pcomp_p = 1;
8191 break;
e9a25f70
JL
8192 default:
8193 break;
230d793d
RS
8194 }
8195
8196 /* Check for NO-OP cases. */
8197 const0 &= GET_MODE_MASK (mode);
8198 if (const0 == 0
8199 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8200 op0 = NIL;
8201 else if (const0 == 0 && op0 == AND)
8202 op0 = SET;
8203 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
8204 op0 = NIL;
8205
9fa6d012
TG
8206 /* If this would be an entire word for the target, but is not for
8207 the host, then sign-extend on the host so that the number will look
8208 the same way on the host that it would on the target.
8209
8210 For example, when building a 64 bit alpha hosted 32 bit sparc
8211 targeted compiler, then we want the 32 bit unsigned value -1 to be
8212 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8213 The later confuses the sparc backend. */
8214
8215 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8216 && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
8217 const0 |= ((HOST_WIDE_INT) (-1) << width);
8218
230d793d
RS
8219 *pop0 = op0;
8220 *pconst0 = const0;
8221
8222 return 1;
8223}
8224\f
8225/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8226 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8227 that we started with.
8228
8229 The shift is normally computed in the widest mode we find in VAROP, as
8230 long as it isn't a different number of words than RESULT_MODE. Exceptions
8231 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8232
8233static rtx
8234simplify_shift_const (x, code, result_mode, varop, count)
8235 rtx x;
8236 enum rtx_code code;
8237 enum machine_mode result_mode;
8238 rtx varop;
8239 int count;
8240{
8241 enum rtx_code orig_code = code;
8242 int orig_count = count;
8243 enum machine_mode mode = result_mode;
8244 enum machine_mode shift_mode, tmode;
8245 int mode_words
8246 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8247 /* We form (outer_op (code varop count) (outer_const)). */
8248 enum rtx_code outer_op = NIL;
c4e861e8 8249 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8250 rtx const_rtx;
8251 int complement_p = 0;
8252 rtx new;
8253
8254 /* If we were given an invalid count, don't do anything except exactly
8255 what was requested. */
8256
8257 if (count < 0 || count > GET_MODE_BITSIZE (mode))
8258 {
8259 if (x)
8260 return x;
8261
38a448ca 8262 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
230d793d
RS
8263 }
8264
8265 /* Unless one of the branches of the `if' in this loop does a `continue',
8266 we will `break' the loop after the `if'. */
8267
8268 while (count != 0)
8269 {
8270 /* If we have an operand of (clobber (const_int 0)), just return that
8271 value. */
8272 if (GET_CODE (varop) == CLOBBER)
8273 return varop;
8274
8275 /* If we discovered we had to complement VAROP, leave. Making a NOT
8276 here would cause an infinite loop. */
8277 if (complement_p)
8278 break;
8279
abc95ed3 8280 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8281 if (code == ROTATERT)
8282 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8283
230d793d 8284 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8285 shift is a right shift or a ROTATE, we must always do it in the mode
8286 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8287 widest mode encountered. */
f6789c77
RK
8288 shift_mode
8289 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8290 ? result_mode : mode);
230d793d
RS
8291
8292 /* Handle cases where the count is greater than the size of the mode
8293 minus 1. For ASHIFT, use the size minus one as the count (this can
8294 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8295 take the count modulo the size. For other shifts, the result is
8296 zero.
8297
8298 Since these shifts are being produced by the compiler by combining
8299 multiple operations, each of which are defined, we know what the
8300 result is supposed to be. */
8301
8302 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8303 {
8304 if (code == ASHIFTRT)
8305 count = GET_MODE_BITSIZE (shift_mode) - 1;
8306 else if (code == ROTATE || code == ROTATERT)
8307 count %= GET_MODE_BITSIZE (shift_mode);
8308 else
8309 {
8310 /* We can't simply return zero because there may be an
8311 outer op. */
8312 varop = const0_rtx;
8313 count = 0;
8314 break;
8315 }
8316 }
8317
8318 /* Negative counts are invalid and should not have been made (a
8319 programmer-specified negative count should have been handled
0f41302f 8320 above). */
230d793d
RS
8321 else if (count < 0)
8322 abort ();
8323
312def2e
RK
8324 /* An arithmetic right shift of a quantity known to be -1 or 0
8325 is a no-op. */
8326 if (code == ASHIFTRT
8327 && (num_sign_bit_copies (varop, shift_mode)
8328 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8329 {
312def2e
RK
8330 count = 0;
8331 break;
8332 }
d0ab8cd3 8333
312def2e
RK
8334 /* If we are doing an arithmetic right shift and discarding all but
8335 the sign bit copies, this is equivalent to doing a shift by the
8336 bitsize minus one. Convert it into that shift because it will often
8337 allow other simplifications. */
500c518b 8338
312def2e
RK
8339 if (code == ASHIFTRT
8340 && (count + num_sign_bit_copies (varop, shift_mode)
8341 >= GET_MODE_BITSIZE (shift_mode)))
8342 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8343
230d793d
RS
8344 /* We simplify the tests below and elsewhere by converting
8345 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8346 `make_compound_operation' will convert it to a ASHIFTRT for
8347 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8348 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8349 && code == ASHIFTRT
951553af 8350 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8351 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8352 == 0))
230d793d
RS
8353 code = LSHIFTRT;
8354
8355 switch (GET_CODE (varop))
8356 {
8357 case SIGN_EXTEND:
8358 case ZERO_EXTEND:
8359 case SIGN_EXTRACT:
8360 case ZERO_EXTRACT:
8361 new = expand_compound_operation (varop);
8362 if (new != varop)
8363 {
8364 varop = new;
8365 continue;
8366 }
8367 break;
8368
8369 case MEM:
8370 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8371 minus the width of a smaller mode, we can do this with a
8372 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8373 if ((code == ASHIFTRT || code == LSHIFTRT)
8374 && ! mode_dependent_address_p (XEXP (varop, 0))
8375 && ! MEM_VOLATILE_P (varop)
8376 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8377 MODE_INT, 1)) != BLKmode)
8378 {
f76b9db2 8379 if (BYTES_BIG_ENDIAN)
38a448ca 8380 new = gen_rtx_MEM (tmode, XEXP (varop, 0));
f76b9db2 8381 else
38a448ca
RH
8382 new = gen_rtx_MEM (tmode,
8383 plus_constant (XEXP (varop, 0),
8384 count / BITS_PER_UNIT));
e24b00c8
ILT
8385 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
8386 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
8387 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
230d793d
RS
8388 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8389 : ZERO_EXTEND, mode, new);
8390 count = 0;
8391 continue;
8392 }
8393 break;
8394
8395 case USE:
8396 /* Similar to the case above, except that we can only do this if
8397 the resulting mode is the same as that of the underlying
8398 MEM and adjust the address depending on the *bits* endianness
8399 because of the way that bit-field extract insns are defined. */
8400 if ((code == ASHIFTRT || code == LSHIFTRT)
8401 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8402 MODE_INT, 1)) != BLKmode
8403 && tmode == GET_MODE (XEXP (varop, 0)))
8404 {
f76b9db2
ILT
8405 if (BITS_BIG_ENDIAN)
8406 new = XEXP (varop, 0);
8407 else
8408 {
8409 new = copy_rtx (XEXP (varop, 0));
8410 SUBST (XEXP (new, 0),
8411 plus_constant (XEXP (new, 0),
8412 count / BITS_PER_UNIT));
8413 }
230d793d
RS
8414
8415 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8416 : ZERO_EXTEND, mode, new);
8417 count = 0;
8418 continue;
8419 }
8420 break;
8421
8422 case SUBREG:
8423 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8424 the same number of words as what we've seen so far. Then store
8425 the widest mode in MODE. */
f9e67232
RS
8426 if (subreg_lowpart_p (varop)
8427 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8428 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
8429 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8430 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8431 == mode_words))
8432 {
8433 varop = SUBREG_REG (varop);
8434 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8435 mode = GET_MODE (varop);
8436 continue;
8437 }
8438 break;
8439
8440 case MULT:
8441 /* Some machines use MULT instead of ASHIFT because MULT
8442 is cheaper. But it is still better on those machines to
8443 merge two shifts into one. */
8444 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8445 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8446 {
8447 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8448 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
8449 continue;
8450 }
8451 break;
8452
8453 case UDIV:
8454 /* Similar, for when divides are cheaper. */
8455 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8456 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8457 {
8458 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8459 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
8460 continue;
8461 }
8462 break;
8463
8464 case ASHIFTRT:
8465 /* If we are extracting just the sign bit of an arithmetic right
8466 shift, that shift is not needed. */
8467 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8468 {
8469 varop = XEXP (varop, 0);
8470 continue;
8471 }
8472
0f41302f 8473 /* ... fall through ... */
230d793d
RS
8474
8475 case LSHIFTRT:
8476 case ASHIFT:
230d793d
RS
8477 case ROTATE:
8478 /* Here we have two nested shifts. The result is usually the
8479 AND of a new shift with a mask. We compute the result below. */
8480 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8481 && INTVAL (XEXP (varop, 1)) >= 0
8482 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
8483 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8484 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
8485 {
8486 enum rtx_code first_code = GET_CODE (varop);
8487 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 8488 unsigned HOST_WIDE_INT mask;
230d793d 8489 rtx mask_rtx;
230d793d 8490
230d793d
RS
8491 /* We have one common special case. We can't do any merging if
8492 the inner code is an ASHIFTRT of a smaller mode. However, if
8493 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8494 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8495 we can convert it to
8496 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8497 This simplifies certain SIGN_EXTEND operations. */
8498 if (code == ASHIFT && first_code == ASHIFTRT
8499 && (GET_MODE_BITSIZE (result_mode)
8500 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8501 {
8502 /* C3 has the low-order C1 bits zero. */
8503
5f4f0e22
CH
8504 mask = (GET_MODE_MASK (mode)
8505 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 8506
5f4f0e22 8507 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 8508 XEXP (varop, 0), mask);
5f4f0e22 8509 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
8510 varop, count);
8511 count = first_count;
8512 code = ASHIFTRT;
8513 continue;
8514 }
8515
d0ab8cd3
RK
8516 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8517 than C1 high-order bits equal to the sign bit, we can convert
8518 this to either an ASHIFT or a ASHIFTRT depending on the
8519 two counts.
230d793d
RS
8520
8521 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8522
8523 if (code == ASHIFTRT && first_code == ASHIFT
8524 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
8525 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8526 > first_count))
230d793d 8527 {
d0ab8cd3
RK
8528 count -= first_count;
8529 if (count < 0)
8530 count = - count, code = ASHIFT;
8531 varop = XEXP (varop, 0);
8532 continue;
230d793d
RS
8533 }
8534
8535 /* There are some cases we can't do. If CODE is ASHIFTRT,
8536 we can only do this if FIRST_CODE is also ASHIFTRT.
8537
8538 We can't do the case when CODE is ROTATE and FIRST_CODE is
8539 ASHIFTRT.
8540
8541 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 8542 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
8543
8544 Finally, we can't do any of these if the mode is too wide
8545 unless the codes are the same.
8546
8547 Handle the case where the shift codes are the same
8548 first. */
8549
8550 if (code == first_code)
8551 {
8552 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
8553 && (code == ASHIFTRT || code == LSHIFTRT
8554 || code == ROTATE))
230d793d
RS
8555 break;
8556
8557 count += first_count;
8558 varop = XEXP (varop, 0);
8559 continue;
8560 }
8561
8562 if (code == ASHIFTRT
8563 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 8564 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 8565 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
8566 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8567 || first_code == ROTATE
230d793d
RS
8568 || code == ROTATE)))
8569 break;
8570
8571 /* To compute the mask to apply after the shift, shift the
951553af 8572 nonzero bits of the inner shift the same way the
230d793d
RS
8573 outer shift will. */
8574
951553af 8575 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
8576
8577 mask_rtx
8578 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 8579 GEN_INT (count));
230d793d
RS
8580
8581 /* Give up if we can't compute an outer operation to use. */
8582 if (mask_rtx == 0
8583 || GET_CODE (mask_rtx) != CONST_INT
8584 || ! merge_outer_ops (&outer_op, &outer_const, AND,
8585 INTVAL (mask_rtx),
8586 result_mode, &complement_p))
8587 break;
8588
8589 /* If the shifts are in the same direction, we add the
8590 counts. Otherwise, we subtract them. */
8591 if ((code == ASHIFTRT || code == LSHIFTRT)
8592 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8593 count += first_count;
8594 else
8595 count -= first_count;
8596
8597 /* If COUNT is positive, the new shift is usually CODE,
8598 except for the two exceptions below, in which case it is
8599 FIRST_CODE. If the count is negative, FIRST_CODE should
8600 always be used */
8601 if (count > 0
8602 && ((first_code == ROTATE && code == ASHIFT)
8603 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8604 code = first_code;
8605 else if (count < 0)
8606 code = first_code, count = - count;
8607
8608 varop = XEXP (varop, 0);
8609 continue;
8610 }
8611
8612 /* If we have (A << B << C) for any shift, we can convert this to
8613 (A << C << B). This wins if A is a constant. Only try this if
8614 B is not a constant. */
8615
8616 else if (GET_CODE (varop) == code
8617 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8618 && 0 != (new
8619 = simplify_binary_operation (code, mode,
8620 XEXP (varop, 0),
5f4f0e22 8621 GEN_INT (count))))
230d793d
RS
8622 {
8623 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8624 count = 0;
8625 continue;
8626 }
8627 break;
8628
8629 case NOT:
8630 /* Make this fit the case below. */
8631 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 8632 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
8633 continue;
8634
8635 case IOR:
8636 case AND:
8637 case XOR:
8638 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8639 with C the size of VAROP - 1 and the shift is logical if
8640 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8641 we have an (le X 0) operation. If we have an arithmetic shift
8642 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8643 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8644
8645 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8646 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8647 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8648 && (code == LSHIFTRT || code == ASHIFTRT)
8649 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8650 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8651 {
8652 count = 0;
8653 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8654 const0_rtx);
8655
8656 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8657 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8658
8659 continue;
8660 }
8661
8662 /* If we have (shift (logical)), move the logical to the outside
8663 to allow it to possibly combine with another logical and the
8664 shift to combine with another shift. This also canonicalizes to
8665 what a ZERO_EXTRACT looks like. Also, some machines have
8666 (and (shift)) insns. */
8667
8668 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8669 && (new = simplify_binary_operation (code, result_mode,
8670 XEXP (varop, 1),
5f4f0e22 8671 GEN_INT (count))) != 0
7d171a1e 8672 && GET_CODE(new) == CONST_INT
230d793d
RS
8673 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8674 INTVAL (new), result_mode, &complement_p))
8675 {
8676 varop = XEXP (varop, 0);
8677 continue;
8678 }
8679
8680 /* If we can't do that, try to simplify the shift in each arm of the
8681 logical expression, make a new logical expression, and apply
8682 the inverse distributive law. */
8683 {
00d4ca1c 8684 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 8685 XEXP (varop, 0), count);
00d4ca1c 8686 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
8687 XEXP (varop, 1), count);
8688
21a64bf1 8689 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
8690 varop = apply_distributive_law (varop);
8691
8692 count = 0;
8693 }
8694 break;
8695
8696 case EQ:
45620ed4 8697 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 8698 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
8699 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8700 that may be nonzero. */
8701 if (code == LSHIFTRT
230d793d
RS
8702 && XEXP (varop, 1) == const0_rtx
8703 && GET_MODE (XEXP (varop, 0)) == result_mode
8704 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 8705 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8706 && ((STORE_FLAG_VALUE
5f4f0e22 8707 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 8708 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8709 && merge_outer_ops (&outer_op, &outer_const, XOR,
8710 (HOST_WIDE_INT) 1, result_mode,
8711 &complement_p))
230d793d
RS
8712 {
8713 varop = XEXP (varop, 0);
8714 count = 0;
8715 continue;
8716 }
8717 break;
8718
8719 case NEG:
d0ab8cd3
RK
8720 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8721 than the number of bits in the mode is equivalent to A. */
8722 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 8723 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 8724 {
d0ab8cd3 8725 varop = XEXP (varop, 0);
230d793d
RS
8726 count = 0;
8727 continue;
8728 }
8729
8730 /* NEG commutes with ASHIFT since it is multiplication. Move the
8731 NEG outside to allow shifts to combine. */
8732 if (code == ASHIFT
5f4f0e22
CH
8733 && merge_outer_ops (&outer_op, &outer_const, NEG,
8734 (HOST_WIDE_INT) 0, result_mode,
8735 &complement_p))
230d793d
RS
8736 {
8737 varop = XEXP (varop, 0);
8738 continue;
8739 }
8740 break;
8741
8742 case PLUS:
d0ab8cd3
RK
8743 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8744 is one less than the number of bits in the mode is
8745 equivalent to (xor A 1). */
230d793d
RS
8746 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8747 && XEXP (varop, 1) == constm1_rtx
951553af 8748 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8749 && merge_outer_ops (&outer_op, &outer_const, XOR,
8750 (HOST_WIDE_INT) 1, result_mode,
8751 &complement_p))
230d793d
RS
8752 {
8753 count = 0;
8754 varop = XEXP (varop, 0);
8755 continue;
8756 }
8757
3f508eca 8758 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 8759 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
8760 bits are known zero in FOO, we can replace the PLUS with FOO.
8761 Similarly in the other operand order. This code occurs when
8762 we are computing the size of a variable-size array. */
8763
8764 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8765 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
8766 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8767 && (nonzero_bits (XEXP (varop, 1), result_mode)
8768 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
8769 {
8770 varop = XEXP (varop, 0);
8771 continue;
8772 }
8773 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8774 && count < HOST_BITS_PER_WIDE_INT
ac49a949 8775 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 8776 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 8777 >> count)
951553af
RK
8778 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8779 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
8780 result_mode)))
8781 {
8782 varop = XEXP (varop, 1);
8783 continue;
8784 }
8785
230d793d
RS
8786 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8787 if (code == ASHIFT
8788 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8789 && (new = simplify_binary_operation (ASHIFT, result_mode,
8790 XEXP (varop, 1),
5f4f0e22 8791 GEN_INT (count))) != 0
7d171a1e 8792 && GET_CODE(new) == CONST_INT
230d793d
RS
8793 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8794 INTVAL (new), result_mode, &complement_p))
8795 {
8796 varop = XEXP (varop, 0);
8797 continue;
8798 }
8799 break;
8800
8801 case MINUS:
8802 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8803 with C the size of VAROP - 1 and the shift is logical if
8804 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8805 we have a (gt X 0) operation. If the shift is arithmetic with
8806 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8807 we have a (neg (gt X 0)) operation. */
8808
0802d516
RK
8809 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8810 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 8811 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
8812 && (code == LSHIFTRT || code == ASHIFTRT)
8813 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8814 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8815 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8816 {
8817 count = 0;
8818 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8819 const0_rtx);
8820
8821 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8822 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8823
8824 continue;
8825 }
8826 break;
6e0ef100
JC
8827
8828 case TRUNCATE:
8829 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
8830 if the truncate does not affect the value. */
8831 if (code == LSHIFTRT
8832 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
8833 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8834 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
8835 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
8836 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
8837 {
8838 rtx varop_inner = XEXP (varop, 0);
8839
8840 varop_inner = gen_rtx_combine (LSHIFTRT,
8841 GET_MODE (varop_inner),
8842 XEXP (varop_inner, 0),
8843 GEN_INT (count + INTVAL (XEXP (varop_inner, 1))));
8844 varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
8845 varop_inner);
8846 count = 0;
8847 continue;
8848 }
8849 break;
e9a25f70
JL
8850
8851 default:
8852 break;
230d793d
RS
8853 }
8854
8855 break;
8856 }
8857
8858 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
8859 a right shift or ROTATE, we must always do it in the mode it was
8860 originally done in. Otherwise, we can do it in MODE, the widest mode
8861 encountered. The code we care about is that of the shift that will
8862 actually be done, not the shift that was originally requested. */
8863 shift_mode
8864 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8865 ? result_mode : mode);
230d793d
RS
8866
8867 /* We have now finished analyzing the shift. The result should be
8868 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8869 OUTER_OP is non-NIL, it is an operation that needs to be applied
8870 to the result of the shift. OUTER_CONST is the relevant constant,
8871 but we must turn off all bits turned off in the shift.
8872
8873 If we were passed a value for X, see if we can use any pieces of
8874 it. If not, make new rtx. */
8875
8876 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8877 && GET_CODE (XEXP (x, 1)) == CONST_INT
8878 && INTVAL (XEXP (x, 1)) == count)
8879 const_rtx = XEXP (x, 1);
8880 else
5f4f0e22 8881 const_rtx = GEN_INT (count);
230d793d
RS
8882
8883 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8884 && GET_MODE (XEXP (x, 0)) == shift_mode
8885 && SUBREG_REG (XEXP (x, 0)) == varop)
8886 varop = XEXP (x, 0);
8887 else if (GET_MODE (varop) != shift_mode)
8888 varop = gen_lowpart_for_combine (shift_mode, varop);
8889
0f41302f 8890 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
8891 if (GET_CODE (varop) == CLOBBER)
8892 return x ? x : varop;
8893
8894 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8895 if (new != 0)
8896 x = new;
8897 else
8898 {
8899 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8900 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8901
8902 SUBST (XEXP (x, 0), varop);
8903 SUBST (XEXP (x, 1), const_rtx);
8904 }
8905
224eeff2
RK
8906 /* If we have an outer operation and we just made a shift, it is
8907 possible that we could have simplified the shift were it not
8908 for the outer operation. So try to do the simplification
8909 recursively. */
8910
8911 if (outer_op != NIL && GET_CODE (x) == code
8912 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8913 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8914 INTVAL (XEXP (x, 1)));
8915
230d793d
RS
8916 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8917 turn off all the bits that the shift would have turned off. */
8918 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 8919 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
8920 GET_MODE_MASK (result_mode) >> orig_count);
8921
8922 /* Do the remainder of the processing in RESULT_MODE. */
8923 x = gen_lowpart_for_combine (result_mode, x);
8924
8925 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8926 operation. */
8927 if (complement_p)
0c1c8ea6 8928 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
8929
8930 if (outer_op != NIL)
8931 {
5f4f0e22 8932 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9fa6d012
TG
8933 {
8934 int width = GET_MODE_BITSIZE (result_mode);
8935
8936 outer_const &= GET_MODE_MASK (result_mode);
8937
8938 /* If this would be an entire word for the target, but is not for
8939 the host, then sign-extend on the host so that the number will
8940 look the same way on the host that it would on the target.
8941
8942 For example, when building a 64 bit alpha hosted 32 bit sparc
8943 targeted compiler, then we want the 32 bit unsigned value -1 to be
8944 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8945 The later confuses the sparc backend. */
8946
8947 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8948 && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
8949 outer_const |= ((HOST_WIDE_INT) (-1) << width);
8950 }
230d793d
RS
8951
8952 if (outer_op == AND)
5f4f0e22 8953 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
8954 else if (outer_op == SET)
8955 /* This means that we have determined that the result is
8956 equivalent to a constant. This should be rare. */
5f4f0e22 8957 x = GEN_INT (outer_const);
230d793d 8958 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 8959 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 8960 else
5f4f0e22 8961 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
8962 }
8963
8964 return x;
8965}
8966\f
8967/* Like recog, but we receive the address of a pointer to a new pattern.
8968 We try to match the rtx that the pointer points to.
8969 If that fails, we may try to modify or replace the pattern,
8970 storing the replacement into the same pointer object.
8971
8972 Modifications include deletion or addition of CLOBBERs.
8973
8974 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8975 the CLOBBERs are placed.
8976
a29ca9db
RK
8977 PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
8978 we had to add.
8979
230d793d
RS
8980 The value is the final insn code from the pattern ultimately matched,
8981 or -1. */
8982
8983static int
a29ca9db 8984recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
230d793d
RS
8985 rtx *pnewpat;
8986 rtx insn;
8987 rtx *pnotes;
a29ca9db 8988 int *padded_scratches;
230d793d
RS
8989{
8990 register rtx pat = *pnewpat;
8991 int insn_code_number;
8992 int num_clobbers_to_add = 0;
8993 int i;
8994 rtx notes = 0;
8995
a29ca9db
RK
8996 *padded_scratches = 0;
8997
974f4146
RK
8998 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8999 we use to indicate that something didn't match. If we find such a
9000 thing, force rejection. */
d96023cf 9001 if (GET_CODE (pat) == PARALLEL)
974f4146 9002 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
9003 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9004 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
9005 return -1;
9006
230d793d
RS
9007 /* Is the result of combination a valid instruction? */
9008 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9009
9010 /* If it isn't, there is the possibility that we previously had an insn
9011 that clobbered some register as a side effect, but the combined
9012 insn doesn't need to do that. So try once more without the clobbers
9013 unless this represents an ASM insn. */
9014
9015 if (insn_code_number < 0 && ! check_asm_operands (pat)
9016 && GET_CODE (pat) == PARALLEL)
9017 {
9018 int pos;
9019
9020 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9021 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9022 {
9023 if (i != pos)
9024 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9025 pos++;
9026 }
9027
9028 SUBST_INT (XVECLEN (pat, 0), pos);
9029
9030 if (pos == 1)
9031 pat = XVECEXP (pat, 0, 0);
9032
9033 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9034 }
9035
9036 /* If we had any clobbers to add, make a new pattern than contains
9037 them. Then check to make sure that all of them are dead. */
9038 if (num_clobbers_to_add)
9039 {
38a448ca
RH
9040 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9041 gen_rtvec (GET_CODE (pat) == PARALLEL
9042 ? XVECLEN (pat, 0) + num_clobbers_to_add
9043 : num_clobbers_to_add + 1));
230d793d
RS
9044
9045 if (GET_CODE (pat) == PARALLEL)
9046 for (i = 0; i < XVECLEN (pat, 0); i++)
9047 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9048 else
9049 XVECEXP (newpat, 0, 0) = pat;
9050
9051 add_clobbers (newpat, insn_code_number);
9052
9053 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9054 i < XVECLEN (newpat, 0); i++)
9055 {
9056 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9057 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9058 return -1;
a29ca9db
RK
9059 else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
9060 (*padded_scratches)++;
38a448ca
RH
9061 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9062 XEXP (XVECEXP (newpat, 0, i), 0), notes);
230d793d
RS
9063 }
9064 pat = newpat;
9065 }
9066
9067 *pnewpat = pat;
9068 *pnotes = notes;
9069
9070 return insn_code_number;
9071}
9072\f
9073/* Like gen_lowpart but for use by combine. In combine it is not possible
9074 to create any new pseudoregs. However, it is safe to create
9075 invalid memory addresses, because combine will try to recognize
9076 them and all they will do is make the combine attempt fail.
9077
9078 If for some reason this cannot do its job, an rtx
9079 (clobber (const_int 0)) is returned.
9080 An insn containing that will not be recognized. */
9081
9082#undef gen_lowpart
9083
9084static rtx
9085gen_lowpart_for_combine (mode, x)
9086 enum machine_mode mode;
9087 register rtx x;
9088{
9089 rtx result;
9090
9091 if (GET_MODE (x) == mode)
9092 return x;
9093
eae957a8
RK
9094 /* We can only support MODE being wider than a word if X is a
9095 constant integer or has a mode the same size. */
9096
9097 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9098 && ! ((GET_MODE (x) == VOIDmode
9099 && (GET_CODE (x) == CONST_INT
9100 || GET_CODE (x) == CONST_DOUBLE))
9101 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
38a448ca 9102 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9103
9104 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9105 won't know what to do. So we will strip off the SUBREG here and
9106 process normally. */
9107 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9108 {
9109 x = SUBREG_REG (x);
9110 if (GET_MODE (x) == mode)
9111 return x;
9112 }
9113
9114 result = gen_lowpart_common (mode, x);
64bf47a2
RK
9115 if (result != 0
9116 && GET_CODE (result) == SUBREG
9117 && GET_CODE (SUBREG_REG (result)) == REG
9118 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9119 && (GET_MODE_SIZE (GET_MODE (result))
9120 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
b1f21e0a 9121 REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
64bf47a2 9122
230d793d
RS
9123 if (result)
9124 return result;
9125
9126 if (GET_CODE (x) == MEM)
9127 {
9128 register int offset = 0;
9129 rtx new;
9130
9131 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9132 address. */
9133 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
38a448ca 9134 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9135
9136 /* If we want to refer to something bigger than the original memref,
9137 generate a perverse subreg instead. That will force a reload
9138 of the original memref X. */
9139 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
38a448ca 9140 return gen_rtx_SUBREG (mode, x, 0);
230d793d 9141
f76b9db2
ILT
9142 if (WORDS_BIG_ENDIAN)
9143 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9144 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9145 if (BYTES_BIG_ENDIAN)
9146 {
9147 /* Adjust the address so that the address-after-the-data is
9148 unchanged. */
9149 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9150 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9151 }
38a448ca 9152 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
230d793d
RS
9153 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
9154 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
9155 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
9156 return new;
9157 }
9158
9159 /* If X is a comparison operator, rewrite it in a new mode. This
9160 probably won't match, but may allow further simplifications. */
9161 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9162 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9163
9164 /* If we couldn't simplify X any other way, just enclose it in a
9165 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 9166 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 9167 else
dfbe1b2f
RK
9168 {
9169 int word = 0;
9170
9171 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9172 word = ((GET_MODE_SIZE (GET_MODE (x))
9173 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9174 / UNITS_PER_WORD);
38a448ca 9175 return gen_rtx_SUBREG (mode, x, word);
dfbe1b2f 9176 }
230d793d
RS
9177}
9178\f
9179/* Make an rtx expression. This is a subset of gen_rtx and only supports
9180 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9181
9182 If the identical expression was previously in the insn (in the undobuf),
9183 it will be returned. Only if it is not found will a new expression
9184 be made. */
9185
9186/*VARARGS2*/
9187static rtx
4f90e4a0 9188gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 9189{
4f90e4a0 9190#ifndef __STDC__
230d793d
RS
9191 enum rtx_code code;
9192 enum machine_mode mode;
4f90e4a0
RK
9193#endif
9194 va_list p;
230d793d
RS
9195 int n_args;
9196 rtx args[3];
b729186a 9197 int j;
230d793d
RS
9198 char *fmt;
9199 rtx rt;
241cea85 9200 struct undo *undo;
230d793d 9201
4f90e4a0
RK
9202 VA_START (p, mode);
9203
9204#ifndef __STDC__
230d793d
RS
9205 code = va_arg (p, enum rtx_code);
9206 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
9207#endif
9208
230d793d
RS
9209 n_args = GET_RTX_LENGTH (code);
9210 fmt = GET_RTX_FORMAT (code);
9211
9212 if (n_args == 0 || n_args > 3)
9213 abort ();
9214
9215 /* Get each arg and verify that it is supposed to be an expression. */
9216 for (j = 0; j < n_args; j++)
9217 {
9218 if (*fmt++ != 'e')
9219 abort ();
9220
9221 args[j] = va_arg (p, rtx);
9222 }
9223
9224 /* See if this is in undobuf. Be sure we don't use objects that came
9225 from another insn; this could produce circular rtl structures. */
9226
241cea85
RK
9227 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9228 if (!undo->is_int
9229 && GET_CODE (undo->old_contents.r) == code
9230 && GET_MODE (undo->old_contents.r) == mode)
230d793d
RS
9231 {
9232 for (j = 0; j < n_args; j++)
241cea85 9233 if (XEXP (undo->old_contents.r, j) != args[j])
230d793d
RS
9234 break;
9235
9236 if (j == n_args)
241cea85 9237 return undo->old_contents.r;
230d793d
RS
9238 }
9239
9240 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9241 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9242 rt = rtx_alloc (code);
9243 PUT_MODE (rt, mode);
9244 XEXP (rt, 0) = args[0];
9245 if (n_args > 1)
9246 {
9247 XEXP (rt, 1) = args[1];
9248 if (n_args > 2)
9249 XEXP (rt, 2) = args[2];
9250 }
9251 return rt;
9252}
9253
9254/* These routines make binary and unary operations by first seeing if they
9255 fold; if not, a new expression is allocated. */
9256
9257static rtx
9258gen_binary (code, mode, op0, op1)
9259 enum rtx_code code;
9260 enum machine_mode mode;
9261 rtx op0, op1;
9262{
9263 rtx result;
1a26b032
RK
9264 rtx tem;
9265
9266 if (GET_RTX_CLASS (code) == 'c'
9267 && (GET_CODE (op0) == CONST_INT
9268 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9269 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
9270
9271 if (GET_RTX_CLASS (code) == '<')
9272 {
9273 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
9274
9275 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9276 just (REL_OP X Y). */
9210df58
RK
9277 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9278 {
9279 op1 = XEXP (op0, 1);
9280 op0 = XEXP (op0, 0);
9281 op_mode = GET_MODE (op0);
9282 }
9283
230d793d
RS
9284 if (op_mode == VOIDmode)
9285 op_mode = GET_MODE (op1);
9286 result = simplify_relational_operation (code, op_mode, op0, op1);
9287 }
9288 else
9289 result = simplify_binary_operation (code, mode, op0, op1);
9290
9291 if (result)
9292 return result;
9293
9294 /* Put complex operands first and constants second. */
9295 if (GET_RTX_CLASS (code) == 'c'
9296 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9297 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9298 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9299 || (GET_CODE (op0) == SUBREG
9300 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9301 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9302 return gen_rtx_combine (code, mode, op1, op0);
9303
9304 return gen_rtx_combine (code, mode, op0, op1);
9305}
9306
9307static rtx
0c1c8ea6 9308gen_unary (code, mode, op0_mode, op0)
230d793d 9309 enum rtx_code code;
0c1c8ea6 9310 enum machine_mode mode, op0_mode;
230d793d
RS
9311 rtx op0;
9312{
0c1c8ea6 9313 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
9314
9315 if (result)
9316 return result;
9317
9318 return gen_rtx_combine (code, mode, op0);
9319}
9320\f
9321/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9322 comparison code that will be tested.
9323
9324 The result is a possibly different comparison code to use. *POP0 and
9325 *POP1 may be updated.
9326
9327 It is possible that we might detect that a comparison is either always
9328 true or always false. However, we do not perform general constant
5089e22e 9329 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9330 should have been detected earlier. Hence we ignore all such cases. */
9331
9332static enum rtx_code
9333simplify_comparison (code, pop0, pop1)
9334 enum rtx_code code;
9335 rtx *pop0;
9336 rtx *pop1;
9337{
9338 rtx op0 = *pop0;
9339 rtx op1 = *pop1;
9340 rtx tem, tem1;
9341 int i;
9342 enum machine_mode mode, tmode;
9343
9344 /* Try a few ways of applying the same transformation to both operands. */
9345 while (1)
9346 {
3a19aabc
RK
9347#ifndef WORD_REGISTER_OPERATIONS
9348 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9349 so check specially. */
9350 if (code != GTU && code != GEU && code != LTU && code != LEU
9351 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9352 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9353 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9354 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9355 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9356 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 9357 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
9358 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9359 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9360 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9361 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9362 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9363 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9364 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9365 && (INTVAL (XEXP (op0, 1))
9366 == (GET_MODE_BITSIZE (GET_MODE (op0))
9367 - (GET_MODE_BITSIZE
9368 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9369 {
9370 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9371 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9372 }
9373#endif
9374
230d793d
RS
9375 /* If both operands are the same constant shift, see if we can ignore the
9376 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 9377 this shift are known to be zero for both inputs and if the type of
230d793d 9378 comparison is compatible with the shift. */
67232b23
RK
9379 if (GET_CODE (op0) == GET_CODE (op1)
9380 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9381 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 9382 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
9383 && (code != GT && code != LT && code != GE && code != LE))
9384 || (GET_CODE (op0) == ASHIFTRT
9385 && (code != GTU && code != LTU
9386 && code != GEU && code != GEU)))
9387 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9388 && INTVAL (XEXP (op0, 1)) >= 0
9389 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9390 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
9391 {
9392 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 9393 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9394 int shift_count = INTVAL (XEXP (op0, 1));
9395
9396 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9397 mask &= (mask >> shift_count) << shift_count;
45620ed4 9398 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
9399 mask = (mask & (mask << shift_count)) >> shift_count;
9400
951553af
RK
9401 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9402 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
9403 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9404 else
9405 break;
9406 }
9407
9408 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9409 SUBREGs are of the same mode, and, in both cases, the AND would
9410 be redundant if the comparison was done in the narrower mode,
9411 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
9412 and the operand's possibly nonzero bits are 0xffffff01; in that case
9413 if we only care about QImode, we don't need the AND). This case
9414 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
9415 STORE_FLAG_VALUE == 1 (e.g., the 386).
9416
9417 Similarly, check for a case where the AND's are ZERO_EXTEND
9418 operations from some narrower mode even though a SUBREG is not
9419 present. */
230d793d
RS
9420
9421 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9422 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 9423 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 9424 {
7e4dc511
RK
9425 rtx inner_op0 = XEXP (op0, 0);
9426 rtx inner_op1 = XEXP (op1, 0);
9427 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9428 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9429 int changed = 0;
9430
9431 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9432 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9433 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9434 && (GET_MODE (SUBREG_REG (inner_op0))
9435 == GET_MODE (SUBREG_REG (inner_op1)))
9436 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9437 <= HOST_BITS_PER_WIDE_INT)
01c82bbb
RK
9438 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9439 GET_MODE (SUBREG_REG (op0)))))
9440 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9441 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
9442 {
9443 op0 = SUBREG_REG (inner_op0);
9444 op1 = SUBREG_REG (inner_op1);
9445
9446 /* The resulting comparison is always unsigned since we masked
0f41302f 9447 off the original sign bit. */
7e4dc511
RK
9448 code = unsigned_condition (code);
9449
9450 changed = 1;
9451 }
230d793d 9452
7e4dc511
RK
9453 else if (c0 == c1)
9454 for (tmode = GET_CLASS_NARROWEST_MODE
9455 (GET_MODE_CLASS (GET_MODE (op0)));
9456 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9457 if (c0 == GET_MODE_MASK (tmode))
9458 {
9459 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9460 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 9461 code = unsigned_condition (code);
7e4dc511
RK
9462 changed = 1;
9463 break;
9464 }
9465
9466 if (! changed)
9467 break;
230d793d 9468 }
3a19aabc 9469
ad25ba17
RK
9470 /* If both operands are NOT, we can strip off the outer operation
9471 and adjust the comparison code for swapped operands; similarly for
9472 NEG, except that this must be an equality comparison. */
9473 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9474 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9475 && (code == EQ || code == NE)))
9476 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 9477
230d793d
RS
9478 else
9479 break;
9480 }
9481
9482 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
9483 comparison code appropriately, but don't do this if the second operand
9484 is already a constant integer. */
9485 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
230d793d
RS
9486 {
9487 tem = op0, op0 = op1, op1 = tem;
9488 code = swap_condition (code);
9489 }
9490
9491 /* We now enter a loop during which we will try to simplify the comparison.
9492 For the most part, we only are concerned with comparisons with zero,
9493 but some things may really be comparisons with zero but not start
9494 out looking that way. */
9495
9496 while (GET_CODE (op1) == CONST_INT)
9497 {
9498 enum machine_mode mode = GET_MODE (op0);
9499 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 9500 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9501 int equality_comparison_p;
9502 int sign_bit_comparison_p;
9503 int unsigned_comparison_p;
5f4f0e22 9504 HOST_WIDE_INT const_op;
230d793d
RS
9505
9506 /* We only want to handle integral modes. This catches VOIDmode,
9507 CCmode, and the floating-point modes. An exception is that we
9508 can handle VOIDmode if OP0 is a COMPARE or a comparison
9509 operation. */
9510
9511 if (GET_MODE_CLASS (mode) != MODE_INT
9512 && ! (mode == VOIDmode
9513 && (GET_CODE (op0) == COMPARE
9514 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9515 break;
9516
9517 /* Get the constant we are comparing against and turn off all bits
9518 not on in our mode. */
9519 const_op = INTVAL (op1);
5f4f0e22 9520 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 9521 const_op &= mask;
230d793d
RS
9522
9523 /* If we are comparing against a constant power of two and the value
951553af 9524 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
9525 `and'ed with that bit), we can replace this with a comparison
9526 with zero. */
9527 if (const_op
9528 && (code == EQ || code == NE || code == GE || code == GEU
9529 || code == LT || code == LTU)
5f4f0e22 9530 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9531 && exact_log2 (const_op) >= 0
951553af 9532 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
9533 {
9534 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9535 op1 = const0_rtx, const_op = 0;
9536 }
9537
d0ab8cd3
RK
9538 /* Similarly, if we are comparing a value known to be either -1 or
9539 0 with -1, change it to the opposite comparison against zero. */
9540
9541 if (const_op == -1
9542 && (code == EQ || code == NE || code == GT || code == LE
9543 || code == GEU || code == LTU)
9544 && num_sign_bit_copies (op0, mode) == mode_width)
9545 {
9546 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9547 op1 = const0_rtx, const_op = 0;
9548 }
9549
230d793d 9550 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
9551 comparisons against zero and then prefer equality comparisons.
9552 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
9553
9554 switch (code)
9555 {
9556 case LT:
4803a34a
RK
9557 /* < C is equivalent to <= (C - 1) */
9558 if (const_op > 0)
230d793d 9559 {
4803a34a 9560 const_op -= 1;
5f4f0e22 9561 op1 = GEN_INT (const_op);
230d793d
RS
9562 code = LE;
9563 /* ... fall through to LE case below. */
9564 }
9565 else
9566 break;
9567
9568 case LE:
4803a34a
RK
9569 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9570 if (const_op < 0)
9571 {
9572 const_op += 1;
5f4f0e22 9573 op1 = GEN_INT (const_op);
4803a34a
RK
9574 code = LT;
9575 }
230d793d
RS
9576
9577 /* If we are doing a <= 0 comparison on a value known to have
9578 a zero sign bit, we can replace this with == 0. */
9579 else if (const_op == 0
5f4f0e22 9580 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9581 && (nonzero_bits (op0, mode)
5f4f0e22 9582 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9583 code = EQ;
9584 break;
9585
9586 case GE:
0f41302f 9587 /* >= C is equivalent to > (C - 1). */
4803a34a 9588 if (const_op > 0)
230d793d 9589 {
4803a34a 9590 const_op -= 1;
5f4f0e22 9591 op1 = GEN_INT (const_op);
230d793d
RS
9592 code = GT;
9593 /* ... fall through to GT below. */
9594 }
9595 else
9596 break;
9597
9598 case GT:
4803a34a
RK
9599 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9600 if (const_op < 0)
9601 {
9602 const_op += 1;
5f4f0e22 9603 op1 = GEN_INT (const_op);
4803a34a
RK
9604 code = GE;
9605 }
230d793d
RS
9606
9607 /* If we are doing a > 0 comparison on a value known to have
9608 a zero sign bit, we can replace this with != 0. */
9609 else if (const_op == 0
5f4f0e22 9610 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9611 && (nonzero_bits (op0, mode)
5f4f0e22 9612 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9613 code = NE;
9614 break;
9615
230d793d 9616 case LTU:
4803a34a
RK
9617 /* < C is equivalent to <= (C - 1). */
9618 if (const_op > 0)
9619 {
9620 const_op -= 1;
5f4f0e22 9621 op1 = GEN_INT (const_op);
4803a34a 9622 code = LEU;
0f41302f 9623 /* ... fall through ... */
4803a34a 9624 }
d0ab8cd3
RK
9625
9626 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
9627 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9628 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9629 {
9630 const_op = 0, op1 = const0_rtx;
9631 code = GE;
9632 break;
9633 }
4803a34a
RK
9634 else
9635 break;
230d793d
RS
9636
9637 case LEU:
9638 /* unsigned <= 0 is equivalent to == 0 */
9639 if (const_op == 0)
9640 code = EQ;
d0ab8cd3 9641
0f41302f 9642 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
9643 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9644 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9645 {
9646 const_op = 0, op1 = const0_rtx;
9647 code = GE;
9648 }
230d793d
RS
9649 break;
9650
4803a34a
RK
9651 case GEU:
9652 /* >= C is equivalent to < (C - 1). */
9653 if (const_op > 1)
9654 {
9655 const_op -= 1;
5f4f0e22 9656 op1 = GEN_INT (const_op);
4803a34a 9657 code = GTU;
0f41302f 9658 /* ... fall through ... */
4803a34a 9659 }
d0ab8cd3
RK
9660
9661 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
9662 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9663 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9664 {
9665 const_op = 0, op1 = const0_rtx;
9666 code = LT;
8b2e69e1 9667 break;
d0ab8cd3 9668 }
4803a34a
RK
9669 else
9670 break;
9671
230d793d
RS
9672 case GTU:
9673 /* unsigned > 0 is equivalent to != 0 */
9674 if (const_op == 0)
9675 code = NE;
d0ab8cd3
RK
9676
9677 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
9678 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9679 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9680 {
9681 const_op = 0, op1 = const0_rtx;
9682 code = LT;
9683 }
230d793d 9684 break;
e9a25f70
JL
9685
9686 default:
9687 break;
230d793d
RS
9688 }
9689
9690 /* Compute some predicates to simplify code below. */
9691
9692 equality_comparison_p = (code == EQ || code == NE);
9693 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9694 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9695 || code == LEU);
9696
6139ff20
RK
9697 /* If this is a sign bit comparison and we can do arithmetic in
9698 MODE, say that we will only be needing the sign bit of OP0. */
9699 if (sign_bit_comparison_p
9700 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9701 op0 = force_to_mode (op0, mode,
9702 ((HOST_WIDE_INT) 1
9703 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 9704 NULL_RTX, 0);
6139ff20 9705
230d793d
RS
9706 /* Now try cases based on the opcode of OP0. If none of the cases
9707 does a "continue", we exit this loop immediately after the
9708 switch. */
9709
9710 switch (GET_CODE (op0))
9711 {
9712 case ZERO_EXTRACT:
9713 /* If we are extracting a single bit from a variable position in
9714 a constant that has only a single bit set and are comparing it
9715 with zero, we can convert this into an equality comparison
d7cd794f 9716 between the position and the location of the single bit. */
230d793d 9717
230d793d
RS
9718 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9719 && XEXP (op0, 1) == const1_rtx
9720 && equality_comparison_p && const_op == 0
d7cd794f 9721 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 9722 {
f76b9db2 9723 if (BITS_BIG_ENDIAN)
d7cd794f 9724#ifdef HAVE_extzv
f76b9db2
ILT
9725 i = (GET_MODE_BITSIZE
9726 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
d7cd794f
RK
9727#else
9728 i = BITS_PER_WORD - 1 - i;
230d793d
RS
9729#endif
9730
9731 op0 = XEXP (op0, 2);
5f4f0e22 9732 op1 = GEN_INT (i);
230d793d
RS
9733 const_op = i;
9734
9735 /* Result is nonzero iff shift count is equal to I. */
9736 code = reverse_condition (code);
9737 continue;
9738 }
230d793d 9739
0f41302f 9740 /* ... fall through ... */
230d793d
RS
9741
9742 case SIGN_EXTRACT:
9743 tem = expand_compound_operation (op0);
9744 if (tem != op0)
9745 {
9746 op0 = tem;
9747 continue;
9748 }
9749 break;
9750
9751 case NOT:
9752 /* If testing for equality, we can take the NOT of the constant. */
9753 if (equality_comparison_p
9754 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9755 {
9756 op0 = XEXP (op0, 0);
9757 op1 = tem;
9758 continue;
9759 }
9760
9761 /* If just looking at the sign bit, reverse the sense of the
9762 comparison. */
9763 if (sign_bit_comparison_p)
9764 {
9765 op0 = XEXP (op0, 0);
9766 code = (code == GE ? LT : GE);
9767 continue;
9768 }
9769 break;
9770
9771 case NEG:
9772 /* If testing for equality, we can take the NEG of the constant. */
9773 if (equality_comparison_p
9774 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9775 {
9776 op0 = XEXP (op0, 0);
9777 op1 = tem;
9778 continue;
9779 }
9780
9781 /* The remaining cases only apply to comparisons with zero. */
9782 if (const_op != 0)
9783 break;
9784
9785 /* When X is ABS or is known positive,
9786 (neg X) is < 0 if and only if X != 0. */
9787
9788 if (sign_bit_comparison_p
9789 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 9790 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9791 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9792 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
9793 {
9794 op0 = XEXP (op0, 0);
9795 code = (code == LT ? NE : EQ);
9796 continue;
9797 }
9798
3bed8141 9799 /* If we have NEG of something whose two high-order bits are the
0f41302f 9800 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 9801 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
9802 {
9803 op0 = XEXP (op0, 0);
9804 code = swap_condition (code);
9805 continue;
9806 }
9807 break;
9808
9809 case ROTATE:
9810 /* If we are testing equality and our count is a constant, we
9811 can perform the inverse operation on our RHS. */
9812 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9813 && (tem = simplify_binary_operation (ROTATERT, mode,
9814 op1, XEXP (op0, 1))) != 0)
9815 {
9816 op0 = XEXP (op0, 0);
9817 op1 = tem;
9818 continue;
9819 }
9820
9821 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9822 a particular bit. Convert it to an AND of a constant of that
9823 bit. This will be converted into a ZERO_EXTRACT. */
9824 if (const_op == 0 && sign_bit_comparison_p
9825 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9826 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9827 {
5f4f0e22
CH
9828 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9829 ((HOST_WIDE_INT) 1
9830 << (mode_width - 1
9831 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9832 code = (code == LT ? NE : EQ);
9833 continue;
9834 }
9835
0f41302f 9836 /* ... fall through ... */
230d793d
RS
9837
9838 case ABS:
9839 /* ABS is ignorable inside an equality comparison with zero. */
9840 if (const_op == 0 && equality_comparison_p)
9841 {
9842 op0 = XEXP (op0, 0);
9843 continue;
9844 }
9845 break;
9846
9847
9848 case SIGN_EXTEND:
9849 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9850 to (compare FOO CONST) if CONST fits in FOO's mode and we
9851 are either testing inequality or have an unsigned comparison
9852 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9853 if (! unsigned_comparison_p
9854 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9855 <= HOST_BITS_PER_WIDE_INT)
9856 && ((unsigned HOST_WIDE_INT) const_op
9857 < (((HOST_WIDE_INT) 1
9858 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
9859 {
9860 op0 = XEXP (op0, 0);
9861 continue;
9862 }
9863 break;
9864
9865 case SUBREG:
a687e897 9866 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 9867 both constants are smaller than 1/2 the maximum positive
a687e897
RK
9868 value in MODE, and the comparison is equality or unsigned.
9869 In that case, if A is either zero-extended to MODE or has
9870 sufficient sign bits so that the high-order bit in MODE
9871 is a copy of the sign in the inner mode, we can prove that it is
9872 safe to do the operation in the wider mode. This simplifies
9873 many range checks. */
9874
9875 if (mode_width <= HOST_BITS_PER_WIDE_INT
9876 && subreg_lowpart_p (op0)
9877 && GET_CODE (SUBREG_REG (op0)) == PLUS
9878 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9879 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9880 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9881 < GET_MODE_MASK (mode) / 2)
adb7a1cb 9882 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
9883 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9884 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
9885 & ~ GET_MODE_MASK (mode))
9886 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9887 GET_MODE (SUBREG_REG (op0)))
9888 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9889 - GET_MODE_BITSIZE (mode)))))
9890 {
9891 op0 = SUBREG_REG (op0);
9892 continue;
9893 }
9894
fe0cf571
RK
9895 /* If the inner mode is narrower and we are extracting the low part,
9896 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9897 if (subreg_lowpart_p (op0)
89f1c7f2
RS
9898 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9899 /* Fall through */ ;
9900 else
230d793d
RS
9901 break;
9902
0f41302f 9903 /* ... fall through ... */
230d793d
RS
9904
9905 case ZERO_EXTEND:
9906 if ((unsigned_comparison_p || equality_comparison_p)
9907 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9908 <= HOST_BITS_PER_WIDE_INT)
9909 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
9910 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9911 {
9912 op0 = XEXP (op0, 0);
9913 continue;
9914 }
9915 break;
9916
9917 case PLUS:
20fdd649 9918 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 9919 this for equality comparisons due to pathological cases involving
230d793d 9920 overflows. */
20fdd649
RK
9921 if (equality_comparison_p
9922 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9923 op1, XEXP (op0, 1))))
230d793d
RS
9924 {
9925 op0 = XEXP (op0, 0);
9926 op1 = tem;
9927 continue;
9928 }
9929
9930 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9931 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9932 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9933 {
9934 op0 = XEXP (XEXP (op0, 0), 0);
9935 code = (code == LT ? EQ : NE);
9936 continue;
9937 }
9938 break;
9939
9940 case MINUS:
20fdd649
RK
9941 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9942 (eq B (minus A C)), whichever simplifies. We can only do
9943 this for equality comparisons due to pathological cases involving
9944 overflows. */
9945 if (equality_comparison_p
9946 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9947 XEXP (op0, 1), op1)))
9948 {
9949 op0 = XEXP (op0, 0);
9950 op1 = tem;
9951 continue;
9952 }
9953
9954 if (equality_comparison_p
9955 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9956 XEXP (op0, 0), op1)))
9957 {
9958 op0 = XEXP (op0, 1);
9959 op1 = tem;
9960 continue;
9961 }
9962
230d793d
RS
9963 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9964 of bits in X minus 1, is one iff X > 0. */
9965 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9966 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9967 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9968 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9969 {
9970 op0 = XEXP (op0, 1);
9971 code = (code == GE ? LE : GT);
9972 continue;
9973 }
9974 break;
9975
9976 case XOR:
9977 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9978 if C is zero or B is a constant. */
9979 if (equality_comparison_p
9980 && 0 != (tem = simplify_binary_operation (XOR, mode,
9981 XEXP (op0, 1), op1)))
9982 {
9983 op0 = XEXP (op0, 0);
9984 op1 = tem;
9985 continue;
9986 }
9987 break;
9988
9989 case EQ: case NE:
9990 case LT: case LTU: case LE: case LEU:
9991 case GT: case GTU: case GE: case GEU:
9992 /* We can't do anything if OP0 is a condition code value, rather
9993 than an actual data value. */
9994 if (const_op != 0
9995#ifdef HAVE_cc0
9996 || XEXP (op0, 0) == cc0_rtx
9997#endif
9998 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9999 break;
10000
10001 /* Get the two operands being compared. */
10002 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10003 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10004 else
10005 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10006
10007 /* Check for the cases where we simply want the result of the
10008 earlier test or the opposite of that result. */
10009 if (code == NE
10010 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 10011 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 10012 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 10013 && (STORE_FLAG_VALUE
5f4f0e22
CH
10014 & (((HOST_WIDE_INT) 1
10015 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
10016 && (code == LT
10017 || (code == GE && reversible_comparison_p (op0)))))
10018 {
10019 code = (code == LT || code == NE
10020 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10021 op0 = tem, op1 = tem1;
10022 continue;
10023 }
10024 break;
10025
10026 case IOR:
10027 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10028 iff X <= 0. */
10029 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10030 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10031 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10032 {
10033 op0 = XEXP (op0, 1);
10034 code = (code == GE ? GT : LE);
10035 continue;
10036 }
10037 break;
10038
10039 case AND:
10040 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10041 will be converted to a ZERO_EXTRACT later. */
10042 if (const_op == 0 && equality_comparison_p
45620ed4 10043 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
10044 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10045 {
10046 op0 = simplify_and_const_int
10047 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10048 XEXP (op0, 1),
10049 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 10050 (HOST_WIDE_INT) 1);
230d793d
RS
10051 continue;
10052 }
10053
10054 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10055 zero and X is a comparison and C1 and C2 describe only bits set
10056 in STORE_FLAG_VALUE, we can compare with X. */
10057 if (const_op == 0 && equality_comparison_p
5f4f0e22 10058 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
10059 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10060 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10061 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10062 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 10063 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
10064 {
10065 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10066 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10067 if ((~ STORE_FLAG_VALUE & mask) == 0
10068 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10069 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10070 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10071 {
10072 op0 = XEXP (XEXP (op0, 0), 0);
10073 continue;
10074 }
10075 }
10076
10077 /* If we are doing an equality comparison of an AND of a bit equal
10078 to the sign bit, replace this with a LT or GE comparison of
10079 the underlying value. */
10080 if (equality_comparison_p
10081 && const_op == 0
10082 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10083 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10084 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 10085 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
10086 {
10087 op0 = XEXP (op0, 0);
10088 code = (code == EQ ? GE : LT);
10089 continue;
10090 }
10091
10092 /* If this AND operation is really a ZERO_EXTEND from a narrower
10093 mode, the constant fits within that mode, and this is either an
10094 equality or unsigned comparison, try to do this comparison in
10095 the narrower mode. */
10096 if ((equality_comparison_p || unsigned_comparison_p)
10097 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10098 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10099 & GET_MODE_MASK (mode))
10100 + 1)) >= 0
10101 && const_op >> i == 0
10102 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10103 {
10104 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10105 continue;
10106 }
10107 break;
10108
10109 case ASHIFT:
45620ed4 10110 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 10111 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 10112 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
10113 shifted right N bits so long as the low-order N bits of C are
10114 zero. */
10115 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10116 && INTVAL (XEXP (op0, 1)) >= 0
10117 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
10118 < HOST_BITS_PER_WIDE_INT)
10119 && ((const_op
34785d05 10120 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 10121 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10122 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
10123 & ~ (mask >> (INTVAL (XEXP (op0, 1))
10124 + ! equality_comparison_p))) == 0)
10125 {
10126 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 10127 op1 = GEN_INT (const_op);
230d793d
RS
10128 op0 = XEXP (op0, 0);
10129 continue;
10130 }
10131
dfbe1b2f 10132 /* If we are doing a sign bit comparison, it means we are testing
230d793d 10133 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 10134 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10135 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10136 {
5f4f0e22
CH
10137 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10138 ((HOST_WIDE_INT) 1
10139 << (mode_width - 1
10140 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10141 code = (code == LT ? NE : EQ);
10142 continue;
10143 }
dfbe1b2f
RK
10144
10145 /* If this an equality comparison with zero and we are shifting
10146 the low bit to the sign bit, we can convert this to an AND of the
10147 low-order bit. */
10148 if (const_op == 0 && equality_comparison_p
10149 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10150 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10151 {
5f4f0e22
CH
10152 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10153 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
10154 continue;
10155 }
230d793d
RS
10156 break;
10157
10158 case ASHIFTRT:
d0ab8cd3
RK
10159 /* If this is an equality comparison with zero, we can do this
10160 as a logical shift, which might be much simpler. */
10161 if (equality_comparison_p && const_op == 0
10162 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10163 {
10164 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10165 XEXP (op0, 0),
10166 INTVAL (XEXP (op0, 1)));
10167 continue;
10168 }
10169
230d793d
RS
10170 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10171 do the comparison in a narrower mode. */
10172 if (! unsigned_comparison_p
10173 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10174 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10175 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10176 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 10177 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
10178 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10179 || ((unsigned HOST_WIDE_INT) - const_op
10180 <= GET_MODE_MASK (tmode))))
230d793d
RS
10181 {
10182 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10183 continue;
10184 }
10185
0f41302f 10186 /* ... fall through ... */
230d793d
RS
10187 case LSHIFTRT:
10188 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 10189 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
10190 by comparing FOO with C shifted left N bits so long as no
10191 overflow occurs. */
10192 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10193 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
10194 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10195 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10196 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10197 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
10198 && (const_op == 0
10199 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10200 < mode_width)))
10201 {
10202 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 10203 op1 = GEN_INT (const_op);
230d793d
RS
10204 op0 = XEXP (op0, 0);
10205 continue;
10206 }
10207
10208 /* If we are using this shift to extract just the sign bit, we
10209 can replace this with an LT or GE comparison. */
10210 if (const_op == 0
10211 && (equality_comparison_p || sign_bit_comparison_p)
10212 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10213 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10214 {
10215 op0 = XEXP (op0, 0);
10216 code = (code == NE || code == GT ? LT : GE);
10217 continue;
10218 }
10219 break;
e9a25f70
JL
10220
10221 default:
10222 break;
230d793d
RS
10223 }
10224
10225 break;
10226 }
10227
10228 /* Now make any compound operations involved in this comparison. Then,
76d31c63 10229 check for an outmost SUBREG on OP0 that is not doing anything or is
230d793d
RS
10230 paradoxical. The latter case can only occur when it is known that the
10231 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10232 We can never remove a SUBREG for a non-equality comparison because the
10233 sign bit is in a different place in the underlying object. */
10234
10235 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10236 op1 = make_compound_operation (op1, SET);
10237
10238 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10239 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10240 && (code == NE || code == EQ)
10241 && ((GET_MODE_SIZE (GET_MODE (op0))
10242 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10243 {
10244 op0 = SUBREG_REG (op0);
10245 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10246 }
10247
10248 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10249 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10250 && (code == NE || code == EQ)
ac49a949
RS
10251 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10252 <= HOST_BITS_PER_WIDE_INT)
951553af 10253 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10254 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
10255 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10256 op1),
951553af 10257 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10258 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
10259 op0 = SUBREG_REG (op0), op1 = tem;
10260
10261 /* We now do the opposite procedure: Some machines don't have compare
10262 insns in all modes. If OP0's mode is an integer mode smaller than a
10263 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
10264 mode for which we can do the compare. There are a number of cases in
10265 which we can use the wider mode. */
230d793d
RS
10266
10267 mode = GET_MODE (op0);
10268 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10269 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10270 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10271 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
10272 (tmode != VOIDmode
10273 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 10274 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 10275 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 10276 {
951553af 10277 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
10278 narrower mode and this is an equality or unsigned comparison,
10279 we can use the wider mode. Similarly for sign-extended
7e4dc511 10280 values, in which case it is true for all comparisons. */
a687e897
RK
10281 if (((code == EQ || code == NE
10282 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
10283 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10284 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
10285 || ((num_sign_bit_copies (op0, tmode)
10286 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 10287 && (num_sign_bit_copies (op1, tmode)
58744483 10288 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
10289 {
10290 op0 = gen_lowpart_for_combine (tmode, op0);
10291 op1 = gen_lowpart_for_combine (tmode, op1);
10292 break;
10293 }
230d793d 10294
a687e897
RK
10295 /* If this is a test for negative, we can make an explicit
10296 test of the sign bit. */
10297
10298 if (op1 == const0_rtx && (code == LT || code == GE)
10299 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 10300 {
a687e897
RK
10301 op0 = gen_binary (AND, tmode,
10302 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
10303 GEN_INT ((HOST_WIDE_INT) 1
10304 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 10305 code = (code == LT) ? NE : EQ;
a687e897 10306 break;
230d793d 10307 }
230d793d
RS
10308 }
10309
b7a775b2
RK
10310#ifdef CANONICALIZE_COMPARISON
10311 /* If this machine only supports a subset of valid comparisons, see if we
10312 can convert an unsupported one into a supported one. */
10313 CANONICALIZE_COMPARISON (code, op0, op1);
10314#endif
10315
230d793d
RS
10316 *pop0 = op0;
10317 *pop1 = op1;
10318
10319 return code;
10320}
10321\f
10322/* Return 1 if we know that X, a comparison operation, is not operating
10323 on a floating-point value or is EQ or NE, meaning that we can safely
10324 reverse it. */
10325
10326static int
10327reversible_comparison_p (x)
10328 rtx x;
10329{
10330 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 10331 || flag_fast_math
230d793d
RS
10332 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10333 return 1;
10334
10335 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10336 {
10337 case MODE_INT:
3ad2180a
RK
10338 case MODE_PARTIAL_INT:
10339 case MODE_COMPLEX_INT:
230d793d
RS
10340 return 1;
10341
10342 case MODE_CC:
9210df58
RK
10343 /* If the mode of the condition codes tells us that this is safe,
10344 we need look no further. */
10345 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10346 return 1;
10347
10348 /* Otherwise try and find where the condition codes were last set and
10349 use that. */
230d793d
RS
10350 x = get_last_value (XEXP (x, 0));
10351 return (x && GET_CODE (x) == COMPARE
3ad2180a 10352 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
e9a25f70
JL
10353
10354 default:
10355 return 0;
230d793d 10356 }
230d793d
RS
10357}
10358\f
10359/* Utility function for following routine. Called when X is part of a value
10360 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10361 for each register mentioned. Similar to mention_regs in cse.c */
10362
10363static void
10364update_table_tick (x)
10365 rtx x;
10366{
10367 register enum rtx_code code = GET_CODE (x);
10368 register char *fmt = GET_RTX_FORMAT (code);
10369 register int i;
10370
10371 if (code == REG)
10372 {
10373 int regno = REGNO (x);
10374 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10375 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10376
10377 for (i = regno; i < endregno; i++)
10378 reg_last_set_table_tick[i] = label_tick;
10379
10380 return;
10381 }
10382
10383 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10384 /* Note that we can't have an "E" in values stored; see
10385 get_last_value_validate. */
10386 if (fmt[i] == 'e')
10387 update_table_tick (XEXP (x, i));
10388}
10389
10390/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10391 are saying that the register is clobbered and we no longer know its
7988fd36
RK
10392 value. If INSN is zero, don't update reg_last_set; this is only permitted
10393 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
10394
10395static void
10396record_value_for_reg (reg, insn, value)
10397 rtx reg;
10398 rtx insn;
10399 rtx value;
10400{
10401 int regno = REGNO (reg);
10402 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10403 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10404 int i;
10405
10406 /* If VALUE contains REG and we have a previous value for REG, substitute
10407 the previous value. */
10408 if (value && insn && reg_overlap_mentioned_p (reg, value))
10409 {
10410 rtx tem;
10411
10412 /* Set things up so get_last_value is allowed to see anything set up to
10413 our insn. */
10414 subst_low_cuid = INSN_CUID (insn);
10415 tem = get_last_value (reg);
10416
10417 if (tem)
10418 value = replace_rtx (copy_rtx (value), reg, tem);
10419 }
10420
10421 /* For each register modified, show we don't know its value, that
ef026f91
RS
10422 we don't know about its bitwise content, that its value has been
10423 updated, and that we don't know the location of the death of the
10424 register. */
230d793d
RS
10425 for (i = regno; i < endregno; i ++)
10426 {
10427 if (insn)
10428 reg_last_set[i] = insn;
10429 reg_last_set_value[i] = 0;
ef026f91
RS
10430 reg_last_set_mode[i] = 0;
10431 reg_last_set_nonzero_bits[i] = 0;
10432 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
10433 reg_last_death[i] = 0;
10434 }
10435
10436 /* Mark registers that are being referenced in this value. */
10437 if (value)
10438 update_table_tick (value);
10439
10440 /* Now update the status of each register being set.
10441 If someone is using this register in this block, set this register
10442 to invalid since we will get confused between the two lives in this
10443 basic block. This makes using this register always invalid. In cse, we
10444 scan the table to invalidate all entries using this register, but this
10445 is too much work for us. */
10446
10447 for (i = regno; i < endregno; i++)
10448 {
10449 reg_last_set_label[i] = label_tick;
10450 if (value && reg_last_set_table_tick[i] == label_tick)
10451 reg_last_set_invalid[i] = 1;
10452 else
10453 reg_last_set_invalid[i] = 0;
10454 }
10455
10456 /* The value being assigned might refer to X (like in "x++;"). In that
10457 case, we must replace it with (clobber (const_int 0)) to prevent
10458 infinite loops. */
9a893315 10459 if (value && ! get_last_value_validate (&value, insn,
230d793d
RS
10460 reg_last_set_label[regno], 0))
10461 {
10462 value = copy_rtx (value);
9a893315
JW
10463 if (! get_last_value_validate (&value, insn,
10464 reg_last_set_label[regno], 1))
230d793d
RS
10465 value = 0;
10466 }
10467
55310dad
RK
10468 /* For the main register being modified, update the value, the mode, the
10469 nonzero bits, and the number of sign bit copies. */
10470
230d793d
RS
10471 reg_last_set_value[regno] = value;
10472
55310dad
RK
10473 if (value)
10474 {
2afabb48 10475 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
10476 reg_last_set_mode[regno] = GET_MODE (reg);
10477 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10478 reg_last_set_sign_bit_copies[regno]
10479 = num_sign_bit_copies (value, GET_MODE (reg));
10480 }
230d793d
RS
10481}
10482
10483/* Used for communication between the following two routines. */
10484static rtx record_dead_insn;
10485
10486/* Called via note_stores from record_dead_and_set_regs to handle one
10487 SET or CLOBBER in an insn. */
10488
10489static void
10490record_dead_and_set_regs_1 (dest, setter)
10491 rtx dest, setter;
10492{
ca89d290
RK
10493 if (GET_CODE (dest) == SUBREG)
10494 dest = SUBREG_REG (dest);
10495
230d793d
RS
10496 if (GET_CODE (dest) == REG)
10497 {
10498 /* If we are setting the whole register, we know its value. Otherwise
10499 show that we don't know the value. We can handle SUBREG in
10500 some cases. */
10501 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10502 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10503 else if (GET_CODE (setter) == SET
10504 && GET_CODE (SET_DEST (setter)) == SUBREG
10505 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 10506 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 10507 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
10508 record_value_for_reg (dest, record_dead_insn,
10509 gen_lowpart_for_combine (GET_MODE (dest),
10510 SET_SRC (setter)));
230d793d 10511 else
5f4f0e22 10512 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
10513 }
10514 else if (GET_CODE (dest) == MEM
10515 /* Ignore pushes, they clobber nothing. */
10516 && ! push_operand (dest, GET_MODE (dest)))
10517 mem_last_set = INSN_CUID (record_dead_insn);
10518}
10519
10520/* Update the records of when each REG was most recently set or killed
10521 for the things done by INSN. This is the last thing done in processing
10522 INSN in the combiner loop.
10523
ef026f91
RS
10524 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10525 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10526 and also the similar information mem_last_set (which insn most recently
10527 modified memory) and last_call_cuid (which insn was the most recent
10528 subroutine call). */
230d793d
RS
10529
10530static void
10531record_dead_and_set_regs (insn)
10532 rtx insn;
10533{
10534 register rtx link;
55310dad
RK
10535 int i;
10536
230d793d
RS
10537 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10538 {
dbc131f3
RK
10539 if (REG_NOTE_KIND (link) == REG_DEAD
10540 && GET_CODE (XEXP (link, 0)) == REG)
10541 {
10542 int regno = REGNO (XEXP (link, 0));
10543 int endregno
10544 = regno + (regno < FIRST_PSEUDO_REGISTER
10545 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10546 : 1);
dbc131f3
RK
10547
10548 for (i = regno; i < endregno; i++)
10549 reg_last_death[i] = insn;
10550 }
230d793d 10551 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 10552 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
10553 }
10554
10555 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
10556 {
10557 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10558 if (call_used_regs[i])
10559 {
10560 reg_last_set_value[i] = 0;
ef026f91
RS
10561 reg_last_set_mode[i] = 0;
10562 reg_last_set_nonzero_bits[i] = 0;
10563 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
10564 reg_last_death[i] = 0;
10565 }
10566
10567 last_call_cuid = mem_last_set = INSN_CUID (insn);
10568 }
230d793d
RS
10569
10570 record_dead_insn = insn;
10571 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10572}
10573\f
10574/* Utility routine for the following function. Verify that all the registers
10575 mentioned in *LOC are valid when *LOC was part of a value set when
10576 label_tick == TICK. Return 0 if some are not.
10577
10578 If REPLACE is non-zero, replace the invalid reference with
10579 (clobber (const_int 0)) and return 1. This replacement is useful because
10580 we often can get useful information about the form of a value (e.g., if
10581 it was produced by a shift that always produces -1 or 0) even though
10582 we don't know exactly what registers it was produced from. */
10583
10584static int
9a893315 10585get_last_value_validate (loc, insn, tick, replace)
230d793d 10586 rtx *loc;
9a893315 10587 rtx insn;
230d793d
RS
10588 int tick;
10589 int replace;
10590{
10591 rtx x = *loc;
10592 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
10593 int len = GET_RTX_LENGTH (GET_CODE (x));
10594 int i;
10595
10596 if (GET_CODE (x) == REG)
10597 {
10598 int regno = REGNO (x);
10599 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10600 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10601 int j;
10602
10603 for (j = regno; j < endregno; j++)
10604 if (reg_last_set_invalid[j]
10605 /* If this is a pseudo-register that was only set once, it is
10606 always valid. */
b1f21e0a 10607 || (! (regno >= FIRST_PSEUDO_REGISTER && REG_N_SETS (regno) == 1)
230d793d
RS
10608 && reg_last_set_label[j] > tick))
10609 {
10610 if (replace)
38a448ca 10611 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
10612 return replace;
10613 }
10614
10615 return 1;
10616 }
9a893315
JW
10617 /* If this is a memory reference, make sure that there were
10618 no stores after it that might have clobbered the value. We don't
10619 have alias info, so we assume any store invalidates it. */
10620 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
10621 && INSN_CUID (insn) <= mem_last_set)
10622 {
10623 if (replace)
38a448ca 10624 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
10625 return replace;
10626 }
230d793d
RS
10627
10628 for (i = 0; i < len; i++)
10629 if ((fmt[i] == 'e'
9a893315 10630 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
230d793d
RS
10631 /* Don't bother with these. They shouldn't occur anyway. */
10632 || fmt[i] == 'E')
10633 return 0;
10634
10635 /* If we haven't found a reason for it to be invalid, it is valid. */
10636 return 1;
10637}
10638
10639/* Get the last value assigned to X, if known. Some registers
10640 in the value may be replaced with (clobber (const_int 0)) if their value
10641 is known longer known reliably. */
10642
10643static rtx
10644get_last_value (x)
10645 rtx x;
10646{
10647 int regno;
10648 rtx value;
10649
10650 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10651 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 10652 we cannot predict what values the "extra" bits might have. */
230d793d
RS
10653 if (GET_CODE (x) == SUBREG
10654 && subreg_lowpart_p (x)
10655 && (GET_MODE_SIZE (GET_MODE (x))
10656 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10657 && (value = get_last_value (SUBREG_REG (x))) != 0)
10658 return gen_lowpart_for_combine (GET_MODE (x), value);
10659
10660 if (GET_CODE (x) != REG)
10661 return 0;
10662
10663 regno = REGNO (x);
10664 value = reg_last_set_value[regno];
10665
0f41302f
MS
10666 /* If we don't have a value or if it isn't for this basic block,
10667 return 0. */
230d793d
RS
10668
10669 if (value == 0
b1f21e0a 10670 || (REG_N_SETS (regno) != 1
55310dad 10671 && reg_last_set_label[regno] != label_tick))
230d793d
RS
10672 return 0;
10673
4255220d 10674 /* If the value was set in a later insn than the ones we are processing,
4090a6b3
RK
10675 we can't use it even if the register was only set once, but make a quick
10676 check to see if the previous insn set it to something. This is commonly
0d9641d1
JW
10677 the case when the same pseudo is used by repeated insns.
10678
10679 This does not work if there exists an instruction which is temporarily
10680 not on the insn chain. */
d0ab8cd3 10681
bcd49eb7 10682 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
10683 {
10684 rtx insn, set;
10685
bcd49eb7
JW
10686 /* We can not do anything useful in this case, because there is
10687 an instruction which is not on the insn chain. */
10688 if (subst_prev_insn)
10689 return 0;
10690
4255220d
JW
10691 /* Skip over USE insns. They are not useful here, and they may have
10692 been made by combine, in which case they do not have a INSN_CUID
d6c80562 10693 value. We can't use prev_real_insn, because that would incorrectly
e340018d
JW
10694 take us backwards across labels. Skip over BARRIERs also, since
10695 they could have been made by combine. If we see one, we must be
10696 optimizing dead code, so it doesn't matter what we do. */
d6c80562
JW
10697 for (insn = prev_nonnote_insn (subst_insn);
10698 insn && ((GET_CODE (insn) == INSN
10699 && GET_CODE (PATTERN (insn)) == USE)
e340018d 10700 || GET_CODE (insn) == BARRIER
4255220d 10701 || INSN_CUID (insn) >= subst_low_cuid);
d6c80562 10702 insn = prev_nonnote_insn (insn))
3adde2a5 10703 ;
d0ab8cd3
RK
10704
10705 if (insn
10706 && (set = single_set (insn)) != 0
10707 && rtx_equal_p (SET_DEST (set), x))
10708 {
10709 value = SET_SRC (set);
10710
10711 /* Make sure that VALUE doesn't reference X. Replace any
ddd5a7c1 10712 explicit references with a CLOBBER. If there are any remaining
d0ab8cd3
RK
10713 references (rare), don't use the value. */
10714
10715 if (reg_mentioned_p (x, value))
10716 value = replace_rtx (copy_rtx (value), x,
38a448ca 10717 gen_rtx_CLOBBER (GET_MODE (x), const0_rtx));
d0ab8cd3
RK
10718
10719 if (reg_overlap_mentioned_p (x, value))
10720 return 0;
10721 }
10722 else
10723 return 0;
10724 }
10725
10726 /* If the value has all its registers valid, return it. */
9a893315
JW
10727 if (get_last_value_validate (&value, reg_last_set[regno],
10728 reg_last_set_label[regno], 0))
230d793d
RS
10729 return value;
10730
10731 /* Otherwise, make a copy and replace any invalid register with
10732 (clobber (const_int 0)). If that fails for some reason, return 0. */
10733
10734 value = copy_rtx (value);
9a893315
JW
10735 if (get_last_value_validate (&value, reg_last_set[regno],
10736 reg_last_set_label[regno], 1))
230d793d
RS
10737 return value;
10738
10739 return 0;
10740}
10741\f
10742/* Return nonzero if expression X refers to a REG or to memory
10743 that is set in an instruction more recent than FROM_CUID. */
10744
10745static int
10746use_crosses_set_p (x, from_cuid)
10747 register rtx x;
10748 int from_cuid;
10749{
10750 register char *fmt;
10751 register int i;
10752 register enum rtx_code code = GET_CODE (x);
10753
10754 if (code == REG)
10755 {
10756 register int regno = REGNO (x);
e28f5732
RK
10757 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10758 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10759
230d793d
RS
10760#ifdef PUSH_ROUNDING
10761 /* Don't allow uses of the stack pointer to be moved,
10762 because we don't know whether the move crosses a push insn. */
10763 if (regno == STACK_POINTER_REGNUM)
10764 return 1;
10765#endif
e28f5732
RK
10766 for (;regno < endreg; regno++)
10767 if (reg_last_set[regno]
10768 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10769 return 1;
10770 return 0;
230d793d
RS
10771 }
10772
10773 if (code == MEM && mem_last_set > from_cuid)
10774 return 1;
10775
10776 fmt = GET_RTX_FORMAT (code);
10777
10778 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10779 {
10780 if (fmt[i] == 'E')
10781 {
10782 register int j;
10783 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10784 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10785 return 1;
10786 }
10787 else if (fmt[i] == 'e'
10788 && use_crosses_set_p (XEXP (x, i), from_cuid))
10789 return 1;
10790 }
10791 return 0;
10792}
10793\f
10794/* Define three variables used for communication between the following
10795 routines. */
10796
10797static int reg_dead_regno, reg_dead_endregno;
10798static int reg_dead_flag;
10799
10800/* Function called via note_stores from reg_dead_at_p.
10801
ddd5a7c1 10802 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
10803 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10804
10805static void
10806reg_dead_at_p_1 (dest, x)
10807 rtx dest;
10808 rtx x;
10809{
10810 int regno, endregno;
10811
10812 if (GET_CODE (dest) != REG)
10813 return;
10814
10815 regno = REGNO (dest);
10816 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10817 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10818
10819 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10820 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10821}
10822
10823/* Return non-zero if REG is known to be dead at INSN.
10824
10825 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10826 referencing REG, it is dead. If we hit a SET referencing REG, it is
10827 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
10828 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10829 must be assumed to be always live. */
230d793d
RS
10830
10831static int
10832reg_dead_at_p (reg, insn)
10833 rtx reg;
10834 rtx insn;
10835{
10836 int block, i;
10837
10838 /* Set variables for reg_dead_at_p_1. */
10839 reg_dead_regno = REGNO (reg);
10840 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10841 ? HARD_REGNO_NREGS (reg_dead_regno,
10842 GET_MODE (reg))
10843 : 1);
10844
10845 reg_dead_flag = 0;
10846
6e25d159
RK
10847 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10848 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10849 {
10850 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10851 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10852 return 0;
10853 }
10854
230d793d
RS
10855 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10856 beginning of function. */
60715d0b 10857 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
10858 insn = prev_nonnote_insn (insn))
10859 {
10860 note_stores (PATTERN (insn), reg_dead_at_p_1);
10861 if (reg_dead_flag)
10862 return reg_dead_flag == 1 ? 1 : 0;
10863
10864 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10865 return 1;
10866 }
10867
10868 /* Get the basic block number that we were in. */
10869 if (insn == 0)
10870 block = 0;
10871 else
10872 {
10873 for (block = 0; block < n_basic_blocks; block++)
10874 if (insn == basic_block_head[block])
10875 break;
10876
10877 if (block == n_basic_blocks)
10878 return 0;
10879 }
10880
10881 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
8e08106d 10882 if (REGNO_REG_SET_P (basic_block_live_at_start[block], i))
230d793d
RS
10883 return 0;
10884
10885 return 1;
10886}
6e25d159
RK
10887\f
10888/* Note hard registers in X that are used. This code is similar to
10889 that in flow.c, but much simpler since we don't care about pseudos. */
10890
10891static void
10892mark_used_regs_combine (x)
10893 rtx x;
10894{
10895 register RTX_CODE code = GET_CODE (x);
10896 register int regno;
10897 int i;
10898
10899 switch (code)
10900 {
10901 case LABEL_REF:
10902 case SYMBOL_REF:
10903 case CONST_INT:
10904 case CONST:
10905 case CONST_DOUBLE:
10906 case PC:
10907 case ADDR_VEC:
10908 case ADDR_DIFF_VEC:
10909 case ASM_INPUT:
10910#ifdef HAVE_cc0
10911 /* CC0 must die in the insn after it is set, so we don't need to take
10912 special note of it here. */
10913 case CC0:
10914#endif
10915 return;
10916
10917 case CLOBBER:
10918 /* If we are clobbering a MEM, mark any hard registers inside the
10919 address as used. */
10920 if (GET_CODE (XEXP (x, 0)) == MEM)
10921 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10922 return;
10923
10924 case REG:
10925 regno = REGNO (x);
10926 /* A hard reg in a wide mode may really be multiple registers.
10927 If so, mark all of them just like the first. */
10928 if (regno < FIRST_PSEUDO_REGISTER)
10929 {
10930 /* None of this applies to the stack, frame or arg pointers */
10931 if (regno == STACK_POINTER_REGNUM
10932#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10933 || regno == HARD_FRAME_POINTER_REGNUM
10934#endif
10935#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10936 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10937#endif
10938 || regno == FRAME_POINTER_REGNUM)
10939 return;
10940
10941 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10942 while (i-- > 0)
10943 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10944 }
10945 return;
10946
10947 case SET:
10948 {
10949 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10950 the address. */
10951 register rtx testreg = SET_DEST (x);
10952
e048778f
RK
10953 while (GET_CODE (testreg) == SUBREG
10954 || GET_CODE (testreg) == ZERO_EXTRACT
10955 || GET_CODE (testreg) == SIGN_EXTRACT
10956 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
10957 testreg = XEXP (testreg, 0);
10958
10959 if (GET_CODE (testreg) == MEM)
10960 mark_used_regs_combine (XEXP (testreg, 0));
10961
10962 mark_used_regs_combine (SET_SRC (x));
6e25d159 10963 }
e9a25f70
JL
10964 return;
10965
10966 default:
10967 break;
6e25d159
RK
10968 }
10969
10970 /* Recursively scan the operands of this expression. */
10971
10972 {
10973 register char *fmt = GET_RTX_FORMAT (code);
10974
10975 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10976 {
10977 if (fmt[i] == 'e')
10978 mark_used_regs_combine (XEXP (x, i));
10979 else if (fmt[i] == 'E')
10980 {
10981 register int j;
10982
10983 for (j = 0; j < XVECLEN (x, i); j++)
10984 mark_used_regs_combine (XVECEXP (x, i, j));
10985 }
10986 }
10987 }
10988}
10989
230d793d
RS
10990\f
10991/* Remove register number REGNO from the dead registers list of INSN.
10992
10993 Return the note used to record the death, if there was one. */
10994
10995rtx
10996remove_death (regno, insn)
10997 int regno;
10998 rtx insn;
10999{
11000 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11001
11002 if (note)
1a26b032 11003 {
b1f21e0a 11004 REG_N_DEATHS (regno)--;
1a26b032
RK
11005 remove_note (insn, note);
11006 }
230d793d
RS
11007
11008 return note;
11009}
11010
11011/* For each register (hardware or pseudo) used within expression X, if its
11012 death is in an instruction with cuid between FROM_CUID (inclusive) and
11013 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11014 list headed by PNOTES.
11015
6eb12cef
RK
11016 That said, don't move registers killed by maybe_kill_insn.
11017
230d793d
RS
11018 This is done when X is being merged by combination into TO_INSN. These
11019 notes will then be distributed as needed. */
11020
11021static void
6eb12cef 11022move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 11023 rtx x;
6eb12cef 11024 rtx maybe_kill_insn;
230d793d
RS
11025 int from_cuid;
11026 rtx to_insn;
11027 rtx *pnotes;
11028{
11029 register char *fmt;
11030 register int len, i;
11031 register enum rtx_code code = GET_CODE (x);
11032
11033 if (code == REG)
11034 {
11035 register int regno = REGNO (x);
11036 register rtx where_dead = reg_last_death[regno];
e340018d
JW
11037 register rtx before_dead, after_dead;
11038
6eb12cef
RK
11039 /* Don't move the register if it gets killed in between from and to */
11040 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11041 && !reg_referenced_p (x, maybe_kill_insn))
11042 return;
11043
e340018d
JW
11044 /* WHERE_DEAD could be a USE insn made by combine, so first we
11045 make sure that we have insns with valid INSN_CUID values. */
11046 before_dead = where_dead;
11047 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11048 before_dead = PREV_INSN (before_dead);
11049 after_dead = where_dead;
11050 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11051 after_dead = NEXT_INSN (after_dead);
11052
11053 if (before_dead && after_dead
11054 && INSN_CUID (before_dead) >= from_cuid
11055 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11056 || (where_dead != after_dead
11057 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 11058 {
dbc131f3 11059 rtx note = remove_death (regno, where_dead);
230d793d
RS
11060
11061 /* It is possible for the call above to return 0. This can occur
11062 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
11063 In that case make a new note.
11064
11065 We must also check for the case where X is a hard register
11066 and NOTE is a death note for a range of hard registers
11067 including X. In that case, we must put REG_DEAD notes for
11068 the remaining registers in place of NOTE. */
11069
11070 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11071 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 11072 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3
RK
11073 {
11074 int deadregno = REGNO (XEXP (note, 0));
11075 int deadend
11076 = (deadregno + HARD_REGNO_NREGS (deadregno,
11077 GET_MODE (XEXP (note, 0))));
11078 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11079 int i;
11080
11081 for (i = deadregno; i < deadend; i++)
11082 if (i < regno || i >= ourend)
11083 REG_NOTES (where_dead)
38a448ca
RH
11084 = gen_rtx_EXPR_LIST (REG_DEAD,
11085 gen_rtx_REG (reg_raw_mode[i], i),
11086 REG_NOTES (where_dead));
dbc131f3 11087 }
24e46fc4
JW
11088 /* If we didn't find any note, or if we found a REG_DEAD note that
11089 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
11090 register, then to be safe we must check for REG_DEAD notes
11091 for each register other than the first. They could have
11092 their own REG_DEAD notes lying around. */
24e46fc4
JW
11093 else if ((note == 0
11094 || (note != 0
11095 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11096 < GET_MODE_SIZE (GET_MODE (x)))))
11097 && regno < FIRST_PSEUDO_REGISTER
fabd69e8
RK
11098 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11099 {
11100 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
24e46fc4 11101 int i, offset;
fabd69e8
RK
11102 rtx oldnotes = 0;
11103
24e46fc4
JW
11104 if (note)
11105 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11106 else
11107 offset = 1;
11108
11109 for (i = regno + offset; i < ourend; i++)
38a448ca 11110 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
6eb12cef 11111 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 11112 }
230d793d 11113
dbc131f3 11114 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
11115 {
11116 XEXP (note, 1) = *pnotes;
11117 *pnotes = note;
11118 }
11119 else
38a448ca 11120 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
1a26b032 11121
b1f21e0a 11122 REG_N_DEATHS (regno)++;
230d793d
RS
11123 }
11124
11125 return;
11126 }
11127
11128 else if (GET_CODE (x) == SET)
11129 {
11130 rtx dest = SET_DEST (x);
11131
6eb12cef 11132 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 11133
a7c99304
RK
11134 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11135 that accesses one word of a multi-word item, some
11136 piece of everything register in the expression is used by
11137 this insn, so remove any old death. */
11138
11139 if (GET_CODE (dest) == ZERO_EXTRACT
11140 || GET_CODE (dest) == STRICT_LOW_PART
11141 || (GET_CODE (dest) == SUBREG
11142 && (((GET_MODE_SIZE (GET_MODE (dest))
11143 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11144 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11145 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 11146 {
6eb12cef 11147 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 11148 return;
230d793d
RS
11149 }
11150
a7c99304
RK
11151 /* If this is some other SUBREG, we know it replaces the entire
11152 value, so use that as the destination. */
11153 if (GET_CODE (dest) == SUBREG)
11154 dest = SUBREG_REG (dest);
11155
11156 /* If this is a MEM, adjust deaths of anything used in the address.
11157 For a REG (the only other possibility), the entire value is
11158 being replaced so the old value is not used in this insn. */
230d793d
RS
11159
11160 if (GET_CODE (dest) == MEM)
6eb12cef
RK
11161 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11162 to_insn, pnotes);
230d793d
RS
11163 return;
11164 }
11165
11166 else if (GET_CODE (x) == CLOBBER)
11167 return;
11168
11169 len = GET_RTX_LENGTH (code);
11170 fmt = GET_RTX_FORMAT (code);
11171
11172 for (i = 0; i < len; i++)
11173 {
11174 if (fmt[i] == 'E')
11175 {
11176 register int j;
11177 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
11178 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11179 to_insn, pnotes);
230d793d
RS
11180 }
11181 else if (fmt[i] == 'e')
6eb12cef 11182 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
11183 }
11184}
11185\f
a7c99304
RK
11186/* Return 1 if X is the target of a bit-field assignment in BODY, the
11187 pattern of an insn. X must be a REG. */
230d793d
RS
11188
11189static int
a7c99304
RK
11190reg_bitfield_target_p (x, body)
11191 rtx x;
230d793d
RS
11192 rtx body;
11193{
11194 int i;
11195
11196 if (GET_CODE (body) == SET)
a7c99304
RK
11197 {
11198 rtx dest = SET_DEST (body);
11199 rtx target;
11200 int regno, tregno, endregno, endtregno;
11201
11202 if (GET_CODE (dest) == ZERO_EXTRACT)
11203 target = XEXP (dest, 0);
11204 else if (GET_CODE (dest) == STRICT_LOW_PART)
11205 target = SUBREG_REG (XEXP (dest, 0));
11206 else
11207 return 0;
11208
11209 if (GET_CODE (target) == SUBREG)
11210 target = SUBREG_REG (target);
11211
11212 if (GET_CODE (target) != REG)
11213 return 0;
11214
11215 tregno = REGNO (target), regno = REGNO (x);
11216 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11217 return target == x;
11218
11219 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11220 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11221
11222 return endregno > tregno && regno < endtregno;
11223 }
230d793d
RS
11224
11225 else if (GET_CODE (body) == PARALLEL)
11226 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 11227 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
11228 return 1;
11229
11230 return 0;
11231}
11232\f
11233/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11234 as appropriate. I3 and I2 are the insns resulting from the combination
11235 insns including FROM (I2 may be zero).
11236
11237 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11238 not need REG_DEAD notes because they are being substituted for. This
11239 saves searching in the most common cases.
11240
11241 Each note in the list is either ignored or placed on some insns, depending
11242 on the type of note. */
11243
11244static void
11245distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11246 rtx notes;
11247 rtx from_insn;
11248 rtx i3, i2;
11249 rtx elim_i2, elim_i1;
11250{
11251 rtx note, next_note;
11252 rtx tem;
11253
11254 for (note = notes; note; note = next_note)
11255 {
11256 rtx place = 0, place2 = 0;
11257
11258 /* If this NOTE references a pseudo register, ensure it references
11259 the latest copy of that register. */
11260 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11261 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11262 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11263
11264 next_note = XEXP (note, 1);
11265 switch (REG_NOTE_KIND (note))
11266 {
c9903b44
DE
11267 case REG_BR_PROB:
11268 case REG_EXEC_COUNT:
11269 /* Doesn't matter much where we put this, as long as it's somewhere.
11270 It is preferable to keep these notes on branches, which is most
11271 likely to be i3. */
11272 place = i3;
11273 break;
11274
230d793d 11275 case REG_UNUSED:
07d0cbdd 11276 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
11277 REG_UNUSED notes from that insn.
11278
11279 Any clobbers from i2 or i1 can only exist if they were added by
11280 recog_for_combine. In that case, recog_for_combine created the
11281 necessary REG_UNUSED notes. Trying to keep any original
11282 REG_UNUSED notes from these insns can cause incorrect output
11283 if it is for the same register as the original i3 dest.
11284 In that case, we will notice that the register is set in i3,
11285 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
11286 is wrong. However, it is possible to have REG_UNUSED notes from
11287 i2 or i1 for register which were both used and clobbered, so
11288 we keep notes from i2 or i1 if they will turn into REG_DEAD
11289 notes. */
176c9e6b 11290
230d793d
RS
11291 /* If this register is set or clobbered in I3, put the note there
11292 unless there is one already. */
07d0cbdd 11293 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 11294 {
07d0cbdd
JW
11295 if (from_insn != i3)
11296 break;
11297
230d793d
RS
11298 if (! (GET_CODE (XEXP (note, 0)) == REG
11299 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11300 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11301 place = i3;
11302 }
11303 /* Otherwise, if this register is used by I3, then this register
11304 now dies here, so we must put a REG_DEAD note here unless there
11305 is one already. */
11306 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11307 && ! (GET_CODE (XEXP (note, 0)) == REG
11308 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
11309 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11310 {
11311 PUT_REG_NOTE_KIND (note, REG_DEAD);
11312 place = i3;
11313 }
11314 break;
11315
11316 case REG_EQUAL:
11317 case REG_EQUIV:
11318 case REG_NONNEG:
9ae8ffe7 11319 case REG_NOALIAS:
230d793d
RS
11320 /* These notes say something about results of an insn. We can
11321 only support them if they used to be on I3 in which case they
a687e897
RK
11322 remain on I3. Otherwise they are ignored.
11323
11324 If the note refers to an expression that is not a constant, we
11325 must also ignore the note since we cannot tell whether the
11326 equivalence is still true. It might be possible to do
11327 slightly better than this (we only have a problem if I2DEST
11328 or I1DEST is present in the expression), but it doesn't
11329 seem worth the trouble. */
11330
11331 if (from_insn == i3
11332 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
11333 place = i3;
11334 break;
11335
11336 case REG_INC:
11337 case REG_NO_CONFLICT:
11338 case REG_LABEL:
11339 /* These notes say something about how a register is used. They must
11340 be present on any use of the register in I2 or I3. */
11341 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11342 place = i3;
11343
11344 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11345 {
11346 if (place)
11347 place2 = i2;
11348 else
11349 place = i2;
11350 }
11351 break;
11352
11353 case REG_WAS_0:
11354 /* It is too much trouble to try to see if this note is still
11355 correct in all situations. It is better to simply delete it. */
11356 break;
11357
11358 case REG_RETVAL:
11359 /* If the insn previously containing this note still exists,
11360 put it back where it was. Otherwise move it to the previous
11361 insn. Adjust the corresponding REG_LIBCALL note. */
11362 if (GET_CODE (from_insn) != NOTE)
11363 place = from_insn;
11364 else
11365 {
5f4f0e22 11366 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
11367 place = prev_real_insn (from_insn);
11368 if (tem && place)
11369 XEXP (tem, 0) = place;
11370 }
11371 break;
11372
11373 case REG_LIBCALL:
11374 /* This is handled similarly to REG_RETVAL. */
11375 if (GET_CODE (from_insn) != NOTE)
11376 place = from_insn;
11377 else
11378 {
5f4f0e22 11379 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
11380 place = next_real_insn (from_insn);
11381 if (tem && place)
11382 XEXP (tem, 0) = place;
11383 }
11384 break;
11385
11386 case REG_DEAD:
11387 /* If the register is used as an input in I3, it dies there.
11388 Similarly for I2, if it is non-zero and adjacent to I3.
11389
11390 If the register is not used as an input in either I3 or I2
11391 and it is not one of the registers we were supposed to eliminate,
11392 there are two possibilities. We might have a non-adjacent I2
11393 or we might have somehow eliminated an additional register
11394 from a computation. For example, we might have had A & B where
11395 we discover that B will always be zero. In this case we will
11396 eliminate the reference to A.
11397
11398 In both cases, we must search to see if we can find a previous
11399 use of A and put the death note there. */
11400
6e2d1486
RK
11401 if (from_insn
11402 && GET_CODE (from_insn) == CALL_INSN
11403 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11404 place = from_insn;
11405 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
11406 place = i3;
11407 else if (i2 != 0 && next_nonnote_insn (i2) == i3
11408 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11409 place = i2;
11410
11411 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11412 break;
11413
510dd77e
RK
11414 /* If the register is used in both I2 and I3 and it dies in I3,
11415 we might have added another reference to it. If reg_n_refs
11416 was 2, bump it to 3. This has to be correct since the
11417 register must have been set somewhere. The reason this is
11418 done is because local-alloc.c treats 2 references as a
11419 special case. */
11420
11421 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
b1f21e0a 11422 && REG_N_REFS (REGNO (XEXP (note, 0)))== 2
510dd77e 11423 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
b1f21e0a 11424 REG_N_REFS (REGNO (XEXP (note, 0))) = 3;
510dd77e 11425
230d793d 11426 if (place == 0)
38d8473f
RK
11427 {
11428 for (tem = prev_nonnote_insn (i3);
11429 place == 0 && tem
11430 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
11431 tem = prev_nonnote_insn (tem))
11432 {
11433 /* If the register is being set at TEM, see if that is all
11434 TEM is doing. If so, delete TEM. Otherwise, make this
11435 into a REG_UNUSED note instead. */
11436 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11437 {
11438 rtx set = single_set (tem);
11439
11440 /* Verify that it was the set, and not a clobber that
11441 modified the register. */
11442
11443 if (set != 0 && ! side_effects_p (SET_SRC (set))
d02089a5
RK
11444 && (rtx_equal_p (XEXP (note, 0), SET_DEST (set))
11445 || (GET_CODE (SET_DEST (set)) == SUBREG
11446 && rtx_equal_p (XEXP (note, 0),
11447 XEXP (SET_DEST (set), 0)))))
38d8473f
RK
11448 {
11449 /* Move the notes and links of TEM elsewhere.
11450 This might delete other dead insns recursively.
11451 First set the pattern to something that won't use
11452 any register. */
11453
11454 PATTERN (tem) = pc_rtx;
11455
11456 distribute_notes (REG_NOTES (tem), tem, tem,
11457 NULL_RTX, NULL_RTX, NULL_RTX);
11458 distribute_links (LOG_LINKS (tem));
11459
11460 PUT_CODE (tem, NOTE);
11461 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11462 NOTE_SOURCE_FILE (tem) = 0;
11463 }
11464 else
11465 {
11466 PUT_REG_NOTE_KIND (note, REG_UNUSED);
11467
11468 /* If there isn't already a REG_UNUSED note, put one
11469 here. */
11470 if (! find_regno_note (tem, REG_UNUSED,
11471 REGNO (XEXP (note, 0))))
11472 place = tem;
11473 break;
230d793d
RS
11474 }
11475 }
13018fad
RE
11476 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11477 || (GET_CODE (tem) == CALL_INSN
11478 && find_reg_fusage (tem, USE, XEXP (note, 0))))
230d793d
RS
11479 {
11480 place = tem;
932d1119
RK
11481
11482 /* If we are doing a 3->2 combination, and we have a
11483 register which formerly died in i3 and was not used
11484 by i2, which now no longer dies in i3 and is used in
11485 i2 but does not die in i2, and place is between i2
11486 and i3, then we may need to move a link from place to
11487 i2. */
a8908849
RK
11488 if (i2 && INSN_UID (place) <= max_uid_cuid
11489 && INSN_CUID (place) > INSN_CUID (i2)
932d1119
RK
11490 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11491 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11492 {
11493 rtx links = LOG_LINKS (place);
11494 LOG_LINKS (place) = 0;
11495 distribute_links (links);
11496 }
230d793d
RS
11497 break;
11498 }
38d8473f
RK
11499 }
11500
11501 /* If we haven't found an insn for the death note and it
11502 is still a REG_DEAD note, but we have hit a CODE_LABEL,
11503 insert a USE insn for the register at that label and
11504 put the death node there. This prevents problems with
11505 call-state tracking in caller-save.c. */
11506 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
e2cce0cf
RK
11507 {
11508 place
38a448ca 11509 = emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (note, 0)),
e2cce0cf
RK
11510 tem);
11511
11512 /* If this insn was emitted between blocks, then update
11513 basic_block_head of the current block to include it. */
11514 if (basic_block_end[this_basic_block - 1] == tem)
11515 basic_block_head[this_basic_block] = place;
11516 }
38d8473f 11517 }
230d793d
RS
11518
11519 /* If the register is set or already dead at PLACE, we needn't do
11520 anything with this note if it is still a REG_DEAD note.
11521
11522 Note that we cannot use just `dead_or_set_p' here since we can
11523 convert an assignment to a register into a bit-field assignment.
11524 Therefore, we must also omit the note if the register is the
11525 target of a bitfield assignment. */
11526
11527 if (place && REG_NOTE_KIND (note) == REG_DEAD)
11528 {
11529 int regno = REGNO (XEXP (note, 0));
11530
11531 if (dead_or_set_p (place, XEXP (note, 0))
11532 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11533 {
11534 /* Unless the register previously died in PLACE, clear
11535 reg_last_death. [I no longer understand why this is
11536 being done.] */
11537 if (reg_last_death[regno] != place)
11538 reg_last_death[regno] = 0;
11539 place = 0;
11540 }
11541 else
11542 reg_last_death[regno] = place;
11543
11544 /* If this is a death note for a hard reg that is occupying
11545 multiple registers, ensure that we are still using all
11546 parts of the object. If we find a piece of the object
11547 that is unused, we must add a USE for that piece before
11548 PLACE and put the appropriate REG_DEAD note on it.
11549
11550 An alternative would be to put a REG_UNUSED for the pieces
11551 on the insn that set the register, but that can't be done if
11552 it is not in the same block. It is simpler, though less
11553 efficient, to add the USE insns. */
11554
11555 if (place && regno < FIRST_PSEUDO_REGISTER
11556 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11557 {
11558 int endregno
11559 = regno + HARD_REGNO_NREGS (regno,
11560 GET_MODE (XEXP (note, 0)));
11561 int all_used = 1;
11562 int i;
11563
11564 for (i = regno; i < endregno; i++)
9fd5bb62
JW
11565 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11566 && ! find_regno_fusage (place, USE, i))
230d793d 11567 {
38a448ca 11568 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
28f6d3af
RK
11569 rtx p;
11570
11571 /* See if we already placed a USE note for this
11572 register in front of PLACE. */
11573 for (p = place;
11574 GET_CODE (PREV_INSN (p)) == INSN
11575 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11576 p = PREV_INSN (p))
11577 if (rtx_equal_p (piece,
11578 XEXP (PATTERN (PREV_INSN (p)), 0)))
11579 {
11580 p = 0;
11581 break;
11582 }
11583
11584 if (p)
11585 {
11586 rtx use_insn
38a448ca
RH
11587 = emit_insn_before (gen_rtx_USE (VOIDmode,
11588 piece),
28f6d3af
RK
11589 p);
11590 REG_NOTES (use_insn)
38a448ca
RH
11591 = gen_rtx_EXPR_LIST (REG_DEAD, piece,
11592 REG_NOTES (use_insn));
28f6d3af 11593 }
230d793d 11594
5089e22e 11595 all_used = 0;
230d793d
RS
11596 }
11597
a394b17b
JW
11598 /* Check for the case where the register dying partially
11599 overlaps the register set by this insn. */
11600 if (all_used)
11601 for (i = regno; i < endregno; i++)
11602 if (dead_or_set_regno_p (place, i))
11603 {
11604 all_used = 0;
11605 break;
11606 }
11607
230d793d
RS
11608 if (! all_used)
11609 {
11610 /* Put only REG_DEAD notes for pieces that are
11611 still used and that are not already dead or set. */
11612
11613 for (i = regno; i < endregno; i++)
11614 {
38a448ca 11615 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
230d793d 11616
17cbf358
JW
11617 if ((reg_referenced_p (piece, PATTERN (place))
11618 || (GET_CODE (place) == CALL_INSN
11619 && find_reg_fusage (place, USE, piece)))
230d793d
RS
11620 && ! dead_or_set_p (place, piece)
11621 && ! reg_bitfield_target_p (piece,
11622 PATTERN (place)))
38a448ca
RH
11623 REG_NOTES (place)
11624 = gen_rtx_EXPR_LIST (REG_DEAD,
11625 piece, REG_NOTES (place));
230d793d
RS
11626 }
11627
11628 place = 0;
11629 }
11630 }
11631 }
11632 break;
11633
11634 default:
11635 /* Any other notes should not be present at this point in the
11636 compilation. */
11637 abort ();
11638 }
11639
11640 if (place)
11641 {
11642 XEXP (note, 1) = REG_NOTES (place);
11643 REG_NOTES (place) = note;
11644 }
1a26b032
RK
11645 else if ((REG_NOTE_KIND (note) == REG_DEAD
11646 || REG_NOTE_KIND (note) == REG_UNUSED)
11647 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 11648 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
230d793d
RS
11649
11650 if (place2)
1a26b032
RK
11651 {
11652 if ((REG_NOTE_KIND (note) == REG_DEAD
11653 || REG_NOTE_KIND (note) == REG_UNUSED)
11654 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 11655 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 11656
38a448ca
RH
11657 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
11658 REG_NOTE_KIND (note),
11659 XEXP (note, 0),
11660 REG_NOTES (place2));
1a26b032 11661 }
230d793d
RS
11662 }
11663}
11664\f
11665/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
11666 I3, I2, and I1 to new locations. This is also called in one case to
11667 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
11668
11669static void
11670distribute_links (links)
11671 rtx links;
11672{
11673 rtx link, next_link;
11674
11675 for (link = links; link; link = next_link)
11676 {
11677 rtx place = 0;
11678 rtx insn;
11679 rtx set, reg;
11680
11681 next_link = XEXP (link, 1);
11682
11683 /* If the insn that this link points to is a NOTE or isn't a single
11684 set, ignore it. In the latter case, it isn't clear what we
11685 can do other than ignore the link, since we can't tell which
11686 register it was for. Such links wouldn't be used by combine
11687 anyway.
11688
11689 It is not possible for the destination of the target of the link to
11690 have been changed by combine. The only potential of this is if we
11691 replace I3, I2, and I1 by I3 and I2. But in that case the
11692 destination of I2 also remains unchanged. */
11693
11694 if (GET_CODE (XEXP (link, 0)) == NOTE
11695 || (set = single_set (XEXP (link, 0))) == 0)
11696 continue;
11697
11698 reg = SET_DEST (set);
11699 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11700 || GET_CODE (reg) == SIGN_EXTRACT
11701 || GET_CODE (reg) == STRICT_LOW_PART)
11702 reg = XEXP (reg, 0);
11703
11704 /* A LOG_LINK is defined as being placed on the first insn that uses
11705 a register and points to the insn that sets the register. Start
11706 searching at the next insn after the target of the link and stop
11707 when we reach a set of the register or the end of the basic block.
11708
11709 Note that this correctly handles the link that used to point from
5089e22e 11710 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
11711 since most links don't point very far away. */
11712
11713 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3
RK
11714 (insn && (this_basic_block == n_basic_blocks - 1
11715 || basic_block_head[this_basic_block + 1] != insn));
230d793d
RS
11716 insn = NEXT_INSN (insn))
11717 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11718 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11719 {
11720 if (reg_referenced_p (reg, PATTERN (insn)))
11721 place = insn;
11722 break;
11723 }
6e2d1486
RK
11724 else if (GET_CODE (insn) == CALL_INSN
11725 && find_reg_fusage (insn, USE, reg))
11726 {
11727 place = insn;
11728 break;
11729 }
230d793d
RS
11730
11731 /* If we found a place to put the link, place it there unless there
11732 is already a link to the same insn as LINK at that point. */
11733
11734 if (place)
11735 {
11736 rtx link2;
11737
11738 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11739 if (XEXP (link2, 0) == XEXP (link, 0))
11740 break;
11741
11742 if (link2 == 0)
11743 {
11744 XEXP (link, 1) = LOG_LINKS (place);
11745 LOG_LINKS (place) = link;
abe6e52f
RK
11746
11747 /* Set added_links_insn to the earliest insn we added a
11748 link to. */
11749 if (added_links_insn == 0
11750 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11751 added_links_insn = place;
230d793d
RS
11752 }
11753 }
11754 }
11755}
11756\f
1427d6d2
RK
11757/* Compute INSN_CUID for INSN, which is an insn made by combine. */
11758
11759static int
11760insn_cuid (insn)
11761 rtx insn;
11762{
11763 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
11764 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
11765 insn = NEXT_INSN (insn);
11766
11767 if (INSN_UID (insn) > max_uid_cuid)
11768 abort ();
11769
11770 return INSN_CUID (insn);
11771}
11772\f
230d793d
RS
11773void
11774dump_combine_stats (file)
11775 FILE *file;
11776{
11777 fprintf
11778 (file,
11779 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11780 combine_attempts, combine_merges, combine_extras, combine_successes);
11781}
11782
11783void
11784dump_combine_total_stats (file)
11785 FILE *file;
11786{
11787 fprintf
11788 (file,
11789 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11790 total_attempts, total_merges, total_extras, total_successes);
11791}
This page took 2.027241 seconds and 5 git commands to generate.