]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
Update comments.
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
1a6ec070 2 Copyright (C) 1987, 88, 92-96, 1997 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
230d793d
RS
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
4f90e4a0 78#ifdef __STDC__
04fe4385 79#include <stdarg.h>
4f90e4a0 80#else
04fe4385 81#include <varargs.h>
4f90e4a0 82#endif
dfa3449b 83
9c3b4c8b
RS
84/* Must precede rtl.h for FFS. */
85#include <stdio.h>
86
230d793d
RS
87#include "rtl.h"
88#include "flags.h"
89#include "regs.h"
55310dad 90#include "hard-reg-set.h"
230d793d
RS
91#include "expr.h"
92#include "basic-block.h"
93#include "insn-config.h"
94#include "insn-flags.h"
95#include "insn-codes.h"
96#include "insn-attr.h"
97#include "recog.h"
98#include "real.h"
99
100/* It is not safe to use ordinary gen_lowpart in combine.
101 Use gen_lowpart_for_combine instead. See comments there. */
102#define gen_lowpart dont_use_gen_lowpart_you_dummy
103
104/* Number of attempts to combine instructions in this function. */
105
106static int combine_attempts;
107
108/* Number of attempts that got as far as substitution in this function. */
109
110static int combine_merges;
111
112/* Number of instructions combined with added SETs in this function. */
113
114static int combine_extras;
115
116/* Number of instructions combined in this function. */
117
118static int combine_successes;
119
120/* Totals over entire compilation. */
121
122static int total_attempts, total_merges, total_extras, total_successes;
9210df58 123
ddd5a7c1 124/* Define a default value for REVERSIBLE_CC_MODE.
9210df58
RK
125 We can never assume that a condition code mode is safe to reverse unless
126 the md tells us so. */
127#ifndef REVERSIBLE_CC_MODE
128#define REVERSIBLE_CC_MODE(MODE) 0
129#endif
230d793d
RS
130\f
131/* Vector mapping INSN_UIDs to cuids.
5089e22e 132 The cuids are like uids but increase monotonically always.
230d793d
RS
133 Combine always uses cuids so that it can compare them.
134 But actually renumbering the uids, which we used to do,
135 proves to be a bad idea because it makes it hard to compare
136 the dumps produced by earlier passes with those from later passes. */
137
138static int *uid_cuid;
4255220d 139static int max_uid_cuid;
230d793d
RS
140
141/* Get the cuid of an insn. */
142
1427d6d2
RK
143#define INSN_CUID(INSN) \
144(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d
RS
145
146/* Maximum register number, which is the size of the tables below. */
147
148static int combine_max_regno;
149
150/* Record last point of death of (hard or pseudo) register n. */
151
152static rtx *reg_last_death;
153
154/* Record last point of modification of (hard or pseudo) register n. */
155
156static rtx *reg_last_set;
157
158/* Record the cuid of the last insn that invalidated memory
159 (anything that writes memory, and subroutine calls, but not pushes). */
160
161static int mem_last_set;
162
163/* Record the cuid of the last CALL_INSN
164 so we can tell whether a potential combination crosses any calls. */
165
166static int last_call_cuid;
167
168/* When `subst' is called, this is the insn that is being modified
169 (by combining in a previous insn). The PATTERN of this insn
170 is still the old pattern partially modified and it should not be
171 looked at, but this may be used to examine the successors of the insn
172 to judge whether a simplification is valid. */
173
174static rtx subst_insn;
175
0d9641d1
JW
176/* This is an insn that belongs before subst_insn, but is not currently
177 on the insn chain. */
178
179static rtx subst_prev_insn;
180
230d793d
RS
181/* This is the lowest CUID that `subst' is currently dealing with.
182 get_last_value will not return a value if the register was set at or
183 after this CUID. If not for this mechanism, we could get confused if
184 I2 or I1 in try_combine were an insn that used the old value of a register
185 to obtain a new value. In that case, we might erroneously get the
186 new value of the register when we wanted the old one. */
187
188static int subst_low_cuid;
189
6e25d159
RK
190/* This contains any hard registers that are used in newpat; reg_dead_at_p
191 must consider all these registers to be always live. */
192
193static HARD_REG_SET newpat_used_regs;
194
abe6e52f
RK
195/* This is an insn to which a LOG_LINKS entry has been added. If this
196 insn is the earlier than I2 or I3, combine should rescan starting at
197 that location. */
198
199static rtx added_links_insn;
200
0d4d42c3
RK
201/* Basic block number of the block in which we are performing combines. */
202static int this_basic_block;
230d793d
RS
203\f
204/* The next group of arrays allows the recording of the last value assigned
205 to (hard or pseudo) register n. We use this information to see if a
5089e22e 206 operation being processed is redundant given a prior operation performed
230d793d
RS
207 on the register. For example, an `and' with a constant is redundant if
208 all the zero bits are already known to be turned off.
209
210 We use an approach similar to that used by cse, but change it in the
211 following ways:
212
213 (1) We do not want to reinitialize at each label.
214 (2) It is useful, but not critical, to know the actual value assigned
215 to a register. Often just its form is helpful.
216
217 Therefore, we maintain the following arrays:
218
219 reg_last_set_value the last value assigned
220 reg_last_set_label records the value of label_tick when the
221 register was assigned
222 reg_last_set_table_tick records the value of label_tick when a
223 value using the register is assigned
224 reg_last_set_invalid set to non-zero when it is not valid
225 to use the value of this register in some
226 register's value
227
228 To understand the usage of these tables, it is important to understand
229 the distinction between the value in reg_last_set_value being valid
230 and the register being validly contained in some other expression in the
231 table.
232
233 Entry I in reg_last_set_value is valid if it is non-zero, and either
234 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
235
236 Register I may validly appear in any expression returned for the value
237 of another register if reg_n_sets[i] is 1. It may also appear in the
238 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239 reg_last_set_invalid[j] is zero.
240
241 If an expression is found in the table containing a register which may
242 not validly appear in an expression, the register is replaced by
243 something that won't match, (clobber (const_int 0)).
244
245 reg_last_set_invalid[i] is set non-zero when register I is being assigned
246 to and reg_last_set_table_tick[i] == label_tick. */
247
0f41302f 248/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
249
250static rtx *reg_last_set_value;
251
252/* Record the value of label_tick when the value for register n is placed in
253 reg_last_set_value[n]. */
254
568356af 255static int *reg_last_set_label;
230d793d
RS
256
257/* Record the value of label_tick when an expression involving register n
0f41302f 258 is placed in reg_last_set_value. */
230d793d 259
568356af 260static int *reg_last_set_table_tick;
230d793d
RS
261
262/* Set non-zero if references to register n in expressions should not be
263 used. */
264
265static char *reg_last_set_invalid;
266
0f41302f 267/* Incremented for each label. */
230d793d 268
568356af 269static int label_tick;
230d793d
RS
270
271/* Some registers that are set more than once and used in more than one
272 basic block are nevertheless always set in similar ways. For example,
273 a QImode register may be loaded from memory in two places on a machine
274 where byte loads zero extend.
275
951553af 276 We record in the following array what we know about the nonzero
230d793d
RS
277 bits of a register, specifically which bits are known to be zero.
278
279 If an entry is zero, it means that we don't know anything special. */
280
55310dad 281static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 282
951553af 283/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 284 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 285
951553af 286static enum machine_mode nonzero_bits_mode;
230d793d 287
d0ab8cd3
RK
288/* Nonzero if we know that a register has some leading bits that are always
289 equal to the sign bit. */
290
291static char *reg_sign_bit_copies;
292
951553af 293/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
294 It is zero while computing them and after combine has completed. This
295 former test prevents propagating values based on previously set values,
296 which can be incorrect if a variable is modified in a loop. */
230d793d 297
951553af 298static int nonzero_sign_valid;
55310dad
RK
299
300/* These arrays are maintained in parallel with reg_last_set_value
301 and are used to store the mode in which the register was last set,
302 the bits that were known to be zero when it was last set, and the
303 number of sign bits copies it was known to have when it was last set. */
304
305static enum machine_mode *reg_last_set_mode;
306static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307static char *reg_last_set_sign_bit_copies;
230d793d
RS
308\f
309/* Record one modification to rtl structure
310 to be undone by storing old_contents into *where.
311 is_int is 1 if the contents are an int. */
312
313struct undo
314{
241cea85 315 struct undo *next;
230d793d 316 int is_int;
f5393ab9
RS
317 union {rtx r; int i;} old_contents;
318 union {rtx *r; int *i;} where;
230d793d
RS
319};
320
321/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322 num_undo says how many are currently recorded.
323
324 storage is nonzero if we must undo the allocation of new storage.
325 The value of storage is what to pass to obfree.
326
327 other_insn is nonzero if we have modified some other insn in the process
241cea85 328 of working on subst_insn. It must be verified too.
230d793d 329
241cea85
RK
330 previous_undos is the value of undobuf.undos when we started processing
331 this substitution. This will prevent gen_rtx_combine from re-used a piece
332 from the previous expression. Doing so can produce circular rtl
333 structures. */
230d793d
RS
334
335struct undobuf
336{
230d793d 337 char *storage;
241cea85
RK
338 struct undo *undos;
339 struct undo *frees;
340 struct undo *previous_undos;
230d793d
RS
341 rtx other_insn;
342};
343
344static struct undobuf undobuf;
345
cc876596 346/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 347 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
348 set to NEWVAL, do not record this change. Because computing NEWVAL might
349 also call SUBST, we have to compute it before we put anything into
350 the undo table. */
230d793d
RS
351
352#define SUBST(INTO, NEWVAL) \
241cea85
RK
353 do { rtx _new = (NEWVAL); \
354 struct undo *_buf; \
355 \
356 if (undobuf.frees) \
357 _buf = undobuf.frees, undobuf.frees = _buf->next; \
358 else \
359 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
360 \
361 _buf->is_int = 0; \
362 _buf->where.r = &INTO; \
363 _buf->old_contents.r = INTO; \
364 INTO = _new; \
365 if (_buf->old_contents.r == INTO) \
366 _buf->next = undobuf.frees, undobuf.frees = _buf; \
367 else \
368 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
369 } while (0)
370
241cea85
RK
371/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
372 for the value of a HOST_WIDE_INT value (including CONST_INT) is
373 not safe. */
230d793d
RS
374
375#define SUBST_INT(INTO, NEWVAL) \
241cea85
RK
376 do { struct undo *_buf; \
377 \
378 if (undobuf.frees) \
379 _buf = undobuf.frees, undobuf.frees = _buf->next; \
380 else \
381 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
382 \
383 _buf->is_int = 1; \
384 _buf->where.i = (int *) &INTO; \
385 _buf->old_contents.i = INTO; \
386 INTO = NEWVAL; \
387 if (_buf->old_contents.i == INTO) \
388 _buf->next = undobuf.frees, undobuf.frees = _buf; \
389 else \
390 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
391 } while (0)
392
393/* Number of times the pseudo being substituted for
394 was found and replaced. */
395
396static int n_occurrences;
397
c5ad722c
RK
398static void init_reg_last_arrays PROTO((void));
399static void setup_incoming_promotions PROTO((void));
fe2db4fb
RK
400static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
401static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
402static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
403static rtx try_combine PROTO((rtx, rtx, rtx));
404static void undo_all PROTO((void));
405static rtx *find_split_point PROTO((rtx *, rtx));
406static rtx subst PROTO((rtx, rtx, rtx, int, int));
8079805d
RK
407static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
408static rtx simplify_if_then_else PROTO((rtx));
409static rtx simplify_set PROTO((rtx));
410static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
411static rtx expand_compound_operation PROTO((rtx));
412static rtx expand_field_assignment PROTO((rtx));
413static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
414 int, int, int));
71923da7 415static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
416static rtx make_compound_operation PROTO((rtx, enum rtx_code));
417static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 418static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 419 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 420static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb 421static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
e11fa86f 422static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
fe2db4fb
RK
423static rtx make_field_assignment PROTO((rtx));
424static rtx apply_distributive_law PROTO((rtx));
425static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
426 unsigned HOST_WIDE_INT));
427static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
428static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
429static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
430 enum rtx_code, HOST_WIDE_INT,
431 enum machine_mode, int *));
432static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
433 rtx, int));
a29ca9db 434static int recog_for_combine PROTO((rtx *, rtx, rtx *, int *));
fe2db4fb 435static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 436static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 437 ...));
fe2db4fb
RK
438static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
439 rtx, rtx));
0c1c8ea6
RK
440static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
441 enum machine_mode, rtx));
fe2db4fb
RK
442static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
443static int reversible_comparison_p PROTO((rtx));
444static void update_table_tick PROTO((rtx));
445static void record_value_for_reg PROTO((rtx, rtx, rtx));
446static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
447static void record_dead_and_set_regs PROTO((rtx));
448static int get_last_value_validate PROTO((rtx *, int, int));
449static rtx get_last_value PROTO((rtx));
450static int use_crosses_set_p PROTO((rtx, int));
451static void reg_dead_at_p_1 PROTO((rtx, rtx));
452static int reg_dead_at_p PROTO((rtx, rtx));
6eb12cef 453static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
fe2db4fb
RK
454static int reg_bitfield_target_p PROTO((rtx, rtx));
455static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
456static void distribute_links PROTO((rtx));
6e25d159 457static void mark_used_regs_combine PROTO((rtx));
1427d6d2 458static int insn_cuid PROTO((rtx));
230d793d
RS
459\f
460/* Main entry point for combiner. F is the first insn of the function.
461 NREGS is the first unused pseudo-reg number. */
462
463void
464combine_instructions (f, nregs)
465 rtx f;
466 int nregs;
467{
468 register rtx insn, next, prev;
469 register int i;
470 register rtx links, nextlinks;
471
472 combine_attempts = 0;
473 combine_merges = 0;
474 combine_extras = 0;
475 combine_successes = 0;
241cea85 476 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
477
478 combine_max_regno = nregs;
479
ef026f91
RS
480 reg_nonzero_bits
481 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
482 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
483
4c9a05bc 484 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
485 bzero (reg_sign_bit_copies, nregs * sizeof (char));
486
230d793d
RS
487 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
488 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
489 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
490 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
491 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 492 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
493 reg_last_set_mode
494 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
495 reg_last_set_nonzero_bits
496 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
497 reg_last_set_sign_bit_copies
498 = (char *) alloca (nregs * sizeof (char));
499
ef026f91 500 init_reg_last_arrays ();
230d793d
RS
501
502 init_recog_no_volatile ();
503
504 /* Compute maximum uid value so uid_cuid can be allocated. */
505
506 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
507 if (INSN_UID (insn) > i)
508 i = INSN_UID (insn);
509
510 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
4255220d 511 max_uid_cuid = i;
230d793d 512
951553af 513 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 514
951553af 515 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
516 when, for example, we have j <<= 1 in a loop. */
517
951553af 518 nonzero_sign_valid = 0;
230d793d
RS
519
520 /* Compute the mapping from uids to cuids.
521 Cuids are numbers assigned to insns, like uids,
522 except that cuids increase monotonically through the code.
523
524 Scan all SETs and see if we can deduce anything about what
951553af 525 bits are known to be zero for some registers and how many copies
d79f08e0
RK
526 of the sign bit are known to exist for those registers.
527
528 Also set any known values so that we can use it while searching
529 for what bits are known to be set. */
530
531 label_tick = 1;
230d793d 532
bcd49eb7
JW
533 /* We need to initialize it here, because record_dead_and_set_regs may call
534 get_last_value. */
535 subst_prev_insn = NULL_RTX;
536
7988fd36
RK
537 setup_incoming_promotions ();
538
230d793d
RS
539 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
540 {
4255220d 541 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
542 subst_low_cuid = i;
543 subst_insn = insn;
544
230d793d 545 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
546 {
547 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
548 record_dead_and_set_regs (insn);
2dab894a
RK
549
550#ifdef AUTO_INC_DEC
551 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
552 if (REG_NOTE_KIND (links) == REG_INC)
553 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
554#endif
d79f08e0
RK
555 }
556
557 if (GET_CODE (insn) == CODE_LABEL)
558 label_tick++;
230d793d
RS
559 }
560
951553af 561 nonzero_sign_valid = 1;
230d793d
RS
562
563 /* Now scan all the insns in forward order. */
564
0d4d42c3 565 this_basic_block = -1;
230d793d
RS
566 label_tick = 1;
567 last_call_cuid = 0;
568 mem_last_set = 0;
ef026f91 569 init_reg_last_arrays ();
7988fd36
RK
570 setup_incoming_promotions ();
571
230d793d
RS
572 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
573 {
574 next = 0;
575
0d4d42c3 576 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 577 if (this_basic_block + 1 < n_basic_blocks
0d4d42c3
RK
578 && basic_block_head[this_basic_block + 1] == insn)
579 this_basic_block++;
580
230d793d
RS
581 if (GET_CODE (insn) == CODE_LABEL)
582 label_tick++;
583
0d4d42c3 584 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
585 {
586 /* Try this insn with each insn it links back to. */
587
588 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 589 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
590 goto retry;
591
592 /* Try each sequence of three linked insns ending with this one. */
593
594 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
595 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
596 nextlinks = XEXP (nextlinks, 1))
597 if ((next = try_combine (insn, XEXP (links, 0),
598 XEXP (nextlinks, 0))) != 0)
599 goto retry;
600
601#ifdef HAVE_cc0
602 /* Try to combine a jump insn that uses CC0
603 with a preceding insn that sets CC0, and maybe with its
604 logical predecessor as well.
605 This is how we make decrement-and-branch insns.
606 We need this special code because data flow connections
607 via CC0 do not get entered in LOG_LINKS. */
608
609 if (GET_CODE (insn) == JUMP_INSN
610 && (prev = prev_nonnote_insn (insn)) != 0
611 && GET_CODE (prev) == INSN
612 && sets_cc0_p (PATTERN (prev)))
613 {
5f4f0e22 614 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
615 goto retry;
616
617 for (nextlinks = LOG_LINKS (prev); nextlinks;
618 nextlinks = XEXP (nextlinks, 1))
619 if ((next = try_combine (insn, prev,
620 XEXP (nextlinks, 0))) != 0)
621 goto retry;
622 }
623
624 /* Do the same for an insn that explicitly references CC0. */
625 if (GET_CODE (insn) == INSN
626 && (prev = prev_nonnote_insn (insn)) != 0
627 && GET_CODE (prev) == INSN
628 && sets_cc0_p (PATTERN (prev))
629 && GET_CODE (PATTERN (insn)) == SET
630 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
631 {
5f4f0e22 632 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
633 goto retry;
634
635 for (nextlinks = LOG_LINKS (prev); nextlinks;
636 nextlinks = XEXP (nextlinks, 1))
637 if ((next = try_combine (insn, prev,
638 XEXP (nextlinks, 0))) != 0)
639 goto retry;
640 }
641
642 /* Finally, see if any of the insns that this insn links to
643 explicitly references CC0. If so, try this insn, that insn,
5089e22e 644 and its predecessor if it sets CC0. */
230d793d
RS
645 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
646 if (GET_CODE (XEXP (links, 0)) == INSN
647 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
648 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
649 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
650 && GET_CODE (prev) == INSN
651 && sets_cc0_p (PATTERN (prev))
652 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
653 goto retry;
654#endif
655
656 /* Try combining an insn with two different insns whose results it
657 uses. */
658 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
659 for (nextlinks = XEXP (links, 1); nextlinks;
660 nextlinks = XEXP (nextlinks, 1))
661 if ((next = try_combine (insn, XEXP (links, 0),
662 XEXP (nextlinks, 0))) != 0)
663 goto retry;
664
665 if (GET_CODE (insn) != NOTE)
666 record_dead_and_set_regs (insn);
667
668 retry:
669 ;
670 }
671 }
672
673 total_attempts += combine_attempts;
674 total_merges += combine_merges;
675 total_extras += combine_extras;
676 total_successes += combine_successes;
1a26b032 677
951553af 678 nonzero_sign_valid = 0;
230d793d 679}
ef026f91
RS
680
681/* Wipe the reg_last_xxx arrays in preparation for another pass. */
682
683static void
684init_reg_last_arrays ()
685{
686 int nregs = combine_max_regno;
687
4c9a05bc
RK
688 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
689 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
690 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
691 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
692 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 693 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
694 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
695 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
696 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
697}
230d793d 698\f
7988fd36
RK
699/* Set up any promoted values for incoming argument registers. */
700
ee791cc3 701static void
7988fd36
RK
702setup_incoming_promotions ()
703{
704#ifdef PROMOTE_FUNCTION_ARGS
705 int regno;
706 rtx reg;
707 enum machine_mode mode;
708 int unsignedp;
709 rtx first = get_insns ();
710
711 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
712 if (FUNCTION_ARG_REGNO_P (regno)
713 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
714 record_value_for_reg (reg, first,
715 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
500c518b
RK
716 GET_MODE (reg),
717 gen_rtx (CLOBBER, mode, const0_rtx)));
7988fd36
RK
718#endif
719}
720\f
91102d5a
RK
721/* Called via note_stores. If X is a pseudo that is narrower than
722 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
723
724 If we are setting only a portion of X and we can't figure out what
725 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
726 be happening.
727
728 Similarly, set how many bits of X are known to be copies of the sign bit
729 at all locations in the function. This is the smallest number implied
730 by any set of X. */
230d793d
RS
731
732static void
951553af 733set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
734 rtx x;
735 rtx set;
736{
d0ab8cd3
RK
737 int num;
738
230d793d
RS
739 if (GET_CODE (x) == REG
740 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
741 /* If this register is undefined at the start of the file, we can't
742 say what its contents were. */
743 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
744 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
5f4f0e22 745 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 746 {
2dab894a 747 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
748 {
749 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 750 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
751 return;
752 }
230d793d
RS
753
754 /* If this is a complex assignment, see if we can convert it into a
5089e22e 755 simple assignment. */
230d793d 756 set = expand_field_assignment (set);
d79f08e0
RK
757
758 /* If this is a simple assignment, or we have a paradoxical SUBREG,
759 set what we know about X. */
760
761 if (SET_DEST (set) == x
762 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
763 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
764 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 765 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 766 {
9afa3d54
RK
767 rtx src = SET_SRC (set);
768
769#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
770 /* If X is narrower than a word and SRC is a non-negative
771 constant that would appear negative in the mode of X,
772 sign-extend it for use in reg_nonzero_bits because some
773 machines (maybe most) will actually do the sign-extension
774 and this is the conservative approach.
775
776 ??? For 2.5, try to tighten up the MD files in this regard
777 instead of this kludge. */
778
779 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
780 && GET_CODE (src) == CONST_INT
781 && INTVAL (src) > 0
782 && 0 != (INTVAL (src)
783 & ((HOST_WIDE_INT) 1
9e69be8c 784 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
785 src = GEN_INT (INTVAL (src)
786 | ((HOST_WIDE_INT) (-1)
787 << GET_MODE_BITSIZE (GET_MODE (x))));
788#endif
789
951553af 790 reg_nonzero_bits[REGNO (x)]
9afa3d54 791 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
792 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
793 if (reg_sign_bit_copies[REGNO (x)] == 0
794 || reg_sign_bit_copies[REGNO (x)] > num)
795 reg_sign_bit_copies[REGNO (x)] = num;
796 }
230d793d 797 else
d0ab8cd3 798 {
951553af 799 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 800 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 801 }
230d793d
RS
802 }
803}
804\f
805/* See if INSN can be combined into I3. PRED and SUCC are optionally
806 insns that were previously combined into I3 or that will be combined
807 into the merger of INSN and I3.
808
809 Return 0 if the combination is not allowed for any reason.
810
811 If the combination is allowed, *PDEST will be set to the single
812 destination of INSN and *PSRC to the single source, and this function
813 will return 1. */
814
815static int
816can_combine_p (insn, i3, pred, succ, pdest, psrc)
817 rtx insn;
818 rtx i3;
819 rtx pred, succ;
820 rtx *pdest, *psrc;
821{
822 int i;
823 rtx set = 0, src, dest;
824 rtx p, link;
825 int all_adjacent = (succ ? (next_active_insn (insn) == succ
826 && next_active_insn (succ) == i3)
827 : next_active_insn (insn) == i3);
828
829 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
830 or a PARALLEL consisting of such a SET and CLOBBERs.
831
832 If INSN has CLOBBER parallel parts, ignore them for our processing.
833 By definition, these happen during the execution of the insn. When it
834 is merged with another insn, all bets are off. If they are, in fact,
835 needed and aren't also supplied in I3, they may be added by
836 recog_for_combine. Otherwise, it won't match.
837
838 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
839 note.
840
841 Get the source and destination of INSN. If more than one, can't
842 combine. */
843
844 if (GET_CODE (PATTERN (insn)) == SET)
845 set = PATTERN (insn);
846 else if (GET_CODE (PATTERN (insn)) == PARALLEL
847 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
848 {
849 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
850 {
851 rtx elt = XVECEXP (PATTERN (insn), 0, i);
852
853 switch (GET_CODE (elt))
854 {
855 /* We can ignore CLOBBERs. */
856 case CLOBBER:
857 break;
858
859 case SET:
860 /* Ignore SETs whose result isn't used but not those that
861 have side-effects. */
862 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
863 && ! side_effects_p (elt))
864 break;
865
866 /* If we have already found a SET, this is a second one and
867 so we cannot combine with this insn. */
868 if (set)
869 return 0;
870
871 set = elt;
872 break;
873
874 default:
875 /* Anything else means we can't combine. */
876 return 0;
877 }
878 }
879
880 if (set == 0
881 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
882 so don't do anything with it. */
883 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
884 return 0;
885 }
886 else
887 return 0;
888
889 if (set == 0)
890 return 0;
891
892 set = expand_field_assignment (set);
893 src = SET_SRC (set), dest = SET_DEST (set);
894
895 /* Don't eliminate a store in the stack pointer. */
896 if (dest == stack_pointer_rtx
230d793d
RS
897 /* If we couldn't eliminate a field assignment, we can't combine. */
898 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
899 /* Don't combine with an insn that sets a register to itself if it has
900 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 901 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
902 /* Can't merge a function call. */
903 || GET_CODE (src) == CALL
cd5e8f1f 904 /* Don't eliminate a function call argument. */
4dca5ec5
RK
905 || (GET_CODE (i3) == CALL_INSN
906 && (find_reg_fusage (i3, USE, dest)
907 || (GET_CODE (dest) == REG
908 && REGNO (dest) < FIRST_PSEUDO_REGISTER
909 && global_regs[REGNO (dest)])))
230d793d
RS
910 /* Don't substitute into an incremented register. */
911 || FIND_REG_INC_NOTE (i3, dest)
912 || (succ && FIND_REG_INC_NOTE (succ, dest))
913 /* Don't combine the end of a libcall into anything. */
5f4f0e22 914 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
915 /* Make sure that DEST is not used after SUCC but before I3. */
916 || (succ && ! all_adjacent
917 && reg_used_between_p (dest, succ, i3))
918 /* Make sure that the value that is to be substituted for the register
919 does not use any registers whose values alter in between. However,
920 If the insns are adjacent, a use can't cross a set even though we
921 think it might (this can happen for a sequence of insns each setting
922 the same destination; reg_last_set of that register might point to
d81481d3
RK
923 a NOTE). If INSN has a REG_EQUIV note, the register is always
924 equivalent to the memory so the substitution is valid even if there
925 are intervening stores. Also, don't move a volatile asm or
926 UNSPEC_VOLATILE across any other insns. */
230d793d 927 || (! all_adjacent
d81481d3
RK
928 && (((GET_CODE (src) != MEM
929 || ! find_reg_note (insn, REG_EQUIV, src))
930 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
931 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
932 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
933 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
934 better register allocation by not doing the combine. */
935 || find_reg_note (i3, REG_NO_CONFLICT, dest)
936 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
937 /* Don't combine across a CALL_INSN, because that would possibly
938 change whether the life span of some REGs crosses calls or not,
939 and it is a pain to update that information.
940 Exception: if source is a constant, moving it later can't hurt.
941 Accept that special case, because it helps -fforce-addr a lot. */
942 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
943 return 0;
944
945 /* DEST must either be a REG or CC0. */
946 if (GET_CODE (dest) == REG)
947 {
948 /* If register alignment is being enforced for multi-word items in all
949 cases except for parameters, it is possible to have a register copy
950 insn referencing a hard register that is not allowed to contain the
951 mode being copied and which would not be valid as an operand of most
952 insns. Eliminate this problem by not combining with such an insn.
953
954 Also, on some machines we don't want to extend the life of a hard
4d2c432d
RK
955 register.
956
957 This is the same test done in can_combine except that we don't test
958 if SRC is a CALL operation to permit a hard register with
959 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
960 into account. */
230d793d
RS
961
962 if (GET_CODE (src) == REG
963 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
964 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
965 /* Don't extend the life of a hard register unless it is
966 user variable (if we have few registers) or it can't
967 fit into the desired register (meaning something special
ecd40809
RK
968 is going on).
969 Also avoid substituting a return register into I3, because
970 reload can't handle a conflict with constraints of other
971 inputs. */
230d793d 972 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e
RK
973 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
974#ifdef SMALL_REGISTER_CLASSES
f95182a4
ILT
975 || (SMALL_REGISTER_CLASSES
976 && ((! all_adjacent && ! REG_USERVAR_P (src))
977 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
978 && ! REG_USERVAR_P (src))))
230d793d 979#endif
c448a43e 980 ))))
230d793d
RS
981 return 0;
982 }
983 else if (GET_CODE (dest) != CC0)
984 return 0;
985
5f96750d
RS
986 /* Don't substitute for a register intended as a clobberable operand.
987 Similarly, don't substitute an expression containing a register that
988 will be clobbered in I3. */
230d793d
RS
989 if (GET_CODE (PATTERN (i3)) == PARALLEL)
990 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
991 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
992 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
993 src)
994 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
995 return 0;
996
997 /* If INSN contains anything volatile, or is an `asm' (whether volatile
998 or not), reject, unless nothing volatile comes between it and I3,
999 with the exception of SUCC. */
1000
1001 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1002 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1003 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1004 && p != succ && volatile_refs_p (PATTERN (p)))
1005 return 0;
1006
4b2cb4a2
RS
1007 /* If there are any volatile insns between INSN and I3, reject, because
1008 they might affect machine state. */
1009
1010 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1011 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1012 && p != succ && volatile_insn_p (PATTERN (p)))
1013 return 0;
1014
230d793d
RS
1015 /* If INSN or I2 contains an autoincrement or autodecrement,
1016 make sure that register is not used between there and I3,
1017 and not already used in I3 either.
1018 Also insist that I3 not be a jump; if it were one
1019 and the incremented register were spilled, we would lose. */
1020
1021#ifdef AUTO_INC_DEC
1022 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1023 if (REG_NOTE_KIND (link) == REG_INC
1024 && (GET_CODE (i3) == JUMP_INSN
1025 || reg_used_between_p (XEXP (link, 0), insn, i3)
1026 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1027 return 0;
1028#endif
1029
1030#ifdef HAVE_cc0
1031 /* Don't combine an insn that follows a CC0-setting insn.
1032 An insn that uses CC0 must not be separated from the one that sets it.
1033 We do, however, allow I2 to follow a CC0-setting insn if that insn
1034 is passed as I1; in that case it will be deleted also.
1035 We also allow combining in this case if all the insns are adjacent
1036 because that would leave the two CC0 insns adjacent as well.
1037 It would be more logical to test whether CC0 occurs inside I1 or I2,
1038 but that would be much slower, and this ought to be equivalent. */
1039
1040 p = prev_nonnote_insn (insn);
1041 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1042 && ! all_adjacent)
1043 return 0;
1044#endif
1045
1046 /* If we get here, we have passed all the tests and the combination is
1047 to be allowed. */
1048
1049 *pdest = dest;
1050 *psrc = src;
1051
1052 return 1;
1053}
1054\f
1055/* LOC is the location within I3 that contains its pattern or the component
1056 of a PARALLEL of the pattern. We validate that it is valid for combining.
1057
1058 One problem is if I3 modifies its output, as opposed to replacing it
1059 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1060 so would produce an insn that is not equivalent to the original insns.
1061
1062 Consider:
1063
1064 (set (reg:DI 101) (reg:DI 100))
1065 (set (subreg:SI (reg:DI 101) 0) <foo>)
1066
1067 This is NOT equivalent to:
1068
1069 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1070 (set (reg:DI 101) (reg:DI 100))])
1071
1072 Not only does this modify 100 (in which case it might still be valid
1073 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1074
1075 We can also run into a problem if I2 sets a register that I1
1076 uses and I1 gets directly substituted into I3 (not via I2). In that
1077 case, we would be getting the wrong value of I2DEST into I3, so we
1078 must reject the combination. This case occurs when I2 and I1 both
1079 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1080 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1081 of a SET must prevent combination from occurring.
1082
1083 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
c448a43e
RK
1084 if the destination of a SET is a hard register that isn't a user
1085 variable.
230d793d
RS
1086
1087 Before doing the above check, we first try to expand a field assignment
1088 into a set of logical operations.
1089
1090 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1091 we place a register that is both set and used within I3. If more than one
1092 such register is detected, we fail.
1093
1094 Return 1 if the combination is valid, zero otherwise. */
1095
1096static int
1097combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1098 rtx i3;
1099 rtx *loc;
1100 rtx i2dest;
1101 rtx i1dest;
1102 int i1_not_in_src;
1103 rtx *pi3dest_killed;
1104{
1105 rtx x = *loc;
1106
1107 if (GET_CODE (x) == SET)
1108 {
1109 rtx set = expand_field_assignment (x);
1110 rtx dest = SET_DEST (set);
1111 rtx src = SET_SRC (set);
1112 rtx inner_dest = dest, inner_src = src;
1113
1114 SUBST (*loc, set);
1115
1116 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1117 || GET_CODE (inner_dest) == SUBREG
1118 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1119 inner_dest = XEXP (inner_dest, 0);
1120
1121 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1122 was added. */
1123#if 0
1124 while (GET_CODE (inner_src) == STRICT_LOW_PART
1125 || GET_CODE (inner_src) == SUBREG
1126 || GET_CODE (inner_src) == ZERO_EXTRACT)
1127 inner_src = XEXP (inner_src, 0);
1128
1129 /* If it is better that two different modes keep two different pseudos,
1130 avoid combining them. This avoids producing the following pattern
1131 on a 386:
1132 (set (subreg:SI (reg/v:QI 21) 0)
1133 (lshiftrt:SI (reg/v:SI 20)
1134 (const_int 24)))
1135 If that were made, reload could not handle the pair of
1136 reg 20/21, since it would try to get any GENERAL_REGS
1137 but some of them don't handle QImode. */
1138
1139 if (rtx_equal_p (inner_src, i2dest)
1140 && GET_CODE (inner_dest) == REG
1141 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1142 return 0;
1143#endif
1144
1145 /* Check for the case where I3 modifies its output, as
1146 discussed above. */
1147 if ((inner_dest != dest
1148 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1149 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
1150 /* This is the same test done in can_combine_p except that we
1151 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
6d101824
RK
1152 CALL operation.
1153 Moreover, we can't test all_adjacent; we don't have to, since
1154 this instruction will stay in place, thus we are not considering
1155 to increase the lifetime of INNER_DEST. */
230d793d 1156 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1157 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1158 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1159 GET_MODE (inner_dest))
3f508eca 1160#ifdef SMALL_REGISTER_CLASSES
f95182a4
ILT
1161 || (SMALL_REGISTER_CLASSES
1162 && GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest)
6d101824 1163 && FUNCTION_VALUE_REGNO_P (REGNO (inner_dest)))
230d793d 1164#endif
c448a43e 1165 ))
230d793d
RS
1166 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1167 return 0;
1168
1169 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1170 so record that for later.
1171 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1172 STACK_POINTER_REGNUM, since these are always considered to be
1173 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1174 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1175 && reg_referenced_p (dest, PATTERN (i3))
1176 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1177#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1178 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1179#endif
36a9c2e9
JL
1180#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1181 && (REGNO (dest) != ARG_POINTER_REGNUM
1182 || ! fixed_regs [REGNO (dest)])
1183#endif
1184 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1185 {
1186 if (*pi3dest_killed)
1187 return 0;
1188
1189 *pi3dest_killed = dest;
1190 }
1191 }
1192
1193 else if (GET_CODE (x) == PARALLEL)
1194 {
1195 int i;
1196
1197 for (i = 0; i < XVECLEN (x, 0); i++)
1198 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1199 i1_not_in_src, pi3dest_killed))
1200 return 0;
1201 }
1202
1203 return 1;
1204}
1205\f
1206/* Try to combine the insns I1 and I2 into I3.
1207 Here I1 and I2 appear earlier than I3.
1208 I1 can be zero; then we combine just I2 into I3.
1209
1210 It we are combining three insns and the resulting insn is not recognized,
1211 try splitting it into two insns. If that happens, I2 and I3 are retained
1212 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1213 are pseudo-deleted.
1214
abe6e52f
RK
1215 Return 0 if the combination does not work. Then nothing is changed.
1216 If we did the combination, return the insn at which combine should
1217 resume scanning. */
230d793d
RS
1218
1219static rtx
1220try_combine (i3, i2, i1)
1221 register rtx i3, i2, i1;
1222{
1223 /* New patterns for I3 and I3, respectively. */
1224 rtx newpat, newi2pat = 0;
1225 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1226 int added_sets_1, added_sets_2;
1227 /* Total number of SETs to put into I3. */
1228 int total_sets;
1229 /* Nonzero is I2's body now appears in I3. */
1230 int i2_is_used;
1231 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1232 int insn_code_number, i2_code_number, other_code_number;
1233 /* Contains I3 if the destination of I3 is used in its source, which means
1234 that the old life of I3 is being killed. If that usage is placed into
1235 I2 and not in I3, a REG_DEAD note must be made. */
1236 rtx i3dest_killed = 0;
1237 /* SET_DEST and SET_SRC of I2 and I1. */
1238 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1239 /* PATTERN (I2), or a copy of it in certain cases. */
1240 rtx i2pat;
1241 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1242 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1243 int i1_feeds_i3 = 0;
1244 /* Notes that must be added to REG_NOTES in I3 and I2. */
1245 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1246 /* Notes that we substituted I3 into I2 instead of the normal case. */
1247 int i3_subst_into_i2 = 0;
df7d75de
RK
1248 /* Notes that I1, I2 or I3 is a MULT operation. */
1249 int have_mult = 0;
a29ca9db
RK
1250 /* Number of clobbers of SCRATCH we had to add. */
1251 int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
230d793d
RS
1252
1253 int maxreg;
1254 rtx temp;
1255 register rtx link;
1256 int i;
1257
1258 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1259 This can occur when flow deletes an insn that it has merged into an
1260 auto-increment address. We also can't do anything if I3 has a
1261 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1262 libcall. */
1263
1264 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1265 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1266 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1267 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1268 return 0;
1269
1270 combine_attempts++;
1271
241cea85 1272 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
1273 undobuf.other_insn = 0;
1274
1275 /* Save the current high-water-mark so we can free storage if we didn't
1276 accept this combination. */
1277 undobuf.storage = (char *) oballoc (0);
1278
6e25d159
RK
1279 /* Reset the hard register usage information. */
1280 CLEAR_HARD_REG_SET (newpat_used_regs);
1281
230d793d
RS
1282 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1283 code below, set I1 to be the earlier of the two insns. */
1284 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1285 temp = i1, i1 = i2, i2 = temp;
1286
abe6e52f 1287 added_links_insn = 0;
137e889e 1288
230d793d
RS
1289 /* First check for one important special-case that the code below will
1290 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1291 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1292 we may be able to replace that destination with the destination of I3.
1293 This occurs in the common code where we compute both a quotient and
1294 remainder into a structure, in which case we want to do the computation
1295 directly into the structure to avoid register-register copies.
1296
1297 We make very conservative checks below and only try to handle the
1298 most common cases of this. For example, we only handle the case
1299 where I2 and I3 are adjacent to avoid making difficult register
1300 usage tests. */
1301
1302 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1303 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1304 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1305#ifdef SMALL_REGISTER_CLASSES
f95182a4
ILT
1306 && (! SMALL_REGISTER_CLASSES
1307 || GET_CODE (SET_DEST (PATTERN (i3))) != REG
c448a43e
RK
1308 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1309 || REG_USERVAR_P (SET_DEST (PATTERN (i3))))
230d793d
RS
1310#endif
1311 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1312 && GET_CODE (PATTERN (i2)) == PARALLEL
1313 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1314 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1315 below would need to check what is inside (and reg_overlap_mentioned_p
1316 doesn't support those codes anyway). Don't allow those destinations;
1317 the resulting insn isn't likely to be recognized anyway. */
1318 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1319 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1320 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1321 SET_DEST (PATTERN (i3)))
1322 && next_real_insn (i2) == i3)
5089e22e
RS
1323 {
1324 rtx p2 = PATTERN (i2);
1325
1326 /* Make sure that the destination of I3,
1327 which we are going to substitute into one output of I2,
1328 is not used within another output of I2. We must avoid making this:
1329 (parallel [(set (mem (reg 69)) ...)
1330 (set (reg 69) ...)])
1331 which is not well-defined as to order of actions.
1332 (Besides, reload can't handle output reloads for this.)
1333
1334 The problem can also happen if the dest of I3 is a memory ref,
1335 if another dest in I2 is an indirect memory ref. */
1336 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1337 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1338 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1339 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1340 SET_DEST (XVECEXP (p2, 0, i))))
1341 break;
230d793d 1342
5089e22e
RS
1343 if (i == XVECLEN (p2, 0))
1344 for (i = 0; i < XVECLEN (p2, 0); i++)
1345 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1346 {
1347 combine_merges++;
230d793d 1348
5089e22e
RS
1349 subst_insn = i3;
1350 subst_low_cuid = INSN_CUID (i2);
230d793d 1351
c4e861e8 1352 added_sets_2 = added_sets_1 = 0;
5089e22e 1353 i2dest = SET_SRC (PATTERN (i3));
230d793d 1354
5089e22e
RS
1355 /* Replace the dest in I2 with our dest and make the resulting
1356 insn the new pattern for I3. Then skip to where we
1357 validate the pattern. Everything was set up above. */
1358 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1359 SET_DEST (PATTERN (i3)));
1360
1361 newpat = p2;
176c9e6b 1362 i3_subst_into_i2 = 1;
5089e22e
RS
1363 goto validate_replacement;
1364 }
1365 }
230d793d
RS
1366
1367#ifndef HAVE_cc0
1368 /* If we have no I1 and I2 looks like:
1369 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1370 (set Y OP)])
1371 make up a dummy I1 that is
1372 (set Y OP)
1373 and change I2 to be
1374 (set (reg:CC X) (compare:CC Y (const_int 0)))
1375
1376 (We can ignore any trailing CLOBBERs.)
1377
1378 This undoes a previous combination and allows us to match a branch-and-
1379 decrement insn. */
1380
1381 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1382 && XVECLEN (PATTERN (i2), 0) >= 2
1383 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1384 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1385 == MODE_CC)
1386 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1387 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1388 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1389 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1390 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1391 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1392 {
1393 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1394 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1395 break;
1396
1397 if (i == 1)
1398 {
1399 /* We make I1 with the same INSN_UID as I2. This gives it
1400 the same INSN_CUID for value tracking. Our fake I1 will
1401 never appear in the insn stream so giving it the same INSN_UID
1402 as I2 will not cause a problem. */
1403
0d9641d1
JW
1404 subst_prev_insn = i1
1405 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1406 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
230d793d
RS
1407
1408 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1409 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1410 SET_DEST (PATTERN (i1)));
1411 }
1412 }
1413#endif
1414
1415 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1416 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1417 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1418 {
1419 undo_all ();
1420 return 0;
1421 }
1422
1423 /* Record whether I2DEST is used in I2SRC and similarly for the other
1424 cases. Knowing this will help in register status updating below. */
1425 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1426 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1427 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1428
916f14f1 1429 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1430 in I2SRC. */
1431 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1432
1433 /* Ensure that I3's pattern can be the destination of combines. */
1434 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1435 i1 && i2dest_in_i1src && i1_feeds_i3,
1436 &i3dest_killed))
1437 {
1438 undo_all ();
1439 return 0;
1440 }
1441
df7d75de
RK
1442 /* See if any of the insns is a MULT operation. Unless one is, we will
1443 reject a combination that is, since it must be slower. Be conservative
1444 here. */
1445 if (GET_CODE (i2src) == MULT
1446 || (i1 != 0 && GET_CODE (i1src) == MULT)
1447 || (GET_CODE (PATTERN (i3)) == SET
1448 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1449 have_mult = 1;
1450
230d793d
RS
1451 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1452 We used to do this EXCEPT in one case: I3 has a post-inc in an
1453 output operand. However, that exception can give rise to insns like
1454 mov r3,(r3)+
1455 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1456 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1457
1458#if 0
1459 if (!(GET_CODE (PATTERN (i3)) == SET
1460 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1461 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1462 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1463 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1464 /* It's not the exception. */
1465#endif
1466#ifdef AUTO_INC_DEC
1467 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1468 if (REG_NOTE_KIND (link) == REG_INC
1469 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1470 || (i1 != 0
1471 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1472 {
1473 undo_all ();
1474 return 0;
1475 }
1476#endif
1477
1478 /* See if the SETs in I1 or I2 need to be kept around in the merged
1479 instruction: whenever the value set there is still needed past I3.
1480 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1481
1482 For the SET in I1, we have two cases: If I1 and I2 independently
1483 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1484 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1485 in I1 needs to be kept around unless I1DEST dies or is set in either
1486 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1487 I1DEST. If so, we know I1 feeds into I2. */
1488
1489 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1490
1491 added_sets_1
1492 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1493 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1494
1495 /* If the set in I2 needs to be kept around, we must make a copy of
1496 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1497 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1498 an already-substituted copy. This also prevents making self-referential
1499 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1500 I2DEST. */
1501
1502 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1503 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1504 : PATTERN (i2));
1505
1506 if (added_sets_2)
1507 i2pat = copy_rtx (i2pat);
1508
1509 combine_merges++;
1510
1511 /* Substitute in the latest insn for the regs set by the earlier ones. */
1512
1513 maxreg = max_reg_num ();
1514
1515 subst_insn = i3;
230d793d
RS
1516
1517 /* It is possible that the source of I2 or I1 may be performing an
1518 unneeded operation, such as a ZERO_EXTEND of something that is known
1519 to have the high part zero. Handle that case by letting subst look at
1520 the innermost one of them.
1521
1522 Another way to do this would be to have a function that tries to
1523 simplify a single insn instead of merging two or more insns. We don't
1524 do this because of the potential of infinite loops and because
1525 of the potential extra memory required. However, doing it the way
1526 we are is a bit of a kludge and doesn't catch all cases.
1527
1528 But only do this if -fexpensive-optimizations since it slows things down
1529 and doesn't usually win. */
1530
1531 if (flag_expensive_optimizations)
1532 {
1533 /* Pass pc_rtx so no substitutions are done, just simplifications.
1534 The cases that we are interested in here do not involve the few
1535 cases were is_replaced is checked. */
1536 if (i1)
d0ab8cd3
RK
1537 {
1538 subst_low_cuid = INSN_CUID (i1);
1539 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1540 }
230d793d 1541 else
d0ab8cd3
RK
1542 {
1543 subst_low_cuid = INSN_CUID (i2);
1544 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1545 }
230d793d 1546
241cea85 1547 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1548 }
1549
1550#ifndef HAVE_cc0
1551 /* Many machines that don't use CC0 have insns that can both perform an
1552 arithmetic operation and set the condition code. These operations will
1553 be represented as a PARALLEL with the first element of the vector
1554 being a COMPARE of an arithmetic operation with the constant zero.
1555 The second element of the vector will set some pseudo to the result
1556 of the same arithmetic operation. If we simplify the COMPARE, we won't
1557 match such a pattern and so will generate an extra insn. Here we test
1558 for this case, where both the comparison and the operation result are
1559 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1560 I2SRC. Later we will make the PARALLEL that contains I2. */
1561
1562 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1563 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1564 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1565 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1566 {
1567 rtx *cc_use;
1568 enum machine_mode compare_mode;
1569
1570 newpat = PATTERN (i3);
1571 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1572
1573 i2_is_used = 1;
1574
1575#ifdef EXTRA_CC_MODES
1576 /* See if a COMPARE with the operand we substituted in should be done
1577 with the mode that is currently being used. If not, do the same
1578 processing we do in `subst' for a SET; namely, if the destination
1579 is used only once, try to replace it with a register of the proper
1580 mode and also replace the COMPARE. */
1581 if (undobuf.other_insn == 0
1582 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1583 &undobuf.other_insn))
77fa0940
RK
1584 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1585 i2src, const0_rtx))
230d793d
RS
1586 != GET_MODE (SET_DEST (newpat))))
1587 {
1588 int regno = REGNO (SET_DEST (newpat));
1589 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1590
1591 if (regno < FIRST_PSEUDO_REGISTER
1592 || (reg_n_sets[regno] == 1 && ! added_sets_2
1593 && ! REG_USERVAR_P (SET_DEST (newpat))))
1594 {
1595 if (regno >= FIRST_PSEUDO_REGISTER)
1596 SUBST (regno_reg_rtx[regno], new_dest);
1597
1598 SUBST (SET_DEST (newpat), new_dest);
1599 SUBST (XEXP (*cc_use, 0), new_dest);
1600 SUBST (SET_SRC (newpat),
1601 gen_rtx_combine (COMPARE, compare_mode,
1602 i2src, const0_rtx));
1603 }
1604 else
1605 undobuf.other_insn = 0;
1606 }
1607#endif
1608 }
1609 else
1610#endif
1611 {
1612 n_occurrences = 0; /* `subst' counts here */
1613
1614 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1615 need to make a unique copy of I2SRC each time we substitute it
1616 to avoid self-referential rtl. */
1617
d0ab8cd3 1618 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1619 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1620 ! i1_feeds_i3 && i1dest_in_i1src);
241cea85 1621 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1622
1623 /* Record whether i2's body now appears within i3's body. */
1624 i2_is_used = n_occurrences;
1625 }
1626
1627 /* If we already got a failure, don't try to do more. Otherwise,
1628 try to substitute in I1 if we have it. */
1629
1630 if (i1 && GET_CODE (newpat) != CLOBBER)
1631 {
1632 /* Before we can do this substitution, we must redo the test done
1633 above (see detailed comments there) that ensures that I1DEST
0f41302f 1634 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1635
5f4f0e22
CH
1636 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1637 0, NULL_PTR))
230d793d
RS
1638 {
1639 undo_all ();
1640 return 0;
1641 }
1642
1643 n_occurrences = 0;
d0ab8cd3 1644 subst_low_cuid = INSN_CUID (i1);
230d793d 1645 newpat = subst (newpat, i1dest, i1src, 0, 0);
241cea85 1646 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1647 }
1648
916f14f1
RK
1649 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1650 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1651 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1652 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1653 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1654 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1655 > 1))
230d793d
RS
1656 /* Fail if we tried to make a new register (we used to abort, but there's
1657 really no reason to). */
1658 || max_reg_num () != maxreg
1659 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1660 || GET_CODE (newpat) == CLOBBER
1661 /* Fail if this new pattern is a MULT and we didn't have one before
1662 at the outer level. */
1663 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1664 && ! have_mult))
230d793d
RS
1665 {
1666 undo_all ();
1667 return 0;
1668 }
1669
1670 /* If the actions of the earlier insns must be kept
1671 in addition to substituting them into the latest one,
1672 we must make a new PARALLEL for the latest insn
1673 to hold additional the SETs. */
1674
1675 if (added_sets_1 || added_sets_2)
1676 {
1677 combine_extras++;
1678
1679 if (GET_CODE (newpat) == PARALLEL)
1680 {
1681 rtvec old = XVEC (newpat, 0);
1682 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1683 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
59888de2 1684 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
230d793d
RS
1685 sizeof (old->elem[0]) * old->num_elem);
1686 }
1687 else
1688 {
1689 rtx old = newpat;
1690 total_sets = 1 + added_sets_1 + added_sets_2;
1691 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1692 XVECEXP (newpat, 0, 0) = old;
1693 }
1694
1695 if (added_sets_1)
1696 XVECEXP (newpat, 0, --total_sets)
1697 = (GET_CODE (PATTERN (i1)) == PARALLEL
1698 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1699
1700 if (added_sets_2)
1701 {
1702 /* If there is no I1, use I2's body as is. We used to also not do
1703 the subst call below if I2 was substituted into I3,
1704 but that could lose a simplification. */
1705 if (i1 == 0)
1706 XVECEXP (newpat, 0, --total_sets) = i2pat;
1707 else
1708 /* See comment where i2pat is assigned. */
1709 XVECEXP (newpat, 0, --total_sets)
1710 = subst (i2pat, i1dest, i1src, 0, 0);
1711 }
1712 }
1713
1714 /* We come here when we are replacing a destination in I2 with the
1715 destination of I3. */
1716 validate_replacement:
1717
6e25d159
RK
1718 /* Note which hard regs this insn has as inputs. */
1719 mark_used_regs_combine (newpat);
1720
230d793d 1721 /* Is the result of combination a valid instruction? */
a29ca9db
RK
1722 insn_code_number
1723 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1724
1725 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1726 the second SET's destination is a register that is unused. In that case,
1727 we just need the first SET. This can occur when simplifying a divmod
1728 insn. We *must* test for this case here because the code below that
1729 splits two independent SETs doesn't handle this case correctly when it
1730 updates the register status. Also check the case where the first
1731 SET's destination is unused. That would not cause incorrect code, but
1732 does cause an unneeded insn to remain. */
1733
1734 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1735 && XVECLEN (newpat, 0) == 2
1736 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1737 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1738 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1739 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1740 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1741 && asm_noperands (newpat) < 0)
1742 {
1743 newpat = XVECEXP (newpat, 0, 0);
a29ca9db
RK
1744 insn_code_number
1745 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1746 }
1747
1748 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1749 && XVECLEN (newpat, 0) == 2
1750 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1751 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1752 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1753 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1754 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1755 && asm_noperands (newpat) < 0)
1756 {
1757 newpat = XVECEXP (newpat, 0, 1);
a29ca9db
RK
1758 insn_code_number
1759 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1760 }
1761
1762 /* If we were combining three insns and the result is a simple SET
1763 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1764 insns. There are two ways to do this. It can be split using a
1765 machine-specific method (like when you have an addition of a large
1766 constant) or by combine in the function find_split_point. */
1767
230d793d
RS
1768 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1769 && asm_noperands (newpat) < 0)
1770 {
916f14f1 1771 rtx m_split, *split;
42495ca0 1772 rtx ni2dest = i2dest;
916f14f1
RK
1773
1774 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1775 use I2DEST as a scratch register will help. In the latter case,
1776 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1777
1778 m_split = split_insns (newpat, i3);
a70c61d9
JW
1779
1780 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1781 inputs of NEWPAT. */
1782
1783 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1784 possible to try that as a scratch reg. This would require adding
1785 more code to make it work though. */
1786
1787 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1788 {
1789 /* If I2DEST is a hard register or the only use of a pseudo,
1790 we can change its mode. */
1791 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1792 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1793 && GET_CODE (i2dest) == REG
42495ca0
RK
1794 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1795 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1796 && ! REG_USERVAR_P (i2dest))))
1797 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1798 REGNO (i2dest));
1799
1800 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1801 gen_rtvec (2, newpat,
1802 gen_rtx (CLOBBER,
1803 VOIDmode,
1804 ni2dest))),
1805 i3);
1806 }
916f14f1
RK
1807
1808 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1809 && XVECLEN (m_split, 0) == 2
1810 && (next_real_insn (i2) == i3
1811 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1812 INSN_CUID (i2))))
916f14f1 1813 {
1a26b032 1814 rtx i2set, i3set;
d0ab8cd3 1815 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1816 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1817
e4ba89be
RK
1818 i3set = single_set (XVECEXP (m_split, 0, 1));
1819 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1820
42495ca0
RK
1821 /* In case we changed the mode of I2DEST, replace it in the
1822 pseudo-register table here. We can't do it above in case this
1823 code doesn't get executed and we do a split the other way. */
1824
1825 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1826 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1827
a29ca9db
RK
1828 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1829 &i2_scratches);
1a26b032
RK
1830
1831 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
1832 register status, so don't use these insns. If I2's destination
1833 is used between I2 and I3, we also can't use these insns. */
1a26b032 1834
9cc96794
RK
1835 if (i2_code_number >= 0 && i2set && i3set
1836 && (next_real_insn (i2) == i3
1837 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
a29ca9db
RK
1838 insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1839 &i3_scratches);
d0ab8cd3
RK
1840 if (insn_code_number >= 0)
1841 newpat = newi3pat;
1842
c767f54b 1843 /* It is possible that both insns now set the destination of I3.
22609cbf 1844 If so, we must show an extra use of it. */
c767f54b 1845
393de53f
RK
1846 if (insn_code_number >= 0)
1847 {
1848 rtx new_i3_dest = SET_DEST (i3set);
1849 rtx new_i2_dest = SET_DEST (i2set);
1850
1851 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1852 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1853 || GET_CODE (new_i3_dest) == SUBREG)
1854 new_i3_dest = XEXP (new_i3_dest, 0);
1855
d4096689
RK
1856 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
1857 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
1858 || GET_CODE (new_i2_dest) == SUBREG)
1859 new_i2_dest = XEXP (new_i2_dest, 0);
1860
393de53f
RK
1861 if (GET_CODE (new_i3_dest) == REG
1862 && GET_CODE (new_i2_dest) == REG
1863 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
d4096689 1864 reg_n_sets[REGNO (new_i2_dest)]++;
393de53f 1865 }
916f14f1 1866 }
230d793d
RS
1867
1868 /* If we can split it and use I2DEST, go ahead and see if that
1869 helps things be recognized. Verify that none of the registers
1870 are set between I2 and I3. */
d0ab8cd3 1871 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1872#ifdef HAVE_cc0
1873 && GET_CODE (i2dest) == REG
1874#endif
1875 /* We need I2DEST in the proper mode. If it is a hard register
1876 or the only use of a pseudo, we can change its mode. */
1877 && (GET_MODE (*split) == GET_MODE (i2dest)
1878 || GET_MODE (*split) == VOIDmode
1879 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1880 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1881 && ! REG_USERVAR_P (i2dest)))
1882 && (next_real_insn (i2) == i3
1883 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1884 /* We can't overwrite I2DEST if its value is still used by
1885 NEWPAT. */
1886 && ! reg_referenced_p (i2dest, newpat))
1887 {
1888 rtx newdest = i2dest;
df7d75de
RK
1889 enum rtx_code split_code = GET_CODE (*split);
1890 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
1891
1892 /* Get NEWDEST as a register in the proper mode. We have already
1893 validated that we can do this. */
df7d75de 1894 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 1895 {
df7d75de 1896 newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
230d793d
RS
1897
1898 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1899 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1900 }
1901
1902 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1903 an ASHIFT. This can occur if it was inside a PLUS and hence
1904 appeared to be a memory address. This is a kludge. */
df7d75de 1905 if (split_code == MULT
230d793d
RS
1906 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1907 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
1908 {
1909 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
1910 XEXP (*split, 0), GEN_INT (i)));
1911 /* Update split_code because we may not have a multiply
1912 anymore. */
1913 split_code = GET_CODE (*split);
1914 }
230d793d
RS
1915
1916#ifdef INSN_SCHEDULING
1917 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1918 be written as a ZERO_EXTEND. */
df7d75de
RK
1919 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
1920 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
1921 XEXP (*split, 0)));
1922#endif
1923
1924 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1925 SUBST (*split, newdest);
a29ca9db
RK
1926 i2_code_number
1927 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
df7d75de
RK
1928
1929 /* If the split point was a MULT and we didn't have one before,
1930 don't use one now. */
1931 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
a29ca9db
RK
1932 insn_code_number
1933 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1934 }
1935 }
1936
1937 /* Check for a case where we loaded from memory in a narrow mode and
1938 then sign extended it, but we need both registers. In that case,
1939 we have a PARALLEL with both loads from the same memory location.
1940 We can split this into a load from memory followed by a register-register
1941 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
1942 eliminate the copy.
1943
1944 We cannot do this if the destination of the second assignment is
1945 a register that we have already assumed is zero-extended. Similarly
1946 for a SUBREG of such a register. */
230d793d
RS
1947
1948 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1949 && GET_CODE (newpat) == PARALLEL
1950 && XVECLEN (newpat, 0) == 2
1951 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1952 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1953 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1954 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1955 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1956 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1957 INSN_CUID (i2))
1958 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1959 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
1960 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1961 (GET_CODE (temp) == REG
1962 && reg_nonzero_bits[REGNO (temp)] != 0
1963 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1964 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1965 && (reg_nonzero_bits[REGNO (temp)]
1966 != GET_MODE_MASK (word_mode))))
1967 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1968 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1969 (GET_CODE (temp) == REG
1970 && reg_nonzero_bits[REGNO (temp)] != 0
1971 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1972 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1973 && (reg_nonzero_bits[REGNO (temp)]
1974 != GET_MODE_MASK (word_mode)))))
230d793d
RS
1975 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1976 SET_SRC (XVECEXP (newpat, 0, 1)))
1977 && ! find_reg_note (i3, REG_UNUSED,
1978 SET_DEST (XVECEXP (newpat, 0, 0))))
1979 {
472fbdd1
RK
1980 rtx ni2dest;
1981
230d793d 1982 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 1983 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
1984 newpat = XVECEXP (newpat, 0, 1);
1985 SUBST (SET_SRC (newpat),
472fbdd1 1986 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
a29ca9db
RK
1987 i2_code_number
1988 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
1989
230d793d 1990 if (i2_code_number >= 0)
a29ca9db
RK
1991 insn_code_number
1992 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
5089e22e
RS
1993
1994 if (insn_code_number >= 0)
1995 {
1996 rtx insn;
1997 rtx link;
1998
1999 /* If we will be able to accept this, we have made a change to the
2000 destination of I3. This can invalidate a LOG_LINKS pointing
2001 to I3. No other part of combine.c makes such a transformation.
2002
2003 The new I3 will have a destination that was previously the
2004 destination of I1 or I2 and which was used in i2 or I3. Call
2005 distribute_links to make a LOG_LINK from the next use of
2006 that destination. */
2007
2008 PATTERN (i3) = newpat;
5f4f0e22 2009 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
2010
2011 /* I3 now uses what used to be its destination and which is
2012 now I2's destination. That means we need a LOG_LINK from
2013 I3 to I2. But we used to have one, so we still will.
2014
2015 However, some later insn might be using I2's dest and have
2016 a LOG_LINK pointing at I3. We must remove this link.
2017 The simplest way to remove the link is to point it at I1,
2018 which we know will be a NOTE. */
2019
2020 for (insn = NEXT_INSN (i3);
0d4d42c3
RK
2021 insn && (this_basic_block == n_basic_blocks - 1
2022 || insn != basic_block_head[this_basic_block + 1]);
5089e22e
RS
2023 insn = NEXT_INSN (insn))
2024 {
2025 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 2026 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2027 {
2028 for (link = LOG_LINKS (insn); link;
2029 link = XEXP (link, 1))
2030 if (XEXP (link, 0) == i3)
2031 XEXP (link, 0) = i1;
2032
2033 break;
2034 }
2035 }
2036 }
230d793d
RS
2037 }
2038
2039 /* Similarly, check for a case where we have a PARALLEL of two independent
2040 SETs but we started with three insns. In this case, we can do the sets
2041 as two separate insns. This case occurs when some SET allows two
2042 other insns to combine, but the destination of that SET is still live. */
2043
2044 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2045 && GET_CODE (newpat) == PARALLEL
2046 && XVECLEN (newpat, 0) == 2
2047 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2048 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2049 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2050 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2051 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2052 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2053 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2054 INSN_CUID (i2))
2055 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2056 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2057 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2058 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2059 XVECEXP (newpat, 0, 0))
2060 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2061 XVECEXP (newpat, 0, 1)))
2062 {
2063 newi2pat = XVECEXP (newpat, 0, 1);
2064 newpat = XVECEXP (newpat, 0, 0);
2065
a29ca9db
RK
2066 i2_code_number
2067 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2068
230d793d 2069 if (i2_code_number >= 0)
a29ca9db
RK
2070 insn_code_number
2071 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
2072 }
2073
2074 /* If it still isn't recognized, fail and change things back the way they
2075 were. */
2076 if ((insn_code_number < 0
2077 /* Is the result a reasonable ASM_OPERANDS? */
2078 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2079 {
2080 undo_all ();
2081 return 0;
2082 }
2083
2084 /* If we had to change another insn, make sure it is valid also. */
2085 if (undobuf.other_insn)
2086 {
230d793d
RS
2087 rtx other_pat = PATTERN (undobuf.other_insn);
2088 rtx new_other_notes;
2089 rtx note, next;
2090
6e25d159
RK
2091 CLEAR_HARD_REG_SET (newpat_used_regs);
2092
a29ca9db
RK
2093 other_code_number
2094 = recog_for_combine (&other_pat, undobuf.other_insn,
2095 &new_other_notes, &other_scratches);
230d793d
RS
2096
2097 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2098 {
2099 undo_all ();
2100 return 0;
2101 }
2102
2103 PATTERN (undobuf.other_insn) = other_pat;
2104
2105 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2106 are still valid. Then add any non-duplicate notes added by
2107 recog_for_combine. */
2108 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2109 {
2110 next = XEXP (note, 1);
2111
2112 if (REG_NOTE_KIND (note) == REG_UNUSED
2113 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2114 {
2115 if (GET_CODE (XEXP (note, 0)) == REG)
2116 reg_n_deaths[REGNO (XEXP (note, 0))]--;
2117
2118 remove_note (undobuf.other_insn, note);
2119 }
230d793d
RS
2120 }
2121
1a26b032
RK
2122 for (note = new_other_notes; note; note = XEXP (note, 1))
2123 if (GET_CODE (XEXP (note, 0)) == REG)
2124 reg_n_deaths[REGNO (XEXP (note, 0))]++;
2125
230d793d 2126 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2127 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2128 }
2129
2130 /* We now know that we can do this combination. Merge the insns and
2131 update the status of registers and LOG_LINKS. */
2132
2133 {
2134 rtx i3notes, i2notes, i1notes = 0;
2135 rtx i3links, i2links, i1links = 0;
2136 rtx midnotes = 0;
230d793d
RS
2137 register int regno;
2138 /* Compute which registers we expect to eliminate. */
2139 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2140 ? 0 : i2dest);
2141 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2142
2143 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2144 clear them. */
2145 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2146 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2147 if (i1)
2148 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2149
2150 /* Ensure that we do not have something that should not be shared but
2151 occurs multiple times in the new insns. Check this by first
5089e22e 2152 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2153
2154 reset_used_flags (i3notes);
2155 reset_used_flags (i2notes);
2156 reset_used_flags (i1notes);
2157 reset_used_flags (newpat);
2158 reset_used_flags (newi2pat);
2159 if (undobuf.other_insn)
2160 reset_used_flags (PATTERN (undobuf.other_insn));
2161
2162 i3notes = copy_rtx_if_shared (i3notes);
2163 i2notes = copy_rtx_if_shared (i2notes);
2164 i1notes = copy_rtx_if_shared (i1notes);
2165 newpat = copy_rtx_if_shared (newpat);
2166 newi2pat = copy_rtx_if_shared (newi2pat);
2167 if (undobuf.other_insn)
2168 reset_used_flags (PATTERN (undobuf.other_insn));
2169
2170 INSN_CODE (i3) = insn_code_number;
2171 PATTERN (i3) = newpat;
2172 if (undobuf.other_insn)
2173 INSN_CODE (undobuf.other_insn) = other_code_number;
2174
2175 /* We had one special case above where I2 had more than one set and
2176 we replaced a destination of one of those sets with the destination
2177 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2178 in this basic block. Note that this (expensive) case is rare.
2179
2180 Also, in this case, we must pretend that all REG_NOTEs for I2
2181 actually came from I3, so that REG_UNUSED notes from I2 will be
2182 properly handled. */
2183
2184 if (i3_subst_into_i2)
2185 {
2186 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2187 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2188 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2189 && ! find_reg_note (i2, REG_UNUSED,
2190 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2191 for (temp = NEXT_INSN (i2);
2192 temp && (this_basic_block == n_basic_blocks - 1
2193 || basic_block_head[this_basic_block] != temp);
2194 temp = NEXT_INSN (temp))
2195 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2196 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2197 if (XEXP (link, 0) == i2)
2198 XEXP (link, 0) = i3;
2199
2200 if (i3notes)
2201 {
2202 rtx link = i3notes;
2203 while (XEXP (link, 1))
2204 link = XEXP (link, 1);
2205 XEXP (link, 1) = i2notes;
2206 }
2207 else
2208 i3notes = i2notes;
2209 i2notes = 0;
2210 }
230d793d
RS
2211
2212 LOG_LINKS (i3) = 0;
2213 REG_NOTES (i3) = 0;
2214 LOG_LINKS (i2) = 0;
2215 REG_NOTES (i2) = 0;
2216
2217 if (newi2pat)
2218 {
2219 INSN_CODE (i2) = i2_code_number;
2220 PATTERN (i2) = newi2pat;
2221 }
2222 else
2223 {
2224 PUT_CODE (i2, NOTE);
2225 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2226 NOTE_SOURCE_FILE (i2) = 0;
2227 }
2228
2229 if (i1)
2230 {
2231 LOG_LINKS (i1) = 0;
2232 REG_NOTES (i1) = 0;
2233 PUT_CODE (i1, NOTE);
2234 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2235 NOTE_SOURCE_FILE (i1) = 0;
2236 }
2237
2238 /* Get death notes for everything that is now used in either I3 or
6eb12cef
RK
2239 I2 and used to die in a previous insn. If we built two new
2240 patterns, move from I1 to I2 then I2 to I3 so that we get the
2241 proper movement on registers that I2 modifies. */
230d793d 2242
230d793d 2243 if (newi2pat)
6eb12cef
RK
2244 {
2245 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2246 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2247 }
2248 else
2249 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2250 i3, &midnotes);
230d793d
RS
2251
2252 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2253 if (i3notes)
5f4f0e22
CH
2254 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2255 elim_i2, elim_i1);
230d793d 2256 if (i2notes)
5f4f0e22
CH
2257 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2258 elim_i2, elim_i1);
230d793d 2259 if (i1notes)
5f4f0e22
CH
2260 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2261 elim_i2, elim_i1);
230d793d 2262 if (midnotes)
5f4f0e22
CH
2263 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2264 elim_i2, elim_i1);
230d793d
RS
2265
2266 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2267 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2268 so we always pass it as i3. We have not counted the notes in
2269 reg_n_deaths yet, so we need to do so now. */
2270
230d793d 2271 if (newi2pat && new_i2_notes)
1a26b032
RK
2272 {
2273 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2274 if (GET_CODE (XEXP (temp, 0)) == REG)
2275 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2276
2277 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2278 }
2279
230d793d 2280 if (new_i3_notes)
1a26b032
RK
2281 {
2282 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2283 if (GET_CODE (XEXP (temp, 0)) == REG)
2284 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2285
2286 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2287 }
230d793d
RS
2288
2289 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1a26b032
RK
2290 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2291 Show an additional death due to the REG_DEAD note we make here. If
2292 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2293
230d793d 2294 if (i3dest_killed)
1a26b032
RK
2295 {
2296 if (GET_CODE (i3dest_killed) == REG)
2297 reg_n_deaths[REGNO (i3dest_killed)]++;
2298
2299 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2300 NULL_RTX),
2301 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2302 NULL_RTX, NULL_RTX);
2303 }
58c8c593
RK
2304
2305 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2306 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2307 we passed I3 in that case, it might delete I2. */
2308
230d793d 2309 if (i2dest_in_i2src)
58c8c593 2310 {
1a26b032
RK
2311 if (GET_CODE (i2dest) == REG)
2312 reg_n_deaths[REGNO (i2dest)]++;
2313
58c8c593
RK
2314 if (newi2pat && reg_set_p (i2dest, newi2pat))
2315 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2316 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2317 else
2318 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2319 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2320 NULL_RTX, NULL_RTX);
2321 }
2322
230d793d 2323 if (i1dest_in_i1src)
58c8c593 2324 {
1a26b032
RK
2325 if (GET_CODE (i1dest) == REG)
2326 reg_n_deaths[REGNO (i1dest)]++;
2327
58c8c593
RK
2328 if (newi2pat && reg_set_p (i1dest, newi2pat))
2329 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2330 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2331 else
2332 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2333 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2334 NULL_RTX, NULL_RTX);
2335 }
230d793d
RS
2336
2337 distribute_links (i3links);
2338 distribute_links (i2links);
2339 distribute_links (i1links);
2340
2341 if (GET_CODE (i2dest) == REG)
2342 {
d0ab8cd3
RK
2343 rtx link;
2344 rtx i2_insn = 0, i2_val = 0, set;
2345
2346 /* The insn that used to set this register doesn't exist, and
2347 this life of the register may not exist either. See if one of
2348 I3's links points to an insn that sets I2DEST. If it does,
2349 that is now the last known value for I2DEST. If we don't update
2350 this and I2 set the register to a value that depended on its old
230d793d
RS
2351 contents, we will get confused. If this insn is used, thing
2352 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2353
2354 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2355 if ((set = single_set (XEXP (link, 0))) != 0
2356 && rtx_equal_p (i2dest, SET_DEST (set)))
2357 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2358
2359 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2360
2361 /* If the reg formerly set in I2 died only once and that was in I3,
2362 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2363 if (! added_sets_2
2364 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2365 && ! i2dest_in_i2src)
230d793d
RS
2366 {
2367 regno = REGNO (i2dest);
2368 reg_n_sets[regno]--;
2369 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2370 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2371 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2372 reg_n_refs[regno] = 0;
2373 }
2374 }
2375
2376 if (i1 && GET_CODE (i1dest) == REG)
2377 {
d0ab8cd3
RK
2378 rtx link;
2379 rtx i1_insn = 0, i1_val = 0, set;
2380
2381 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2382 if ((set = single_set (XEXP (link, 0))) != 0
2383 && rtx_equal_p (i1dest, SET_DEST (set)))
2384 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2385
2386 record_value_for_reg (i1dest, i1_insn, i1_val);
2387
230d793d 2388 regno = REGNO (i1dest);
5af91171 2389 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d
RS
2390 {
2391 reg_n_sets[regno]--;
2392 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2393 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2394 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2395 reg_n_refs[regno] = 0;
2396 }
2397 }
2398
951553af 2399 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2400 to this insn. */
2401
951553af 2402 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2403 if (newi2pat)
951553af 2404 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2405
a29ca9db
RK
2406 /* If we added any (clobber (scratch)), add them to the max for a
2407 block. This is a very pessimistic calculation, since we might
2408 have had them already and this might not be the worst block, but
2409 it's not worth doing any better. */
2410 max_scratch += i3_scratches + i2_scratches + other_scratches;
2411
230d793d
RS
2412 /* If I3 is now an unconditional jump, ensure that it has a
2413 BARRIER following it since it may have initially been a
381ee8af 2414 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2415
2416 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2417 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2418 || GET_CODE (temp) != BARRIER))
230d793d
RS
2419 emit_barrier_after (i3);
2420 }
2421
2422 combine_successes++;
2423
bcd49eb7
JW
2424 /* Clear this here, so that subsequent get_last_value calls are not
2425 affected. */
2426 subst_prev_insn = NULL_RTX;
2427
abe6e52f
RK
2428 if (added_links_insn
2429 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2430 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2431 return added_links_insn;
2432 else
2433 return newi2pat ? i2 : i3;
230d793d
RS
2434}
2435\f
2436/* Undo all the modifications recorded in undobuf. */
2437
2438static void
2439undo_all ()
2440{
241cea85
RK
2441 struct undo *undo, *next;
2442
2443 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2444 {
241cea85
RK
2445 next = undo->next;
2446 if (undo->is_int)
2447 *undo->where.i = undo->old_contents.i;
7c046e4e 2448 else
241cea85
RK
2449 *undo->where.r = undo->old_contents.r;
2450
2451 undo->next = undobuf.frees;
2452 undobuf.frees = undo;
7c046e4e 2453 }
230d793d
RS
2454
2455 obfree (undobuf.storage);
845fc875 2456 undobuf.undos = undobuf.previous_undos = 0;
bcd49eb7
JW
2457
2458 /* Clear this here, so that subsequent get_last_value calls are not
2459 affected. */
2460 subst_prev_insn = NULL_RTX;
230d793d
RS
2461}
2462\f
2463/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2464 where we have an arithmetic expression and return that point. LOC will
2465 be inside INSN.
230d793d
RS
2466
2467 try_combine will call this function to see if an insn can be split into
2468 two insns. */
2469
2470static rtx *
d0ab8cd3 2471find_split_point (loc, insn)
230d793d 2472 rtx *loc;
d0ab8cd3 2473 rtx insn;
230d793d
RS
2474{
2475 rtx x = *loc;
2476 enum rtx_code code = GET_CODE (x);
2477 rtx *split;
2478 int len = 0, pos, unsignedp;
2479 rtx inner;
2480
2481 /* First special-case some codes. */
2482 switch (code)
2483 {
2484 case SUBREG:
2485#ifdef INSN_SCHEDULING
2486 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2487 point. */
2488 if (GET_CODE (SUBREG_REG (x)) == MEM)
2489 return loc;
2490#endif
d0ab8cd3 2491 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2492
230d793d 2493 case MEM:
916f14f1 2494#ifdef HAVE_lo_sum
230d793d
RS
2495 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2496 using LO_SUM and HIGH. */
2497 if (GET_CODE (XEXP (x, 0)) == CONST
2498 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2499 {
2500 SUBST (XEXP (x, 0),
2501 gen_rtx_combine (LO_SUM, Pmode,
2502 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2503 XEXP (x, 0)));
2504 return &XEXP (XEXP (x, 0), 0);
2505 }
230d793d
RS
2506#endif
2507
916f14f1
RK
2508 /* If we have a PLUS whose second operand is a constant and the
2509 address is not valid, perhaps will can split it up using
2510 the machine-specific way to split large constants. We use
ddd5a7c1 2511 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2512 it will not remain in the result. */
2513 if (GET_CODE (XEXP (x, 0)) == PLUS
2514 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2515 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2516 {
2517 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2518 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2519 subst_insn);
2520
2521 /* This should have produced two insns, each of which sets our
2522 placeholder. If the source of the second is a valid address,
2523 we can make put both sources together and make a split point
2524 in the middle. */
2525
2526 if (seq && XVECLEN (seq, 0) == 2
2527 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2528 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2529 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2530 && ! reg_mentioned_p (reg,
2531 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2532 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2533 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2534 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2535 && memory_address_p (GET_MODE (x),
2536 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2537 {
2538 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2539 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2540
2541 /* Replace the placeholder in SRC2 with SRC1. If we can
2542 find where in SRC2 it was placed, that can become our
2543 split point and we can replace this address with SRC2.
2544 Just try two obvious places. */
2545
2546 src2 = replace_rtx (src2, reg, src1);
2547 split = 0;
2548 if (XEXP (src2, 0) == src1)
2549 split = &XEXP (src2, 0);
2550 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2551 && XEXP (XEXP (src2, 0), 0) == src1)
2552 split = &XEXP (XEXP (src2, 0), 0);
2553
2554 if (split)
2555 {
2556 SUBST (XEXP (x, 0), src2);
2557 return split;
2558 }
2559 }
1a26b032
RK
2560
2561 /* If that didn't work, perhaps the first operand is complex and
2562 needs to be computed separately, so make a split point there.
2563 This will occur on machines that just support REG + CONST
2564 and have a constant moved through some previous computation. */
2565
2566 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2567 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2568 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2569 == 'o')))
2570 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2571 }
2572 break;
2573
230d793d
RS
2574 case SET:
2575#ifdef HAVE_cc0
2576 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2577 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2578 we need to put the operand into a register. So split at that
2579 point. */
2580
2581 if (SET_DEST (x) == cc0_rtx
2582 && GET_CODE (SET_SRC (x)) != COMPARE
2583 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2584 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2585 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2586 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2587 return &SET_SRC (x);
2588#endif
2589
2590 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2591 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2592 if (split && split != &SET_SRC (x))
2593 return split;
2594
041d7180
JL
2595 /* See if we can split SET_DEST as it stands. */
2596 split = find_split_point (&SET_DEST (x), insn);
2597 if (split && split != &SET_DEST (x))
2598 return split;
2599
230d793d
RS
2600 /* See if this is a bitfield assignment with everything constant. If
2601 so, this is an IOR of an AND, so split it into that. */
2602 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2603 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2604 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2605 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2606 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2607 && GET_CODE (SET_SRC (x)) == CONST_INT
2608 && ((INTVAL (XEXP (SET_DEST (x), 1))
2609 + INTVAL (XEXP (SET_DEST (x), 2)))
2610 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2611 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2612 {
2613 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2614 int len = INTVAL (XEXP (SET_DEST (x), 1));
2615 int src = INTVAL (SET_SRC (x));
2616 rtx dest = XEXP (SET_DEST (x), 0);
2617 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2618 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2619
f76b9db2
ILT
2620 if (BITS_BIG_ENDIAN)
2621 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d
RS
2622
2623 if (src == mask)
2624 SUBST (SET_SRC (x),
5f4f0e22 2625 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2626 else
2627 SUBST (SET_SRC (x),
2628 gen_binary (IOR, mode,
2629 gen_binary (AND, mode, dest,
5f4f0e22
CH
2630 GEN_INT (~ (mask << pos)
2631 & GET_MODE_MASK (mode))),
2632 GEN_INT (src << pos)));
230d793d
RS
2633
2634 SUBST (SET_DEST (x), dest);
2635
d0ab8cd3 2636 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2637 if (split && split != &SET_SRC (x))
2638 return split;
2639 }
2640
2641 /* Otherwise, see if this is an operation that we can split into two.
2642 If so, try to split that. */
2643 code = GET_CODE (SET_SRC (x));
2644
2645 switch (code)
2646 {
d0ab8cd3
RK
2647 case AND:
2648 /* If we are AND'ing with a large constant that is only a single
2649 bit and the result is only being used in a context where we
2650 need to know if it is zero or non-zero, replace it with a bit
2651 extraction. This will avoid the large constant, which might
2652 have taken more than one insn to make. If the constant were
2653 not a valid argument to the AND but took only one insn to make,
2654 this is no worse, but if it took more than one insn, it will
2655 be better. */
2656
2657 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2658 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2659 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2660 && GET_CODE (SET_DEST (x)) == REG
2661 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2662 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2663 && XEXP (*split, 0) == SET_DEST (x)
2664 && XEXP (*split, 1) == const0_rtx)
2665 {
76184def
DE
2666 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2667 XEXP (SET_SRC (x), 0),
2668 pos, NULL_RTX, 1, 1, 0, 0);
2669 if (extraction != 0)
2670 {
2671 SUBST (SET_SRC (x), extraction);
2672 return find_split_point (loc, insn);
2673 }
d0ab8cd3
RK
2674 }
2675 break;
2676
1a6ec070
RK
2677 case NE:
2678 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2679 is known to be on, this can be converted into a NEG of a shift. */
2680 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2681 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 2682 && 1 <= (pos = exact_log2
1a6ec070
RK
2683 (nonzero_bits (XEXP (SET_SRC (x), 0),
2684 GET_MODE (XEXP (SET_SRC (x), 0))))))
2685 {
2686 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2687
2688 SUBST (SET_SRC (x),
2689 gen_rtx_combine (NEG, mode,
2690 gen_rtx_combine (LSHIFTRT, mode,
2691 XEXP (SET_SRC (x), 0),
4eb2cb10 2692 GEN_INT (pos))));
1a6ec070
RK
2693
2694 split = find_split_point (&SET_SRC (x), insn);
2695 if (split && split != &SET_SRC (x))
2696 return split;
2697 }
2698 break;
2699
230d793d
RS
2700 case SIGN_EXTEND:
2701 inner = XEXP (SET_SRC (x), 0);
2702 pos = 0;
2703 len = GET_MODE_BITSIZE (GET_MODE (inner));
2704 unsignedp = 0;
2705 break;
2706
2707 case SIGN_EXTRACT:
2708 case ZERO_EXTRACT:
2709 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2710 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2711 {
2712 inner = XEXP (SET_SRC (x), 0);
2713 len = INTVAL (XEXP (SET_SRC (x), 1));
2714 pos = INTVAL (XEXP (SET_SRC (x), 2));
2715
f76b9db2
ILT
2716 if (BITS_BIG_ENDIAN)
2717 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
2718 unsignedp = (code == ZERO_EXTRACT);
2719 }
2720 break;
2721 }
2722
2723 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2724 {
2725 enum machine_mode mode = GET_MODE (SET_SRC (x));
2726
d0ab8cd3
RK
2727 /* For unsigned, we have a choice of a shift followed by an
2728 AND or two shifts. Use two shifts for field sizes where the
2729 constant might be too large. We assume here that we can
2730 always at least get 8-bit constants in an AND insn, which is
2731 true for every current RISC. */
2732
2733 if (unsignedp && len <= 8)
230d793d
RS
2734 {
2735 SUBST (SET_SRC (x),
2736 gen_rtx_combine
2737 (AND, mode,
2738 gen_rtx_combine (LSHIFTRT, mode,
2739 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2740 GEN_INT (pos)),
2741 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2742
d0ab8cd3 2743 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2744 if (split && split != &SET_SRC (x))
2745 return split;
2746 }
2747 else
2748 {
2749 SUBST (SET_SRC (x),
2750 gen_rtx_combine
d0ab8cd3 2751 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2752 gen_rtx_combine (ASHIFT, mode,
2753 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2754 GEN_INT (GET_MODE_BITSIZE (mode)
2755 - len - pos)),
2756 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2757
d0ab8cd3 2758 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2759 if (split && split != &SET_SRC (x))
2760 return split;
2761 }
2762 }
2763
2764 /* See if this is a simple operation with a constant as the second
2765 operand. It might be that this constant is out of range and hence
2766 could be used as a split point. */
2767 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2768 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2769 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2770 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2771 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2772 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2773 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2774 == 'o'))))
2775 return &XEXP (SET_SRC (x), 1);
2776
2777 /* Finally, see if this is a simple operation with its first operand
2778 not in a register. The operation might require this operand in a
2779 register, so return it as a split point. We can always do this
2780 because if the first operand were another operation, we would have
2781 already found it as a split point. */
2782 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2783 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2784 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2785 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2786 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2787 return &XEXP (SET_SRC (x), 0);
2788
2789 return 0;
2790
2791 case AND:
2792 case IOR:
2793 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2794 it is better to write this as (not (ior A B)) so we can split it.
2795 Similarly for IOR. */
2796 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2797 {
2798 SUBST (*loc,
2799 gen_rtx_combine (NOT, GET_MODE (x),
2800 gen_rtx_combine (code == IOR ? AND : IOR,
2801 GET_MODE (x),
2802 XEXP (XEXP (x, 0), 0),
2803 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2804 return find_split_point (loc, insn);
230d793d
RS
2805 }
2806
2807 /* Many RISC machines have a large set of logical insns. If the
2808 second operand is a NOT, put it first so we will try to split the
2809 other operand first. */
2810 if (GET_CODE (XEXP (x, 1)) == NOT)
2811 {
2812 rtx tem = XEXP (x, 0);
2813 SUBST (XEXP (x, 0), XEXP (x, 1));
2814 SUBST (XEXP (x, 1), tem);
2815 }
2816 break;
2817 }
2818
2819 /* Otherwise, select our actions depending on our rtx class. */
2820 switch (GET_RTX_CLASS (code))
2821 {
2822 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2823 case '3':
d0ab8cd3 2824 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2825 if (split)
2826 return split;
0f41302f 2827 /* ... fall through ... */
230d793d
RS
2828 case '2':
2829 case 'c':
2830 case '<':
d0ab8cd3 2831 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2832 if (split)
2833 return split;
0f41302f 2834 /* ... fall through ... */
230d793d
RS
2835 case '1':
2836 /* Some machines have (and (shift ...) ...) insns. If X is not
2837 an AND, but XEXP (X, 0) is, use it as our split point. */
2838 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2839 return &XEXP (x, 0);
2840
d0ab8cd3 2841 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2842 if (split)
2843 return split;
2844 return loc;
2845 }
2846
2847 /* Otherwise, we don't have a split point. */
2848 return 0;
2849}
2850\f
2851/* Throughout X, replace FROM with TO, and return the result.
2852 The result is TO if X is FROM;
2853 otherwise the result is X, but its contents may have been modified.
2854 If they were modified, a record was made in undobuf so that
2855 undo_all will (among other things) return X to its original state.
2856
2857 If the number of changes necessary is too much to record to undo,
2858 the excess changes are not made, so the result is invalid.
2859 The changes already made can still be undone.
2860 undobuf.num_undo is incremented for such changes, so by testing that
2861 the caller can tell whether the result is valid.
2862
2863 `n_occurrences' is incremented each time FROM is replaced.
2864
2865 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2866
5089e22e 2867 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2868 by copying if `n_occurrences' is non-zero. */
2869
2870static rtx
2871subst (x, from, to, in_dest, unique_copy)
2872 register rtx x, from, to;
2873 int in_dest;
2874 int unique_copy;
2875{
f24ad0e4 2876 register enum rtx_code code = GET_CODE (x);
230d793d 2877 enum machine_mode op0_mode = VOIDmode;
8079805d
RK
2878 register char *fmt;
2879 register int len, i;
2880 rtx new;
230d793d
RS
2881
2882/* Two expressions are equal if they are identical copies of a shared
2883 RTX or if they are both registers with the same register number
2884 and mode. */
2885
2886#define COMBINE_RTX_EQUAL_P(X,Y) \
2887 ((X) == (Y) \
2888 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2889 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2890
2891 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2892 {
2893 n_occurrences++;
2894 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2895 }
2896
2897 /* If X and FROM are the same register but different modes, they will
2898 not have been seen as equal above. However, flow.c will make a
2899 LOG_LINKS entry for that case. If we do nothing, we will try to
2900 rerecognize our original insn and, when it succeeds, we will
2901 delete the feeding insn, which is incorrect.
2902
2903 So force this insn not to match in this (rare) case. */
2904 if (! in_dest && code == REG && GET_CODE (from) == REG
2905 && REGNO (x) == REGNO (from))
2906 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2907
2908 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2909 of which may contain things that can be combined. */
2910 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2911 return x;
2912
2913 /* It is possible to have a subexpression appear twice in the insn.
2914 Suppose that FROM is a register that appears within TO.
2915 Then, after that subexpression has been scanned once by `subst',
2916 the second time it is scanned, TO may be found. If we were
2917 to scan TO here, we would find FROM within it and create a
2918 self-referent rtl structure which is completely wrong. */
2919 if (COMBINE_RTX_EQUAL_P (x, to))
2920 return to;
2921
2922 len = GET_RTX_LENGTH (code);
2923 fmt = GET_RTX_FORMAT (code);
2924
2925 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2926 set up to skip this common case. All other cases where we want to
2927 suppress replacing something inside a SET_SRC are handled via the
2928 IN_DEST operand. */
2929 if (code == SET
2930 && (GET_CODE (SET_DEST (x)) == REG
2931 || GET_CODE (SET_DEST (x)) == CC0
2932 || GET_CODE (SET_DEST (x)) == PC))
2933 fmt = "ie";
2934
0f41302f
MS
2935 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
2936 constant. */
230d793d
RS
2937 if (fmt[0] == 'e')
2938 op0_mode = GET_MODE (XEXP (x, 0));
2939
2940 for (i = 0; i < len; i++)
2941 {
2942 if (fmt[i] == 'E')
2943 {
2944 register int j;
2945 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2946 {
230d793d
RS
2947 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2948 {
2949 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2950 n_occurrences++;
2951 }
2952 else
2953 {
2954 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2955
2956 /* If this substitution failed, this whole thing fails. */
2957 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2958 return new;
2959 }
2960
2961 SUBST (XVECEXP (x, i, j), new);
2962 }
2963 }
2964 else if (fmt[i] == 'e')
2965 {
230d793d
RS
2966 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2967 {
42301240
RK
2968 /* In general, don't install a subreg involving two modes not
2969 tieable. It can worsen register allocation, and can even
2970 make invalid reload insns, since the reg inside may need to
2971 be copied from in the outside mode, and that may be invalid
2972 if it is an fp reg copied in integer mode.
2973
2974 We allow two exceptions to this: It is valid if it is inside
2975 another SUBREG and the mode of that SUBREG and the mode of
2976 the inside of TO is tieable and it is valid if X is a SET
2977 that copies FROM to CC0. */
2978 if (GET_CODE (to) == SUBREG
2979 && ! MODES_TIEABLE_P (GET_MODE (to),
2980 GET_MODE (SUBREG_REG (to)))
2981 && ! (code == SUBREG
8079805d
RK
2982 && MODES_TIEABLE_P (GET_MODE (x),
2983 GET_MODE (SUBREG_REG (to))))
42301240
RK
2984#ifdef HAVE_cc0
2985 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2986#endif
2987 )
2988 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2989
230d793d
RS
2990 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2991 n_occurrences++;
2992 }
2993 else
2994 /* If we are in a SET_DEST, suppress most cases unless we
2995 have gone inside a MEM, in which case we want to
2996 simplify the address. We assume here that things that
2997 are actually part of the destination have their inner
2998 parts in the first expression. This is true for SUBREG,
2999 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3000 things aside from REG and MEM that should appear in a
3001 SET_DEST. */
3002 new = subst (XEXP (x, i), from, to,
3003 (((in_dest
3004 && (code == SUBREG || code == STRICT_LOW_PART
3005 || code == ZERO_EXTRACT))
3006 || code == SET)
3007 && i == 0), unique_copy);
3008
3009 /* If we found that we will have to reject this combination,
3010 indicate that by returning the CLOBBER ourselves, rather than
3011 an expression containing it. This will speed things up as
3012 well as prevent accidents where two CLOBBERs are considered
3013 to be equal, thus producing an incorrect simplification. */
3014
3015 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3016 return new;
3017
3018 SUBST (XEXP (x, i), new);
3019 }
3020 }
3021
8079805d
RK
3022 /* Try to simplify X. If the simplification changed the code, it is likely
3023 that further simplification will help, so loop, but limit the number
3024 of repetitions that will be performed. */
3025
3026 for (i = 0; i < 4; i++)
3027 {
3028 /* If X is sufficiently simple, don't bother trying to do anything
3029 with it. */
3030 if (code != CONST_INT && code != REG && code != CLOBBER)
3031 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3032
8079805d
RK
3033 if (GET_CODE (x) == code)
3034 break;
d0ab8cd3 3035
8079805d 3036 code = GET_CODE (x);
eeb43d32 3037
8079805d
RK
3038 /* We no longer know the original mode of operand 0 since we
3039 have changed the form of X) */
3040 op0_mode = VOIDmode;
3041 }
eeb43d32 3042
8079805d
RK
3043 return x;
3044}
3045\f
3046/* Simplify X, a piece of RTL. We just operate on the expression at the
3047 outer level; call `subst' to simplify recursively. Return the new
3048 expression.
3049
3050 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3051 will be the iteration even if an expression with a code different from
3052 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3053
8079805d
RK
3054static rtx
3055simplify_rtx (x, op0_mode, last, in_dest)
3056 rtx x;
3057 enum machine_mode op0_mode;
3058 int last;
3059 int in_dest;
3060{
3061 enum rtx_code code = GET_CODE (x);
3062 enum machine_mode mode = GET_MODE (x);
3063 rtx temp;
3064 int i;
d0ab8cd3 3065
230d793d
RS
3066 /* If this is a commutative operation, put a constant last and a complex
3067 expression first. We don't need to do this for comparisons here. */
3068 if (GET_RTX_CLASS (code) == 'c'
3069 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3070 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3071 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3072 || (GET_CODE (XEXP (x, 0)) == SUBREG
3073 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3074 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3075 {
3076 temp = XEXP (x, 0);
3077 SUBST (XEXP (x, 0), XEXP (x, 1));
3078 SUBST (XEXP (x, 1), temp);
3079 }
3080
22609cbf
RK
3081 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3082 sign extension of a PLUS with a constant, reverse the order of the sign
3083 extension and the addition. Note that this not the same as the original
3084 code, but overflow is undefined for signed values. Also note that the
3085 PLUS will have been partially moved "inside" the sign-extension, so that
3086 the first operand of X will really look like:
3087 (ashiftrt (plus (ashift A C4) C5) C4).
3088 We convert this to
3089 (plus (ashiftrt (ashift A C4) C2) C4)
3090 and replace the first operand of X with that expression. Later parts
3091 of this function may simplify the expression further.
3092
3093 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3094 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3095 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3096
3097 We do this to simplify address expressions. */
3098
3099 if ((code == PLUS || code == MINUS || code == MULT)
3100 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3101 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3102 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3103 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3104 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3105 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3106 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3107 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3108 XEXP (XEXP (XEXP (x, 0), 0), 1),
3109 XEXP (XEXP (x, 0), 1))) != 0)
3110 {
3111 rtx new
3112 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3113 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3114 INTVAL (XEXP (XEXP (x, 0), 1)));
3115
3116 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3117 INTVAL (XEXP (XEXP (x, 0), 1)));
3118
3119 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3120 }
3121
d0ab8cd3
RK
3122 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3123 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3124 things. Check for cases where both arms are testing the same
3125 condition.
3126
3127 Don't do anything if all operands are very simple. */
3128
3129 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3130 || GET_RTX_CLASS (code) == '<')
3131 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3132 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3133 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3134 == 'o')))
3135 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3136 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3137 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3138 == 'o')))))
3139 || (GET_RTX_CLASS (code) == '1'
3140 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3141 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3142 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3143 == 'o'))))))
d0ab8cd3 3144 {
abe6e52f
RK
3145 rtx cond, true, false;
3146
3147 cond = if_then_else_cond (x, &true, &false);
0802d516
RK
3148 if (cond != 0
3149 /* If everything is a comparison, what we have is highly unlikely
3150 to be simpler, so don't use it. */
3151 && ! (GET_RTX_CLASS (code) == '<'
3152 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3153 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
abe6e52f
RK
3154 {
3155 rtx cop1 = const0_rtx;
3156 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3157
15448afc
RK
3158 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3159 return x;
3160
9210df58
RK
3161 /* Simplify the alternative arms; this may collapse the true and
3162 false arms to store-flag values. */
3163 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3164 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3165
3166 /* Restarting if we generate a store-flag expression will cause
3167 us to loop. Just drop through in this case. */
3168
abe6e52f
RK
3169 /* If the result values are STORE_FLAG_VALUE and zero, we can
3170 just make the comparison operation. */
3171 if (true == const_true_rtx && false == const0_rtx)
3172 x = gen_binary (cond_code, mode, cond, cop1);
3173 else if (true == const0_rtx && false == const_true_rtx)
3174 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3175
3176 /* Likewise, we can make the negate of a comparison operation
3177 if the result values are - STORE_FLAG_VALUE and zero. */
3178 else if (GET_CODE (true) == CONST_INT
3179 && INTVAL (true) == - STORE_FLAG_VALUE
3180 && false == const0_rtx)
0c1c8ea6 3181 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3182 gen_binary (cond_code, mode, cond, cop1));
3183 else if (GET_CODE (false) == CONST_INT
3184 && INTVAL (false) == - STORE_FLAG_VALUE
3185 && true == const0_rtx)
0c1c8ea6 3186 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3187 gen_binary (reverse_condition (cond_code),
3188 mode, cond, cop1));
3189 else
8079805d
RK
3190 return gen_rtx (IF_THEN_ELSE, mode,
3191 gen_binary (cond_code, VOIDmode, cond, cop1),
3192 true, false);
5109d49f 3193
9210df58
RK
3194 code = GET_CODE (x);
3195 op0_mode = VOIDmode;
abe6e52f 3196 }
d0ab8cd3
RK
3197 }
3198
230d793d
RS
3199 /* Try to fold this expression in case we have constants that weren't
3200 present before. */
3201 temp = 0;
3202 switch (GET_RTX_CLASS (code))
3203 {
3204 case '1':
3205 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3206 break;
3207 case '<':
3208 temp = simplify_relational_operation (code, op0_mode,
3209 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3210#ifdef FLOAT_STORE_FLAG_VALUE
3211 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3212 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3213 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3214#endif
230d793d
RS
3215 break;
3216 case 'c':
3217 case '2':
3218 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3219 break;
3220 case 'b':
3221 case '3':
3222 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3223 XEXP (x, 1), XEXP (x, 2));
3224 break;
3225 }
3226
3227 if (temp)
d0ab8cd3 3228 x = temp, code = GET_CODE (temp);
230d793d 3229
230d793d 3230 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3231 if (code == PLUS || code == MINUS
3232 || code == AND || code == IOR || code == XOR)
230d793d
RS
3233 {
3234 x = apply_distributive_law (x);
3235 code = GET_CODE (x);
3236 }
3237
3238 /* If CODE is an associative operation not otherwise handled, see if we
3239 can associate some operands. This can win if they are constants or
3240 if they are logically related (i.e. (a & b) & a. */
3241 if ((code == PLUS || code == MINUS
3242 || code == MULT || code == AND || code == IOR || code == XOR
3243 || code == DIV || code == UDIV
3244 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3245 && INTEGRAL_MODE_P (mode))
230d793d
RS
3246 {
3247 if (GET_CODE (XEXP (x, 0)) == code)
3248 {
3249 rtx other = XEXP (XEXP (x, 0), 0);
3250 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3251 rtx inner_op1 = XEXP (x, 1);
3252 rtx inner;
3253
3254 /* Make sure we pass the constant operand if any as the second
3255 one if this is a commutative operation. */
3256 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3257 {
3258 rtx tem = inner_op0;
3259 inner_op0 = inner_op1;
3260 inner_op1 = tem;
3261 }
3262 inner = simplify_binary_operation (code == MINUS ? PLUS
3263 : code == DIV ? MULT
3264 : code == UDIV ? MULT
3265 : code,
3266 mode, inner_op0, inner_op1);
3267
3268 /* For commutative operations, try the other pair if that one
3269 didn't simplify. */
3270 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3271 {
3272 other = XEXP (XEXP (x, 0), 1);
3273 inner = simplify_binary_operation (code, mode,
3274 XEXP (XEXP (x, 0), 0),
3275 XEXP (x, 1));
3276 }
3277
3278 if (inner)
8079805d 3279 return gen_binary (code, mode, other, inner);
230d793d
RS
3280 }
3281 }
3282
3283 /* A little bit of algebraic simplification here. */
3284 switch (code)
3285 {
3286 case MEM:
3287 /* Ensure that our address has any ASHIFTs converted to MULT in case
3288 address-recognizing predicates are called later. */
3289 temp = make_compound_operation (XEXP (x, 0), MEM);
3290 SUBST (XEXP (x, 0), temp);
3291 break;
3292
3293 case SUBREG:
3294 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3295 is paradoxical. If we can't do that safely, then it becomes
3296 something nonsensical so that this combination won't take place. */
3297
3298 if (GET_CODE (SUBREG_REG (x)) == MEM
3299 && (GET_MODE_SIZE (mode)
3300 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3301 {
3302 rtx inner = SUBREG_REG (x);
3303 int endian_offset = 0;
3304 /* Don't change the mode of the MEM
3305 if that would change the meaning of the address. */
3306 if (MEM_VOLATILE_P (SUBREG_REG (x))
3307 || mode_dependent_address_p (XEXP (inner, 0)))
3308 return gen_rtx (CLOBBER, mode, const0_rtx);
3309
f76b9db2
ILT
3310 if (BYTES_BIG_ENDIAN)
3311 {
3312 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3313 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3314 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3315 endian_offset -= (UNITS_PER_WORD
3316 - GET_MODE_SIZE (GET_MODE (inner)));
3317 }
230d793d
RS
3318 /* Note if the plus_constant doesn't make a valid address
3319 then this combination won't be accepted. */
3320 x = gen_rtx (MEM, mode,
3321 plus_constant (XEXP (inner, 0),
3322 (SUBREG_WORD (x) * UNITS_PER_WORD
3323 + endian_offset)));
3324 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3325 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3326 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3327 return x;
3328 }
3329
3330 /* If we are in a SET_DEST, these other cases can't apply. */
3331 if (in_dest)
3332 return x;
3333
3334 /* Changing mode twice with SUBREG => just change it once,
3335 or not at all if changing back to starting mode. */
3336 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3337 {
3338 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3339 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3340 return SUBREG_REG (SUBREG_REG (x));
3341
3342 SUBST_INT (SUBREG_WORD (x),
3343 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3344 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3345 }
3346
3347 /* SUBREG of a hard register => just change the register number
3348 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3349 suppress this combination. If the hard register is the stack,
3350 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3351
3352 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3353 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3354 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3355#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3356 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3357#endif
26ecfc76
RK
3358#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3359 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3360#endif
3361 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3362 {
3363 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3364 mode))
3365 return gen_rtx (REG, mode,
3366 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3367 else
3368 return gen_rtx (CLOBBER, mode, const0_rtx);
3369 }
3370
3371 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3372 word and low-order part. Only do this if we are narrowing
3373 the constant; if it is being widened, we have no idea what
3374 the extra bits will have been set to. */
230d793d
RS
3375
3376 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3377 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 3378 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
3379 && GET_MODE_CLASS (mode) == MODE_INT)
3380 {
3381 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3382 0, op0_mode);
230d793d
RS
3383 if (temp)
3384 return temp;
3385 }
3386
19808e22
RS
3387 /* If we want a subreg of a constant, at offset 0,
3388 take the low bits. On a little-endian machine, that's
3389 always valid. On a big-endian machine, it's valid
3390 only if the constant's mode fits in one word. */
a4bde0b1 3391 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
f82da7d2 3392 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3393 && (! WORDS_BIG_ENDIAN
3394 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3395 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3396
b65c1b5b
RK
3397 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3398 since we are saying that the high bits don't matter. */
3399 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3400 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3401 return SUBREG_REG (x);
3402
87e3e0c1
RK
3403 /* Note that we cannot do any narrowing for non-constants since
3404 we might have been counting on using the fact that some bits were
3405 zero. We now do this in the SET. */
3406
230d793d
RS
3407 break;
3408
3409 case NOT:
3410 /* (not (plus X -1)) can become (neg X). */
3411 if (GET_CODE (XEXP (x, 0)) == PLUS
3412 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3413 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3414
3415 /* Similarly, (not (neg X)) is (plus X -1). */
3416 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3417 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3418 constm1_rtx);
230d793d 3419
d0ab8cd3
RK
3420 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3421 if (GET_CODE (XEXP (x, 0)) == XOR
3422 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3423 && (temp = simplify_unary_operation (NOT, mode,
3424 XEXP (XEXP (x, 0), 1),
3425 mode)) != 0)
787745f5 3426 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3427
230d793d
RS
3428 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3429 other than 1, but that is not valid. We could do a similar
3430 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3431 but this doesn't seem common enough to bother with. */
3432 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3433 && XEXP (XEXP (x, 0), 0) == const1_rtx)
0c1c8ea6 3434 return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
8079805d 3435 XEXP (XEXP (x, 0), 1));
230d793d
RS
3436
3437 if (GET_CODE (XEXP (x, 0)) == SUBREG
3438 && subreg_lowpart_p (XEXP (x, 0))
3439 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3440 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3441 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3442 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3443 {
3444 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3445
3446 x = gen_rtx (ROTATE, inner_mode,
0c1c8ea6 3447 gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
230d793d 3448 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3449 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3450 }
3451
0802d516
RK
3452 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3453 reversing the comparison code if valid. */
3454 if (STORE_FLAG_VALUE == -1
3455 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
230d793d
RS
3456 && reversible_comparison_p (XEXP (x, 0)))
3457 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3458 mode, XEXP (XEXP (x, 0), 0),
3459 XEXP (XEXP (x, 0), 1));
500c518b
RK
3460
3461 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3462 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3463 perform the above simplification. */
500c518b 3464
0802d516
RK
3465 if (STORE_FLAG_VALUE == -1
3466 && XEXP (x, 1) == const1_rtx
500c518b
RK
3467 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3468 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3469 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3470 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3471
3472 /* Apply De Morgan's laws to reduce number of patterns for machines
3473 with negating logical insns (and-not, nand, etc.). If result has
3474 only one NOT, put it first, since that is how the patterns are
3475 coded. */
3476
3477 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3478 {
3479 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3480
3481 if (GET_CODE (in1) == NOT)
3482 in1 = XEXP (in1, 0);
3483 else
3484 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3485
3486 if (GET_CODE (in2) == NOT)
3487 in2 = XEXP (in2, 0);
3488 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3489 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3490 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3491 else
3492 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3493
3494 if (GET_CODE (in2) == NOT)
3495 {
3496 rtx tem = in2;
3497 in2 = in1; in1 = tem;
3498 }
3499
8079805d
RK
3500 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3501 mode, in1, in2);
230d793d
RS
3502 }
3503 break;
3504
3505 case NEG:
3506 /* (neg (plus X 1)) can become (not X). */
3507 if (GET_CODE (XEXP (x, 0)) == PLUS
3508 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3509 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3510
3511 /* Similarly, (neg (not X)) is (plus X 1). */
3512 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3513 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3514
230d793d
RS
3515 /* (neg (minus X Y)) can become (minus Y X). */
3516 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3517 && (! FLOAT_MODE_P (mode)
0f41302f 3518 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3519 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3520 || flag_fast_math))
8079805d
RK
3521 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3522 XEXP (XEXP (x, 0), 0));
230d793d 3523
0f41302f 3524 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3525 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3526 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3527 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3528
230d793d
RS
3529 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3530 if we can then eliminate the NEG (e.g.,
3531 if the operand is a constant). */
3532
3533 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3534 {
3535 temp = simplify_unary_operation (NEG, mode,
3536 XEXP (XEXP (x, 0), 0), mode);
3537 if (temp)
3538 {
3539 SUBST (XEXP (XEXP (x, 0), 0), temp);
3540 return XEXP (x, 0);
3541 }
3542 }
3543
3544 temp = expand_compound_operation (XEXP (x, 0));
3545
3546 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3547 replaced by (lshiftrt X C). This will convert
3548 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3549
3550 if (GET_CODE (temp) == ASHIFTRT
3551 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3552 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3553 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3554 INTVAL (XEXP (temp, 1)));
230d793d 3555
951553af 3556 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3557 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3558 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3559 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3560 or a SUBREG of one since we'd be making the expression more
3561 complex if it was just a register. */
3562
3563 if (GET_CODE (temp) != REG
3564 && ! (GET_CODE (temp) == SUBREG
3565 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3566 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3567 {
3568 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3569 (NULL_RTX, ASHIFTRT, mode,
3570 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3571 GET_MODE_BITSIZE (mode) - 1 - i),
3572 GET_MODE_BITSIZE (mode) - 1 - i);
3573
3574 /* If all we did was surround TEMP with the two shifts, we
3575 haven't improved anything, so don't use it. Otherwise,
3576 we are better off with TEMP1. */
3577 if (GET_CODE (temp1) != ASHIFTRT
3578 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3579 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3580 return temp1;
230d793d
RS
3581 }
3582 break;
3583
2ca9ae17
JW
3584 case TRUNCATE:
3585 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3586 SUBST (XEXP (x, 0),
3587 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3588 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
3589
3590 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3591 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3592 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3593 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3594 return XEXP (XEXP (x, 0), 0);
3595
3596 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3597 (OP:SI foo:SI) if OP is NEG or ABS. */
3598 if ((GET_CODE (XEXP (x, 0)) == ABS
3599 || GET_CODE (XEXP (x, 0)) == NEG)
3600 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3601 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3602 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3603 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3604 XEXP (XEXP (XEXP (x, 0), 0), 0));
3605
3606 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3607 (truncate:SI x). */
3608 if (GET_CODE (XEXP (x, 0)) == SUBREG
3609 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3610 && subreg_lowpart_p (XEXP (x, 0)))
3611 return SUBREG_REG (XEXP (x, 0));
3612
3613 /* If we know that the value is already truncated, we can
3614 replace the TRUNCATE with a SUBREG. */
3615 if (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) <= HOST_BITS_PER_WIDE_INT
3616 && (nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3617 &~ GET_MODE_MASK (mode)) == 0)
3618 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3619
3620 /* A truncate of a comparison can be replaced with a subreg if
3621 STORE_FLAG_VALUE permits. This is like the previous test,
3622 but it works even if the comparison is done in a mode larger
3623 than HOST_BITS_PER_WIDE_INT. */
3624 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3625 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3626 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3627 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3628
3629 /* Similarly, a truncate of a register whose value is a
3630 comparison can be replaced with a subreg if STORE_FLAG_VALUE
3631 permits. */
3632 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3633 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3634 && (temp = get_last_value (XEXP (x, 0)))
3635 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3636 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3637
2ca9ae17
JW
3638 break;
3639
230d793d
RS
3640 case FLOAT_TRUNCATE:
3641 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3642 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3643 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3644 return XEXP (XEXP (x, 0), 0);
4635f748
RK
3645
3646 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3647 (OP:SF foo:SF) if OP is NEG or ABS. */
3648 if ((GET_CODE (XEXP (x, 0)) == ABS
3649 || GET_CODE (XEXP (x, 0)) == NEG)
3650 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3651 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
3652 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3653 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
3654
3655 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3656 is (float_truncate:SF x). */
3657 if (GET_CODE (XEXP (x, 0)) == SUBREG
3658 && subreg_lowpart_p (XEXP (x, 0))
3659 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3660 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
3661 break;
3662
3663#ifdef HAVE_cc0
3664 case COMPARE:
3665 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3666 using cc0, in which case we want to leave it as a COMPARE
3667 so we can distinguish it from a register-register-copy. */
3668 if (XEXP (x, 1) == const0_rtx)
3669 return XEXP (x, 0);
3670
3671 /* In IEEE floating point, x-0 is not the same as x. */
3672 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3673 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3674 || flag_fast_math)
230d793d
RS
3675 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3676 return XEXP (x, 0);
3677 break;
3678#endif
3679
3680 case CONST:
3681 /* (const (const X)) can become (const X). Do it this way rather than
3682 returning the inner CONST since CONST can be shared with a
3683 REG_EQUAL note. */
3684 if (GET_CODE (XEXP (x, 0)) == CONST)
3685 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3686 break;
3687
3688#ifdef HAVE_lo_sum
3689 case LO_SUM:
3690 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3691 can add in an offset. find_split_point will split this address up
3692 again if it doesn't match. */
3693 if (GET_CODE (XEXP (x, 0)) == HIGH
3694 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3695 return XEXP (x, 1);
3696 break;
3697#endif
3698
3699 case PLUS:
3700 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3701 outermost. That's because that's the way indexed addresses are
3702 supposed to appear. This code used to check many more cases, but
3703 they are now checked elsewhere. */
3704 if (GET_CODE (XEXP (x, 0)) == PLUS
3705 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3706 return gen_binary (PLUS, mode,
3707 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3708 XEXP (x, 1)),
3709 XEXP (XEXP (x, 0), 1));
3710
3711 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3712 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3713 bit-field and can be replaced by either a sign_extend or a
3714 sign_extract. The `and' may be a zero_extend. */
3715 if (GET_CODE (XEXP (x, 0)) == XOR
3716 && GET_CODE (XEXP (x, 1)) == CONST_INT
3717 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3718 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3719 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3720 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3721 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3722 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3723 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3724 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3725 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3726 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3727 == i + 1))))
8079805d
RK
3728 return simplify_shift_const
3729 (NULL_RTX, ASHIFTRT, mode,
3730 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3731 XEXP (XEXP (XEXP (x, 0), 0), 0),
3732 GET_MODE_BITSIZE (mode) - (i + 1)),
3733 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 3734
bc0776c6
RK
3735 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3736 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3737 is 1. This produces better code than the alternative immediately
3738 below. */
3739 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3740 && reversible_comparison_p (XEXP (x, 0))
3741 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3742 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 3743 return
0c1c8ea6 3744 gen_unary (NEG, mode, mode,
8079805d
RK
3745 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3746 mode, XEXP (XEXP (x, 0), 0),
3747 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
3748
3749 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3750 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3751 the bitsize of the mode - 1. This allows simplification of
3752 "a = (b & 8) == 0;" */
3753 if (XEXP (x, 1) == constm1_rtx
3754 && GET_CODE (XEXP (x, 0)) != REG
3755 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3756 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3757 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
3758 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3759 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3760 gen_rtx_combine (XOR, mode,
3761 XEXP (x, 0), const1_rtx),
3762 GET_MODE_BITSIZE (mode) - 1),
3763 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
3764
3765 /* If we are adding two things that have no bits in common, convert
3766 the addition into an IOR. This will often be further simplified,
3767 for example in cases like ((a & 1) + (a & 2)), which can
3768 become a & 3. */
3769
ac49a949 3770 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3771 && (nonzero_bits (XEXP (x, 0), mode)
3772 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 3773 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
3774 break;
3775
3776 case MINUS:
0802d516
RK
3777 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
3778 by reversing the comparison code if valid. */
3779 if (STORE_FLAG_VALUE == 1
3780 && XEXP (x, 0) == const1_rtx
5109d49f
RK
3781 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3782 && reversible_comparison_p (XEXP (x, 1)))
3783 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3784 mode, XEXP (XEXP (x, 1), 0),
3785 XEXP (XEXP (x, 1), 1));
5109d49f 3786
230d793d
RS
3787 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3788 (and <foo> (const_int pow2-1)) */
3789 if (GET_CODE (XEXP (x, 1)) == AND
3790 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3791 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3792 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
3793 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3794 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
3795
3796 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3797 integers. */
3798 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
3799 return gen_binary (MINUS, mode,
3800 gen_binary (MINUS, mode, XEXP (x, 0),
3801 XEXP (XEXP (x, 1), 0)),
3802 XEXP (XEXP (x, 1), 1));
230d793d
RS
3803 break;
3804
3805 case MULT:
3806 /* If we have (mult (plus A B) C), apply the distributive law and then
3807 the inverse distributive law to see if things simplify. This
3808 occurs mostly in addresses, often when unrolling loops. */
3809
3810 if (GET_CODE (XEXP (x, 0)) == PLUS)
3811 {
3812 x = apply_distributive_law
3813 (gen_binary (PLUS, mode,
3814 gen_binary (MULT, mode,
3815 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3816 gen_binary (MULT, mode,
3817 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3818
3819 if (GET_CODE (x) != MULT)
8079805d 3820 return x;
230d793d 3821 }
230d793d
RS
3822 break;
3823
3824 case UDIV:
3825 /* If this is a divide by a power of two, treat it as a shift if
3826 its first operand is a shift. */
3827 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3828 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3829 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3830 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3831 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3832 || GET_CODE (XEXP (x, 0)) == ROTATE
3833 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 3834 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3835 break;
3836
3837 case EQ: case NE:
3838 case GT: case GTU: case GE: case GEU:
3839 case LT: case LTU: case LE: case LEU:
3840 /* If the first operand is a condition code, we can't do anything
3841 with it. */
3842 if (GET_CODE (XEXP (x, 0)) == COMPARE
3843 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3844#ifdef HAVE_cc0
3845 && XEXP (x, 0) != cc0_rtx
3846#endif
3847 ))
3848 {
3849 rtx op0 = XEXP (x, 0);
3850 rtx op1 = XEXP (x, 1);
3851 enum rtx_code new_code;
3852
3853 if (GET_CODE (op0) == COMPARE)
3854 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3855
3856 /* Simplify our comparison, if possible. */
3857 new_code = simplify_comparison (code, &op0, &op1);
3858
230d793d 3859 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 3860 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
3861 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3862 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3863 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3864 (plus X 1).
3865
3866 Remove any ZERO_EXTRACT we made when thinking this was a
3867 comparison. It may now be simpler to use, e.g., an AND. If a
3868 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3869 the call to make_compound_operation in the SET case. */
3870
0802d516
RK
3871 if (STORE_FLAG_VALUE == 1
3872 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3873 && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3874 return gen_lowpart_for_combine (mode,
3875 expand_compound_operation (op0));
5109d49f 3876
0802d516
RK
3877 else if (STORE_FLAG_VALUE == 1
3878 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
3879 && op1 == const0_rtx
3880 && (num_sign_bit_copies (op0, mode)
3881 == GET_MODE_BITSIZE (mode)))
3882 {
3883 op0 = expand_compound_operation (op0);
0c1c8ea6 3884 return gen_unary (NEG, mode, mode,
8079805d 3885 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3886 }
3887
0802d516
RK
3888 else if (STORE_FLAG_VALUE == 1
3889 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3890 && op1 == const0_rtx
5109d49f 3891 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3892 {
3893 op0 = expand_compound_operation (op0);
8079805d
RK
3894 return gen_binary (XOR, mode,
3895 gen_lowpart_for_combine (mode, op0),
3896 const1_rtx);
5109d49f 3897 }
818b11b9 3898
0802d516
RK
3899 else if (STORE_FLAG_VALUE == 1
3900 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
3901 && op1 == const0_rtx
3902 && (num_sign_bit_copies (op0, mode)
3903 == GET_MODE_BITSIZE (mode)))
3904 {
3905 op0 = expand_compound_operation (op0);
8079805d 3906 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 3907 }
230d793d 3908
5109d49f
RK
3909 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3910 those above. */
0802d516
RK
3911 if (STORE_FLAG_VALUE == -1
3912 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3913 && op1 == const0_rtx
5109d49f
RK
3914 && (num_sign_bit_copies (op0, mode)
3915 == GET_MODE_BITSIZE (mode)))
3916 return gen_lowpart_for_combine (mode,
3917 expand_compound_operation (op0));
3918
0802d516
RK
3919 else if (STORE_FLAG_VALUE == -1
3920 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
3921 && op1 == const0_rtx
3922 && nonzero_bits (op0, mode) == 1)
3923 {
3924 op0 = expand_compound_operation (op0);
0c1c8ea6 3925 return gen_unary (NEG, mode, mode,
8079805d 3926 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3927 }
3928
0802d516
RK
3929 else if (STORE_FLAG_VALUE == -1
3930 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
3931 && op1 == const0_rtx
3932 && (num_sign_bit_copies (op0, mode)
3933 == GET_MODE_BITSIZE (mode)))
230d793d 3934 {
818b11b9 3935 op0 = expand_compound_operation (op0);
0c1c8ea6 3936 return gen_unary (NOT, mode, mode,
8079805d 3937 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3938 }
3939
3940 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
3941 else if (STORE_FLAG_VALUE == -1
3942 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
3943 && op1 == const0_rtx
3944 && nonzero_bits (op0, mode) == 1)
3945 {
3946 op0 = expand_compound_operation (op0);
8079805d 3947 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 3948 }
230d793d
RS
3949
3950 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
3951 one bit that might be nonzero, we can convert (ne x 0) to
3952 (ashift x c) where C puts the bit in the sign bit. Remove any
3953 AND with STORE_FLAG_VALUE when we are done, since we are only
3954 going to test the sign bit. */
3f508eca 3955 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 3956 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 3957 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5f4f0e22 3958 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3959 && op1 == const0_rtx
3960 && mode == GET_MODE (op0)
5109d49f 3961 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 3962 {
818b11b9
RK
3963 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3964 expand_compound_operation (op0),
230d793d
RS
3965 GET_MODE_BITSIZE (mode) - 1 - i);
3966 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3967 return XEXP (x, 0);
3968 else
3969 return x;
3970 }
3971
3972 /* If the code changed, return a whole new comparison. */
3973 if (new_code != code)
3974 return gen_rtx_combine (new_code, mode, op0, op1);
3975
3976 /* Otherwise, keep this operation, but maybe change its operands.
3977 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3978 SUBST (XEXP (x, 0), op0);
3979 SUBST (XEXP (x, 1), op1);
3980 }
3981 break;
3982
3983 case IF_THEN_ELSE:
8079805d 3984 return simplify_if_then_else (x);
9210df58 3985
8079805d
RK
3986 case ZERO_EXTRACT:
3987 case SIGN_EXTRACT:
3988 case ZERO_EXTEND:
3989 case SIGN_EXTEND:
0f41302f 3990 /* If we are processing SET_DEST, we are done. */
8079805d
RK
3991 if (in_dest)
3992 return x;
d0ab8cd3 3993
8079805d 3994 return expand_compound_operation (x);
d0ab8cd3 3995
8079805d
RK
3996 case SET:
3997 return simplify_set (x);
1a26b032 3998
8079805d
RK
3999 case AND:
4000 case IOR:
4001 case XOR:
4002 return simplify_logical (x, last);
d0ab8cd3 4003
8079805d
RK
4004 case ABS:
4005 /* (abs (neg <foo>)) -> (abs <foo>) */
4006 if (GET_CODE (XEXP (x, 0)) == NEG)
4007 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4008
8079805d
RK
4009 /* If operand is something known to be positive, ignore the ABS. */
4010 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4011 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4012 <= HOST_BITS_PER_WIDE_INT)
4013 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4014 & ((HOST_WIDE_INT) 1
4015 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4016 == 0)))
4017 return XEXP (x, 0);
1a26b032 4018
1a26b032 4019
8079805d
RK
4020 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4021 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4022 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 4023
8079805d 4024 break;
1a26b032 4025
8079805d
RK
4026 case FFS:
4027 /* (ffs (*_extend <X>)) = (ffs <X>) */
4028 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4029 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4030 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4031 break;
1a26b032 4032
8079805d
RK
4033 case FLOAT:
4034 /* (float (sign_extend <X>)) = (float <X>). */
4035 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4036 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4037 break;
1a26b032 4038
8079805d
RK
4039 case ASHIFT:
4040 case LSHIFTRT:
4041 case ASHIFTRT:
4042 case ROTATE:
4043 case ROTATERT:
4044 /* If this is a shift by a constant amount, simplify it. */
4045 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4046 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4047 INTVAL (XEXP (x, 1)));
4048
4049#ifdef SHIFT_COUNT_TRUNCATED
4050 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4051 SUBST (XEXP (x, 1),
4052 force_to_mode (XEXP (x, 1), GET_MODE (x),
4053 ((HOST_WIDE_INT) 1
4054 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4055 - 1,
4056 NULL_RTX, 0));
4057#endif
4058
4059 break;
4060 }
4061
4062 return x;
4063}
4064\f
4065/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4066
8079805d
RK
4067static rtx
4068simplify_if_then_else (x)
4069 rtx x;
4070{
4071 enum machine_mode mode = GET_MODE (x);
4072 rtx cond = XEXP (x, 0);
4073 rtx true = XEXP (x, 1);
4074 rtx false = XEXP (x, 2);
4075 enum rtx_code true_code = GET_CODE (cond);
4076 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4077 rtx temp;
4078 int i;
4079
0f41302f 4080 /* Simplify storing of the truth value. */
8079805d
RK
4081 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4082 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4083
0f41302f 4084 /* Also when the truth value has to be reversed. */
8079805d
RK
4085 if (comparison_p && reversible_comparison_p (cond)
4086 && true == const0_rtx && false == const_true_rtx)
4087 return gen_binary (reverse_condition (true_code),
4088 mode, XEXP (cond, 0), XEXP (cond, 1));
4089
4090 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4091 in it is being compared against certain values. Get the true and false
4092 comparisons and see if that says anything about the value of each arm. */
4093
4094 if (comparison_p && reversible_comparison_p (cond)
4095 && GET_CODE (XEXP (cond, 0)) == REG)
4096 {
4097 HOST_WIDE_INT nzb;
4098 rtx from = XEXP (cond, 0);
4099 enum rtx_code false_code = reverse_condition (true_code);
4100 rtx true_val = XEXP (cond, 1);
4101 rtx false_val = true_val;
4102 int swapped = 0;
9210df58 4103
8079805d 4104 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4105
8079805d 4106 if (false_code == EQ)
1a26b032 4107 {
8079805d
RK
4108 swapped = 1, true_code = EQ, false_code = NE;
4109 temp = true, true = false, false = temp;
4110 }
5109d49f 4111
8079805d
RK
4112 /* If we are comparing against zero and the expression being tested has
4113 only a single bit that might be nonzero, that is its value when it is
4114 not equal to zero. Similarly if it is known to be -1 or 0. */
4115
4116 if (true_code == EQ && true_val == const0_rtx
4117 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4118 false_code = EQ, false_val = GEN_INT (nzb);
4119 else if (true_code == EQ && true_val == const0_rtx
4120 && (num_sign_bit_copies (from, GET_MODE (from))
4121 == GET_MODE_BITSIZE (GET_MODE (from))))
4122 false_code = EQ, false_val = constm1_rtx;
4123
4124 /* Now simplify an arm if we know the value of the register in the
4125 branch and it is used in the arm. Be careful due to the potential
4126 of locally-shared RTL. */
4127
4128 if (reg_mentioned_p (from, true))
4129 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4130 pc_rtx, pc_rtx, 0, 0);
4131 if (reg_mentioned_p (from, false))
4132 false = subst (known_cond (copy_rtx (false), false_code,
4133 from, false_val),
4134 pc_rtx, pc_rtx, 0, 0);
4135
4136 SUBST (XEXP (x, 1), swapped ? false : true);
4137 SUBST (XEXP (x, 2), swapped ? true : false);
4138
4139 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4140 }
5109d49f 4141
8079805d
RK
4142 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4143 reversed, do so to avoid needing two sets of patterns for
4144 subtract-and-branch insns. Similarly if we have a constant in the true
4145 arm, the false arm is the same as the first operand of the comparison, or
4146 the false arm is more complicated than the true arm. */
4147
4148 if (comparison_p && reversible_comparison_p (cond)
4149 && (true == pc_rtx
4150 || (CONSTANT_P (true)
4151 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4152 || true == const0_rtx
4153 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4154 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4155 || (GET_CODE (true) == SUBREG
4156 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4157 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4158 || reg_mentioned_p (true, false)
4159 || rtx_equal_p (false, XEXP (cond, 0))))
4160 {
4161 true_code = reverse_condition (true_code);
4162 SUBST (XEXP (x, 0),
4163 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4164 XEXP (cond, 1)));
5109d49f 4165
8079805d
RK
4166 SUBST (XEXP (x, 1), false);
4167 SUBST (XEXP (x, 2), true);
1a26b032 4168
8079805d 4169 temp = true, true = false, false = temp, cond = XEXP (x, 0);
bb821298 4170
0f41302f 4171 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4172 true_code = GET_CODE (cond);
4173 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4174 }
abe6e52f 4175
8079805d 4176 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4177
8079805d
RK
4178 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4179 return true;
1a26b032 4180
5be669c7
RK
4181 /* Convert a == b ? b : a to "a". */
4182 if (true_code == EQ && ! side_effects_p (cond)
4183 && rtx_equal_p (XEXP (cond, 0), false)
4184 && rtx_equal_p (XEXP (cond, 1), true))
4185 return false;
4186 else if (true_code == NE && ! side_effects_p (cond)
4187 && rtx_equal_p (XEXP (cond, 0), true)
4188 && rtx_equal_p (XEXP (cond, 1), false))
4189 return true;
4190
8079805d
RK
4191 /* Look for cases where we have (abs x) or (neg (abs X)). */
4192
4193 if (GET_MODE_CLASS (mode) == MODE_INT
4194 && GET_CODE (false) == NEG
4195 && rtx_equal_p (true, XEXP (false, 0))
4196 && comparison_p
4197 && rtx_equal_p (true, XEXP (cond, 0))
4198 && ! side_effects_p (true))
4199 switch (true_code)
4200 {
4201 case GT:
4202 case GE:
0c1c8ea6 4203 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4204 case LT:
4205 case LE:
0c1c8ea6 4206 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
8079805d
RK
4207 }
4208
4209 /* Look for MIN or MAX. */
4210
34c8be72 4211 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4212 && comparison_p
4213 && rtx_equal_p (XEXP (cond, 0), true)
4214 && rtx_equal_p (XEXP (cond, 1), false)
4215 && ! side_effects_p (cond))
4216 switch (true_code)
4217 {
4218 case GE:
4219 case GT:
4220 return gen_binary (SMAX, mode, true, false);
4221 case LE:
4222 case LT:
4223 return gen_binary (SMIN, mode, true, false);
4224 case GEU:
4225 case GTU:
4226 return gen_binary (UMAX, mode, true, false);
4227 case LEU:
4228 case LTU:
4229 return gen_binary (UMIN, mode, true, false);
4230 }
4231
8079805d
RK
4232 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4233 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4234 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4235 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4236 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4237 neither 1 or -1, but it isn't worth checking for. */
8079805d 4238
0802d516
RK
4239 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4240 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d
RK
4241 {
4242 rtx t = make_compound_operation (true, SET);
4243 rtx f = make_compound_operation (false, SET);
4244 rtx cond_op0 = XEXP (cond, 0);
4245 rtx cond_op1 = XEXP (cond, 1);
4246 enum rtx_code op, extend_op = NIL;
4247 enum machine_mode m = mode;
f24ad0e4 4248 rtx z = 0, c1;
8079805d 4249
8079805d
RK
4250 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4251 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4252 || GET_CODE (t) == ASHIFT
4253 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4254 && rtx_equal_p (XEXP (t, 0), f))
4255 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4256
4257 /* If an identity-zero op is commutative, check whether there
0f41302f 4258 would be a match if we swapped the operands. */
8079805d
RK
4259 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4260 || GET_CODE (t) == XOR)
4261 && rtx_equal_p (XEXP (t, 1), f))
4262 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4263 else if (GET_CODE (t) == SIGN_EXTEND
4264 && (GET_CODE (XEXP (t, 0)) == PLUS
4265 || GET_CODE (XEXP (t, 0)) == MINUS
4266 || GET_CODE (XEXP (t, 0)) == IOR
4267 || GET_CODE (XEXP (t, 0)) == XOR
4268 || GET_CODE (XEXP (t, 0)) == ASHIFT
4269 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4270 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4271 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4272 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4273 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4274 && (num_sign_bit_copies (f, GET_MODE (f))
4275 > (GET_MODE_BITSIZE (mode)
4276 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4277 {
4278 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4279 extend_op = SIGN_EXTEND;
4280 m = GET_MODE (XEXP (t, 0));
1a26b032 4281 }
8079805d
RK
4282 else if (GET_CODE (t) == SIGN_EXTEND
4283 && (GET_CODE (XEXP (t, 0)) == PLUS
4284 || GET_CODE (XEXP (t, 0)) == IOR
4285 || GET_CODE (XEXP (t, 0)) == XOR)
4286 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4287 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4288 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4289 && (num_sign_bit_copies (f, GET_MODE (f))
4290 > (GET_MODE_BITSIZE (mode)
4291 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4292 {
4293 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4294 extend_op = SIGN_EXTEND;
4295 m = GET_MODE (XEXP (t, 0));
4296 }
4297 else if (GET_CODE (t) == ZERO_EXTEND
4298 && (GET_CODE (XEXP (t, 0)) == PLUS
4299 || GET_CODE (XEXP (t, 0)) == MINUS
4300 || GET_CODE (XEXP (t, 0)) == IOR
4301 || GET_CODE (XEXP (t, 0)) == XOR
4302 || GET_CODE (XEXP (t, 0)) == ASHIFT
4303 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4304 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4305 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4306 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4307 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4308 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4309 && ((nonzero_bits (f, GET_MODE (f))
4310 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4311 == 0))
4312 {
4313 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4314 extend_op = ZERO_EXTEND;
4315 m = GET_MODE (XEXP (t, 0));
4316 }
4317 else if (GET_CODE (t) == ZERO_EXTEND
4318 && (GET_CODE (XEXP (t, 0)) == PLUS
4319 || GET_CODE (XEXP (t, 0)) == IOR
4320 || GET_CODE (XEXP (t, 0)) == XOR)
4321 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4322 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4323 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4324 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4325 && ((nonzero_bits (f, GET_MODE (f))
4326 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4327 == 0))
4328 {
4329 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4330 extend_op = ZERO_EXTEND;
4331 m = GET_MODE (XEXP (t, 0));
4332 }
4333
4334 if (z)
4335 {
4336 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4337 pc_rtx, pc_rtx, 0, 0);
4338 temp = gen_binary (MULT, m, temp,
4339 gen_binary (MULT, m, c1, const_true_rtx));
4340 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4341 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4342
4343 if (extend_op != NIL)
0c1c8ea6 4344 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4345
4346 return temp;
4347 }
4348 }
224eeff2 4349
8079805d
RK
4350 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4351 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4352 negation of a single bit, we can convert this operation to a shift. We
4353 can actually do this more generally, but it doesn't seem worth it. */
4354
4355 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4356 && false == const0_rtx && GET_CODE (true) == CONST_INT
4357 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4358 && (i = exact_log2 (INTVAL (true))) >= 0)
4359 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4360 == GET_MODE_BITSIZE (mode))
4361 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4362 return
4363 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4364 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4365
8079805d
RK
4366 return x;
4367}
4368\f
4369/* Simplify X, a SET expression. Return the new expression. */
230d793d 4370
8079805d
RK
4371static rtx
4372simplify_set (x)
4373 rtx x;
4374{
4375 rtx src = SET_SRC (x);
4376 rtx dest = SET_DEST (x);
4377 enum machine_mode mode
4378 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4379 rtx other_insn;
4380 rtx *cc_use;
4381
4382 /* (set (pc) (return)) gets written as (return). */
4383 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4384 return src;
230d793d 4385
87e3e0c1
RK
4386 /* Now that we know for sure which bits of SRC we are using, see if we can
4387 simplify the expression for the object knowing that we only need the
4388 low-order bits. */
4389
4390 if (GET_MODE_CLASS (mode) == MODE_INT)
4391 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4392
8079805d
RK
4393 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4394 the comparison result and try to simplify it unless we already have used
4395 undobuf.other_insn. */
4396 if ((GET_CODE (src) == COMPARE
230d793d 4397#ifdef HAVE_cc0
8079805d 4398 || dest == cc0_rtx
230d793d 4399#endif
8079805d
RK
4400 )
4401 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4402 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4403 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4404 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4405 {
4406 enum rtx_code old_code = GET_CODE (*cc_use);
4407 enum rtx_code new_code;
4408 rtx op0, op1;
4409 int other_changed = 0;
4410 enum machine_mode compare_mode = GET_MODE (dest);
4411
4412 if (GET_CODE (src) == COMPARE)
4413 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4414 else
4415 op0 = src, op1 = const0_rtx;
230d793d 4416
8079805d
RK
4417 /* Simplify our comparison, if possible. */
4418 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4419
c141a106 4420#ifdef EXTRA_CC_MODES
8079805d
RK
4421 /* If this machine has CC modes other than CCmode, check to see if we
4422 need to use a different CC mode here. */
4423 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4424#endif /* EXTRA_CC_MODES */
230d793d 4425
c141a106 4426#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4427 /* If the mode changed, we have to change SET_DEST, the mode in the
4428 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4429 a hard register, just build new versions with the proper mode. If it
4430 is a pseudo, we lose unless it is only time we set the pseudo, in
4431 which case we can safely change its mode. */
4432 if (compare_mode != GET_MODE (dest))
4433 {
4434 int regno = REGNO (dest);
4435 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4436
4437 if (regno < FIRST_PSEUDO_REGISTER
4438 || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest)))
230d793d 4439 {
8079805d
RK
4440 if (regno >= FIRST_PSEUDO_REGISTER)
4441 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4442
8079805d
RK
4443 SUBST (SET_DEST (x), new_dest);
4444 SUBST (XEXP (*cc_use, 0), new_dest);
4445 other_changed = 1;
230d793d 4446
8079805d 4447 dest = new_dest;
230d793d 4448 }
8079805d 4449 }
230d793d
RS
4450#endif
4451
8079805d
RK
4452 /* If the code changed, we have to build a new comparison in
4453 undobuf.other_insn. */
4454 if (new_code != old_code)
4455 {
4456 unsigned HOST_WIDE_INT mask;
4457
4458 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4459 dest, const0_rtx));
4460
4461 /* If the only change we made was to change an EQ into an NE or
4462 vice versa, OP0 has only one bit that might be nonzero, and OP1
4463 is zero, check if changing the user of the condition code will
4464 produce a valid insn. If it won't, we can keep the original code
4465 in that insn by surrounding our operation with an XOR. */
4466
4467 if (((old_code == NE && new_code == EQ)
4468 || (old_code == EQ && new_code == NE))
4469 && ! other_changed && op1 == const0_rtx
4470 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4471 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4472 {
8079805d 4473 rtx pat = PATTERN (other_insn), note = 0;
a29ca9db 4474 int scratches;
230d793d 4475
a29ca9db 4476 if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
8079805d
RK
4477 && ! check_asm_operands (pat)))
4478 {
4479 PUT_CODE (*cc_use, old_code);
4480 other_insn = 0;
230d793d 4481
8079805d 4482 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4483 }
230d793d
RS
4484 }
4485
8079805d
RK
4486 other_changed = 1;
4487 }
4488
4489 if (other_changed)
4490 undobuf.other_insn = other_insn;
230d793d
RS
4491
4492#ifdef HAVE_cc0
8079805d
RK
4493 /* If we are now comparing against zero, change our source if
4494 needed. If we do not use cc0, we always have a COMPARE. */
4495 if (op1 == const0_rtx && dest == cc0_rtx)
4496 {
4497 SUBST (SET_SRC (x), op0);
4498 src = op0;
4499 }
4500 else
230d793d
RS
4501#endif
4502
8079805d
RK
4503 /* Otherwise, if we didn't previously have a COMPARE in the
4504 correct mode, we need one. */
4505 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4506 {
4507 SUBST (SET_SRC (x),
4508 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4509 src = SET_SRC (x);
230d793d
RS
4510 }
4511 else
4512 {
8079805d
RK
4513 /* Otherwise, update the COMPARE if needed. */
4514 SUBST (XEXP (src, 0), op0);
4515 SUBST (XEXP (src, 1), op1);
230d793d 4516 }
8079805d
RK
4517 }
4518 else
4519 {
4520 /* Get SET_SRC in a form where we have placed back any
4521 compound expressions. Then do the checks below. */
4522 src = make_compound_operation (src, SET);
4523 SUBST (SET_SRC (x), src);
4524 }
230d793d 4525
8079805d
RK
4526 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4527 and X being a REG or (subreg (reg)), we may be able to convert this to
4528 (set (subreg:m2 x) (op)).
df62f951 4529
8079805d
RK
4530 We can always do this if M1 is narrower than M2 because that means that
4531 we only care about the low bits of the result.
df62f951 4532
8079805d
RK
4533 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4534 perform a narrower operation that requested since the high-order bits will
4535 be undefined. On machine where it is defined, this transformation is safe
4536 as long as M1 and M2 have the same number of words. */
df62f951 4537
8079805d
RK
4538 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4539 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4540 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4541 / UNITS_PER_WORD)
4542 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4543 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4544#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4545 && (GET_MODE_SIZE (GET_MODE (src))
4546 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4547#endif
f507a070
RK
4548#ifdef CLASS_CANNOT_CHANGE_SIZE
4549 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4550 && (TEST_HARD_REG_BIT
4551 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4552 REGNO (dest)))
4553 && (GET_MODE_SIZE (GET_MODE (src))
4554 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4555#endif
8079805d
RK
4556 && (GET_CODE (dest) == REG
4557 || (GET_CODE (dest) == SUBREG
4558 && GET_CODE (SUBREG_REG (dest)) == REG)))
4559 {
4560 SUBST (SET_DEST (x),
4561 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4562 dest));
4563 SUBST (SET_SRC (x), SUBREG_REG (src));
4564
4565 src = SET_SRC (x), dest = SET_DEST (x);
4566 }
df62f951 4567
8baf60bb 4568#ifdef LOAD_EXTEND_OP
8079805d
RK
4569 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4570 would require a paradoxical subreg. Replace the subreg with a
0f41302f 4571 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
4572
4573 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4574 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4575 && SUBREG_WORD (src) == 0
4576 && (GET_MODE_SIZE (GET_MODE (src))
4577 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4578 && GET_CODE (SUBREG_REG (src)) == MEM)
4579 {
4580 SUBST (SET_SRC (x),
4581 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4582 GET_MODE (src), XEXP (src, 0)));
4583
4584 src = SET_SRC (x);
4585 }
230d793d
RS
4586#endif
4587
8079805d
RK
4588 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4589 are comparing an item known to be 0 or -1 against 0, use a logical
4590 operation instead. Check for one of the arms being an IOR of the other
4591 arm with some value. We compute three terms to be IOR'ed together. In
4592 practice, at most two will be nonzero. Then we do the IOR's. */
4593
4594 if (GET_CODE (dest) != PC
4595 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 4596 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4597 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4598 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 4599 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
4600#ifdef HAVE_conditional_move
4601 && ! can_conditionally_move_p (GET_MODE (src))
4602#endif
8079805d
RK
4603 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4604 GET_MODE (XEXP (XEXP (src, 0), 0)))
4605 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4606 && ! side_effects_p (src))
4607 {
4608 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4609 ? XEXP (src, 1) : XEXP (src, 2));
4610 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4611 ? XEXP (src, 2) : XEXP (src, 1));
4612 rtx term1 = const0_rtx, term2, term3;
4613
4614 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4615 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4616 else if (GET_CODE (true) == IOR
4617 && rtx_equal_p (XEXP (true, 1), false))
4618 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4619 else if (GET_CODE (false) == IOR
4620 && rtx_equal_p (XEXP (false, 0), true))
4621 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4622 else if (GET_CODE (false) == IOR
4623 && rtx_equal_p (XEXP (false, 1), true))
4624 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4625
4626 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4627 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 4628 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
4629 XEXP (XEXP (src, 0), 0)),
4630 false);
4631
4632 SUBST (SET_SRC (x),
4633 gen_binary (IOR, GET_MODE (src),
4634 gen_binary (IOR, GET_MODE (src), term1, term2),
4635 term3));
4636
4637 src = SET_SRC (x);
4638 }
230d793d 4639
246e00f2
RK
4640 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4641 whole thing fail. */
4642 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4643 return src;
4644 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4645 return dest;
4646 else
4647 /* Convert this into a field assignment operation, if possible. */
4648 return make_field_assignment (x);
8079805d
RK
4649}
4650\f
4651/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4652 result. LAST is nonzero if this is the last retry. */
4653
4654static rtx
4655simplify_logical (x, last)
4656 rtx x;
4657 int last;
4658{
4659 enum machine_mode mode = GET_MODE (x);
4660 rtx op0 = XEXP (x, 0);
4661 rtx op1 = XEXP (x, 1);
4662
4663 switch (GET_CODE (x))
4664 {
230d793d 4665 case AND:
8079805d
RK
4666 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4667 insn (and may simplify more). */
4668 if (GET_CODE (op0) == XOR
4669 && rtx_equal_p (XEXP (op0, 0), op1)
4670 && ! side_effects_p (op1))
0c1c8ea6
RK
4671 x = gen_binary (AND, mode,
4672 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
4673
4674 if (GET_CODE (op0) == XOR
4675 && rtx_equal_p (XEXP (op0, 1), op1)
4676 && ! side_effects_p (op1))
0c1c8ea6
RK
4677 x = gen_binary (AND, mode,
4678 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
4679
4680 /* Similarly for (~ (A ^ B)) & A. */
4681 if (GET_CODE (op0) == NOT
4682 && GET_CODE (XEXP (op0, 0)) == XOR
4683 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4684 && ! side_effects_p (op1))
4685 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4686
4687 if (GET_CODE (op0) == NOT
4688 && GET_CODE (XEXP (op0, 0)) == XOR
4689 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4690 && ! side_effects_p (op1))
4691 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4692
4693 if (GET_CODE (op1) == CONST_INT)
230d793d 4694 {
8079805d 4695 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
4696
4697 /* If we have (ior (and (X C1) C2)) and the next restart would be
4698 the last, simplify this by making C1 as small as possible
0f41302f 4699 and then exit. */
8079805d
RK
4700 if (last
4701 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4702 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4703 && GET_CODE (op1) == CONST_INT)
4704 return gen_binary (IOR, mode,
4705 gen_binary (AND, mode, XEXP (op0, 0),
4706 GEN_INT (INTVAL (XEXP (op0, 1))
4707 & ~ INTVAL (op1))), op1);
230d793d
RS
4708
4709 if (GET_CODE (x) != AND)
8079805d 4710 return x;
0e32506c
RK
4711
4712 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4713 || GET_RTX_CLASS (GET_CODE (x)) == '2')
4714 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
4715 }
4716
4717 /* Convert (A | B) & A to A. */
8079805d
RK
4718 if (GET_CODE (op0) == IOR
4719 && (rtx_equal_p (XEXP (op0, 0), op1)
4720 || rtx_equal_p (XEXP (op0, 1), op1))
4721 && ! side_effects_p (XEXP (op0, 0))
4722 && ! side_effects_p (XEXP (op0, 1)))
4723 return op1;
230d793d 4724
d0ab8cd3 4725 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4726 we start with some combination of logical operations and apply
4727 the distributive law followed by the inverse distributive law.
4728 Most of the time, this results in no change. However, if some of
4729 the operands are the same or inverses of each other, simplifications
4730 will result.
4731
4732 For example, (and (ior A B) (not B)) can occur as the result of
4733 expanding a bit field assignment. When we apply the distributive
4734 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 4735 which then simplifies to (and (A (not B))).
230d793d 4736
8079805d 4737 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
4738 the inverse distributive law to see if things simplify. */
4739
8079805d 4740 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
4741 {
4742 x = apply_distributive_law
8079805d
RK
4743 (gen_binary (GET_CODE (op0), mode,
4744 gen_binary (AND, mode, XEXP (op0, 0), op1),
4745 gen_binary (AND, mode, XEXP (op0, 1), op1)));
230d793d 4746 if (GET_CODE (x) != AND)
8079805d 4747 return x;
230d793d
RS
4748 }
4749
8079805d
RK
4750 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4751 return apply_distributive_law
4752 (gen_binary (GET_CODE (op1), mode,
4753 gen_binary (AND, mode, XEXP (op1, 0), op0),
4754 gen_binary (AND, mode, XEXP (op1, 1), op0)));
230d793d
RS
4755
4756 /* Similarly, taking advantage of the fact that
4757 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4758
8079805d
RK
4759 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4760 return apply_distributive_law
4761 (gen_binary (XOR, mode,
4762 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4763 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
230d793d 4764
8079805d
RK
4765 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4766 return apply_distributive_law
4767 (gen_binary (XOR, mode,
4768 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4769 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
230d793d
RS
4770 break;
4771
4772 case IOR:
951553af 4773 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 4774 if (GET_CODE (op1) == CONST_INT
ac49a949 4775 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
4776 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4777 return op1;
d0ab8cd3 4778
230d793d 4779 /* Convert (A & B) | A to A. */
8079805d
RK
4780 if (GET_CODE (op0) == AND
4781 && (rtx_equal_p (XEXP (op0, 0), op1)
4782 || rtx_equal_p (XEXP (op0, 1), op1))
4783 && ! side_effects_p (XEXP (op0, 0))
4784 && ! side_effects_p (XEXP (op0, 1)))
4785 return op1;
230d793d
RS
4786
4787 /* If we have (ior (and A B) C), apply the distributive law and then
4788 the inverse distributive law to see if things simplify. */
4789
8079805d 4790 if (GET_CODE (op0) == AND)
230d793d
RS
4791 {
4792 x = apply_distributive_law
4793 (gen_binary (AND, mode,
8079805d
RK
4794 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4795 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
230d793d
RS
4796
4797 if (GET_CODE (x) != IOR)
8079805d 4798 return x;
230d793d
RS
4799 }
4800
8079805d 4801 if (GET_CODE (op1) == AND)
230d793d
RS
4802 {
4803 x = apply_distributive_law
4804 (gen_binary (AND, mode,
8079805d
RK
4805 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4806 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
230d793d
RS
4807
4808 if (GET_CODE (x) != IOR)
8079805d 4809 return x;
230d793d
RS
4810 }
4811
4812 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4813 mode size to (rotate A CX). */
4814
8079805d
RK
4815 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4816 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4817 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4818 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4819 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4820 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 4821 == GET_MODE_BITSIZE (mode)))
8079805d
RK
4822 return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4823 (GET_CODE (op0) == ASHIFT
4824 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 4825
71923da7
RK
4826 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4827 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4828 does not affect any of the bits in OP1, it can really be done
4829 as a PLUS and we can associate. We do this by seeing if OP1
4830 can be safely shifted left C bits. */
4831 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4832 && GET_CODE (XEXP (op0, 0)) == PLUS
4833 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4834 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4835 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4836 {
4837 int count = INTVAL (XEXP (op0, 1));
4838 HOST_WIDE_INT mask = INTVAL (op1) << count;
4839
4840 if (mask >> count == INTVAL (op1)
4841 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4842 {
4843 SUBST (XEXP (XEXP (op0, 0), 1),
4844 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
4845 return op0;
4846 }
4847 }
230d793d
RS
4848 break;
4849
4850 case XOR:
4851 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4852 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4853 (NOT y). */
4854 {
4855 int num_negated = 0;
230d793d 4856
8079805d
RK
4857 if (GET_CODE (op0) == NOT)
4858 num_negated++, op0 = XEXP (op0, 0);
4859 if (GET_CODE (op1) == NOT)
4860 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
4861
4862 if (num_negated == 2)
4863 {
8079805d
RK
4864 SUBST (XEXP (x, 0), op0);
4865 SUBST (XEXP (x, 1), op1);
230d793d
RS
4866 }
4867 else if (num_negated == 1)
0c1c8ea6 4868 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
4869 }
4870
4871 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4872 correspond to a machine insn or result in further simplifications
4873 if B is a constant. */
4874
8079805d
RK
4875 if (GET_CODE (op0) == AND
4876 && rtx_equal_p (XEXP (op0, 1), op1)
4877 && ! side_effects_p (op1))
0c1c8ea6
RK
4878 return gen_binary (AND, mode,
4879 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 4880 op1);
230d793d 4881
8079805d
RK
4882 else if (GET_CODE (op0) == AND
4883 && rtx_equal_p (XEXP (op0, 0), op1)
4884 && ! side_effects_p (op1))
0c1c8ea6
RK
4885 return gen_binary (AND, mode,
4886 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 4887 op1);
230d793d 4888
230d793d 4889 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
4890 comparison if STORE_FLAG_VALUE is 1. */
4891 if (STORE_FLAG_VALUE == 1
4892 && op1 == const1_rtx
8079805d
RK
4893 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4894 && reversible_comparison_p (op0))
4895 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4896 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
4897
4898 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4899 is (lt foo (const_int 0)), so we can perform the above
0802d516 4900 simplification if STORE_FLAG_VALUE is 1. */
500c518b 4901
0802d516
RK
4902 if (STORE_FLAG_VALUE == 1
4903 && op1 == const1_rtx
8079805d
RK
4904 && GET_CODE (op0) == LSHIFTRT
4905 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4906 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
4907 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
4908
4909 /* (xor (comparison foo bar) (const_int sign-bit))
4910 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 4911 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4912 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5f4f0e22 4913 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
4914 && op1 == const_true_rtx
4915 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4916 && reversible_comparison_p (op0))
4917 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4918 mode, XEXP (op0, 0), XEXP (op0, 1));
230d793d
RS
4919 break;
4920 }
4921
4922 return x;
4923}
4924\f
4925/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4926 operations" because they can be replaced with two more basic operations.
4927 ZERO_EXTEND is also considered "compound" because it can be replaced with
4928 an AND operation, which is simpler, though only one operation.
4929
4930 The function expand_compound_operation is called with an rtx expression
4931 and will convert it to the appropriate shifts and AND operations,
4932 simplifying at each stage.
4933
4934 The function make_compound_operation is called to convert an expression
4935 consisting of shifts and ANDs into the equivalent compound expression.
4936 It is the inverse of this function, loosely speaking. */
4937
4938static rtx
4939expand_compound_operation (x)
4940 rtx x;
4941{
4942 int pos = 0, len;
4943 int unsignedp = 0;
4944 int modewidth;
4945 rtx tem;
4946
4947 switch (GET_CODE (x))
4948 {
4949 case ZERO_EXTEND:
4950 unsignedp = 1;
4951 case SIGN_EXTEND:
75473182
RS
4952 /* We can't necessarily use a const_int for a multiword mode;
4953 it depends on implicitly extending the value.
4954 Since we don't know the right way to extend it,
4955 we can't tell whether the implicit way is right.
4956
4957 Even for a mode that is no wider than a const_int,
4958 we can't win, because we need to sign extend one of its bits through
4959 the rest of it, and we don't know which bit. */
230d793d 4960 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 4961 return x;
230d793d 4962
8079805d
RK
4963 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
4964 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
4965 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
4966 reloaded. If not for that, MEM's would very rarely be safe.
4967
4968 Reject MODEs bigger than a word, because we might not be able
4969 to reference a two-register group starting with an arbitrary register
4970 (and currently gen_lowpart might crash for a SUBREG). */
4971
4972 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
4973 return x;
4974
4975 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4976 /* If the inner object has VOIDmode (the only way this can happen
4977 is if it is a ASM_OPERANDS), we can't do anything since we don't
4978 know how much masking to do. */
4979 if (len == 0)
4980 return x;
4981
4982 break;
4983
4984 case ZERO_EXTRACT:
4985 unsignedp = 1;
4986 case SIGN_EXTRACT:
4987 /* If the operand is a CLOBBER, just return it. */
4988 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4989 return XEXP (x, 0);
4990
4991 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4992 || GET_CODE (XEXP (x, 2)) != CONST_INT
4993 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4994 return x;
4995
4996 len = INTVAL (XEXP (x, 1));
4997 pos = INTVAL (XEXP (x, 2));
4998
4999 /* If this goes outside the object being extracted, replace the object
5000 with a (use (mem ...)) construct that only combine understands
5001 and is used only for this purpose. */
5002 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
5003 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
5004
f76b9db2
ILT
5005 if (BITS_BIG_ENDIAN)
5006 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5007
230d793d
RS
5008 break;
5009
5010 default:
5011 return x;
5012 }
5013
0f13a422
ILT
5014 /* We can optimize some special cases of ZERO_EXTEND. */
5015 if (GET_CODE (x) == ZERO_EXTEND)
5016 {
5017 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5018 know that the last value didn't have any inappropriate bits
5019 set. */
5020 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5021 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5022 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5023 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5024 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5025 return XEXP (XEXP (x, 0), 0);
5026
5027 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5028 if (GET_CODE (XEXP (x, 0)) == SUBREG
5029 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5030 && subreg_lowpart_p (XEXP (x, 0))
5031 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5032 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5033 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))) == 0)
5034 return SUBREG_REG (XEXP (x, 0));
5035
5036 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5037 is a comparison and STORE_FLAG_VALUE permits. This is like
5038 the first case, but it works even when GET_MODE (x) is larger
5039 than HOST_WIDE_INT. */
5040 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5041 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5042 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5043 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5044 <= HOST_BITS_PER_WIDE_INT)
5045 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5046 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5047 return XEXP (XEXP (x, 0), 0);
5048
5049 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5050 if (GET_CODE (XEXP (x, 0)) == SUBREG
5051 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5052 && subreg_lowpart_p (XEXP (x, 0))
5053 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5054 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5055 <= HOST_BITS_PER_WIDE_INT)
5056 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5057 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5058 return SUBREG_REG (XEXP (x, 0));
5059
5060 /* If sign extension is cheaper than zero extension, then use it
5061 if we know that no extraneous bits are set, and that the high
5062 bit is not set. */
5063 if (flag_expensive_optimizations
5064 && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5065 && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5066 & ~ (((unsigned HOST_WIDE_INT)
5067 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5068 >> 1))
5069 == 0))
5070 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5071 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5072 <= HOST_BITS_PER_WIDE_INT)
5073 && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5074 & ~ (((unsigned HOST_WIDE_INT)
5075 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5076 >> 1))
5077 == 0))))
5078 {
5079 rtx temp = gen_rtx (SIGN_EXTEND, GET_MODE (x), XEXP (x, 0));
5080
5081 if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5082 return expand_compound_operation (temp);
5083 }
5084 }
5085
230d793d
RS
5086 /* If we reach here, we want to return a pair of shifts. The inner
5087 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5088 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5089 logical depending on the value of UNSIGNEDP.
5090
5091 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5092 converted into an AND of a shift.
5093
5094 We must check for the case where the left shift would have a negative
5095 count. This can happen in a case like (x >> 31) & 255 on machines
5096 that can't shift by a constant. On those machines, we would first
5097 combine the shift with the AND to produce a variable-position
5098 extraction. Then the constant of 31 would be substituted in to produce
5099 a such a position. */
5100
5101 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5102 if (modewidth >= pos - len)
5f4f0e22 5103 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5104 GET_MODE (x),
5f4f0e22
CH
5105 simplify_shift_const (NULL_RTX, ASHIFT,
5106 GET_MODE (x),
230d793d
RS
5107 XEXP (x, 0),
5108 modewidth - pos - len),
5109 modewidth - len);
5110
5f4f0e22
CH
5111 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5112 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5113 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5114 GET_MODE (x),
5115 XEXP (x, 0), pos),
5f4f0e22 5116 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5117 else
5118 /* Any other cases we can't handle. */
5119 return x;
5120
5121
5122 /* If we couldn't do this for some reason, return the original
5123 expression. */
5124 if (GET_CODE (tem) == CLOBBER)
5125 return x;
5126
5127 return tem;
5128}
5129\f
5130/* X is a SET which contains an assignment of one object into
5131 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5132 or certain SUBREGS). If possible, convert it into a series of
5133 logical operations.
5134
5135 We half-heartedly support variable positions, but do not at all
5136 support variable lengths. */
5137
5138static rtx
5139expand_field_assignment (x)
5140 rtx x;
5141{
5142 rtx inner;
0f41302f 5143 rtx pos; /* Always counts from low bit. */
230d793d
RS
5144 int len;
5145 rtx mask;
5146 enum machine_mode compute_mode;
5147
5148 /* Loop until we find something we can't simplify. */
5149 while (1)
5150 {
5151 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5152 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5153 {
5154 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5155 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4d9cfc7b 5156 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
230d793d
RS
5157 }
5158 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5159 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5160 {
5161 inner = XEXP (SET_DEST (x), 0);
5162 len = INTVAL (XEXP (SET_DEST (x), 1));
5163 pos = XEXP (SET_DEST (x), 2);
5164
5165 /* If the position is constant and spans the width of INNER,
5166 surround INNER with a USE to indicate this. */
5167 if (GET_CODE (pos) == CONST_INT
5168 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5169 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
5170
f76b9db2
ILT
5171 if (BITS_BIG_ENDIAN)
5172 {
5173 if (GET_CODE (pos) == CONST_INT)
5174 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5175 - INTVAL (pos));
5176 else if (GET_CODE (pos) == MINUS
5177 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5178 && (INTVAL (XEXP (pos, 1))
5179 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5180 /* If position is ADJUST - X, new position is X. */
5181 pos = XEXP (pos, 0);
5182 else
5183 pos = gen_binary (MINUS, GET_MODE (pos),
5184 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5185 - len),
5186 pos);
5187 }
230d793d
RS
5188 }
5189
5190 /* A SUBREG between two modes that occupy the same numbers of words
5191 can be done by moving the SUBREG to the source. */
5192 else if (GET_CODE (SET_DEST (x)) == SUBREG
5193 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5194 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5195 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5196 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5197 {
5198 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
5199 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
5200 SET_SRC (x)));
5201 continue;
5202 }
5203 else
5204 break;
5205
5206 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5207 inner = SUBREG_REG (inner);
5208
5209 compute_mode = GET_MODE (inner);
5210
5211 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5212 if (len < HOST_BITS_PER_WIDE_INT)
5213 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5214 else
5215 break;
5216
5217 /* Now compute the equivalent expression. Make a copy of INNER
5218 for the SET_DEST in case it is a MEM into which we will substitute;
5219 we don't want shared RTL in that case. */
5220 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
5221 gen_binary (IOR, compute_mode,
5222 gen_binary (AND, compute_mode,
5223 gen_unary (NOT, compute_mode,
0c1c8ea6 5224 compute_mode,
230d793d
RS
5225 gen_binary (ASHIFT,
5226 compute_mode,
5227 mask, pos)),
5228 inner),
5229 gen_binary (ASHIFT, compute_mode,
5230 gen_binary (AND, compute_mode,
5231 gen_lowpart_for_combine
5232 (compute_mode,
5233 SET_SRC (x)),
5234 mask),
5235 pos)));
5236 }
5237
5238 return x;
5239}
5240\f
8999a12e
RK
5241/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5242 it is an RTX that represents a variable starting position; otherwise,
5243 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5244
5245 INNER may be a USE. This will occur when we started with a bitfield
5246 that went outside the boundary of the object in memory, which is
5247 allowed on most machines. To isolate this case, we produce a USE
5248 whose mode is wide enough and surround the MEM with it. The only
5249 code that understands the USE is this routine. If it is not removed,
5250 it will cause the resulting insn not to match.
5251
5252 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5253 signed reference.
5254
5255 IN_DEST is non-zero if this is a reference in the destination of a
5256 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5257 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5258 be used.
5259
5260 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5261 ZERO_EXTRACT should be built even for bits starting at bit 0.
5262
76184def
DE
5263 MODE is the desired mode of the result (if IN_DEST == 0).
5264
5265 The result is an RTX for the extraction or NULL_RTX if the target
5266 can't handle it. */
230d793d
RS
5267
5268static rtx
5269make_extraction (mode, inner, pos, pos_rtx, len,
5270 unsignedp, in_dest, in_compare)
5271 enum machine_mode mode;
5272 rtx inner;
5273 int pos;
5274 rtx pos_rtx;
5275 int len;
5276 int unsignedp;
5277 int in_dest, in_compare;
5278{
94b4b17a
RS
5279 /* This mode describes the size of the storage area
5280 to fetch the overall value from. Within that, we
5281 ignore the POS lowest bits, etc. */
230d793d
RS
5282 enum machine_mode is_mode = GET_MODE (inner);
5283 enum machine_mode inner_mode;
d7cd794f
RK
5284 enum machine_mode wanted_inner_mode = byte_mode;
5285 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5286 enum machine_mode pos_mode = word_mode;
5287 enum machine_mode extraction_mode = word_mode;
5288 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5289 int spans_byte = 0;
5290 rtx new = 0;
8999a12e 5291 rtx orig_pos_rtx = pos_rtx;
6139ff20 5292 int orig_pos;
230d793d
RS
5293
5294 /* Get some information about INNER and get the innermost object. */
5295 if (GET_CODE (inner) == USE)
94b4b17a 5296 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5297 /* We don't need to adjust the position because we set up the USE
5298 to pretend that it was a full-word object. */
5299 spans_byte = 1, inner = XEXP (inner, 0);
5300 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5301 {
5302 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5303 consider just the QI as the memory to extract from.
5304 The subreg adds or removes high bits; its mode is
5305 irrelevant to the meaning of this extraction,
5306 since POS and LEN count from the lsb. */
5307 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5308 is_mode = GET_MODE (SUBREG_REG (inner));
5309 inner = SUBREG_REG (inner);
5310 }
230d793d
RS
5311
5312 inner_mode = GET_MODE (inner);
5313
5314 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5315 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5316
5317 /* See if this can be done without an extraction. We never can if the
5318 width of the field is not the same as that of some integer mode. For
5319 registers, we can only avoid the extraction if the position is at the
5320 low-order bit and this is either not in the destination or we have the
5321 appropriate STRICT_LOW_PART operation available.
5322
5323 For MEM, we can avoid an extract if the field starts on an appropriate
5324 boundary and we can change the mode of the memory reference. However,
5325 we cannot directly access the MEM if we have a USE and the underlying
5326 MEM is not TMODE. This combination means that MEM was being used in a
5327 context where bits outside its mode were being referenced; that is only
5328 valid in bit-field insns. */
5329
5330 if (tmode != BLKmode
5331 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5332 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5333 && GET_CODE (inner) != MEM
230d793d 5334 && (! in_dest
df62f951
RK
5335 || (GET_CODE (inner) == REG
5336 && (movstrict_optab->handlers[(int) tmode].insn_code
5337 != CODE_FOR_nothing))))
8999a12e 5338 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5339 && (pos
5340 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5341 : BITS_PER_UNIT)) == 0
230d793d
RS
5342 /* We can't do this if we are widening INNER_MODE (it
5343 may not be aligned, for one thing). */
5344 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5345 && (inner_mode == tmode
5346 || (! mode_dependent_address_p (XEXP (inner, 0))
5347 && ! MEM_VOLATILE_P (inner))))))
5348 {
230d793d
RS
5349 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5350 field. If the original and current mode are the same, we need not
5351 adjust the offset. Otherwise, we do if bytes big endian.
5352
4d9cfc7b
RK
5353 If INNER is not a MEM, get a piece consisting of just the field
5354 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5355
5356 if (GET_CODE (inner) == MEM)
5357 {
94b4b17a
RS
5358 int offset;
5359 /* POS counts from lsb, but make OFFSET count in memory order. */
5360 if (BYTES_BIG_ENDIAN)
5361 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5362 else
5363 offset = pos / BITS_PER_UNIT;
230d793d
RS
5364
5365 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5366 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5367 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5368 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5369 }
df62f951 5370 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5371 {
5372 /* We can't call gen_lowpart_for_combine here since we always want
5373 a SUBREG and it would sometimes return a new hard register. */
5374 if (tmode != inner_mode)
5375 new = gen_rtx (SUBREG, tmode, inner,
5376 (WORDS_BIG_ENDIAN
5377 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
4d9cfc7b
RK
5378 ? (((GET_MODE_SIZE (inner_mode)
5379 - GET_MODE_SIZE (tmode))
5380 / UNITS_PER_WORD)
5381 - pos / BITS_PER_WORD)
5382 : pos / BITS_PER_WORD));
c0d3ac4d
RK
5383 else
5384 new = inner;
5385 }
230d793d 5386 else
6139ff20
RK
5387 new = force_to_mode (inner, tmode,
5388 len >= HOST_BITS_PER_WIDE_INT
5389 ? GET_MODE_MASK (tmode)
5390 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5391 NULL_RTX, 0);
230d793d
RS
5392
5393 /* If this extraction is going into the destination of a SET,
5394 make a STRICT_LOW_PART unless we made a MEM. */
5395
5396 if (in_dest)
5397 return (GET_CODE (new) == MEM ? new
77fa0940
RK
5398 : (GET_CODE (new) != SUBREG
5399 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5400 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5401
5402 /* Otherwise, sign- or zero-extend unless we already are in the
5403 proper mode. */
5404
5405 return (mode == tmode ? new
5406 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5407 mode, new));
5408 }
5409
cc471082
RS
5410 /* Unless this is a COMPARE or we have a funny memory reference,
5411 don't do anything with zero-extending field extracts starting at
5412 the low-order bit since they are simple AND operations. */
8999a12e
RK
5413 if (pos_rtx == 0 && pos == 0 && ! in_dest
5414 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5415 return 0;
5416
e7373556
RK
5417 /* Unless we are allowed to span bytes, reject this if we would be
5418 spanning bytes or if the position is not a constant and the length
5419 is not 1. In all other cases, we would only be going outside
5420 out object in cases when an original shift would have been
5421 undefined. */
5422 if (! spans_byte
5423 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5424 || (pos_rtx != 0 && len != 1)))
5425 return 0;
5426
d7cd794f 5427 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
5428 and the mode for the result. */
5429#ifdef HAVE_insv
5430 if (in_dest)
5431 {
d7cd794f 5432 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
230d793d
RS
5433 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5434 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5435 }
5436#endif
5437
5438#ifdef HAVE_extzv
5439 if (! in_dest && unsignedp)
5440 {
d7cd794f 5441 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
230d793d
RS
5442 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5443 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5444 }
5445#endif
5446
5447#ifdef HAVE_extv
5448 if (! in_dest && ! unsignedp)
5449 {
d7cd794f 5450 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
230d793d
RS
5451 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5452 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5453 }
5454#endif
5455
5456 /* Never narrow an object, since that might not be safe. */
5457
5458 if (mode != VOIDmode
5459 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5460 extraction_mode = mode;
5461
5462 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5463 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5464 pos_mode = GET_MODE (pos_rtx);
5465
d7cd794f
RK
5466 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5467 if we have to change the mode of memory and cannot, the desired mode is
5468 EXTRACTION_MODE. */
5469 if (GET_CODE (inner) != MEM)
5470 wanted_inner_mode = wanted_inner_reg_mode;
5471 else if (inner_mode != wanted_inner_mode
5472 && (mode_dependent_address_p (XEXP (inner, 0))
5473 || MEM_VOLATILE_P (inner)))
5474 wanted_inner_mode = extraction_mode;
230d793d 5475
6139ff20
RK
5476 orig_pos = pos;
5477
f76b9db2
ILT
5478 if (BITS_BIG_ENDIAN)
5479 {
cf54c2cd
DE
5480 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5481 BITS_BIG_ENDIAN style. If position is constant, compute new
5482 position. Otherwise, build subtraction.
5483 Note that POS is relative to the mode of the original argument.
5484 If it's a MEM we need to recompute POS relative to that.
5485 However, if we're extracting from (or inserting into) a register,
5486 we want to recompute POS relative to wanted_inner_mode. */
5487 int width = (GET_CODE (inner) == MEM
5488 ? GET_MODE_BITSIZE (is_mode)
5489 : GET_MODE_BITSIZE (wanted_inner_mode));
5490
f76b9db2 5491 if (pos_rtx == 0)
cf54c2cd 5492 pos = width - len - pos;
f76b9db2
ILT
5493 else
5494 pos_rtx
5495 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
cf54c2cd
DE
5496 GEN_INT (width - len), pos_rtx);
5497 /* POS may be less than 0 now, but we check for that below.
5498 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 5499 }
230d793d
RS
5500
5501 /* If INNER has a wider mode, make it smaller. If this is a constant
5502 extract, try to adjust the byte to point to the byte containing
5503 the value. */
d7cd794f
RK
5504 if (wanted_inner_mode != VOIDmode
5505 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 5506 && ((GET_CODE (inner) == MEM
d7cd794f 5507 && (inner_mode == wanted_inner_mode
230d793d
RS
5508 || (! mode_dependent_address_p (XEXP (inner, 0))
5509 && ! MEM_VOLATILE_P (inner))))))
5510 {
5511 int offset = 0;
5512
5513 /* The computations below will be correct if the machine is big
5514 endian in both bits and bytes or little endian in bits and bytes.
5515 If it is mixed, we must adjust. */
5516
230d793d 5517 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 5518 adjust OFFSET to compensate. */
f76b9db2
ILT
5519 if (BYTES_BIG_ENDIAN
5520 && ! spans_byte
230d793d
RS
5521 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5522 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
5523
5524 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5525 if (pos_rtx == 0)
230d793d
RS
5526 {
5527 offset += pos / BITS_PER_UNIT;
d7cd794f 5528 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
5529 }
5530
f76b9db2
ILT
5531 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5532 && ! spans_byte
d7cd794f 5533 && is_mode != wanted_inner_mode)
c6b3f1f2 5534 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 5535 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 5536
d7cd794f 5537 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 5538 {
d7cd794f 5539 rtx newmem = gen_rtx (MEM, wanted_inner_mode,
230d793d
RS
5540 plus_constant (XEXP (inner, 0), offset));
5541 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5542 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5543 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5544 inner = newmem;
5545 }
5546 }
5547
9e74dc41
RK
5548 /* If INNER is not memory, we can always get it into the proper mode. If we
5549 are changing its mode, POS must be a constant and smaller than the size
5550 of the new mode. */
230d793d 5551 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
5552 {
5553 if (GET_MODE (inner) != wanted_inner_mode
5554 && (pos_rtx != 0
5555 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5556 return 0;
5557
5558 inner = force_to_mode (inner, wanted_inner_mode,
5559 pos_rtx
5560 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5561 ? GET_MODE_MASK (wanted_inner_mode)
5562 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5563 NULL_RTX, 0);
5564 }
230d793d
RS
5565
5566 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5567 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5568 if (pos_rtx != 0
230d793d
RS
5569 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5570 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5571 else if (pos_rtx != 0
230d793d
RS
5572 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5573 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5574
8999a12e
RK
5575 /* Make POS_RTX unless we already have it and it is correct. If we don't
5576 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 5577 be a CONST_INT. */
8999a12e
RK
5578 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5579 pos_rtx = orig_pos_rtx;
5580
5581 else if (pos_rtx == 0)
5f4f0e22 5582 pos_rtx = GEN_INT (pos);
230d793d
RS
5583
5584 /* Make the required operation. See if we can use existing rtx. */
5585 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5586 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5587 if (! in_dest)
5588 new = gen_lowpart_for_combine (mode, new);
5589
5590 return new;
5591}
5592\f
71923da7
RK
5593/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5594 with any other operations in X. Return X without that shift if so. */
5595
5596static rtx
5597extract_left_shift (x, count)
5598 rtx x;
5599 int count;
5600{
5601 enum rtx_code code = GET_CODE (x);
5602 enum machine_mode mode = GET_MODE (x);
5603 rtx tem;
5604
5605 switch (code)
5606 {
5607 case ASHIFT:
5608 /* This is the shift itself. If it is wide enough, we will return
5609 either the value being shifted if the shift count is equal to
5610 COUNT or a shift for the difference. */
5611 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5612 && INTVAL (XEXP (x, 1)) >= count)
5613 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5614 INTVAL (XEXP (x, 1)) - count);
5615 break;
5616
5617 case NEG: case NOT:
5618 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 5619 return gen_unary (code, mode, mode, tem);
71923da7
RK
5620
5621 break;
5622
5623 case PLUS: case IOR: case XOR: case AND:
5624 /* If we can safely shift this constant and we find the inner shift,
5625 make a new operation. */
5626 if (GET_CODE (XEXP (x,1)) == CONST_INT
5627 && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5628 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5629 return gen_binary (code, mode, tem,
5630 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5631
5632 break;
5633 }
5634
5635 return 0;
5636}
5637\f
230d793d
RS
5638/* Look at the expression rooted at X. Look for expressions
5639 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5640 Form these expressions.
5641
5642 Return the new rtx, usually just X.
5643
5644 Also, for machines like the Vax that don't have logical shift insns,
5645 try to convert logical to arithmetic shift operations in cases where
5646 they are equivalent. This undoes the canonicalizations to logical
5647 shifts done elsewhere.
5648
5649 We try, as much as possible, to re-use rtl expressions to save memory.
5650
5651 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5652 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5653 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5654 or a COMPARE against zero, it is COMPARE. */
5655
5656static rtx
5657make_compound_operation (x, in_code)
5658 rtx x;
5659 enum rtx_code in_code;
5660{
5661 enum rtx_code code = GET_CODE (x);
5662 enum machine_mode mode = GET_MODE (x);
5663 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 5664 rtx rhs, lhs;
230d793d 5665 enum rtx_code next_code;
f24ad0e4 5666 int i;
230d793d 5667 rtx new = 0;
280f58ba 5668 rtx tem;
230d793d
RS
5669 char *fmt;
5670
5671 /* Select the code to be used in recursive calls. Once we are inside an
5672 address, we stay there. If we have a comparison, set to COMPARE,
5673 but once inside, go back to our default of SET. */
5674
42495ca0 5675 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5676 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5677 && XEXP (x, 1) == const0_rtx) ? COMPARE
5678 : in_code == COMPARE ? SET : in_code);
5679
5680 /* Process depending on the code of this operation. If NEW is set
5681 non-zero, it will be returned. */
5682
5683 switch (code)
5684 {
5685 case ASHIFT:
230d793d
RS
5686 /* Convert shifts by constants into multiplications if inside
5687 an address. */
5688 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5689 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5690 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5691 {
5692 new = make_compound_operation (XEXP (x, 0), next_code);
5693 new = gen_rtx_combine (MULT, mode, new,
5694 GEN_INT ((HOST_WIDE_INT) 1
5695 << INTVAL (XEXP (x, 1))));
5696 }
230d793d
RS
5697 break;
5698
5699 case AND:
5700 /* If the second operand is not a constant, we can't do anything
5701 with it. */
5702 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5703 break;
5704
5705 /* If the constant is a power of two minus one and the first operand
5706 is a logical right shift, make an extraction. */
5707 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5708 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5709 {
5710 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5711 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5712 0, in_code == COMPARE);
5713 }
dfbe1b2f 5714
230d793d
RS
5715 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5716 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5717 && subreg_lowpart_p (XEXP (x, 0))
5718 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5719 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5720 {
5721 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5722 next_code);
2f99f437 5723 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
5724 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5725 0, in_code == COMPARE);
5726 }
45620ed4 5727 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
5728 else if ((GET_CODE (XEXP (x, 0)) == XOR
5729 || GET_CODE (XEXP (x, 0)) == IOR)
5730 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5731 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5732 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5733 {
5734 /* Apply the distributive law, and then try to make extractions. */
5735 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5736 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5737 XEXP (x, 1)),
5738 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5739 XEXP (x, 1)));
5740 new = make_compound_operation (new, in_code);
5741 }
a7c99304
RK
5742
5743 /* If we are have (and (rotate X C) M) and C is larger than the number
5744 of bits in M, this is an extraction. */
5745
5746 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5747 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5748 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5749 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5750 {
5751 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5752 new = make_extraction (mode, new,
5753 (GET_MODE_BITSIZE (mode)
5754 - INTVAL (XEXP (XEXP (x, 0), 1))),
5755 NULL_RTX, i, 1, 0, in_code == COMPARE);
5756 }
a7c99304
RK
5757
5758 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5759 a logical shift and our mask turns off all the propagated sign
5760 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5761 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5762 && (lshr_optab->handlers[(int) mode].insn_code
5763 == CODE_FOR_nothing)
230d793d
RS
5764 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5765 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5766 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5767 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5768 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5769 {
5f4f0e22 5770 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5771
5772 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5773 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5774 SUBST (XEXP (x, 0),
280f58ba
RK
5775 gen_rtx_combine (ASHIFTRT, mode,
5776 make_compound_operation (XEXP (XEXP (x, 0), 0),
5777 next_code),
230d793d
RS
5778 XEXP (XEXP (x, 0), 1)));
5779 }
5780
5781 /* If the constant is one less than a power of two, this might be
5782 representable by an extraction even if no shift is present.
5783 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5784 we are in a COMPARE. */
5785 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5786 new = make_extraction (mode,
5787 make_compound_operation (XEXP (x, 0),
5788 next_code),
5789 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5790
5791 /* If we are in a comparison and this is an AND with a power of two,
5792 convert this into the appropriate bit extract. */
5793 else if (in_code == COMPARE
5794 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5795 new = make_extraction (mode,
5796 make_compound_operation (XEXP (x, 0),
5797 next_code),
5798 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5799
5800 break;
5801
5802 case LSHIFTRT:
5803 /* If the sign bit is known to be zero, replace this with an
5804 arithmetic shift. */
d0ab8cd3
RK
5805 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5806 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5807 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5808 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5809 {
280f58ba
RK
5810 new = gen_rtx_combine (ASHIFTRT, mode,
5811 make_compound_operation (XEXP (x, 0),
5812 next_code),
5813 XEXP (x, 1));
230d793d
RS
5814 break;
5815 }
5816
0f41302f 5817 /* ... fall through ... */
230d793d
RS
5818
5819 case ASHIFTRT:
71923da7
RK
5820 lhs = XEXP (x, 0);
5821 rhs = XEXP (x, 1);
5822
230d793d
RS
5823 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5824 this is a SIGN_EXTRACT. */
71923da7
RK
5825 if (GET_CODE (rhs) == CONST_INT
5826 && GET_CODE (lhs) == ASHIFT
5827 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5828 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 5829 {
71923da7 5830 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 5831 new = make_extraction (mode, new,
71923da7
RK
5832 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5833 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
5834 code == LSHIFTRT, 0, in_code == COMPARE);
5835 }
5836
71923da7
RK
5837 /* See if we have operations between an ASHIFTRT and an ASHIFT.
5838 If so, try to merge the shifts into a SIGN_EXTEND. We could
5839 also do this for some cases of SIGN_EXTRACT, but it doesn't
5840 seem worth the effort; the case checked for occurs on Alpha. */
5841
5842 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
5843 && ! (GET_CODE (lhs) == SUBREG
5844 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
5845 && GET_CODE (rhs) == CONST_INT
5846 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
5847 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
5848 new = make_extraction (mode, make_compound_operation (new, next_code),
5849 0, NULL_RTX, mode_width - INTVAL (rhs),
5850 code == LSHIFTRT, 0, in_code == COMPARE);
5851
230d793d 5852 break;
280f58ba
RK
5853
5854 case SUBREG:
5855 /* Call ourselves recursively on the inner expression. If we are
5856 narrowing the object and it has a different RTL code from
5857 what it originally did, do this SUBREG as a force_to_mode. */
5858
0a5cbff6 5859 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
5860 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5861 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5862 && subreg_lowpart_p (x))
0a5cbff6
RK
5863 {
5864 rtx newer = force_to_mode (tem, mode,
e3d616e3 5865 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
5866
5867 /* If we have something other than a SUBREG, we might have
5868 done an expansion, so rerun outselves. */
5869 if (GET_CODE (newer) != SUBREG)
5870 newer = make_compound_operation (newer, in_code);
5871
5872 return newer;
5873 }
230d793d
RS
5874 }
5875
5876 if (new)
5877 {
df62f951 5878 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
5879 code = GET_CODE (x);
5880 }
5881
5882 /* Now recursively process each operand of this operation. */
5883 fmt = GET_RTX_FORMAT (code);
5884 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5885 if (fmt[i] == 'e')
5886 {
5887 new = make_compound_operation (XEXP (x, i), next_code);
5888 SUBST (XEXP (x, i), new);
5889 }
5890
5891 return x;
5892}
5893\f
5894/* Given M see if it is a value that would select a field of bits
5895 within an item, but not the entire word. Return -1 if not.
5896 Otherwise, return the starting position of the field, where 0 is the
5897 low-order bit.
5898
5899 *PLEN is set to the length of the field. */
5900
5901static int
5902get_pos_from_mask (m, plen)
5f4f0e22 5903 unsigned HOST_WIDE_INT m;
230d793d
RS
5904 int *plen;
5905{
5906 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5907 int pos = exact_log2 (m & - m);
5908
5909 if (pos < 0)
5910 return -1;
5911
5912 /* Now shift off the low-order zero bits and see if we have a power of
5913 two minus 1. */
5914 *plen = exact_log2 ((m >> pos) + 1);
5915
5916 if (*plen <= 0)
5917 return -1;
5918
5919 return pos;
5920}
5921\f
6139ff20
RK
5922/* See if X can be simplified knowing that we will only refer to it in
5923 MODE and will only refer to those bits that are nonzero in MASK.
5924 If other bits are being computed or if masking operations are done
5925 that select a superset of the bits in MASK, they can sometimes be
5926 ignored.
5927
5928 Return a possibly simplified expression, but always convert X to
5929 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
5930
5931 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
5932 replace X with REG.
5933
5934 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
5935 are all off in X. This is used when X will be complemented, by either
180b8e4b 5936 NOT, NEG, or XOR. */
dfbe1b2f
RK
5937
5938static rtx
e3d616e3 5939force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
5940 rtx x;
5941 enum machine_mode mode;
6139ff20 5942 unsigned HOST_WIDE_INT mask;
dfbe1b2f 5943 rtx reg;
e3d616e3 5944 int just_select;
dfbe1b2f
RK
5945{
5946 enum rtx_code code = GET_CODE (x);
180b8e4b 5947 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
5948 enum machine_mode op_mode;
5949 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
5950 rtx op0, op1, temp;
5951
246e00f2
RK
5952 /* If this is a CALL, don't do anything. Some of the code below
5953 will do the wrong thing since the mode of a CALL is VOIDmode. */
5954 if (code == CALL)
5955 return x;
5956
6139ff20
RK
5957 /* We want to perform the operation is its present mode unless we know
5958 that the operation is valid in MODE, in which case we do the operation
5959 in MODE. */
1c75dfa4
RK
5960 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
5961 && code_to_optab[(int) code] != 0
ef026f91
RS
5962 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5963 != CODE_FOR_nothing))
5964 ? mode : GET_MODE (x));
e3d616e3 5965
aa988991
RS
5966 /* It is not valid to do a right-shift in a narrower mode
5967 than the one it came in with. */
5968 if ((code == LSHIFTRT || code == ASHIFTRT)
5969 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
5970 op_mode = GET_MODE (x);
ef026f91
RS
5971
5972 /* Truncate MASK to fit OP_MODE. */
5973 if (op_mode)
5974 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
5975
5976 /* When we have an arithmetic operation, or a shift whose count we
5977 do not know, we need to assume that all bit the up to the highest-order
5978 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
5979 if (op_mode)
5980 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5981 ? GET_MODE_MASK (op_mode)
5982 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5983 else
5984 fuller_mask = ~ (HOST_WIDE_INT) 0;
5985
5986 /* Determine what bits of X are guaranteed to be (non)zero. */
5987 nonzero = nonzero_bits (x, mode);
6139ff20
RK
5988
5989 /* If none of the bits in X are needed, return a zero. */
e3d616e3 5990 if (! just_select && (nonzero & mask) == 0)
6139ff20 5991 return const0_rtx;
dfbe1b2f 5992
6139ff20
RK
5993 /* If X is a CONST_INT, return a new one. Do this here since the
5994 test below will fail. */
5995 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
5996 {
5997 HOST_WIDE_INT cval = INTVAL (x) & mask;
5998 int width = GET_MODE_BITSIZE (mode);
5999
6000 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6001 number, sign extend it. */
6002 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6003 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6004 cval |= (HOST_WIDE_INT) -1 << width;
6005
6006 return GEN_INT (cval);
6007 }
dfbe1b2f 6008
180b8e4b
RK
6009 /* If X is narrower than MODE and we want all the bits in X's mode, just
6010 get X in the proper mode. */
6011 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6012 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
6013 return gen_lowpart_for_combine (mode, x);
6014
71923da7
RK
6015 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6016 MASK are already known to be zero in X, we need not do anything. */
6017 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
6018 return x;
6019
dfbe1b2f
RK
6020 switch (code)
6021 {
6139ff20
RK
6022 case CLOBBER:
6023 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6024 generating something that won't match. */
6139ff20
RK
6025 return x;
6026
6139ff20
RK
6027 case USE:
6028 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6029 spanned the boundary of the MEM. If we are now masking so it is
6030 within that boundary, we don't need the USE any more. */
f76b9db2
ILT
6031 if (! BITS_BIG_ENDIAN
6032 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6033 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6034 break;
6139ff20 6035
dfbe1b2f
RK
6036 case SIGN_EXTEND:
6037 case ZERO_EXTEND:
6038 case ZERO_EXTRACT:
6039 case SIGN_EXTRACT:
6040 x = expand_compound_operation (x);
6041 if (GET_CODE (x) != code)
e3d616e3 6042 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6043 break;
6044
6045 case REG:
6046 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6047 || rtx_equal_p (reg, get_last_value (x))))
6048 x = reg;
6049 break;
6050
dfbe1b2f 6051 case SUBREG:
6139ff20 6052 if (subreg_lowpart_p (x)
180b8e4b
RK
6053 /* We can ignore the effect of this SUBREG if it narrows the mode or
6054 if the constant masks to zero all the bits the mode doesn't
6055 have. */
6139ff20
RK
6056 && ((GET_MODE_SIZE (GET_MODE (x))
6057 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6058 || (0 == (mask
6059 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 6060 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6061 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6062 break;
6063
6064 case AND:
6139ff20
RK
6065 /* If this is an AND with a constant, convert it into an AND
6066 whose constant is the AND of that constant with MASK. If it
6067 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6068
2ca9ae17 6069 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6070 {
6139ff20
RK
6071 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6072 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6073
6074 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6075 is just some low-order bits. If so, and it is MASK, we don't
6076 need it. */
dfbe1b2f
RK
6077
6078 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6079 && INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6080 x = XEXP (x, 0);
d0ab8cd3 6081
71923da7
RK
6082 /* If it remains an AND, try making another AND with the bits
6083 in the mode mask that aren't in MASK turned on. If the
6084 constant in the AND is wide enough, this might make a
6085 cheaper constant. */
6086
6087 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6088 && GET_MODE_MASK (GET_MODE (x)) != mask
6089 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6090 {
6091 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6092 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6093 int width = GET_MODE_BITSIZE (GET_MODE (x));
6094 rtx y;
6095
6096 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6097 number, sign extend it. */
6098 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6099 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6100 cval |= (HOST_WIDE_INT) -1 << width;
6101
6102 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6103 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6104 x = y;
6105 }
6106
d0ab8cd3 6107 break;
dfbe1b2f
RK
6108 }
6109
6139ff20 6110 goto binop;
dfbe1b2f
RK
6111
6112 case PLUS:
6139ff20
RK
6113 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6114 low-order bits (as in an alignment operation) and FOO is already
6115 aligned to that boundary, mask C1 to that boundary as well.
6116 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6117
6118 {
6119 int width = GET_MODE_BITSIZE (mode);
6120 unsigned HOST_WIDE_INT smask = mask;
6121
6122 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6123 number, sign extend it. */
6124
6125 if (width < HOST_BITS_PER_WIDE_INT
6126 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6127 smask |= (HOST_WIDE_INT) -1 << width;
6128
6129 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6130 && exact_log2 (- smask) >= 0
6131 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
6132 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
6133 return force_to_mode (plus_constant (XEXP (x, 0),
6134 INTVAL (XEXP (x, 1)) & mask),
6135 mode, mask, reg, next_select);
6136 }
6139ff20 6137
0f41302f 6138 /* ... fall through ... */
6139ff20 6139
dfbe1b2f
RK
6140 case MINUS:
6141 case MULT:
6139ff20
RK
6142 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6143 most significant bit in MASK since carries from those bits will
6144 affect the bits we are interested in. */
6145 mask = fuller_mask;
6146 goto binop;
6147
dfbe1b2f
RK
6148 case IOR:
6149 case XOR:
6139ff20
RK
6150 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6151 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6152 operation which may be a bitfield extraction. Ensure that the
6153 constant we form is not wider than the mode of X. */
6154
6155 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6156 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6157 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6158 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6159 && GET_CODE (XEXP (x, 1)) == CONST_INT
6160 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6161 + floor_log2 (INTVAL (XEXP (x, 1))))
6162 < GET_MODE_BITSIZE (GET_MODE (x)))
6163 && (INTVAL (XEXP (x, 1))
01c82bbb 6164 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6165 {
6166 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6167 << INTVAL (XEXP (XEXP (x, 0), 1)));
6168 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6169 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6170 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6171 XEXP (XEXP (x, 0), 1));
e3d616e3 6172 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6173 }
6174
6175 binop:
dfbe1b2f 6176 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6177 change the mode if we have an operation of that mode. */
6178
e3d616e3
RK
6179 op0 = gen_lowpart_for_combine (op_mode,
6180 force_to_mode (XEXP (x, 0), mode, mask,
6181 reg, next_select));
6182 op1 = gen_lowpart_for_combine (op_mode,
6183 force_to_mode (XEXP (x, 1), mode, mask,
6184 reg, next_select));
6139ff20 6185
2dd484ed
RK
6186 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6187 MASK since OP1 might have been sign-extended but we never want
6188 to turn on extra bits, since combine might have previously relied
6189 on them being off. */
6190 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6191 && (INTVAL (op1) & mask) != 0)
6192 op1 = GEN_INT (INTVAL (op1) & mask);
6193
6139ff20
RK
6194 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6195 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6196 break;
dfbe1b2f
RK
6197
6198 case ASHIFT:
dfbe1b2f 6199 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6200 However, we cannot do anything with shifts where we cannot
6201 guarantee that the counts are smaller than the size of the mode
6202 because such a count will have a different meaning in a
6139ff20 6203 wider mode. */
f6785026
RK
6204
6205 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6206 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6207 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6208 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6209 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6210 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
6211 break;
6212
6139ff20
RK
6213 /* If the shift count is a constant and we can do arithmetic in
6214 the mode of the shift, refine which bits we need. Otherwise, use the
6215 conservative form of the mask. */
6216 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6217 && INTVAL (XEXP (x, 1)) >= 0
6218 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6219 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6220 mask >>= INTVAL (XEXP (x, 1));
6221 else
6222 mask = fuller_mask;
6223
6224 op0 = gen_lowpart_for_combine (op_mode,
6225 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6226 mask, reg, next_select));
6139ff20
RK
6227
6228 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6229 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6230 break;
dfbe1b2f
RK
6231
6232 case LSHIFTRT:
1347292b
JW
6233 /* Here we can only do something if the shift count is a constant,
6234 this shift constant is valid for the host, and we can do arithmetic
6235 in OP_MODE. */
dfbe1b2f
RK
6236
6237 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6238 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6239 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6240 {
6139ff20
RK
6241 rtx inner = XEXP (x, 0);
6242
6243 /* Select the mask of the bits we need for the shift operand. */
6244 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 6245
6139ff20
RK
6246 /* We can only change the mode of the shift if we can do arithmetic
6247 in the mode of the shift and MASK is no wider than the width of
6248 OP_MODE. */
6249 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6250 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6251 op_mode = GET_MODE (x);
6252
e3d616e3 6253 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
6254
6255 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6256 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6257 }
6139ff20
RK
6258
6259 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6260 shift and AND produces only copies of the sign bit (C2 is one less
6261 than a power of two), we can do this with just a shift. */
6262
6263 if (GET_CODE (x) == LSHIFTRT
6264 && GET_CODE (XEXP (x, 1)) == CONST_INT
6265 && ((INTVAL (XEXP (x, 1))
6266 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6267 >= GET_MODE_BITSIZE (GET_MODE (x)))
6268 && exact_log2 (mask + 1) >= 0
6269 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6270 >= exact_log2 (mask + 1)))
6271 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6272 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6273 - exact_log2 (mask + 1)));
d0ab8cd3
RK
6274 break;
6275
6276 case ASHIFTRT:
6139ff20
RK
6277 /* If we are just looking for the sign bit, we don't need this shift at
6278 all, even if it has a variable count. */
9bf22b75
RK
6279 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6280 && (mask == ((HOST_WIDE_INT) 1
6281 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6282 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6283
6284 /* If this is a shift by a constant, get a mask that contains those bits
6285 that are not copies of the sign bit. We then have two cases: If
6286 MASK only includes those bits, this can be a logical shift, which may
6287 allow simplifications. If MASK is a single-bit field not within
6288 those bits, we are requesting a copy of the sign bit and hence can
6289 shift the sign bit to the appropriate location. */
6290
6291 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6292 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6293 {
6294 int i = -1;
6295
b69960ac
RK
6296 /* If the considered data is wider then HOST_WIDE_INT, we can't
6297 represent a mask for all its bits in a single scalar.
6298 But we only care about the lower bits, so calculate these. */
6299
6a11342f 6300 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 6301 {
0f41302f 6302 nonzero = ~ (HOST_WIDE_INT) 0;
b69960ac
RK
6303
6304 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6305 is the number of bits a full-width mask would have set.
6306 We need only shift if these are fewer than nonzero can
6307 hold. If not, we must keep all bits set in nonzero. */
6308
6309 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6310 < HOST_BITS_PER_WIDE_INT)
6311 nonzero >>= INTVAL (XEXP (x, 1))
6312 + HOST_BITS_PER_WIDE_INT
6313 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6314 }
6315 else
6316 {
6317 nonzero = GET_MODE_MASK (GET_MODE (x));
6318 nonzero >>= INTVAL (XEXP (x, 1));
6319 }
6139ff20
RK
6320
6321 if ((mask & ~ nonzero) == 0
6322 || (i = exact_log2 (mask)) >= 0)
6323 {
6324 x = simplify_shift_const
6325 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6326 i < 0 ? INTVAL (XEXP (x, 1))
6327 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6328
6329 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 6330 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6331 }
6332 }
6333
6334 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6335 even if the shift count isn't a constant. */
6336 if (mask == 1)
6337 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6338
d0ab8cd3 6339 /* If this is a sign-extension operation that just affects bits
4c002f29
RK
6340 we don't care about, remove it. Be sure the call above returned
6341 something that is still a shift. */
d0ab8cd3 6342
4c002f29
RK
6343 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6344 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 6345 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
6346 && (INTVAL (XEXP (x, 1))
6347 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
6348 && GET_CODE (XEXP (x, 0)) == ASHIFT
6349 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6350 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
6351 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6352 reg, next_select);
6139ff20 6353
dfbe1b2f
RK
6354 break;
6355
6139ff20
RK
6356 case ROTATE:
6357 case ROTATERT:
6358 /* If the shift count is constant and we can do computations
6359 in the mode of X, compute where the bits we care about are.
6360 Otherwise, we can't do anything. Don't change the mode of
6361 the shift or propagate MODE into the shift, though. */
6362 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6363 && INTVAL (XEXP (x, 1)) >= 0)
6364 {
6365 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6366 GET_MODE (x), GEN_INT (mask),
6367 XEXP (x, 1));
7d171a1e 6368 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
6369 SUBST (XEXP (x, 0),
6370 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6371 INTVAL (temp), reg, next_select));
6139ff20
RK
6372 }
6373 break;
6374
dfbe1b2f 6375 case NEG:
180b8e4b
RK
6376 /* If we just want the low-order bit, the NEG isn't needed since it
6377 won't change the low-order bit. */
6378 if (mask == 1)
6379 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6380
6139ff20
RK
6381 /* We need any bits less significant than the most significant bit in
6382 MASK since carries from those bits will affect the bits we are
6383 interested in. */
6384 mask = fuller_mask;
6385 goto unop;
6386
dfbe1b2f 6387 case NOT:
6139ff20
RK
6388 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6389 same as the XOR case above. Ensure that the constant we form is not
6390 wider than the mode of X. */
6391
6392 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6393 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6394 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6395 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6396 < GET_MODE_BITSIZE (GET_MODE (x)))
6397 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6398 {
6399 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6400 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6401 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6402
e3d616e3 6403 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6404 }
6405
f82da7d2
JW
6406 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6407 use the full mask inside the NOT. */
6408 mask = fuller_mask;
6409
6139ff20 6410 unop:
e3d616e3
RK
6411 op0 = gen_lowpart_for_combine (op_mode,
6412 force_to_mode (XEXP (x, 0), mode, mask,
6413 reg, next_select));
6139ff20 6414 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 6415 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
6416 break;
6417
6418 case NE:
6419 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 6420 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 6421 which is equal to STORE_FLAG_VALUE. */
3aceff0d
RK
6422 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6423 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 6424 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 6425 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6426
d0ab8cd3
RK
6427 break;
6428
6429 case IF_THEN_ELSE:
6430 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6431 written in a narrower mode. We play it safe and do not do so. */
6432
6433 SUBST (XEXP (x, 1),
6434 gen_lowpart_for_combine (GET_MODE (x),
6435 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6436 mask, reg, next_select)));
d0ab8cd3
RK
6437 SUBST (XEXP (x, 2),
6438 gen_lowpart_for_combine (GET_MODE (x),
6439 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6440 mask, reg,next_select)));
d0ab8cd3 6441 break;
dfbe1b2f
RK
6442 }
6443
d0ab8cd3 6444 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6445 return gen_lowpart_for_combine (mode, x);
6446}
6447\f
abe6e52f
RK
6448/* Return nonzero if X is an expression that has one of two values depending on
6449 whether some other value is zero or nonzero. In that case, we return the
6450 value that is being tested, *PTRUE is set to the value if the rtx being
6451 returned has a nonzero value, and *PFALSE is set to the other alternative.
6452
6453 If we return zero, we set *PTRUE and *PFALSE to X. */
6454
6455static rtx
6456if_then_else_cond (x, ptrue, pfalse)
6457 rtx x;
6458 rtx *ptrue, *pfalse;
6459{
6460 enum machine_mode mode = GET_MODE (x);
6461 enum rtx_code code = GET_CODE (x);
6462 int size = GET_MODE_BITSIZE (mode);
6463 rtx cond0, cond1, true0, true1, false0, false1;
6464 unsigned HOST_WIDE_INT nz;
6465
6466 /* If this is a unary operation whose operand has one of two values, apply
6467 our opcode to compute those values. */
6468 if (GET_RTX_CLASS (code) == '1'
6469 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6470 {
0c1c8ea6
RK
6471 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6472 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
6473 return cond0;
6474 }
6475
3a19aabc 6476 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 6477 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
6478 else if (code == COMPARE)
6479 ;
6480
abe6e52f
RK
6481 /* If this is a binary operation, see if either side has only one of two
6482 values. If either one does or if both do and they are conditional on
6483 the same value, compute the new true and false values. */
6484 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6485 || GET_RTX_CLASS (code) == '<')
6486 {
6487 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6488 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6489
6490 if ((cond0 != 0 || cond1 != 0)
6491 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6492 {
6493 *ptrue = gen_binary (code, mode, true0, true1);
6494 *pfalse = gen_binary (code, mode, false0, false1);
6495 return cond0 ? cond0 : cond1;
6496 }
9210df58 6497
9210df58 6498 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
6499 operands is zero when the other is non-zero, and vice-versa,
6500 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 6501
0802d516
RK
6502 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6503 && (code == PLUS || code == IOR || code == XOR || code == MINUS
9210df58
RK
6504 || code == UMAX)
6505 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6506 {
6507 rtx op0 = XEXP (XEXP (x, 0), 1);
6508 rtx op1 = XEXP (XEXP (x, 1), 1);
6509
6510 cond0 = XEXP (XEXP (x, 0), 0);
6511 cond1 = XEXP (XEXP (x, 1), 0);
6512
6513 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6514 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6515 && reversible_comparison_p (cond1)
6516 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6517 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6518 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6519 || ((swap_condition (GET_CODE (cond0))
6520 == reverse_condition (GET_CODE (cond1)))
6521 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6522 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6523 && ! side_effects_p (x))
6524 {
6525 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6526 *pfalse = gen_binary (MULT, mode,
6527 (code == MINUS
0c1c8ea6 6528 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
6529 const_true_rtx);
6530 return cond0;
6531 }
6532 }
6533
6534 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6535 is always zero. */
0802d516
RK
6536 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6537 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
6538 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6539 {
6540 cond0 = XEXP (XEXP (x, 0), 0);
6541 cond1 = XEXP (XEXP (x, 1), 0);
6542
6543 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6544 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6545 && reversible_comparison_p (cond1)
6546 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6547 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6548 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6549 || ((swap_condition (GET_CODE (cond0))
6550 == reverse_condition (GET_CODE (cond1)))
6551 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6552 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6553 && ! side_effects_p (x))
6554 {
6555 *ptrue = *pfalse = const0_rtx;
6556 return cond0;
6557 }
6558 }
abe6e52f
RK
6559 }
6560
6561 else if (code == IF_THEN_ELSE)
6562 {
6563 /* If we have IF_THEN_ELSE already, extract the condition and
6564 canonicalize it if it is NE or EQ. */
6565 cond0 = XEXP (x, 0);
6566 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6567 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6568 return XEXP (cond0, 0);
6569 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6570 {
6571 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6572 return XEXP (cond0, 0);
6573 }
6574 else
6575 return cond0;
6576 }
6577
6578 /* If X is a normal SUBREG with both inner and outer modes integral,
6579 we can narrow both the true and false values of the inner expression,
6580 if there is a condition. */
6581 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6582 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6583 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6584 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6585 &true0, &false0)))
6586 {
00244e6b
RK
6587 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6588 *pfalse
6589 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 6590
abe6e52f
RK
6591 return cond0;
6592 }
6593
6594 /* If X is a constant, this isn't special and will cause confusions
6595 if we treat it as such. Likewise if it is equivalent to a constant. */
6596 else if (CONSTANT_P (x)
6597 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6598 ;
6599
6600 /* If X is known to be either 0 or -1, those are the true and
6601 false values when testing X. */
6602 else if (num_sign_bit_copies (x, mode) == size)
6603 {
6604 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6605 return x;
6606 }
6607
6608 /* Likewise for 0 or a single bit. */
6609 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6610 {
6611 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6612 return x;
6613 }
6614
6615 /* Otherwise fail; show no condition with true and false values the same. */
6616 *ptrue = *pfalse = x;
6617 return 0;
6618}
6619\f
1a26b032
RK
6620/* Return the value of expression X given the fact that condition COND
6621 is known to be true when applied to REG as its first operand and VAL
6622 as its second. X is known to not be shared and so can be modified in
6623 place.
6624
6625 We only handle the simplest cases, and specifically those cases that
6626 arise with IF_THEN_ELSE expressions. */
6627
6628static rtx
6629known_cond (x, cond, reg, val)
6630 rtx x;
6631 enum rtx_code cond;
6632 rtx reg, val;
6633{
6634 enum rtx_code code = GET_CODE (x);
f24ad0e4 6635 rtx temp;
1a26b032
RK
6636 char *fmt;
6637 int i, j;
6638
6639 if (side_effects_p (x))
6640 return x;
6641
6642 if (cond == EQ && rtx_equal_p (x, reg))
6643 return val;
6644
6645 /* If X is (abs REG) and we know something about REG's relationship
6646 with zero, we may be able to simplify this. */
6647
6648 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6649 switch (cond)
6650 {
6651 case GE: case GT: case EQ:
6652 return XEXP (x, 0);
6653 case LT: case LE:
0c1c8ea6
RK
6654 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6655 XEXP (x, 0));
1a26b032
RK
6656 }
6657
6658 /* The only other cases we handle are MIN, MAX, and comparisons if the
6659 operands are the same as REG and VAL. */
6660
6661 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6662 {
6663 if (rtx_equal_p (XEXP (x, 0), val))
6664 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6665
6666 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6667 {
6668 if (GET_RTX_CLASS (code) == '<')
6669 return (comparison_dominates_p (cond, code) ? const_true_rtx
6670 : (comparison_dominates_p (cond,
6671 reverse_condition (code))
6672 ? const0_rtx : x));
6673
6674 else if (code == SMAX || code == SMIN
6675 || code == UMIN || code == UMAX)
6676 {
6677 int unsignedp = (code == UMIN || code == UMAX);
6678
6679 if (code == SMAX || code == UMAX)
6680 cond = reverse_condition (cond);
6681
6682 switch (cond)
6683 {
6684 case GE: case GT:
6685 return unsignedp ? x : XEXP (x, 1);
6686 case LE: case LT:
6687 return unsignedp ? x : XEXP (x, 0);
6688 case GEU: case GTU:
6689 return unsignedp ? XEXP (x, 1) : x;
6690 case LEU: case LTU:
6691 return unsignedp ? XEXP (x, 0) : x;
6692 }
6693 }
6694 }
6695 }
6696
6697 fmt = GET_RTX_FORMAT (code);
6698 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6699 {
6700 if (fmt[i] == 'e')
6701 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6702 else if (fmt[i] == 'E')
6703 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6704 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6705 cond, reg, val));
6706 }
6707
6708 return x;
6709}
6710\f
e11fa86f
RK
6711/* See if X and Y are equal for the purposes of seeing if we can rewrite an
6712 assignment as a field assignment. */
6713
6714static int
6715rtx_equal_for_field_assignment_p (x, y)
6716 rtx x;
6717 rtx y;
6718{
6719 rtx last_x, last_y;
6720
6721 if (x == y || rtx_equal_p (x, y))
6722 return 1;
6723
6724 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
6725 return 0;
6726
6727 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
6728 Note that all SUBREGs of MEM are paradoxical; otherwise they
6729 would have been rewritten. */
6730 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
6731 && GET_CODE (SUBREG_REG (y)) == MEM
6732 && rtx_equal_p (SUBREG_REG (y),
6733 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
6734 return 1;
6735
6736 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
6737 && GET_CODE (SUBREG_REG (x)) == MEM
6738 && rtx_equal_p (SUBREG_REG (x),
6739 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
6740 return 1;
6741
6742 last_x = get_last_value (x);
6743 last_y = get_last_value (y);
6744
0f47edd3
JL
6745 return ((last_x != 0
6746 && GET_CODE (last_x) != CLOBBER
6747 && rtx_equal_for_field_assignment_p (last_x, y))
6748 || (last_y != 0
6749 && GET_CODE (last_y) != CLOBBER
6750 && rtx_equal_for_field_assignment_p (x, last_y))
e11fa86f 6751 || (last_x != 0 && last_y != 0
0f47edd3
JL
6752 && GET_CODE (last_x) != CLOBBER
6753 && GET_CODE (last_y) != CLOBBER
e11fa86f
RK
6754 && rtx_equal_for_field_assignment_p (last_x, last_y)));
6755}
6756\f
230d793d
RS
6757/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6758 Return that assignment if so.
6759
6760 We only handle the most common cases. */
6761
6762static rtx
6763make_field_assignment (x)
6764 rtx x;
6765{
6766 rtx dest = SET_DEST (x);
6767 rtx src = SET_SRC (x);
dfbe1b2f 6768 rtx assign;
e11fa86f 6769 rtx rhs, lhs;
5f4f0e22
CH
6770 HOST_WIDE_INT c1;
6771 int pos, len;
dfbe1b2f
RK
6772 rtx other;
6773 enum machine_mode mode;
230d793d
RS
6774
6775 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6776 a clear of a one-bit field. We will have changed it to
6777 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6778 for a SUBREG. */
6779
6780 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6781 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6782 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 6783 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6784 {
8999a12e 6785 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6786 1, 1, 1, 0);
76184def
DE
6787 if (assign != 0)
6788 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6789 return x;
230d793d
RS
6790 }
6791
6792 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6793 && subreg_lowpart_p (XEXP (src, 0))
6794 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6795 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6796 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6797 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 6798 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6799 {
8999a12e 6800 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
6801 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6802 1, 1, 1, 0);
76184def
DE
6803 if (assign != 0)
6804 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6805 return x;
230d793d
RS
6806 }
6807
9dd11dcb 6808 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
6809 one-bit field. */
6810 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6811 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 6812 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6813 {
8999a12e 6814 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6815 1, 1, 1, 0);
76184def
DE
6816 if (assign != 0)
6817 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
6818 return x;
230d793d
RS
6819 }
6820
dfbe1b2f 6821 /* The other case we handle is assignments into a constant-position
9dd11dcb 6822 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
6823 a mask that has all one bits except for a group of zero bits and
6824 OTHER is known to have zeros where C1 has ones, this is such an
6825 assignment. Compute the position and length from C1. Shift OTHER
6826 to the appropriate position, force it to the required mode, and
6827 make the extraction. Check for the AND in both operands. */
6828
9dd11dcb 6829 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
6830 return x;
6831
6832 rhs = expand_compound_operation (XEXP (src, 0));
6833 lhs = expand_compound_operation (XEXP (src, 1));
6834
6835 if (GET_CODE (rhs) == AND
6836 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
6837 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
6838 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
6839 else if (GET_CODE (lhs) == AND
6840 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6841 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
6842 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
6843 else
6844 return x;
230d793d 6845
e11fa86f 6846 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 6847 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 6848 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 6849 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 6850 return x;
230d793d 6851
5f4f0e22 6852 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
6853 if (assign == 0)
6854 return x;
230d793d 6855
dfbe1b2f
RK
6856 /* The mode to use for the source is the mode of the assignment, or of
6857 what is inside a possible STRICT_LOW_PART. */
6858 mode = (GET_CODE (assign) == STRICT_LOW_PART
6859 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 6860
dfbe1b2f
RK
6861 /* Shift OTHER right POS places and make it the source, restricting it
6862 to the proper length and mode. */
230d793d 6863
5f4f0e22
CH
6864 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6865 GET_MODE (src), other, pos),
6139ff20
RK
6866 mode,
6867 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6868 ? GET_MODE_MASK (mode)
6869 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 6870 dest, 0);
230d793d 6871
dfbe1b2f 6872 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
6873}
6874\f
6875/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6876 if so. */
6877
6878static rtx
6879apply_distributive_law (x)
6880 rtx x;
6881{
6882 enum rtx_code code = GET_CODE (x);
6883 rtx lhs, rhs, other;
6884 rtx tem;
6885 enum rtx_code inner_code;
6886
d8a8a4da
RS
6887 /* Distributivity is not true for floating point.
6888 It can change the value. So don't do it.
6889 -- rms and moshier@world.std.com. */
3ad2180a 6890 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
6891 return x;
6892
230d793d
RS
6893 /* The outer operation can only be one of the following: */
6894 if (code != IOR && code != AND && code != XOR
6895 && code != PLUS && code != MINUS)
6896 return x;
6897
6898 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6899
0f41302f
MS
6900 /* If either operand is a primitive we can't do anything, so get out
6901 fast. */
230d793d 6902 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 6903 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
6904 return x;
6905
6906 lhs = expand_compound_operation (lhs);
6907 rhs = expand_compound_operation (rhs);
6908 inner_code = GET_CODE (lhs);
6909 if (inner_code != GET_CODE (rhs))
6910 return x;
6911
6912 /* See if the inner and outer operations distribute. */
6913 switch (inner_code)
6914 {
6915 case LSHIFTRT:
6916 case ASHIFTRT:
6917 case AND:
6918 case IOR:
6919 /* These all distribute except over PLUS. */
6920 if (code == PLUS || code == MINUS)
6921 return x;
6922 break;
6923
6924 case MULT:
6925 if (code != PLUS && code != MINUS)
6926 return x;
6927 break;
6928
6929 case ASHIFT:
45620ed4 6930 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
6931 break;
6932
6933 case SUBREG:
dfbe1b2f
RK
6934 /* Non-paradoxical SUBREGs distributes over all operations, provided
6935 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
6936 of a low-order part, we don't convert an fp operation to int or
6937 vice versa, and we would not be converting a single-word
dfbe1b2f 6938 operation into a multi-word operation. The latter test is not
2b4bd1bc 6939 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
6940 Some of the previous tests are redundant given the latter test, but
6941 are retained because they are required for correctness.
6942
6943 We produce the result slightly differently in this case. */
6944
6945 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6946 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6947 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
6948 || (GET_MODE_CLASS (GET_MODE (lhs))
6949 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 6950 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 6951 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 6952 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
6953 return x;
6954
6955 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6956 SUBREG_REG (lhs), SUBREG_REG (rhs));
6957 return gen_lowpart_for_combine (GET_MODE (x), tem);
6958
6959 default:
6960 return x;
6961 }
6962
6963 /* Set LHS and RHS to the inner operands (A and B in the example
6964 above) and set OTHER to the common operand (C in the example).
6965 These is only one way to do this unless the inner operation is
6966 commutative. */
6967 if (GET_RTX_CLASS (inner_code) == 'c'
6968 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6969 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6970 else if (GET_RTX_CLASS (inner_code) == 'c'
6971 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6972 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6973 else if (GET_RTX_CLASS (inner_code) == 'c'
6974 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6975 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6976 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6977 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6978 else
6979 return x;
6980
6981 /* Form the new inner operation, seeing if it simplifies first. */
6982 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6983
6984 /* There is one exception to the general way of distributing:
6985 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6986 if (code == XOR && inner_code == IOR)
6987 {
6988 inner_code = AND;
0c1c8ea6 6989 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
6990 }
6991
6992 /* We may be able to continuing distributing the result, so call
6993 ourselves recursively on the inner operation before forming the
6994 outer operation, which we return. */
6995 return gen_binary (inner_code, GET_MODE (x),
6996 apply_distributive_law (tem), other);
6997}
6998\f
6999/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7000 in MODE.
7001
7002 Return an equivalent form, if different from X. Otherwise, return X. If
7003 X is zero, we are to always construct the equivalent form. */
7004
7005static rtx
7006simplify_and_const_int (x, mode, varop, constop)
7007 rtx x;
7008 enum machine_mode mode;
7009 rtx varop;
5f4f0e22 7010 unsigned HOST_WIDE_INT constop;
230d793d 7011{
951553af 7012 unsigned HOST_WIDE_INT nonzero;
9fa6d012 7013 int width = GET_MODE_BITSIZE (mode);
42301240 7014 int i;
230d793d 7015
6139ff20
RK
7016 /* Simplify VAROP knowing that we will be only looking at some of the
7017 bits in it. */
e3d616e3 7018 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7019
6139ff20
RK
7020 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7021 CONST_INT, we are done. */
7022 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7023 return varop;
230d793d 7024
fc06d7aa
RK
7025 /* See what bits may be nonzero in VAROP. Unlike the general case of
7026 a call to nonzero_bits, here we don't care about bits outside
7027 MODE. */
7028
7029 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d 7030
9fa6d012
TG
7031 /* If this would be an entire word for the target, but is not for
7032 the host, then sign-extend on the host so that the number will look
7033 the same way on the host that it would on the target.
7034
7035 For example, when building a 64 bit alpha hosted 32 bit sparc
7036 targeted compiler, then we want the 32 bit unsigned value -1 to be
7037 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7038 The later confuses the sparc backend. */
7039
7040 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7041 && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
7042 nonzero |= ((HOST_WIDE_INT) (-1) << width);
7043
230d793d 7044 /* Turn off all bits in the constant that are known to already be zero.
951553af 7045 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7046 which is tested below. */
7047
951553af 7048 constop &= nonzero;
230d793d
RS
7049
7050 /* If we don't have any bits left, return zero. */
7051 if (constop == 0)
7052 return const0_rtx;
7053
42301240
RK
7054 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7055 a power of two, we can replace this with a ASHIFT. */
7056 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7057 && (i = exact_log2 (constop)) >= 0)
7058 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7059
6139ff20
RK
7060 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7061 or XOR, then try to apply the distributive law. This may eliminate
7062 operations if either branch can be simplified because of the AND.
7063 It may also make some cases more complex, but those cases probably
7064 won't match a pattern either with or without this. */
7065
7066 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7067 return
7068 gen_lowpart_for_combine
7069 (mode,
7070 apply_distributive_law
7071 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7072 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7073 XEXP (varop, 0), constop),
7074 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7075 XEXP (varop, 1), constop))));
7076
230d793d
RS
7077 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7078 if we already had one (just check for the simplest cases). */
7079 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7080 && GET_MODE (XEXP (x, 0)) == mode
7081 && SUBREG_REG (XEXP (x, 0)) == varop)
7082 varop = XEXP (x, 0);
7083 else
7084 varop = gen_lowpart_for_combine (mode, varop);
7085
0f41302f 7086 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7087 if (GET_CODE (varop) == CLOBBER)
7088 return x ? x : varop;
7089
7090 /* If we are only masking insignificant bits, return VAROP. */
951553af 7091 if (constop == nonzero)
230d793d
RS
7092 x = varop;
7093
7094 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7095 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7096 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7097
7098 else
7099 {
7100 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7101 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7102 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7103
7104 SUBST (XEXP (x, 0), varop);
7105 }
7106
7107 return x;
7108}
7109\f
b3728b0e
JW
7110/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7111 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7112 is less useful. We can't allow both, because that results in exponential
7113 run time recusion. There is a nullstone testcase that triggered
7114 this. This macro avoids accidental uses of num_sign_bit_copies. */
7115#define num_sign_bit_copies()
7116
230d793d
RS
7117/* Given an expression, X, compute which bits in X can be non-zero.
7118 We don't care about bits outside of those defined in MODE.
7119
7120 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7121 a shift, AND, or zero_extract, we can do better. */
7122
5f4f0e22 7123static unsigned HOST_WIDE_INT
951553af 7124nonzero_bits (x, mode)
230d793d
RS
7125 rtx x;
7126 enum machine_mode mode;
7127{
951553af
RK
7128 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7129 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
7130 enum rtx_code code;
7131 int mode_width = GET_MODE_BITSIZE (mode);
7132 rtx tem;
7133
1c75dfa4
RK
7134 /* For floating-point values, assume all bits are needed. */
7135 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7136 return nonzero;
7137
230d793d
RS
7138 /* If X is wider than MODE, use its mode instead. */
7139 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7140 {
7141 mode = GET_MODE (x);
951553af 7142 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7143 mode_width = GET_MODE_BITSIZE (mode);
7144 }
7145
5f4f0e22 7146 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7147 /* Our only callers in this case look for single bit values. So
7148 just return the mode mask. Those tests will then be false. */
951553af 7149 return nonzero;
230d793d 7150
8baf60bb 7151#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7152 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
7153 and target machines, we can compute this from which bits of the
7154 object might be nonzero in its own mode, taking into account the fact
7155 that on many CISC machines, accessing an object in a wider mode
7156 causes the high-order bits to become undefined. So they are
7157 not known to be zero. */
7158
7159 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7160 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7161 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7162 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7163 {
7164 nonzero &= nonzero_bits (x, GET_MODE (x));
7165 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7166 return nonzero;
7167 }
7168#endif
7169
230d793d
RS
7170 code = GET_CODE (x);
7171 switch (code)
7172 {
7173 case REG:
320dd7a7
RK
7174#ifdef POINTERS_EXTEND_UNSIGNED
7175 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7176 all the bits above ptr_mode are known to be zero. */
7177 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7178 && REGNO_POINTER_FLAG (REGNO (x)))
7179 nonzero &= GET_MODE_MASK (ptr_mode);
7180#endif
7181
b0d71df9
RK
7182#ifdef STACK_BOUNDARY
7183 /* If this is the stack pointer, we may know something about its
7184 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
7185 stack to be momentarily aligned only to that amount, so we pick
7186 the least alignment. */
7187
ee49a9c7
JW
7188 /* We can't check for arg_pointer_rtx here, because it is not
7189 guaranteed to have as much alignment as the stack pointer.
7190 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7191 alignment but the argument pointer has only 64 bit alignment. */
7192
b0d71df9 7193 if (x == stack_pointer_rtx || x == frame_pointer_rtx
ee49a9c7 7194 || x == hard_frame_pointer_rtx
b0d71df9
RK
7195 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7196 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
230d793d 7197 {
b0d71df9 7198 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
7199
7200#ifdef PUSH_ROUNDING
91102d5a 7201 if (REGNO (x) == STACK_POINTER_REGNUM)
b0d71df9 7202 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
7203#endif
7204
320dd7a7
RK
7205 /* We must return here, otherwise we may get a worse result from
7206 one of the choices below. There is nothing useful below as
7207 far as the stack pointer is concerned. */
b0d71df9 7208 return nonzero &= ~ (sp_alignment - 1);
230d793d 7209 }
b0d71df9 7210#endif
230d793d 7211
55310dad
RK
7212 /* If X is a register whose nonzero bits value is current, use it.
7213 Otherwise, if X is a register whose value we can find, use that
7214 value. Otherwise, use the previously-computed global nonzero bits
7215 for this register. */
7216
7217 if (reg_last_set_value[REGNO (x)] != 0
7218 && reg_last_set_mode[REGNO (x)] == mode
7219 && (reg_n_sets[REGNO (x)] == 1
7220 || reg_last_set_label[REGNO (x)] == label_tick)
7221 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7222 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
7223
7224 tem = get_last_value (x);
9afa3d54 7225
230d793d 7226 if (tem)
9afa3d54
RK
7227 {
7228#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7229 /* If X is narrower than MODE and TEM is a non-negative
7230 constant that would appear negative in the mode of X,
7231 sign-extend it for use in reg_nonzero_bits because some
7232 machines (maybe most) will actually do the sign-extension
7233 and this is the conservative approach.
7234
7235 ??? For 2.5, try to tighten up the MD files in this regard
7236 instead of this kludge. */
7237
7238 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7239 && GET_CODE (tem) == CONST_INT
7240 && INTVAL (tem) > 0
7241 && 0 != (INTVAL (tem)
7242 & ((HOST_WIDE_INT) 1
9e69be8c 7243 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7244 tem = GEN_INT (INTVAL (tem)
7245 | ((HOST_WIDE_INT) (-1)
7246 << GET_MODE_BITSIZE (GET_MODE (x))));
7247#endif
7248 return nonzero_bits (tem, mode);
7249 }
951553af
RK
7250 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7251 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7252 else
951553af 7253 return nonzero;
230d793d
RS
7254
7255 case CONST_INT:
9afa3d54
RK
7256#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7257 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
7258 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7259 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7260 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
7261#endif
7262
230d793d
RS
7263 return INTVAL (x);
7264
230d793d 7265 case MEM:
8baf60bb 7266#ifdef LOAD_EXTEND_OP
230d793d
RS
7267 /* In many, if not most, RISC machines, reading a byte from memory
7268 zeros the rest of the register. Noticing that fact saves a lot
7269 of extra zero-extends. */
8baf60bb
RK
7270 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7271 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 7272#endif
8baf60bb 7273 break;
230d793d 7274
230d793d
RS
7275 case EQ: case NE:
7276 case GT: case GTU:
7277 case LT: case LTU:
7278 case GE: case GEU:
7279 case LE: case LEU:
3f508eca 7280
c6965c0f
RK
7281 /* If this produces an integer result, we know which bits are set.
7282 Code here used to clear bits outside the mode of X, but that is
7283 now done above. */
230d793d 7284
c6965c0f
RK
7285 if (GET_MODE_CLASS (mode) == MODE_INT
7286 && mode_width <= HOST_BITS_PER_WIDE_INT)
7287 nonzero = STORE_FLAG_VALUE;
230d793d 7288 break;
230d793d 7289
230d793d 7290 case NEG:
b3728b0e
JW
7291#if 0
7292 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7293 and num_sign_bit_copies. */
d0ab8cd3
RK
7294 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7295 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7296 nonzero = 1;
b3728b0e 7297#endif
230d793d
RS
7298
7299 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 7300 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 7301 break;
d0ab8cd3
RK
7302
7303 case ABS:
b3728b0e
JW
7304#if 0
7305 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7306 and num_sign_bit_copies. */
d0ab8cd3
RK
7307 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7308 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7309 nonzero = 1;
b3728b0e 7310#endif
d0ab8cd3 7311 break;
230d793d
RS
7312
7313 case TRUNCATE:
951553af 7314 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
7315 break;
7316
7317 case ZERO_EXTEND:
951553af 7318 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 7319 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 7320 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
7321 break;
7322
7323 case SIGN_EXTEND:
7324 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7325 Otherwise, show all the bits in the outer mode but not the inner
7326 may be non-zero. */
951553af 7327 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
7328 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7329 {
951553af
RK
7330 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7331 if (inner_nz &
5f4f0e22
CH
7332 (((HOST_WIDE_INT) 1
7333 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 7334 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
7335 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7336 }
7337
951553af 7338 nonzero &= inner_nz;
230d793d
RS
7339 break;
7340
7341 case AND:
951553af
RK
7342 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7343 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7344 break;
7345
d0ab8cd3
RK
7346 case XOR: case IOR:
7347 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
7348 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7349 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7350 break;
7351
7352 case PLUS: case MINUS:
7353 case MULT:
7354 case DIV: case UDIV:
7355 case MOD: case UMOD:
7356 /* We can apply the rules of arithmetic to compute the number of
7357 high- and low-order zero bits of these operations. We start by
7358 computing the width (position of the highest-order non-zero bit)
7359 and the number of low-order zero bits for each value. */
7360 {
951553af
RK
7361 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7362 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7363 int width0 = floor_log2 (nz0) + 1;
7364 int width1 = floor_log2 (nz1) + 1;
7365 int low0 = floor_log2 (nz0 & -nz0);
7366 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
7367 HOST_WIDE_INT op0_maybe_minusp
7368 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7369 HOST_WIDE_INT op1_maybe_minusp
7370 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
7371 int result_width = mode_width;
7372 int result_low = 0;
7373
7374 switch (code)
7375 {
7376 case PLUS:
7377 result_width = MAX (width0, width1) + 1;
7378 result_low = MIN (low0, low1);
7379 break;
7380 case MINUS:
7381 result_low = MIN (low0, low1);
7382 break;
7383 case MULT:
7384 result_width = width0 + width1;
7385 result_low = low0 + low1;
7386 break;
7387 case DIV:
7388 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7389 result_width = width0;
7390 break;
7391 case UDIV:
7392 result_width = width0;
7393 break;
7394 case MOD:
7395 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7396 result_width = MIN (width0, width1);
7397 result_low = MIN (low0, low1);
7398 break;
7399 case UMOD:
7400 result_width = MIN (width0, width1);
7401 result_low = MIN (low0, low1);
7402 break;
7403 }
7404
7405 if (result_width < mode_width)
951553af 7406 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
7407
7408 if (result_low > 0)
951553af 7409 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
7410 }
7411 break;
7412
7413 case ZERO_EXTRACT:
7414 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 7415 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 7416 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
7417 break;
7418
7419 case SUBREG:
c3c2cb37
RK
7420 /* If this is a SUBREG formed for a promoted variable that has
7421 been zero-extended, we know that at least the high-order bits
7422 are zero, though others might be too. */
7423
7424 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
7425 nonzero = (GET_MODE_MASK (GET_MODE (x))
7426 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 7427
230d793d
RS
7428 /* If the inner mode is a single word for both the host and target
7429 machines, we can compute this from which bits of the inner
951553af 7430 object might be nonzero. */
230d793d 7431 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
7432 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7433 <= HOST_BITS_PER_WIDE_INT))
230d793d 7434 {
951553af 7435 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb
RK
7436
7437#ifndef WORD_REGISTER_OPERATIONS
230d793d
RS
7438 /* On many CISC machines, accessing an object in a wider mode
7439 causes the high-order bits to become undefined. So they are
7440 not known to be zero. */
7441 if (GET_MODE_SIZE (GET_MODE (x))
7442 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
7443 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7444 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
7445#endif
7446 }
7447 break;
7448
7449 case ASHIFTRT:
7450 case LSHIFTRT:
7451 case ASHIFT:
230d793d 7452 case ROTATE:
951553af 7453 /* The nonzero bits are in two classes: any bits within MODE
230d793d 7454 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 7455 nonzero bits are those that are significant in the operand of
230d793d
RS
7456 the shift when shifted the appropriate number of bits. This
7457 shows that high-order bits are cleared by the right shift and
7458 low-order bits by left shifts. */
7459 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7460 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 7461 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7462 {
7463 enum machine_mode inner_mode = GET_MODE (x);
7464 int width = GET_MODE_BITSIZE (inner_mode);
7465 int count = INTVAL (XEXP (x, 1));
5f4f0e22 7466 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
7467 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7468 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 7469 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
7470
7471 if (mode_width > width)
951553af 7472 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
7473
7474 if (code == LSHIFTRT)
7475 inner >>= count;
7476 else if (code == ASHIFTRT)
7477 {
7478 inner >>= count;
7479
951553af 7480 /* If the sign bit may have been nonzero before the shift, we
230d793d 7481 need to mark all the places it could have been copied to
951553af 7482 by the shift as possibly nonzero. */
5f4f0e22
CH
7483 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7484 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 7485 }
45620ed4 7486 else if (code == ASHIFT)
230d793d
RS
7487 inner <<= count;
7488 else
7489 inner = ((inner << (count % width)
7490 | (inner >> (width - (count % width)))) & mode_mask);
7491
951553af 7492 nonzero &= (outer | inner);
230d793d
RS
7493 }
7494 break;
7495
7496 case FFS:
7497 /* This is at most the number of bits in the mode. */
951553af 7498 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 7499 break;
d0ab8cd3
RK
7500
7501 case IF_THEN_ELSE:
951553af
RK
7502 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7503 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 7504 break;
230d793d
RS
7505 }
7506
951553af 7507 return nonzero;
230d793d 7508}
b3728b0e
JW
7509
7510/* See the macro definition above. */
7511#undef num_sign_bit_copies
230d793d 7512\f
d0ab8cd3 7513/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
7514 be equal to the sign bit. X will be used in mode MODE; if MODE is
7515 VOIDmode, X will be used in its own mode. The returned value will always
7516 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
7517
7518static int
7519num_sign_bit_copies (x, mode)
7520 rtx x;
7521 enum machine_mode mode;
7522{
7523 enum rtx_code code = GET_CODE (x);
7524 int bitwidth;
7525 int num0, num1, result;
951553af 7526 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
7527 rtx tem;
7528
7529 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
7530 VOIDmode, we don't know anything. Likewise if one of the modes is
7531 floating-point. */
d0ab8cd3
RK
7532
7533 if (mode == VOIDmode)
7534 mode = GET_MODE (x);
7535
1c75dfa4 7536 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 7537 return 1;
d0ab8cd3
RK
7538
7539 bitwidth = GET_MODE_BITSIZE (mode);
7540
0f41302f 7541 /* For a smaller object, just ignore the high bits. */
312def2e
RK
7542 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7543 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7544 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7545
0c314d1a
RK
7546#ifndef WORD_REGISTER_OPERATIONS
7547 /* If this machine does not do all register operations on the entire
7548 register and MODE is wider than the mode of X, we can say nothing
7549 at all about the high-order bits. */
7550 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7551 return 1;
7552#endif
7553
d0ab8cd3
RK
7554 switch (code)
7555 {
7556 case REG:
55310dad 7557
ff0dbdd1
RK
7558#ifdef POINTERS_EXTEND_UNSIGNED
7559 /* If pointers extend signed and this is a pointer in Pmode, say that
7560 all the bits above ptr_mode are known to be sign bit copies. */
7561 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7562 && REGNO_POINTER_FLAG (REGNO (x)))
7563 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7564#endif
7565
55310dad
RK
7566 if (reg_last_set_value[REGNO (x)] != 0
7567 && reg_last_set_mode[REGNO (x)] == mode
7568 && (reg_n_sets[REGNO (x)] == 1
7569 || reg_last_set_label[REGNO (x)] == label_tick)
7570 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7571 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7572
7573 tem = get_last_value (x);
7574 if (tem != 0)
7575 return num_sign_bit_copies (tem, mode);
55310dad
RK
7576
7577 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7578 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7579 break;
7580
457816e2 7581 case MEM:
8baf60bb 7582#ifdef LOAD_EXTEND_OP
457816e2 7583 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
7584 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7585 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 7586#endif
8baf60bb 7587 break;
457816e2 7588
d0ab8cd3
RK
7589 case CONST_INT:
7590 /* If the constant is negative, take its 1's complement and remask.
7591 Then see how many zero bits we have. */
951553af 7592 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 7593 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
7594 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7595 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 7596
951553af 7597 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7598
7599 case SUBREG:
c3c2cb37
RK
7600 /* If this is a SUBREG for a promoted object that is sign-extended
7601 and we are looking at it in a wider mode, we know that at least the
7602 high-order bits are known to be sign bit copies. */
7603
7604 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
7605 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7606 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 7607
0f41302f 7608 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7609 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7610 {
7611 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7612 return MAX (1, (num0
7613 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7614 - bitwidth)));
7615 }
457816e2 7616
8baf60bb 7617#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 7618#ifdef LOAD_EXTEND_OP
8baf60bb
RK
7619 /* For paradoxical SUBREGs on machines where all register operations
7620 affect the entire register, just look inside. Note that we are
7621 passing MODE to the recursive call, so the number of sign bit copies
7622 will remain relative to that mode, not the inner mode. */
457816e2 7623
2aec5b7a
JW
7624 /* This works only if loads sign extend. Otherwise, if we get a
7625 reload for the inner part, it may be loaded from the stack, and
7626 then we lose all sign bit copies that existed before the store
7627 to the stack. */
7628
7629 if ((GET_MODE_SIZE (GET_MODE (x))
7630 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7631 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 7632 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 7633#endif
457816e2 7634#endif
d0ab8cd3
RK
7635 break;
7636
7637 case SIGN_EXTRACT:
7638 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7639 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7640 break;
7641
7642 case SIGN_EXTEND:
7643 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7644 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7645
7646 case TRUNCATE:
0f41302f 7647 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7648 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7649 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7650 - bitwidth)));
7651
7652 case NOT:
7653 return num_sign_bit_copies (XEXP (x, 0), mode);
7654
7655 case ROTATE: case ROTATERT:
7656 /* If we are rotating left by a number of bits less than the number
7657 of sign bit copies, we can just subtract that amount from the
7658 number. */
7659 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7660 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7661 {
7662 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7663 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7664 : bitwidth - INTVAL (XEXP (x, 1))));
7665 }
7666 break;
7667
7668 case NEG:
7669 /* In general, this subtracts one sign bit copy. But if the value
7670 is known to be positive, the number of sign bit copies is the
951553af
RK
7671 same as that of the input. Finally, if the input has just one bit
7672 that might be nonzero, all the bits are copies of the sign bit. */
7673 nonzero = nonzero_bits (XEXP (x, 0), mode);
7674 if (nonzero == 1)
d0ab8cd3
RK
7675 return bitwidth;
7676
7677 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7678 if (num0 > 1
ac49a949 7679 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7680 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
7681 num0--;
7682
7683 return num0;
7684
7685 case IOR: case AND: case XOR:
7686 case SMIN: case SMAX: case UMIN: case UMAX:
7687 /* Logical operations will preserve the number of sign-bit copies.
7688 MIN and MAX operations always return one of the operands. */
7689 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7690 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7691 return MIN (num0, num1);
7692
7693 case PLUS: case MINUS:
7694 /* For addition and subtraction, we can have a 1-bit carry. However,
7695 if we are subtracting 1 from a positive number, there will not
7696 be such a carry. Furthermore, if the positive number is known to
7697 be 0 or 1, we know the result is either -1 or 0. */
7698
3e3ea975 7699 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 7700 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7701 {
951553af
RK
7702 nonzero = nonzero_bits (XEXP (x, 0), mode);
7703 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7704 return (nonzero == 1 || nonzero == 0 ? bitwidth
7705 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7706 }
7707
7708 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7709 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7710 return MAX (1, MIN (num0, num1) - 1);
7711
7712 case MULT:
7713 /* The number of bits of the product is the sum of the number of
7714 bits of both terms. However, unless one of the terms if known
7715 to be positive, we must allow for an additional bit since negating
7716 a negative number can remove one sign bit copy. */
7717
7718 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7719 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7720
7721 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7722 if (result > 0
9295e6af 7723 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7724 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 7725 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
01c82bbb
RK
7726 && ((nonzero_bits (XEXP (x, 1), mode)
7727 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
d0ab8cd3
RK
7728 result--;
7729
7730 return MAX (1, result);
7731
7732 case UDIV:
7733 /* The result must be <= the first operand. */
7734 return num_sign_bit_copies (XEXP (x, 0), mode);
7735
7736 case UMOD:
7737 /* The result must be <= the scond operand. */
7738 return num_sign_bit_copies (XEXP (x, 1), mode);
7739
7740 case DIV:
7741 /* Similar to unsigned division, except that we have to worry about
7742 the case where the divisor is negative, in which case we have
7743 to add 1. */
7744 result = num_sign_bit_copies (XEXP (x, 0), mode);
7745 if (result > 1
ac49a949 7746 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7747 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7748 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7749 result --;
7750
7751 return result;
7752
7753 case MOD:
7754 result = num_sign_bit_copies (XEXP (x, 1), mode);
7755 if (result > 1
ac49a949 7756 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7757 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7758 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7759 result --;
7760
7761 return result;
7762
7763 case ASHIFTRT:
7764 /* Shifts by a constant add to the number of bits equal to the
7765 sign bit. */
7766 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7767 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7768 && INTVAL (XEXP (x, 1)) > 0)
7769 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7770
7771 return num0;
7772
7773 case ASHIFT:
d0ab8cd3
RK
7774 /* Left shifts destroy copies. */
7775 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7776 || INTVAL (XEXP (x, 1)) < 0
7777 || INTVAL (XEXP (x, 1)) >= bitwidth)
7778 return 1;
7779
7780 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7781 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7782
7783 case IF_THEN_ELSE:
7784 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7785 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7786 return MIN (num0, num1);
7787
d0ab8cd3
RK
7788 case EQ: case NE: case GE: case GT: case LE: case LT:
7789 case GEU: case GTU: case LEU: case LTU:
0802d516
RK
7790 if (STORE_FLAG_VALUE == -1)
7791 return bitwidth;
d0ab8cd3
RK
7792 }
7793
7794 /* If we haven't been able to figure it out by one of the above rules,
7795 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
7796 count those bits and return one less than that amount. If we can't
7797 safely compute the mask for this mode, always return BITWIDTH. */
7798
7799 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 7800 return 1;
d0ab8cd3 7801
951553af 7802 nonzero = nonzero_bits (x, mode);
df6f4086 7803 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 7804 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7805}
7806\f
1a26b032
RK
7807/* Return the number of "extended" bits there are in X, when interpreted
7808 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7809 unsigned quantities, this is the number of high-order zero bits.
7810 For signed quantities, this is the number of copies of the sign bit
7811 minus 1. In both case, this function returns the number of "spare"
7812 bits. For example, if two quantities for which this function returns
7813 at least 1 are added, the addition is known not to overflow.
7814
7815 This function will always return 0 unless called during combine, which
7816 implies that it must be called from a define_split. */
7817
7818int
7819extended_count (x, mode, unsignedp)
7820 rtx x;
7821 enum machine_mode mode;
7822 int unsignedp;
7823{
951553af 7824 if (nonzero_sign_valid == 0)
1a26b032
RK
7825 return 0;
7826
7827 return (unsignedp
ac49a949
RS
7828 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7829 && (GET_MODE_BITSIZE (mode) - 1
951553af 7830 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
7831 : num_sign_bit_copies (x, mode) - 1);
7832}
7833\f
230d793d
RS
7834/* This function is called from `simplify_shift_const' to merge two
7835 outer operations. Specifically, we have already found that we need
7836 to perform operation *POP0 with constant *PCONST0 at the outermost
7837 position. We would now like to also perform OP1 with constant CONST1
7838 (with *POP0 being done last).
7839
7840 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7841 the resulting operation. *PCOMP_P is set to 1 if we would need to
7842 complement the innermost operand, otherwise it is unchanged.
7843
7844 MODE is the mode in which the operation will be done. No bits outside
7845 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 7846 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
7847
7848 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7849 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7850 result is simply *PCONST0.
7851
7852 If the resulting operation cannot be expressed as one operation, we
7853 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7854
7855static int
7856merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7857 enum rtx_code *pop0;
5f4f0e22 7858 HOST_WIDE_INT *pconst0;
230d793d 7859 enum rtx_code op1;
5f4f0e22 7860 HOST_WIDE_INT const1;
230d793d
RS
7861 enum machine_mode mode;
7862 int *pcomp_p;
7863{
7864 enum rtx_code op0 = *pop0;
5f4f0e22 7865 HOST_WIDE_INT const0 = *pconst0;
9fa6d012 7866 int width = GET_MODE_BITSIZE (mode);
230d793d
RS
7867
7868 const0 &= GET_MODE_MASK (mode);
7869 const1 &= GET_MODE_MASK (mode);
7870
7871 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7872 if (op0 == AND)
7873 const1 &= const0;
7874
7875 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7876 if OP0 is SET. */
7877
7878 if (op1 == NIL || op0 == SET)
7879 return 1;
7880
7881 else if (op0 == NIL)
7882 op0 = op1, const0 = const1;
7883
7884 else if (op0 == op1)
7885 {
7886 switch (op0)
7887 {
7888 case AND:
7889 const0 &= const1;
7890 break;
7891 case IOR:
7892 const0 |= const1;
7893 break;
7894 case XOR:
7895 const0 ^= const1;
7896 break;
7897 case PLUS:
7898 const0 += const1;
7899 break;
7900 case NEG:
7901 op0 = NIL;
7902 break;
7903 }
7904 }
7905
7906 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7907 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7908 return 0;
7909
7910 /* If the two constants aren't the same, we can't do anything. The
7911 remaining six cases can all be done. */
7912 else if (const0 != const1)
7913 return 0;
7914
7915 else
7916 switch (op0)
7917 {
7918 case IOR:
7919 if (op1 == AND)
7920 /* (a & b) | b == b */
7921 op0 = SET;
7922 else /* op1 == XOR */
7923 /* (a ^ b) | b == a | b */
7924 ;
7925 break;
7926
7927 case XOR:
7928 if (op1 == AND)
7929 /* (a & b) ^ b == (~a) & b */
7930 op0 = AND, *pcomp_p = 1;
7931 else /* op1 == IOR */
7932 /* (a | b) ^ b == a & ~b */
7933 op0 = AND, *pconst0 = ~ const0;
7934 break;
7935
7936 case AND:
7937 if (op1 == IOR)
7938 /* (a | b) & b == b */
7939 op0 = SET;
7940 else /* op1 == XOR */
7941 /* (a ^ b) & b) == (~a) & b */
7942 *pcomp_p = 1;
7943 break;
7944 }
7945
7946 /* Check for NO-OP cases. */
7947 const0 &= GET_MODE_MASK (mode);
7948 if (const0 == 0
7949 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7950 op0 = NIL;
7951 else if (const0 == 0 && op0 == AND)
7952 op0 = SET;
7953 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7954 op0 = NIL;
7955
9fa6d012
TG
7956 /* If this would be an entire word for the target, but is not for
7957 the host, then sign-extend on the host so that the number will look
7958 the same way on the host that it would on the target.
7959
7960 For example, when building a 64 bit alpha hosted 32 bit sparc
7961 targeted compiler, then we want the 32 bit unsigned value -1 to be
7962 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7963 The later confuses the sparc backend. */
7964
7965 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7966 && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
7967 const0 |= ((HOST_WIDE_INT) (-1) << width);
7968
230d793d
RS
7969 *pop0 = op0;
7970 *pconst0 = const0;
7971
7972 return 1;
7973}
7974\f
7975/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7976 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7977 that we started with.
7978
7979 The shift is normally computed in the widest mode we find in VAROP, as
7980 long as it isn't a different number of words than RESULT_MODE. Exceptions
7981 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7982
7983static rtx
7984simplify_shift_const (x, code, result_mode, varop, count)
7985 rtx x;
7986 enum rtx_code code;
7987 enum machine_mode result_mode;
7988 rtx varop;
7989 int count;
7990{
7991 enum rtx_code orig_code = code;
7992 int orig_count = count;
7993 enum machine_mode mode = result_mode;
7994 enum machine_mode shift_mode, tmode;
7995 int mode_words
7996 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7997 /* We form (outer_op (code varop count) (outer_const)). */
7998 enum rtx_code outer_op = NIL;
c4e861e8 7999 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8000 rtx const_rtx;
8001 int complement_p = 0;
8002 rtx new;
8003
8004 /* If we were given an invalid count, don't do anything except exactly
8005 what was requested. */
8006
8007 if (count < 0 || count > GET_MODE_BITSIZE (mode))
8008 {
8009 if (x)
8010 return x;
8011
5f4f0e22 8012 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
8013 }
8014
8015 /* Unless one of the branches of the `if' in this loop does a `continue',
8016 we will `break' the loop after the `if'. */
8017
8018 while (count != 0)
8019 {
8020 /* If we have an operand of (clobber (const_int 0)), just return that
8021 value. */
8022 if (GET_CODE (varop) == CLOBBER)
8023 return varop;
8024
8025 /* If we discovered we had to complement VAROP, leave. Making a NOT
8026 here would cause an infinite loop. */
8027 if (complement_p)
8028 break;
8029
abc95ed3 8030 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8031 if (code == ROTATERT)
8032 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8033
230d793d 8034 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8035 shift is a right shift or a ROTATE, we must always do it in the mode
8036 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8037 widest mode encountered. */
f6789c77
RK
8038 shift_mode
8039 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8040 ? result_mode : mode);
230d793d
RS
8041
8042 /* Handle cases where the count is greater than the size of the mode
8043 minus 1. For ASHIFT, use the size minus one as the count (this can
8044 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8045 take the count modulo the size. For other shifts, the result is
8046 zero.
8047
8048 Since these shifts are being produced by the compiler by combining
8049 multiple operations, each of which are defined, we know what the
8050 result is supposed to be. */
8051
8052 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8053 {
8054 if (code == ASHIFTRT)
8055 count = GET_MODE_BITSIZE (shift_mode) - 1;
8056 else if (code == ROTATE || code == ROTATERT)
8057 count %= GET_MODE_BITSIZE (shift_mode);
8058 else
8059 {
8060 /* We can't simply return zero because there may be an
8061 outer op. */
8062 varop = const0_rtx;
8063 count = 0;
8064 break;
8065 }
8066 }
8067
8068 /* Negative counts are invalid and should not have been made (a
8069 programmer-specified negative count should have been handled
0f41302f 8070 above). */
230d793d
RS
8071 else if (count < 0)
8072 abort ();
8073
312def2e
RK
8074 /* An arithmetic right shift of a quantity known to be -1 or 0
8075 is a no-op. */
8076 if (code == ASHIFTRT
8077 && (num_sign_bit_copies (varop, shift_mode)
8078 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8079 {
312def2e
RK
8080 count = 0;
8081 break;
8082 }
d0ab8cd3 8083
312def2e
RK
8084 /* If we are doing an arithmetic right shift and discarding all but
8085 the sign bit copies, this is equivalent to doing a shift by the
8086 bitsize minus one. Convert it into that shift because it will often
8087 allow other simplifications. */
500c518b 8088
312def2e
RK
8089 if (code == ASHIFTRT
8090 && (count + num_sign_bit_copies (varop, shift_mode)
8091 >= GET_MODE_BITSIZE (shift_mode)))
8092 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8093
230d793d
RS
8094 /* We simplify the tests below and elsewhere by converting
8095 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8096 `make_compound_operation' will convert it to a ASHIFTRT for
8097 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8098 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8099 && code == ASHIFTRT
951553af 8100 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8101 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8102 == 0))
230d793d
RS
8103 code = LSHIFTRT;
8104
8105 switch (GET_CODE (varop))
8106 {
8107 case SIGN_EXTEND:
8108 case ZERO_EXTEND:
8109 case SIGN_EXTRACT:
8110 case ZERO_EXTRACT:
8111 new = expand_compound_operation (varop);
8112 if (new != varop)
8113 {
8114 varop = new;
8115 continue;
8116 }
8117 break;
8118
8119 case MEM:
8120 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8121 minus the width of a smaller mode, we can do this with a
8122 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8123 if ((code == ASHIFTRT || code == LSHIFTRT)
8124 && ! mode_dependent_address_p (XEXP (varop, 0))
8125 && ! MEM_VOLATILE_P (varop)
8126 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8127 MODE_INT, 1)) != BLKmode)
8128 {
f76b9db2
ILT
8129 if (BYTES_BIG_ENDIAN)
8130 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
8131 else
e24b00c8
ILT
8132 new = gen_rtx (MEM, tmode,
8133 plus_constant (XEXP (varop, 0),
8134 count / BITS_PER_UNIT));
8135 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
8136 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
8137 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
230d793d
RS
8138 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8139 : ZERO_EXTEND, mode, new);
8140 count = 0;
8141 continue;
8142 }
8143 break;
8144
8145 case USE:
8146 /* Similar to the case above, except that we can only do this if
8147 the resulting mode is the same as that of the underlying
8148 MEM and adjust the address depending on the *bits* endianness
8149 because of the way that bit-field extract insns are defined. */
8150 if ((code == ASHIFTRT || code == LSHIFTRT)
8151 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8152 MODE_INT, 1)) != BLKmode
8153 && tmode == GET_MODE (XEXP (varop, 0)))
8154 {
f76b9db2
ILT
8155 if (BITS_BIG_ENDIAN)
8156 new = XEXP (varop, 0);
8157 else
8158 {
8159 new = copy_rtx (XEXP (varop, 0));
8160 SUBST (XEXP (new, 0),
8161 plus_constant (XEXP (new, 0),
8162 count / BITS_PER_UNIT));
8163 }
230d793d
RS
8164
8165 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8166 : ZERO_EXTEND, mode, new);
8167 count = 0;
8168 continue;
8169 }
8170 break;
8171
8172 case SUBREG:
8173 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8174 the same number of words as what we've seen so far. Then store
8175 the widest mode in MODE. */
f9e67232
RS
8176 if (subreg_lowpart_p (varop)
8177 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8178 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
8179 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8180 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8181 == mode_words))
8182 {
8183 varop = SUBREG_REG (varop);
8184 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8185 mode = GET_MODE (varop);
8186 continue;
8187 }
8188 break;
8189
8190 case MULT:
8191 /* Some machines use MULT instead of ASHIFT because MULT
8192 is cheaper. But it is still better on those machines to
8193 merge two shifts into one. */
8194 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8195 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8196 {
8197 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8198 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
8199 continue;
8200 }
8201 break;
8202
8203 case UDIV:
8204 /* Similar, for when divides are cheaper. */
8205 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8206 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8207 {
8208 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8209 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
8210 continue;
8211 }
8212 break;
8213
8214 case ASHIFTRT:
8215 /* If we are extracting just the sign bit of an arithmetic right
8216 shift, that shift is not needed. */
8217 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8218 {
8219 varop = XEXP (varop, 0);
8220 continue;
8221 }
8222
0f41302f 8223 /* ... fall through ... */
230d793d
RS
8224
8225 case LSHIFTRT:
8226 case ASHIFT:
230d793d
RS
8227 case ROTATE:
8228 /* Here we have two nested shifts. The result is usually the
8229 AND of a new shift with a mask. We compute the result below. */
8230 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8231 && INTVAL (XEXP (varop, 1)) >= 0
8232 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
8233 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8234 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
8235 {
8236 enum rtx_code first_code = GET_CODE (varop);
8237 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 8238 unsigned HOST_WIDE_INT mask;
230d793d 8239 rtx mask_rtx;
230d793d 8240
230d793d
RS
8241 /* We have one common special case. We can't do any merging if
8242 the inner code is an ASHIFTRT of a smaller mode. However, if
8243 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8244 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8245 we can convert it to
8246 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8247 This simplifies certain SIGN_EXTEND operations. */
8248 if (code == ASHIFT && first_code == ASHIFTRT
8249 && (GET_MODE_BITSIZE (result_mode)
8250 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8251 {
8252 /* C3 has the low-order C1 bits zero. */
8253
5f4f0e22
CH
8254 mask = (GET_MODE_MASK (mode)
8255 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 8256
5f4f0e22 8257 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 8258 XEXP (varop, 0), mask);
5f4f0e22 8259 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
8260 varop, count);
8261 count = first_count;
8262 code = ASHIFTRT;
8263 continue;
8264 }
8265
d0ab8cd3
RK
8266 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8267 than C1 high-order bits equal to the sign bit, we can convert
8268 this to either an ASHIFT or a ASHIFTRT depending on the
8269 two counts.
230d793d
RS
8270
8271 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8272
8273 if (code == ASHIFTRT && first_code == ASHIFT
8274 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
8275 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8276 > first_count))
230d793d 8277 {
d0ab8cd3
RK
8278 count -= first_count;
8279 if (count < 0)
8280 count = - count, code = ASHIFT;
8281 varop = XEXP (varop, 0);
8282 continue;
230d793d
RS
8283 }
8284
8285 /* There are some cases we can't do. If CODE is ASHIFTRT,
8286 we can only do this if FIRST_CODE is also ASHIFTRT.
8287
8288 We can't do the case when CODE is ROTATE and FIRST_CODE is
8289 ASHIFTRT.
8290
8291 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 8292 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
8293
8294 Finally, we can't do any of these if the mode is too wide
8295 unless the codes are the same.
8296
8297 Handle the case where the shift codes are the same
8298 first. */
8299
8300 if (code == first_code)
8301 {
8302 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
8303 && (code == ASHIFTRT || code == LSHIFTRT
8304 || code == ROTATE))
230d793d
RS
8305 break;
8306
8307 count += first_count;
8308 varop = XEXP (varop, 0);
8309 continue;
8310 }
8311
8312 if (code == ASHIFTRT
8313 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 8314 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 8315 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
8316 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8317 || first_code == ROTATE
230d793d
RS
8318 || code == ROTATE)))
8319 break;
8320
8321 /* To compute the mask to apply after the shift, shift the
951553af 8322 nonzero bits of the inner shift the same way the
230d793d
RS
8323 outer shift will. */
8324
951553af 8325 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
8326
8327 mask_rtx
8328 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 8329 GEN_INT (count));
230d793d
RS
8330
8331 /* Give up if we can't compute an outer operation to use. */
8332 if (mask_rtx == 0
8333 || GET_CODE (mask_rtx) != CONST_INT
8334 || ! merge_outer_ops (&outer_op, &outer_const, AND,
8335 INTVAL (mask_rtx),
8336 result_mode, &complement_p))
8337 break;
8338
8339 /* If the shifts are in the same direction, we add the
8340 counts. Otherwise, we subtract them. */
8341 if ((code == ASHIFTRT || code == LSHIFTRT)
8342 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8343 count += first_count;
8344 else
8345 count -= first_count;
8346
8347 /* If COUNT is positive, the new shift is usually CODE,
8348 except for the two exceptions below, in which case it is
8349 FIRST_CODE. If the count is negative, FIRST_CODE should
8350 always be used */
8351 if (count > 0
8352 && ((first_code == ROTATE && code == ASHIFT)
8353 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8354 code = first_code;
8355 else if (count < 0)
8356 code = first_code, count = - count;
8357
8358 varop = XEXP (varop, 0);
8359 continue;
8360 }
8361
8362 /* If we have (A << B << C) for any shift, we can convert this to
8363 (A << C << B). This wins if A is a constant. Only try this if
8364 B is not a constant. */
8365
8366 else if (GET_CODE (varop) == code
8367 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8368 && 0 != (new
8369 = simplify_binary_operation (code, mode,
8370 XEXP (varop, 0),
5f4f0e22 8371 GEN_INT (count))))
230d793d
RS
8372 {
8373 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8374 count = 0;
8375 continue;
8376 }
8377 break;
8378
8379 case NOT:
8380 /* Make this fit the case below. */
8381 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 8382 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
8383 continue;
8384
8385 case IOR:
8386 case AND:
8387 case XOR:
8388 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8389 with C the size of VAROP - 1 and the shift is logical if
8390 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8391 we have an (le X 0) operation. If we have an arithmetic shift
8392 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8393 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8394
8395 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8396 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8397 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8398 && (code == LSHIFTRT || code == ASHIFTRT)
8399 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8400 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8401 {
8402 count = 0;
8403 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8404 const0_rtx);
8405
8406 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8407 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8408
8409 continue;
8410 }
8411
8412 /* If we have (shift (logical)), move the logical to the outside
8413 to allow it to possibly combine with another logical and the
8414 shift to combine with another shift. This also canonicalizes to
8415 what a ZERO_EXTRACT looks like. Also, some machines have
8416 (and (shift)) insns. */
8417
8418 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8419 && (new = simplify_binary_operation (code, result_mode,
8420 XEXP (varop, 1),
5f4f0e22 8421 GEN_INT (count))) != 0
7d171a1e 8422 && GET_CODE(new) == CONST_INT
230d793d
RS
8423 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8424 INTVAL (new), result_mode, &complement_p))
8425 {
8426 varop = XEXP (varop, 0);
8427 continue;
8428 }
8429
8430 /* If we can't do that, try to simplify the shift in each arm of the
8431 logical expression, make a new logical expression, and apply
8432 the inverse distributive law. */
8433 {
00d4ca1c 8434 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 8435 XEXP (varop, 0), count);
00d4ca1c 8436 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
8437 XEXP (varop, 1), count);
8438
21a64bf1 8439 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
8440 varop = apply_distributive_law (varop);
8441
8442 count = 0;
8443 }
8444 break;
8445
8446 case EQ:
45620ed4 8447 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 8448 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
8449 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8450 that may be nonzero. */
8451 if (code == LSHIFTRT
230d793d
RS
8452 && XEXP (varop, 1) == const0_rtx
8453 && GET_MODE (XEXP (varop, 0)) == result_mode
8454 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 8455 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8456 && ((STORE_FLAG_VALUE
5f4f0e22 8457 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 8458 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8459 && merge_outer_ops (&outer_op, &outer_const, XOR,
8460 (HOST_WIDE_INT) 1, result_mode,
8461 &complement_p))
230d793d
RS
8462 {
8463 varop = XEXP (varop, 0);
8464 count = 0;
8465 continue;
8466 }
8467 break;
8468
8469 case NEG:
d0ab8cd3
RK
8470 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8471 than the number of bits in the mode is equivalent to A. */
8472 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 8473 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 8474 {
d0ab8cd3 8475 varop = XEXP (varop, 0);
230d793d
RS
8476 count = 0;
8477 continue;
8478 }
8479
8480 /* NEG commutes with ASHIFT since it is multiplication. Move the
8481 NEG outside to allow shifts to combine. */
8482 if (code == ASHIFT
5f4f0e22
CH
8483 && merge_outer_ops (&outer_op, &outer_const, NEG,
8484 (HOST_WIDE_INT) 0, result_mode,
8485 &complement_p))
230d793d
RS
8486 {
8487 varop = XEXP (varop, 0);
8488 continue;
8489 }
8490 break;
8491
8492 case PLUS:
d0ab8cd3
RK
8493 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8494 is one less than the number of bits in the mode is
8495 equivalent to (xor A 1). */
230d793d
RS
8496 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8497 && XEXP (varop, 1) == constm1_rtx
951553af 8498 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8499 && merge_outer_ops (&outer_op, &outer_const, XOR,
8500 (HOST_WIDE_INT) 1, result_mode,
8501 &complement_p))
230d793d
RS
8502 {
8503 count = 0;
8504 varop = XEXP (varop, 0);
8505 continue;
8506 }
8507
3f508eca 8508 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 8509 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
8510 bits are known zero in FOO, we can replace the PLUS with FOO.
8511 Similarly in the other operand order. This code occurs when
8512 we are computing the size of a variable-size array. */
8513
8514 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8515 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
8516 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8517 && (nonzero_bits (XEXP (varop, 1), result_mode)
8518 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
8519 {
8520 varop = XEXP (varop, 0);
8521 continue;
8522 }
8523 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8524 && count < HOST_BITS_PER_WIDE_INT
ac49a949 8525 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 8526 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 8527 >> count)
951553af
RK
8528 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8529 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
8530 result_mode)))
8531 {
8532 varop = XEXP (varop, 1);
8533 continue;
8534 }
8535
230d793d
RS
8536 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8537 if (code == ASHIFT
8538 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8539 && (new = simplify_binary_operation (ASHIFT, result_mode,
8540 XEXP (varop, 1),
5f4f0e22 8541 GEN_INT (count))) != 0
7d171a1e 8542 && GET_CODE(new) == CONST_INT
230d793d
RS
8543 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8544 INTVAL (new), result_mode, &complement_p))
8545 {
8546 varop = XEXP (varop, 0);
8547 continue;
8548 }
8549 break;
8550
8551 case MINUS:
8552 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8553 with C the size of VAROP - 1 and the shift is logical if
8554 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8555 we have a (gt X 0) operation. If the shift is arithmetic with
8556 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8557 we have a (neg (gt X 0)) operation. */
8558
0802d516
RK
8559 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8560 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 8561 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
8562 && (code == LSHIFTRT || code == ASHIFTRT)
8563 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8564 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8565 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8566 {
8567 count = 0;
8568 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8569 const0_rtx);
8570
8571 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8572 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8573
8574 continue;
8575 }
8576 break;
8577 }
8578
8579 break;
8580 }
8581
8582 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
8583 a right shift or ROTATE, we must always do it in the mode it was
8584 originally done in. Otherwise, we can do it in MODE, the widest mode
8585 encountered. The code we care about is that of the shift that will
8586 actually be done, not the shift that was originally requested. */
8587 shift_mode
8588 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8589 ? result_mode : mode);
230d793d
RS
8590
8591 /* We have now finished analyzing the shift. The result should be
8592 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8593 OUTER_OP is non-NIL, it is an operation that needs to be applied
8594 to the result of the shift. OUTER_CONST is the relevant constant,
8595 but we must turn off all bits turned off in the shift.
8596
8597 If we were passed a value for X, see if we can use any pieces of
8598 it. If not, make new rtx. */
8599
8600 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8601 && GET_CODE (XEXP (x, 1)) == CONST_INT
8602 && INTVAL (XEXP (x, 1)) == count)
8603 const_rtx = XEXP (x, 1);
8604 else
5f4f0e22 8605 const_rtx = GEN_INT (count);
230d793d
RS
8606
8607 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8608 && GET_MODE (XEXP (x, 0)) == shift_mode
8609 && SUBREG_REG (XEXP (x, 0)) == varop)
8610 varop = XEXP (x, 0);
8611 else if (GET_MODE (varop) != shift_mode)
8612 varop = gen_lowpart_for_combine (shift_mode, varop);
8613
0f41302f 8614 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
8615 if (GET_CODE (varop) == CLOBBER)
8616 return x ? x : varop;
8617
8618 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8619 if (new != 0)
8620 x = new;
8621 else
8622 {
8623 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8624 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8625
8626 SUBST (XEXP (x, 0), varop);
8627 SUBST (XEXP (x, 1), const_rtx);
8628 }
8629
224eeff2
RK
8630 /* If we have an outer operation and we just made a shift, it is
8631 possible that we could have simplified the shift were it not
8632 for the outer operation. So try to do the simplification
8633 recursively. */
8634
8635 if (outer_op != NIL && GET_CODE (x) == code
8636 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8637 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8638 INTVAL (XEXP (x, 1)));
8639
230d793d
RS
8640 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8641 turn off all the bits that the shift would have turned off. */
8642 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 8643 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
8644 GET_MODE_MASK (result_mode) >> orig_count);
8645
8646 /* Do the remainder of the processing in RESULT_MODE. */
8647 x = gen_lowpart_for_combine (result_mode, x);
8648
8649 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8650 operation. */
8651 if (complement_p)
0c1c8ea6 8652 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
8653
8654 if (outer_op != NIL)
8655 {
5f4f0e22 8656 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9fa6d012
TG
8657 {
8658 int width = GET_MODE_BITSIZE (result_mode);
8659
8660 outer_const &= GET_MODE_MASK (result_mode);
8661
8662 /* If this would be an entire word for the target, but is not for
8663 the host, then sign-extend on the host so that the number will
8664 look the same way on the host that it would on the target.
8665
8666 For example, when building a 64 bit alpha hosted 32 bit sparc
8667 targeted compiler, then we want the 32 bit unsigned value -1 to be
8668 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8669 The later confuses the sparc backend. */
8670
8671 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8672 && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
8673 outer_const |= ((HOST_WIDE_INT) (-1) << width);
8674 }
230d793d
RS
8675
8676 if (outer_op == AND)
5f4f0e22 8677 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
8678 else if (outer_op == SET)
8679 /* This means that we have determined that the result is
8680 equivalent to a constant. This should be rare. */
5f4f0e22 8681 x = GEN_INT (outer_const);
230d793d 8682 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 8683 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 8684 else
5f4f0e22 8685 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
8686 }
8687
8688 return x;
8689}
8690\f
8691/* Like recog, but we receive the address of a pointer to a new pattern.
8692 We try to match the rtx that the pointer points to.
8693 If that fails, we may try to modify or replace the pattern,
8694 storing the replacement into the same pointer object.
8695
8696 Modifications include deletion or addition of CLOBBERs.
8697
8698 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8699 the CLOBBERs are placed.
8700
a29ca9db
RK
8701 PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
8702 we had to add.
8703
230d793d
RS
8704 The value is the final insn code from the pattern ultimately matched,
8705 or -1. */
8706
8707static int
a29ca9db 8708recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
230d793d
RS
8709 rtx *pnewpat;
8710 rtx insn;
8711 rtx *pnotes;
a29ca9db 8712 int *padded_scratches;
230d793d
RS
8713{
8714 register rtx pat = *pnewpat;
8715 int insn_code_number;
8716 int num_clobbers_to_add = 0;
8717 int i;
8718 rtx notes = 0;
8719
a29ca9db
RK
8720 *padded_scratches = 0;
8721
974f4146
RK
8722 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8723 we use to indicate that something didn't match. If we find such a
8724 thing, force rejection. */
d96023cf 8725 if (GET_CODE (pat) == PARALLEL)
974f4146 8726 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
8727 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8728 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
8729 return -1;
8730
230d793d
RS
8731 /* Is the result of combination a valid instruction? */
8732 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8733
8734 /* If it isn't, there is the possibility that we previously had an insn
8735 that clobbered some register as a side effect, but the combined
8736 insn doesn't need to do that. So try once more without the clobbers
8737 unless this represents an ASM insn. */
8738
8739 if (insn_code_number < 0 && ! check_asm_operands (pat)
8740 && GET_CODE (pat) == PARALLEL)
8741 {
8742 int pos;
8743
8744 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8745 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8746 {
8747 if (i != pos)
8748 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8749 pos++;
8750 }
8751
8752 SUBST_INT (XVECLEN (pat, 0), pos);
8753
8754 if (pos == 1)
8755 pat = XVECEXP (pat, 0, 0);
8756
8757 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8758 }
8759
8760 /* If we had any clobbers to add, make a new pattern than contains
8761 them. Then check to make sure that all of them are dead. */
8762 if (num_clobbers_to_add)
8763 {
8764 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8765 gen_rtvec (GET_CODE (pat) == PARALLEL
8766 ? XVECLEN (pat, 0) + num_clobbers_to_add
8767 : num_clobbers_to_add + 1));
8768
8769 if (GET_CODE (pat) == PARALLEL)
8770 for (i = 0; i < XVECLEN (pat, 0); i++)
8771 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8772 else
8773 XVECEXP (newpat, 0, 0) = pat;
8774
8775 add_clobbers (newpat, insn_code_number);
8776
8777 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8778 i < XVECLEN (newpat, 0); i++)
8779 {
8780 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8781 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8782 return -1;
a29ca9db
RK
8783 else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
8784 (*padded_scratches)++;
230d793d
RS
8785 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8786 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8787 }
8788 pat = newpat;
8789 }
8790
8791 *pnewpat = pat;
8792 *pnotes = notes;
8793
8794 return insn_code_number;
8795}
8796\f
8797/* Like gen_lowpart but for use by combine. In combine it is not possible
8798 to create any new pseudoregs. However, it is safe to create
8799 invalid memory addresses, because combine will try to recognize
8800 them and all they will do is make the combine attempt fail.
8801
8802 If for some reason this cannot do its job, an rtx
8803 (clobber (const_int 0)) is returned.
8804 An insn containing that will not be recognized. */
8805
8806#undef gen_lowpart
8807
8808static rtx
8809gen_lowpart_for_combine (mode, x)
8810 enum machine_mode mode;
8811 register rtx x;
8812{
8813 rtx result;
8814
8815 if (GET_MODE (x) == mode)
8816 return x;
8817
eae957a8
RK
8818 /* We can only support MODE being wider than a word if X is a
8819 constant integer or has a mode the same size. */
8820
8821 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8822 && ! ((GET_MODE (x) == VOIDmode
8823 && (GET_CODE (x) == CONST_INT
8824 || GET_CODE (x) == CONST_DOUBLE))
8825 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
230d793d
RS
8826 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8827
8828 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8829 won't know what to do. So we will strip off the SUBREG here and
8830 process normally. */
8831 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8832 {
8833 x = SUBREG_REG (x);
8834 if (GET_MODE (x) == mode)
8835 return x;
8836 }
8837
8838 result = gen_lowpart_common (mode, x);
64bf47a2
RK
8839 if (result != 0
8840 && GET_CODE (result) == SUBREG
8841 && GET_CODE (SUBREG_REG (result)) == REG
8842 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
8843 && (GET_MODE_SIZE (GET_MODE (result))
8844 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
8845 reg_changes_size[REGNO (SUBREG_REG (result))] = 1;
8846
230d793d
RS
8847 if (result)
8848 return result;
8849
8850 if (GET_CODE (x) == MEM)
8851 {
8852 register int offset = 0;
8853 rtx new;
8854
8855 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8856 address. */
8857 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8858 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8859
8860 /* If we want to refer to something bigger than the original memref,
8861 generate a perverse subreg instead. That will force a reload
8862 of the original memref X. */
8863 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8864 return gen_rtx (SUBREG, mode, x, 0);
8865
f76b9db2
ILT
8866 if (WORDS_BIG_ENDIAN)
8867 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8868 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8869 if (BYTES_BIG_ENDIAN)
8870 {
8871 /* Adjust the address so that the address-after-the-data is
8872 unchanged. */
8873 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8874 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8875 }
230d793d
RS
8876 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8877 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8878 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8879 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8880 return new;
8881 }
8882
8883 /* If X is a comparison operator, rewrite it in a new mode. This
8884 probably won't match, but may allow further simplifications. */
8885 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8886 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8887
8888 /* If we couldn't simplify X any other way, just enclose it in a
8889 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 8890 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 8891 else
dfbe1b2f
RK
8892 {
8893 int word = 0;
8894
8895 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8896 word = ((GET_MODE_SIZE (GET_MODE (x))
8897 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8898 / UNITS_PER_WORD);
8899 return gen_rtx (SUBREG, mode, x, word);
8900 }
230d793d
RS
8901}
8902\f
8903/* Make an rtx expression. This is a subset of gen_rtx and only supports
8904 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8905
8906 If the identical expression was previously in the insn (in the undobuf),
8907 it will be returned. Only if it is not found will a new expression
8908 be made. */
8909
8910/*VARARGS2*/
8911static rtx
4f90e4a0 8912gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 8913{
4f90e4a0 8914#ifndef __STDC__
230d793d
RS
8915 enum rtx_code code;
8916 enum machine_mode mode;
4f90e4a0
RK
8917#endif
8918 va_list p;
230d793d
RS
8919 int n_args;
8920 rtx args[3];
8921 int i, j;
8922 char *fmt;
8923 rtx rt;
241cea85 8924 struct undo *undo;
230d793d 8925
4f90e4a0
RK
8926 VA_START (p, mode);
8927
8928#ifndef __STDC__
230d793d
RS
8929 code = va_arg (p, enum rtx_code);
8930 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
8931#endif
8932
230d793d
RS
8933 n_args = GET_RTX_LENGTH (code);
8934 fmt = GET_RTX_FORMAT (code);
8935
8936 if (n_args == 0 || n_args > 3)
8937 abort ();
8938
8939 /* Get each arg and verify that it is supposed to be an expression. */
8940 for (j = 0; j < n_args; j++)
8941 {
8942 if (*fmt++ != 'e')
8943 abort ();
8944
8945 args[j] = va_arg (p, rtx);
8946 }
8947
8948 /* See if this is in undobuf. Be sure we don't use objects that came
8949 from another insn; this could produce circular rtl structures. */
8950
241cea85
RK
8951 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
8952 if (!undo->is_int
8953 && GET_CODE (undo->old_contents.r) == code
8954 && GET_MODE (undo->old_contents.r) == mode)
230d793d
RS
8955 {
8956 for (j = 0; j < n_args; j++)
241cea85 8957 if (XEXP (undo->old_contents.r, j) != args[j])
230d793d
RS
8958 break;
8959
8960 if (j == n_args)
241cea85 8961 return undo->old_contents.r;
230d793d
RS
8962 }
8963
8964 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8965 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8966 rt = rtx_alloc (code);
8967 PUT_MODE (rt, mode);
8968 XEXP (rt, 0) = args[0];
8969 if (n_args > 1)
8970 {
8971 XEXP (rt, 1) = args[1];
8972 if (n_args > 2)
8973 XEXP (rt, 2) = args[2];
8974 }
8975 return rt;
8976}
8977
8978/* These routines make binary and unary operations by first seeing if they
8979 fold; if not, a new expression is allocated. */
8980
8981static rtx
8982gen_binary (code, mode, op0, op1)
8983 enum rtx_code code;
8984 enum machine_mode mode;
8985 rtx op0, op1;
8986{
8987 rtx result;
1a26b032
RK
8988 rtx tem;
8989
8990 if (GET_RTX_CLASS (code) == 'c'
8991 && (GET_CODE (op0) == CONST_INT
8992 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8993 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
8994
8995 if (GET_RTX_CLASS (code) == '<')
8996 {
8997 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
8998
8999 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9000 just (REL_OP X Y). */
9210df58
RK
9001 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9002 {
9003 op1 = XEXP (op0, 1);
9004 op0 = XEXP (op0, 0);
9005 op_mode = GET_MODE (op0);
9006 }
9007
230d793d
RS
9008 if (op_mode == VOIDmode)
9009 op_mode = GET_MODE (op1);
9010 result = simplify_relational_operation (code, op_mode, op0, op1);
9011 }
9012 else
9013 result = simplify_binary_operation (code, mode, op0, op1);
9014
9015 if (result)
9016 return result;
9017
9018 /* Put complex operands first and constants second. */
9019 if (GET_RTX_CLASS (code) == 'c'
9020 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9021 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9022 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9023 || (GET_CODE (op0) == SUBREG
9024 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9025 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9026 return gen_rtx_combine (code, mode, op1, op0);
9027
9028 return gen_rtx_combine (code, mode, op0, op1);
9029}
9030
9031static rtx
0c1c8ea6 9032gen_unary (code, mode, op0_mode, op0)
230d793d 9033 enum rtx_code code;
0c1c8ea6 9034 enum machine_mode mode, op0_mode;
230d793d
RS
9035 rtx op0;
9036{
0c1c8ea6 9037 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
9038
9039 if (result)
9040 return result;
9041
9042 return gen_rtx_combine (code, mode, op0);
9043}
9044\f
9045/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9046 comparison code that will be tested.
9047
9048 The result is a possibly different comparison code to use. *POP0 and
9049 *POP1 may be updated.
9050
9051 It is possible that we might detect that a comparison is either always
9052 true or always false. However, we do not perform general constant
5089e22e 9053 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9054 should have been detected earlier. Hence we ignore all such cases. */
9055
9056static enum rtx_code
9057simplify_comparison (code, pop0, pop1)
9058 enum rtx_code code;
9059 rtx *pop0;
9060 rtx *pop1;
9061{
9062 rtx op0 = *pop0;
9063 rtx op1 = *pop1;
9064 rtx tem, tem1;
9065 int i;
9066 enum machine_mode mode, tmode;
9067
9068 /* Try a few ways of applying the same transformation to both operands. */
9069 while (1)
9070 {
3a19aabc
RK
9071#ifndef WORD_REGISTER_OPERATIONS
9072 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9073 so check specially. */
9074 if (code != GTU && code != GEU && code != LTU && code != LEU
9075 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9076 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9077 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9078 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9079 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9080 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 9081 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
9082 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9083 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9084 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9085 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9086 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9087 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9088 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9089 && (INTVAL (XEXP (op0, 1))
9090 == (GET_MODE_BITSIZE (GET_MODE (op0))
9091 - (GET_MODE_BITSIZE
9092 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9093 {
9094 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9095 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9096 }
9097#endif
9098
230d793d
RS
9099 /* If both operands are the same constant shift, see if we can ignore the
9100 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 9101 this shift are known to be zero for both inputs and if the type of
230d793d 9102 comparison is compatible with the shift. */
67232b23
RK
9103 if (GET_CODE (op0) == GET_CODE (op1)
9104 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9105 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 9106 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
9107 && (code != GT && code != LT && code != GE && code != LE))
9108 || (GET_CODE (op0) == ASHIFTRT
9109 && (code != GTU && code != LTU
9110 && code != GEU && code != GEU)))
9111 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9112 && INTVAL (XEXP (op0, 1)) >= 0
9113 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9114 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
9115 {
9116 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 9117 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9118 int shift_count = INTVAL (XEXP (op0, 1));
9119
9120 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9121 mask &= (mask >> shift_count) << shift_count;
45620ed4 9122 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
9123 mask = (mask & (mask << shift_count)) >> shift_count;
9124
951553af
RK
9125 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9126 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
9127 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9128 else
9129 break;
9130 }
9131
9132 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9133 SUBREGs are of the same mode, and, in both cases, the AND would
9134 be redundant if the comparison was done in the narrower mode,
9135 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
9136 and the operand's possibly nonzero bits are 0xffffff01; in that case
9137 if we only care about QImode, we don't need the AND). This case
9138 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
9139 STORE_FLAG_VALUE == 1 (e.g., the 386).
9140
9141 Similarly, check for a case where the AND's are ZERO_EXTEND
9142 operations from some narrower mode even though a SUBREG is not
9143 present. */
230d793d
RS
9144
9145 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9146 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 9147 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 9148 {
7e4dc511
RK
9149 rtx inner_op0 = XEXP (op0, 0);
9150 rtx inner_op1 = XEXP (op1, 0);
9151 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9152 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9153 int changed = 0;
9154
9155 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9156 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9157 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9158 && (GET_MODE (SUBREG_REG (inner_op0))
9159 == GET_MODE (SUBREG_REG (inner_op1)))
9160 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9161 <= HOST_BITS_PER_WIDE_INT)
01c82bbb
RK
9162 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9163 GET_MODE (SUBREG_REG (op0)))))
9164 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9165 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
9166 {
9167 op0 = SUBREG_REG (inner_op0);
9168 op1 = SUBREG_REG (inner_op1);
9169
9170 /* The resulting comparison is always unsigned since we masked
0f41302f 9171 off the original sign bit. */
7e4dc511
RK
9172 code = unsigned_condition (code);
9173
9174 changed = 1;
9175 }
230d793d 9176
7e4dc511
RK
9177 else if (c0 == c1)
9178 for (tmode = GET_CLASS_NARROWEST_MODE
9179 (GET_MODE_CLASS (GET_MODE (op0)));
9180 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9181 if (c0 == GET_MODE_MASK (tmode))
9182 {
9183 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9184 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 9185 code = unsigned_condition (code);
7e4dc511
RK
9186 changed = 1;
9187 break;
9188 }
9189
9190 if (! changed)
9191 break;
230d793d 9192 }
3a19aabc 9193
ad25ba17
RK
9194 /* If both operands are NOT, we can strip off the outer operation
9195 and adjust the comparison code for swapped operands; similarly for
9196 NEG, except that this must be an equality comparison. */
9197 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9198 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9199 && (code == EQ || code == NE)))
9200 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 9201
230d793d
RS
9202 else
9203 break;
9204 }
9205
9206 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
9207 comparison code appropriately, but don't do this if the second operand
9208 is already a constant integer. */
9209 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
230d793d
RS
9210 {
9211 tem = op0, op0 = op1, op1 = tem;
9212 code = swap_condition (code);
9213 }
9214
9215 /* We now enter a loop during which we will try to simplify the comparison.
9216 For the most part, we only are concerned with comparisons with zero,
9217 but some things may really be comparisons with zero but not start
9218 out looking that way. */
9219
9220 while (GET_CODE (op1) == CONST_INT)
9221 {
9222 enum machine_mode mode = GET_MODE (op0);
9223 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 9224 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9225 int equality_comparison_p;
9226 int sign_bit_comparison_p;
9227 int unsigned_comparison_p;
5f4f0e22 9228 HOST_WIDE_INT const_op;
230d793d
RS
9229
9230 /* We only want to handle integral modes. This catches VOIDmode,
9231 CCmode, and the floating-point modes. An exception is that we
9232 can handle VOIDmode if OP0 is a COMPARE or a comparison
9233 operation. */
9234
9235 if (GET_MODE_CLASS (mode) != MODE_INT
9236 && ! (mode == VOIDmode
9237 && (GET_CODE (op0) == COMPARE
9238 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9239 break;
9240
9241 /* Get the constant we are comparing against and turn off all bits
9242 not on in our mode. */
9243 const_op = INTVAL (op1);
5f4f0e22 9244 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 9245 const_op &= mask;
230d793d
RS
9246
9247 /* If we are comparing against a constant power of two and the value
951553af 9248 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
9249 `and'ed with that bit), we can replace this with a comparison
9250 with zero. */
9251 if (const_op
9252 && (code == EQ || code == NE || code == GE || code == GEU
9253 || code == LT || code == LTU)
5f4f0e22 9254 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9255 && exact_log2 (const_op) >= 0
951553af 9256 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
9257 {
9258 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9259 op1 = const0_rtx, const_op = 0;
9260 }
9261
d0ab8cd3
RK
9262 /* Similarly, if we are comparing a value known to be either -1 or
9263 0 with -1, change it to the opposite comparison against zero. */
9264
9265 if (const_op == -1
9266 && (code == EQ || code == NE || code == GT || code == LE
9267 || code == GEU || code == LTU)
9268 && num_sign_bit_copies (op0, mode) == mode_width)
9269 {
9270 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9271 op1 = const0_rtx, const_op = 0;
9272 }
9273
230d793d 9274 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
9275 comparisons against zero and then prefer equality comparisons.
9276 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
9277
9278 switch (code)
9279 {
9280 case LT:
4803a34a
RK
9281 /* < C is equivalent to <= (C - 1) */
9282 if (const_op > 0)
230d793d 9283 {
4803a34a 9284 const_op -= 1;
5f4f0e22 9285 op1 = GEN_INT (const_op);
230d793d
RS
9286 code = LE;
9287 /* ... fall through to LE case below. */
9288 }
9289 else
9290 break;
9291
9292 case LE:
4803a34a
RK
9293 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9294 if (const_op < 0)
9295 {
9296 const_op += 1;
5f4f0e22 9297 op1 = GEN_INT (const_op);
4803a34a
RK
9298 code = LT;
9299 }
230d793d
RS
9300
9301 /* If we are doing a <= 0 comparison on a value known to have
9302 a zero sign bit, we can replace this with == 0. */
9303 else if (const_op == 0
5f4f0e22 9304 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9305 && (nonzero_bits (op0, mode)
5f4f0e22 9306 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9307 code = EQ;
9308 break;
9309
9310 case GE:
0f41302f 9311 /* >= C is equivalent to > (C - 1). */
4803a34a 9312 if (const_op > 0)
230d793d 9313 {
4803a34a 9314 const_op -= 1;
5f4f0e22 9315 op1 = GEN_INT (const_op);
230d793d
RS
9316 code = GT;
9317 /* ... fall through to GT below. */
9318 }
9319 else
9320 break;
9321
9322 case GT:
4803a34a
RK
9323 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9324 if (const_op < 0)
9325 {
9326 const_op += 1;
5f4f0e22 9327 op1 = GEN_INT (const_op);
4803a34a
RK
9328 code = GE;
9329 }
230d793d
RS
9330
9331 /* If we are doing a > 0 comparison on a value known to have
9332 a zero sign bit, we can replace this with != 0. */
9333 else if (const_op == 0
5f4f0e22 9334 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9335 && (nonzero_bits (op0, mode)
5f4f0e22 9336 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9337 code = NE;
9338 break;
9339
230d793d 9340 case LTU:
4803a34a
RK
9341 /* < C is equivalent to <= (C - 1). */
9342 if (const_op > 0)
9343 {
9344 const_op -= 1;
5f4f0e22 9345 op1 = GEN_INT (const_op);
4803a34a 9346 code = LEU;
0f41302f 9347 /* ... fall through ... */
4803a34a 9348 }
d0ab8cd3
RK
9349
9350 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
9351 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9352 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9353 {
9354 const_op = 0, op1 = const0_rtx;
9355 code = GE;
9356 break;
9357 }
4803a34a
RK
9358 else
9359 break;
230d793d
RS
9360
9361 case LEU:
9362 /* unsigned <= 0 is equivalent to == 0 */
9363 if (const_op == 0)
9364 code = EQ;
d0ab8cd3 9365
0f41302f 9366 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
9367 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9368 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9369 {
9370 const_op = 0, op1 = const0_rtx;
9371 code = GE;
9372 }
230d793d
RS
9373 break;
9374
4803a34a
RK
9375 case GEU:
9376 /* >= C is equivalent to < (C - 1). */
9377 if (const_op > 1)
9378 {
9379 const_op -= 1;
5f4f0e22 9380 op1 = GEN_INT (const_op);
4803a34a 9381 code = GTU;
0f41302f 9382 /* ... fall through ... */
4803a34a 9383 }
d0ab8cd3
RK
9384
9385 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
9386 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9387 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9388 {
9389 const_op = 0, op1 = const0_rtx;
9390 code = LT;
8b2e69e1 9391 break;
d0ab8cd3 9392 }
4803a34a
RK
9393 else
9394 break;
9395
230d793d
RS
9396 case GTU:
9397 /* unsigned > 0 is equivalent to != 0 */
9398 if (const_op == 0)
9399 code = NE;
d0ab8cd3
RK
9400
9401 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
9402 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9403 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9404 {
9405 const_op = 0, op1 = const0_rtx;
9406 code = LT;
9407 }
230d793d
RS
9408 break;
9409 }
9410
9411 /* Compute some predicates to simplify code below. */
9412
9413 equality_comparison_p = (code == EQ || code == NE);
9414 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9415 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9416 || code == LEU);
9417
6139ff20
RK
9418 /* If this is a sign bit comparison and we can do arithmetic in
9419 MODE, say that we will only be needing the sign bit of OP0. */
9420 if (sign_bit_comparison_p
9421 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9422 op0 = force_to_mode (op0, mode,
9423 ((HOST_WIDE_INT) 1
9424 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 9425 NULL_RTX, 0);
6139ff20 9426
230d793d
RS
9427 /* Now try cases based on the opcode of OP0. If none of the cases
9428 does a "continue", we exit this loop immediately after the
9429 switch. */
9430
9431 switch (GET_CODE (op0))
9432 {
9433 case ZERO_EXTRACT:
9434 /* If we are extracting a single bit from a variable position in
9435 a constant that has only a single bit set and are comparing it
9436 with zero, we can convert this into an equality comparison
d7cd794f 9437 between the position and the location of the single bit. */
230d793d 9438
230d793d
RS
9439 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9440 && XEXP (op0, 1) == const1_rtx
9441 && equality_comparison_p && const_op == 0
d7cd794f 9442 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 9443 {
f76b9db2 9444 if (BITS_BIG_ENDIAN)
d7cd794f 9445#ifdef HAVE_extzv
f76b9db2
ILT
9446 i = (GET_MODE_BITSIZE
9447 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
d7cd794f
RK
9448#else
9449 i = BITS_PER_WORD - 1 - i;
230d793d
RS
9450#endif
9451
9452 op0 = XEXP (op0, 2);
5f4f0e22 9453 op1 = GEN_INT (i);
230d793d
RS
9454 const_op = i;
9455
9456 /* Result is nonzero iff shift count is equal to I. */
9457 code = reverse_condition (code);
9458 continue;
9459 }
230d793d 9460
0f41302f 9461 /* ... fall through ... */
230d793d
RS
9462
9463 case SIGN_EXTRACT:
9464 tem = expand_compound_operation (op0);
9465 if (tem != op0)
9466 {
9467 op0 = tem;
9468 continue;
9469 }
9470 break;
9471
9472 case NOT:
9473 /* If testing for equality, we can take the NOT of the constant. */
9474 if (equality_comparison_p
9475 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9476 {
9477 op0 = XEXP (op0, 0);
9478 op1 = tem;
9479 continue;
9480 }
9481
9482 /* If just looking at the sign bit, reverse the sense of the
9483 comparison. */
9484 if (sign_bit_comparison_p)
9485 {
9486 op0 = XEXP (op0, 0);
9487 code = (code == GE ? LT : GE);
9488 continue;
9489 }
9490 break;
9491
9492 case NEG:
9493 /* If testing for equality, we can take the NEG of the constant. */
9494 if (equality_comparison_p
9495 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9496 {
9497 op0 = XEXP (op0, 0);
9498 op1 = tem;
9499 continue;
9500 }
9501
9502 /* The remaining cases only apply to comparisons with zero. */
9503 if (const_op != 0)
9504 break;
9505
9506 /* When X is ABS or is known positive,
9507 (neg X) is < 0 if and only if X != 0. */
9508
9509 if (sign_bit_comparison_p
9510 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 9511 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9512 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9513 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
9514 {
9515 op0 = XEXP (op0, 0);
9516 code = (code == LT ? NE : EQ);
9517 continue;
9518 }
9519
3bed8141 9520 /* If we have NEG of something whose two high-order bits are the
0f41302f 9521 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 9522 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
9523 {
9524 op0 = XEXP (op0, 0);
9525 code = swap_condition (code);
9526 continue;
9527 }
9528 break;
9529
9530 case ROTATE:
9531 /* If we are testing equality and our count is a constant, we
9532 can perform the inverse operation on our RHS. */
9533 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9534 && (tem = simplify_binary_operation (ROTATERT, mode,
9535 op1, XEXP (op0, 1))) != 0)
9536 {
9537 op0 = XEXP (op0, 0);
9538 op1 = tem;
9539 continue;
9540 }
9541
9542 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9543 a particular bit. Convert it to an AND of a constant of that
9544 bit. This will be converted into a ZERO_EXTRACT. */
9545 if (const_op == 0 && sign_bit_comparison_p
9546 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9547 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9548 {
5f4f0e22
CH
9549 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9550 ((HOST_WIDE_INT) 1
9551 << (mode_width - 1
9552 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9553 code = (code == LT ? NE : EQ);
9554 continue;
9555 }
9556
0f41302f 9557 /* ... fall through ... */
230d793d
RS
9558
9559 case ABS:
9560 /* ABS is ignorable inside an equality comparison with zero. */
9561 if (const_op == 0 && equality_comparison_p)
9562 {
9563 op0 = XEXP (op0, 0);
9564 continue;
9565 }
9566 break;
9567
9568
9569 case SIGN_EXTEND:
9570 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9571 to (compare FOO CONST) if CONST fits in FOO's mode and we
9572 are either testing inequality or have an unsigned comparison
9573 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9574 if (! unsigned_comparison_p
9575 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9576 <= HOST_BITS_PER_WIDE_INT)
9577 && ((unsigned HOST_WIDE_INT) const_op
9578 < (((HOST_WIDE_INT) 1
9579 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
9580 {
9581 op0 = XEXP (op0, 0);
9582 continue;
9583 }
9584 break;
9585
9586 case SUBREG:
a687e897 9587 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 9588 both constants are smaller than 1/2 the maximum positive
a687e897
RK
9589 value in MODE, and the comparison is equality or unsigned.
9590 In that case, if A is either zero-extended to MODE or has
9591 sufficient sign bits so that the high-order bit in MODE
9592 is a copy of the sign in the inner mode, we can prove that it is
9593 safe to do the operation in the wider mode. This simplifies
9594 many range checks. */
9595
9596 if (mode_width <= HOST_BITS_PER_WIDE_INT
9597 && subreg_lowpart_p (op0)
9598 && GET_CODE (SUBREG_REG (op0)) == PLUS
9599 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9600 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9601 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9602 < GET_MODE_MASK (mode) / 2)
adb7a1cb 9603 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
9604 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9605 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
9606 & ~ GET_MODE_MASK (mode))
9607 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9608 GET_MODE (SUBREG_REG (op0)))
9609 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9610 - GET_MODE_BITSIZE (mode)))))
9611 {
9612 op0 = SUBREG_REG (op0);
9613 continue;
9614 }
9615
fe0cf571
RK
9616 /* If the inner mode is narrower and we are extracting the low part,
9617 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9618 if (subreg_lowpart_p (op0)
89f1c7f2
RS
9619 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9620 /* Fall through */ ;
9621 else
230d793d
RS
9622 break;
9623
0f41302f 9624 /* ... fall through ... */
230d793d
RS
9625
9626 case ZERO_EXTEND:
9627 if ((unsigned_comparison_p || equality_comparison_p)
9628 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9629 <= HOST_BITS_PER_WIDE_INT)
9630 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
9631 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9632 {
9633 op0 = XEXP (op0, 0);
9634 continue;
9635 }
9636 break;
9637
9638 case PLUS:
20fdd649 9639 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 9640 this for equality comparisons due to pathological cases involving
230d793d 9641 overflows. */
20fdd649
RK
9642 if (equality_comparison_p
9643 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9644 op1, XEXP (op0, 1))))
230d793d
RS
9645 {
9646 op0 = XEXP (op0, 0);
9647 op1 = tem;
9648 continue;
9649 }
9650
9651 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9652 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9653 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9654 {
9655 op0 = XEXP (XEXP (op0, 0), 0);
9656 code = (code == LT ? EQ : NE);
9657 continue;
9658 }
9659 break;
9660
9661 case MINUS:
20fdd649
RK
9662 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9663 (eq B (minus A C)), whichever simplifies. We can only do
9664 this for equality comparisons due to pathological cases involving
9665 overflows. */
9666 if (equality_comparison_p
9667 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9668 XEXP (op0, 1), op1)))
9669 {
9670 op0 = XEXP (op0, 0);
9671 op1 = tem;
9672 continue;
9673 }
9674
9675 if (equality_comparison_p
9676 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9677 XEXP (op0, 0), op1)))
9678 {
9679 op0 = XEXP (op0, 1);
9680 op1 = tem;
9681 continue;
9682 }
9683
230d793d
RS
9684 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9685 of bits in X minus 1, is one iff X > 0. */
9686 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9687 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9688 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9689 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9690 {
9691 op0 = XEXP (op0, 1);
9692 code = (code == GE ? LE : GT);
9693 continue;
9694 }
9695 break;
9696
9697 case XOR:
9698 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9699 if C is zero or B is a constant. */
9700 if (equality_comparison_p
9701 && 0 != (tem = simplify_binary_operation (XOR, mode,
9702 XEXP (op0, 1), op1)))
9703 {
9704 op0 = XEXP (op0, 0);
9705 op1 = tem;
9706 continue;
9707 }
9708 break;
9709
9710 case EQ: case NE:
9711 case LT: case LTU: case LE: case LEU:
9712 case GT: case GTU: case GE: case GEU:
9713 /* We can't do anything if OP0 is a condition code value, rather
9714 than an actual data value. */
9715 if (const_op != 0
9716#ifdef HAVE_cc0
9717 || XEXP (op0, 0) == cc0_rtx
9718#endif
9719 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9720 break;
9721
9722 /* Get the two operands being compared. */
9723 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9724 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9725 else
9726 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9727
9728 /* Check for the cases where we simply want the result of the
9729 earlier test or the opposite of that result. */
9730 if (code == NE
9731 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 9732 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 9733 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 9734 && (STORE_FLAG_VALUE
5f4f0e22
CH
9735 & (((HOST_WIDE_INT) 1
9736 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
9737 && (code == LT
9738 || (code == GE && reversible_comparison_p (op0)))))
9739 {
9740 code = (code == LT || code == NE
9741 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9742 op0 = tem, op1 = tem1;
9743 continue;
9744 }
9745 break;
9746
9747 case IOR:
9748 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9749 iff X <= 0. */
9750 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9751 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9752 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9753 {
9754 op0 = XEXP (op0, 1);
9755 code = (code == GE ? GT : LE);
9756 continue;
9757 }
9758 break;
9759
9760 case AND:
9761 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
9762 will be converted to a ZERO_EXTRACT later. */
9763 if (const_op == 0 && equality_comparison_p
45620ed4 9764 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
9765 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9766 {
9767 op0 = simplify_and_const_int
9768 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9769 XEXP (op0, 1),
9770 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 9771 (HOST_WIDE_INT) 1);
230d793d
RS
9772 continue;
9773 }
9774
9775 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9776 zero and X is a comparison and C1 and C2 describe only bits set
9777 in STORE_FLAG_VALUE, we can compare with X. */
9778 if (const_op == 0 && equality_comparison_p
5f4f0e22 9779 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
9780 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9781 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9782 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9783 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 9784 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
9785 {
9786 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9787 << INTVAL (XEXP (XEXP (op0, 0), 1)));
9788 if ((~ STORE_FLAG_VALUE & mask) == 0
9789 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9790 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9791 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9792 {
9793 op0 = XEXP (XEXP (op0, 0), 0);
9794 continue;
9795 }
9796 }
9797
9798 /* If we are doing an equality comparison of an AND of a bit equal
9799 to the sign bit, replace this with a LT or GE comparison of
9800 the underlying value. */
9801 if (equality_comparison_p
9802 && const_op == 0
9803 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9804 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9805 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 9806 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
9807 {
9808 op0 = XEXP (op0, 0);
9809 code = (code == EQ ? GE : LT);
9810 continue;
9811 }
9812
9813 /* If this AND operation is really a ZERO_EXTEND from a narrower
9814 mode, the constant fits within that mode, and this is either an
9815 equality or unsigned comparison, try to do this comparison in
9816 the narrower mode. */
9817 if ((equality_comparison_p || unsigned_comparison_p)
9818 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9819 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9820 & GET_MODE_MASK (mode))
9821 + 1)) >= 0
9822 && const_op >> i == 0
9823 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9824 {
9825 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9826 continue;
9827 }
9828 break;
9829
9830 case ASHIFT:
45620ed4 9831 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 9832 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 9833 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
9834 shifted right N bits so long as the low-order N bits of C are
9835 zero. */
9836 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9837 && INTVAL (XEXP (op0, 1)) >= 0
9838 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
9839 < HOST_BITS_PER_WIDE_INT)
9840 && ((const_op
34785d05 9841 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 9842 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9843 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
9844 & ~ (mask >> (INTVAL (XEXP (op0, 1))
9845 + ! equality_comparison_p))) == 0)
9846 {
9847 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 9848 op1 = GEN_INT (const_op);
230d793d
RS
9849 op0 = XEXP (op0, 0);
9850 continue;
9851 }
9852
dfbe1b2f 9853 /* If we are doing a sign bit comparison, it means we are testing
230d793d 9854 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 9855 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9856 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9857 {
5f4f0e22
CH
9858 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9859 ((HOST_WIDE_INT) 1
9860 << (mode_width - 1
9861 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9862 code = (code == LT ? NE : EQ);
9863 continue;
9864 }
dfbe1b2f
RK
9865
9866 /* If this an equality comparison with zero and we are shifting
9867 the low bit to the sign bit, we can convert this to an AND of the
9868 low-order bit. */
9869 if (const_op == 0 && equality_comparison_p
9870 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9871 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9872 {
5f4f0e22
CH
9873 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9874 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
9875 continue;
9876 }
230d793d
RS
9877 break;
9878
9879 case ASHIFTRT:
d0ab8cd3
RK
9880 /* If this is an equality comparison with zero, we can do this
9881 as a logical shift, which might be much simpler. */
9882 if (equality_comparison_p && const_op == 0
9883 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9884 {
9885 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
9886 XEXP (op0, 0),
9887 INTVAL (XEXP (op0, 1)));
9888 continue;
9889 }
9890
230d793d
RS
9891 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
9892 do the comparison in a narrower mode. */
9893 if (! unsigned_comparison_p
9894 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9895 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9896 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9897 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 9898 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
9899 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9900 || ((unsigned HOST_WIDE_INT) - const_op
9901 <= GET_MODE_MASK (tmode))))
230d793d
RS
9902 {
9903 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9904 continue;
9905 }
9906
0f41302f 9907 /* ... fall through ... */
230d793d
RS
9908 case LSHIFTRT:
9909 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 9910 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
9911 by comparing FOO with C shifted left N bits so long as no
9912 overflow occurs. */
9913 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9914 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
9915 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9916 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9917 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9918 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
9919 && (const_op == 0
9920 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9921 < mode_width)))
9922 {
9923 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 9924 op1 = GEN_INT (const_op);
230d793d
RS
9925 op0 = XEXP (op0, 0);
9926 continue;
9927 }
9928
9929 /* If we are using this shift to extract just the sign bit, we
9930 can replace this with an LT or GE comparison. */
9931 if (const_op == 0
9932 && (equality_comparison_p || sign_bit_comparison_p)
9933 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9934 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9935 {
9936 op0 = XEXP (op0, 0);
9937 code = (code == NE || code == GT ? LT : GE);
9938 continue;
9939 }
9940 break;
9941 }
9942
9943 break;
9944 }
9945
9946 /* Now make any compound operations involved in this comparison. Then,
9947 check for an outmost SUBREG on OP0 that isn't doing anything or is
9948 paradoxical. The latter case can only occur when it is known that the
9949 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9950 We can never remove a SUBREG for a non-equality comparison because the
9951 sign bit is in a different place in the underlying object. */
9952
9953 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9954 op1 = make_compound_operation (op1, SET);
9955
9956 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9957 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9958 && (code == NE || code == EQ)
9959 && ((GET_MODE_SIZE (GET_MODE (op0))
9960 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9961 {
9962 op0 = SUBREG_REG (op0);
9963 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9964 }
9965
9966 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9967 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9968 && (code == NE || code == EQ)
ac49a949
RS
9969 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9970 <= HOST_BITS_PER_WIDE_INT)
951553af 9971 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9972 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9973 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9974 op1),
951553af 9975 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9976 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9977 op0 = SUBREG_REG (op0), op1 = tem;
9978
9979 /* We now do the opposite procedure: Some machines don't have compare
9980 insns in all modes. If OP0's mode is an integer mode smaller than a
9981 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
9982 mode for which we can do the compare. There are a number of cases in
9983 which we can use the wider mode. */
230d793d
RS
9984
9985 mode = GET_MODE (op0);
9986 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9987 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9988 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9989 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
9990 (tmode != VOIDmode
9991 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 9992 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 9993 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 9994 {
951553af 9995 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
9996 narrower mode and this is an equality or unsigned comparison,
9997 we can use the wider mode. Similarly for sign-extended
7e4dc511 9998 values, in which case it is true for all comparisons. */
a687e897
RK
9999 if (((code == EQ || code == NE
10000 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
10001 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10002 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
10003 || ((num_sign_bit_copies (op0, tmode)
10004 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 10005 && (num_sign_bit_copies (op1, tmode)
58744483 10006 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
10007 {
10008 op0 = gen_lowpart_for_combine (tmode, op0);
10009 op1 = gen_lowpart_for_combine (tmode, op1);
10010 break;
10011 }
230d793d 10012
a687e897
RK
10013 /* If this is a test for negative, we can make an explicit
10014 test of the sign bit. */
10015
10016 if (op1 == const0_rtx && (code == LT || code == GE)
10017 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 10018 {
a687e897
RK
10019 op0 = gen_binary (AND, tmode,
10020 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
10021 GEN_INT ((HOST_WIDE_INT) 1
10022 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 10023 code = (code == LT) ? NE : EQ;
a687e897 10024 break;
230d793d 10025 }
230d793d
RS
10026 }
10027
b7a775b2
RK
10028#ifdef CANONICALIZE_COMPARISON
10029 /* If this machine only supports a subset of valid comparisons, see if we
10030 can convert an unsupported one into a supported one. */
10031 CANONICALIZE_COMPARISON (code, op0, op1);
10032#endif
10033
230d793d
RS
10034 *pop0 = op0;
10035 *pop1 = op1;
10036
10037 return code;
10038}
10039\f
10040/* Return 1 if we know that X, a comparison operation, is not operating
10041 on a floating-point value or is EQ or NE, meaning that we can safely
10042 reverse it. */
10043
10044static int
10045reversible_comparison_p (x)
10046 rtx x;
10047{
10048 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 10049 || flag_fast_math
230d793d
RS
10050 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10051 return 1;
10052
10053 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10054 {
10055 case MODE_INT:
3ad2180a
RK
10056 case MODE_PARTIAL_INT:
10057 case MODE_COMPLEX_INT:
230d793d
RS
10058 return 1;
10059
10060 case MODE_CC:
9210df58
RK
10061 /* If the mode of the condition codes tells us that this is safe,
10062 we need look no further. */
10063 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10064 return 1;
10065
10066 /* Otherwise try and find where the condition codes were last set and
10067 use that. */
230d793d
RS
10068 x = get_last_value (XEXP (x, 0));
10069 return (x && GET_CODE (x) == COMPARE
3ad2180a 10070 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
230d793d
RS
10071 }
10072
10073 return 0;
10074}
10075\f
10076/* Utility function for following routine. Called when X is part of a value
10077 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10078 for each register mentioned. Similar to mention_regs in cse.c */
10079
10080static void
10081update_table_tick (x)
10082 rtx x;
10083{
10084 register enum rtx_code code = GET_CODE (x);
10085 register char *fmt = GET_RTX_FORMAT (code);
10086 register int i;
10087
10088 if (code == REG)
10089 {
10090 int regno = REGNO (x);
10091 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10092 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10093
10094 for (i = regno; i < endregno; i++)
10095 reg_last_set_table_tick[i] = label_tick;
10096
10097 return;
10098 }
10099
10100 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10101 /* Note that we can't have an "E" in values stored; see
10102 get_last_value_validate. */
10103 if (fmt[i] == 'e')
10104 update_table_tick (XEXP (x, i));
10105}
10106
10107/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10108 are saying that the register is clobbered and we no longer know its
7988fd36
RK
10109 value. If INSN is zero, don't update reg_last_set; this is only permitted
10110 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
10111
10112static void
10113record_value_for_reg (reg, insn, value)
10114 rtx reg;
10115 rtx insn;
10116 rtx value;
10117{
10118 int regno = REGNO (reg);
10119 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10120 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10121 int i;
10122
10123 /* If VALUE contains REG and we have a previous value for REG, substitute
10124 the previous value. */
10125 if (value && insn && reg_overlap_mentioned_p (reg, value))
10126 {
10127 rtx tem;
10128
10129 /* Set things up so get_last_value is allowed to see anything set up to
10130 our insn. */
10131 subst_low_cuid = INSN_CUID (insn);
10132 tem = get_last_value (reg);
10133
10134 if (tem)
10135 value = replace_rtx (copy_rtx (value), reg, tem);
10136 }
10137
10138 /* For each register modified, show we don't know its value, that
ef026f91
RS
10139 we don't know about its bitwise content, that its value has been
10140 updated, and that we don't know the location of the death of the
10141 register. */
230d793d
RS
10142 for (i = regno; i < endregno; i ++)
10143 {
10144 if (insn)
10145 reg_last_set[i] = insn;
10146 reg_last_set_value[i] = 0;
ef026f91
RS
10147 reg_last_set_mode[i] = 0;
10148 reg_last_set_nonzero_bits[i] = 0;
10149 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
10150 reg_last_death[i] = 0;
10151 }
10152
10153 /* Mark registers that are being referenced in this value. */
10154 if (value)
10155 update_table_tick (value);
10156
10157 /* Now update the status of each register being set.
10158 If someone is using this register in this block, set this register
10159 to invalid since we will get confused between the two lives in this
10160 basic block. This makes using this register always invalid. In cse, we
10161 scan the table to invalidate all entries using this register, but this
10162 is too much work for us. */
10163
10164 for (i = regno; i < endregno; i++)
10165 {
10166 reg_last_set_label[i] = label_tick;
10167 if (value && reg_last_set_table_tick[i] == label_tick)
10168 reg_last_set_invalid[i] = 1;
10169 else
10170 reg_last_set_invalid[i] = 0;
10171 }
10172
10173 /* The value being assigned might refer to X (like in "x++;"). In that
10174 case, we must replace it with (clobber (const_int 0)) to prevent
10175 infinite loops. */
10176 if (value && ! get_last_value_validate (&value,
10177 reg_last_set_label[regno], 0))
10178 {
10179 value = copy_rtx (value);
10180 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
10181 value = 0;
10182 }
10183
55310dad
RK
10184 /* For the main register being modified, update the value, the mode, the
10185 nonzero bits, and the number of sign bit copies. */
10186
230d793d
RS
10187 reg_last_set_value[regno] = value;
10188
55310dad
RK
10189 if (value)
10190 {
2afabb48 10191 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
10192 reg_last_set_mode[regno] = GET_MODE (reg);
10193 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10194 reg_last_set_sign_bit_copies[regno]
10195 = num_sign_bit_copies (value, GET_MODE (reg));
10196 }
230d793d
RS
10197}
10198
10199/* Used for communication between the following two routines. */
10200static rtx record_dead_insn;
10201
10202/* Called via note_stores from record_dead_and_set_regs to handle one
10203 SET or CLOBBER in an insn. */
10204
10205static void
10206record_dead_and_set_regs_1 (dest, setter)
10207 rtx dest, setter;
10208{
ca89d290
RK
10209 if (GET_CODE (dest) == SUBREG)
10210 dest = SUBREG_REG (dest);
10211
230d793d
RS
10212 if (GET_CODE (dest) == REG)
10213 {
10214 /* If we are setting the whole register, we know its value. Otherwise
10215 show that we don't know the value. We can handle SUBREG in
10216 some cases. */
10217 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10218 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10219 else if (GET_CODE (setter) == SET
10220 && GET_CODE (SET_DEST (setter)) == SUBREG
10221 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 10222 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 10223 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
10224 record_value_for_reg (dest, record_dead_insn,
10225 gen_lowpart_for_combine (GET_MODE (dest),
10226 SET_SRC (setter)));
230d793d 10227 else
5f4f0e22 10228 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
10229 }
10230 else if (GET_CODE (dest) == MEM
10231 /* Ignore pushes, they clobber nothing. */
10232 && ! push_operand (dest, GET_MODE (dest)))
10233 mem_last_set = INSN_CUID (record_dead_insn);
10234}
10235
10236/* Update the records of when each REG was most recently set or killed
10237 for the things done by INSN. This is the last thing done in processing
10238 INSN in the combiner loop.
10239
ef026f91
RS
10240 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10241 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10242 and also the similar information mem_last_set (which insn most recently
10243 modified memory) and last_call_cuid (which insn was the most recent
10244 subroutine call). */
230d793d
RS
10245
10246static void
10247record_dead_and_set_regs (insn)
10248 rtx insn;
10249{
10250 register rtx link;
55310dad
RK
10251 int i;
10252
230d793d
RS
10253 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10254 {
dbc131f3
RK
10255 if (REG_NOTE_KIND (link) == REG_DEAD
10256 && GET_CODE (XEXP (link, 0)) == REG)
10257 {
10258 int regno = REGNO (XEXP (link, 0));
10259 int endregno
10260 = regno + (regno < FIRST_PSEUDO_REGISTER
10261 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10262 : 1);
dbc131f3
RK
10263
10264 for (i = regno; i < endregno; i++)
10265 reg_last_death[i] = insn;
10266 }
230d793d 10267 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 10268 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
10269 }
10270
10271 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
10272 {
10273 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10274 if (call_used_regs[i])
10275 {
10276 reg_last_set_value[i] = 0;
ef026f91
RS
10277 reg_last_set_mode[i] = 0;
10278 reg_last_set_nonzero_bits[i] = 0;
10279 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
10280 reg_last_death[i] = 0;
10281 }
10282
10283 last_call_cuid = mem_last_set = INSN_CUID (insn);
10284 }
230d793d
RS
10285
10286 record_dead_insn = insn;
10287 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10288}
10289\f
10290/* Utility routine for the following function. Verify that all the registers
10291 mentioned in *LOC are valid when *LOC was part of a value set when
10292 label_tick == TICK. Return 0 if some are not.
10293
10294 If REPLACE is non-zero, replace the invalid reference with
10295 (clobber (const_int 0)) and return 1. This replacement is useful because
10296 we often can get useful information about the form of a value (e.g., if
10297 it was produced by a shift that always produces -1 or 0) even though
10298 we don't know exactly what registers it was produced from. */
10299
10300static int
10301get_last_value_validate (loc, tick, replace)
10302 rtx *loc;
10303 int tick;
10304 int replace;
10305{
10306 rtx x = *loc;
10307 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
10308 int len = GET_RTX_LENGTH (GET_CODE (x));
10309 int i;
10310
10311 if (GET_CODE (x) == REG)
10312 {
10313 int regno = REGNO (x);
10314 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10315 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10316 int j;
10317
10318 for (j = regno; j < endregno; j++)
10319 if (reg_last_set_invalid[j]
10320 /* If this is a pseudo-register that was only set once, it is
10321 always valid. */
10322 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
10323 && reg_last_set_label[j] > tick))
10324 {
10325 if (replace)
10326 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
10327 return replace;
10328 }
10329
10330 return 1;
10331 }
10332
10333 for (i = 0; i < len; i++)
10334 if ((fmt[i] == 'e'
10335 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
10336 /* Don't bother with these. They shouldn't occur anyway. */
10337 || fmt[i] == 'E')
10338 return 0;
10339
10340 /* If we haven't found a reason for it to be invalid, it is valid. */
10341 return 1;
10342}
10343
10344/* Get the last value assigned to X, if known. Some registers
10345 in the value may be replaced with (clobber (const_int 0)) if their value
10346 is known longer known reliably. */
10347
10348static rtx
10349get_last_value (x)
10350 rtx x;
10351{
10352 int regno;
10353 rtx value;
10354
10355 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10356 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 10357 we cannot predict what values the "extra" bits might have. */
230d793d
RS
10358 if (GET_CODE (x) == SUBREG
10359 && subreg_lowpart_p (x)
10360 && (GET_MODE_SIZE (GET_MODE (x))
10361 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10362 && (value = get_last_value (SUBREG_REG (x))) != 0)
10363 return gen_lowpart_for_combine (GET_MODE (x), value);
10364
10365 if (GET_CODE (x) != REG)
10366 return 0;
10367
10368 regno = REGNO (x);
10369 value = reg_last_set_value[regno];
10370
0f41302f
MS
10371 /* If we don't have a value or if it isn't for this basic block,
10372 return 0. */
230d793d
RS
10373
10374 if (value == 0
10375 || (reg_n_sets[regno] != 1
55310dad 10376 && reg_last_set_label[regno] != label_tick))
230d793d
RS
10377 return 0;
10378
4255220d 10379 /* If the value was set in a later insn than the ones we are processing,
4090a6b3
RK
10380 we can't use it even if the register was only set once, but make a quick
10381 check to see if the previous insn set it to something. This is commonly
0d9641d1
JW
10382 the case when the same pseudo is used by repeated insns.
10383
10384 This does not work if there exists an instruction which is temporarily
10385 not on the insn chain. */
d0ab8cd3 10386
bcd49eb7 10387 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
10388 {
10389 rtx insn, set;
10390
bcd49eb7
JW
10391 /* We can not do anything useful in this case, because there is
10392 an instruction which is not on the insn chain. */
10393 if (subst_prev_insn)
10394 return 0;
10395
4255220d
JW
10396 /* Skip over USE insns. They are not useful here, and they may have
10397 been made by combine, in which case they do not have a INSN_CUID
d6c80562 10398 value. We can't use prev_real_insn, because that would incorrectly
e340018d
JW
10399 take us backwards across labels. Skip over BARRIERs also, since
10400 they could have been made by combine. If we see one, we must be
10401 optimizing dead code, so it doesn't matter what we do. */
d6c80562
JW
10402 for (insn = prev_nonnote_insn (subst_insn);
10403 insn && ((GET_CODE (insn) == INSN
10404 && GET_CODE (PATTERN (insn)) == USE)
e340018d 10405 || GET_CODE (insn) == BARRIER
4255220d 10406 || INSN_CUID (insn) >= subst_low_cuid);
d6c80562 10407 insn = prev_nonnote_insn (insn))
3adde2a5 10408 ;
d0ab8cd3
RK
10409
10410 if (insn
10411 && (set = single_set (insn)) != 0
10412 && rtx_equal_p (SET_DEST (set), x))
10413 {
10414 value = SET_SRC (set);
10415
10416 /* Make sure that VALUE doesn't reference X. Replace any
ddd5a7c1 10417 explicit references with a CLOBBER. If there are any remaining
d0ab8cd3
RK
10418 references (rare), don't use the value. */
10419
10420 if (reg_mentioned_p (x, value))
10421 value = replace_rtx (copy_rtx (value), x,
10422 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
10423
10424 if (reg_overlap_mentioned_p (x, value))
10425 return 0;
10426 }
10427 else
10428 return 0;
10429 }
10430
10431 /* If the value has all its registers valid, return it. */
230d793d
RS
10432 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
10433 return value;
10434
10435 /* Otherwise, make a copy and replace any invalid register with
10436 (clobber (const_int 0)). If that fails for some reason, return 0. */
10437
10438 value = copy_rtx (value);
10439 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
10440 return value;
10441
10442 return 0;
10443}
10444\f
10445/* Return nonzero if expression X refers to a REG or to memory
10446 that is set in an instruction more recent than FROM_CUID. */
10447
10448static int
10449use_crosses_set_p (x, from_cuid)
10450 register rtx x;
10451 int from_cuid;
10452{
10453 register char *fmt;
10454 register int i;
10455 register enum rtx_code code = GET_CODE (x);
10456
10457 if (code == REG)
10458 {
10459 register int regno = REGNO (x);
e28f5732
RK
10460 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10461 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10462
230d793d
RS
10463#ifdef PUSH_ROUNDING
10464 /* Don't allow uses of the stack pointer to be moved,
10465 because we don't know whether the move crosses a push insn. */
10466 if (regno == STACK_POINTER_REGNUM)
10467 return 1;
10468#endif
e28f5732
RK
10469 for (;regno < endreg; regno++)
10470 if (reg_last_set[regno]
10471 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10472 return 1;
10473 return 0;
230d793d
RS
10474 }
10475
10476 if (code == MEM && mem_last_set > from_cuid)
10477 return 1;
10478
10479 fmt = GET_RTX_FORMAT (code);
10480
10481 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10482 {
10483 if (fmt[i] == 'E')
10484 {
10485 register int j;
10486 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10487 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10488 return 1;
10489 }
10490 else if (fmt[i] == 'e'
10491 && use_crosses_set_p (XEXP (x, i), from_cuid))
10492 return 1;
10493 }
10494 return 0;
10495}
10496\f
10497/* Define three variables used for communication between the following
10498 routines. */
10499
10500static int reg_dead_regno, reg_dead_endregno;
10501static int reg_dead_flag;
10502
10503/* Function called via note_stores from reg_dead_at_p.
10504
ddd5a7c1 10505 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
10506 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10507
10508static void
10509reg_dead_at_p_1 (dest, x)
10510 rtx dest;
10511 rtx x;
10512{
10513 int regno, endregno;
10514
10515 if (GET_CODE (dest) != REG)
10516 return;
10517
10518 regno = REGNO (dest);
10519 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10520 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10521
10522 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10523 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10524}
10525
10526/* Return non-zero if REG is known to be dead at INSN.
10527
10528 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10529 referencing REG, it is dead. If we hit a SET referencing REG, it is
10530 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
10531 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10532 must be assumed to be always live. */
230d793d
RS
10533
10534static int
10535reg_dead_at_p (reg, insn)
10536 rtx reg;
10537 rtx insn;
10538{
10539 int block, i;
10540
10541 /* Set variables for reg_dead_at_p_1. */
10542 reg_dead_regno = REGNO (reg);
10543 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10544 ? HARD_REGNO_NREGS (reg_dead_regno,
10545 GET_MODE (reg))
10546 : 1);
10547
10548 reg_dead_flag = 0;
10549
6e25d159
RK
10550 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10551 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10552 {
10553 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10554 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10555 return 0;
10556 }
10557
230d793d
RS
10558 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10559 beginning of function. */
60715d0b 10560 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
10561 insn = prev_nonnote_insn (insn))
10562 {
10563 note_stores (PATTERN (insn), reg_dead_at_p_1);
10564 if (reg_dead_flag)
10565 return reg_dead_flag == 1 ? 1 : 0;
10566
10567 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10568 return 1;
10569 }
10570
10571 /* Get the basic block number that we were in. */
10572 if (insn == 0)
10573 block = 0;
10574 else
10575 {
10576 for (block = 0; block < n_basic_blocks; block++)
10577 if (insn == basic_block_head[block])
10578 break;
10579
10580 if (block == n_basic_blocks)
10581 return 0;
10582 }
10583
10584 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
10585 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
10586 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
10587 return 0;
10588
10589 return 1;
10590}
6e25d159
RK
10591\f
10592/* Note hard registers in X that are used. This code is similar to
10593 that in flow.c, but much simpler since we don't care about pseudos. */
10594
10595static void
10596mark_used_regs_combine (x)
10597 rtx x;
10598{
10599 register RTX_CODE code = GET_CODE (x);
10600 register int regno;
10601 int i;
10602
10603 switch (code)
10604 {
10605 case LABEL_REF:
10606 case SYMBOL_REF:
10607 case CONST_INT:
10608 case CONST:
10609 case CONST_DOUBLE:
10610 case PC:
10611 case ADDR_VEC:
10612 case ADDR_DIFF_VEC:
10613 case ASM_INPUT:
10614#ifdef HAVE_cc0
10615 /* CC0 must die in the insn after it is set, so we don't need to take
10616 special note of it here. */
10617 case CC0:
10618#endif
10619 return;
10620
10621 case CLOBBER:
10622 /* If we are clobbering a MEM, mark any hard registers inside the
10623 address as used. */
10624 if (GET_CODE (XEXP (x, 0)) == MEM)
10625 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10626 return;
10627
10628 case REG:
10629 regno = REGNO (x);
10630 /* A hard reg in a wide mode may really be multiple registers.
10631 If so, mark all of them just like the first. */
10632 if (regno < FIRST_PSEUDO_REGISTER)
10633 {
10634 /* None of this applies to the stack, frame or arg pointers */
10635 if (regno == STACK_POINTER_REGNUM
10636#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10637 || regno == HARD_FRAME_POINTER_REGNUM
10638#endif
10639#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10640 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10641#endif
10642 || regno == FRAME_POINTER_REGNUM)
10643 return;
10644
10645 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10646 while (i-- > 0)
10647 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10648 }
10649 return;
10650
10651 case SET:
10652 {
10653 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10654 the address. */
10655 register rtx testreg = SET_DEST (x);
10656
e048778f
RK
10657 while (GET_CODE (testreg) == SUBREG
10658 || GET_CODE (testreg) == ZERO_EXTRACT
10659 || GET_CODE (testreg) == SIGN_EXTRACT
10660 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
10661 testreg = XEXP (testreg, 0);
10662
10663 if (GET_CODE (testreg) == MEM)
10664 mark_used_regs_combine (XEXP (testreg, 0));
10665
10666 mark_used_regs_combine (SET_SRC (x));
10667 return;
10668 }
10669 }
10670
10671 /* Recursively scan the operands of this expression. */
10672
10673 {
10674 register char *fmt = GET_RTX_FORMAT (code);
10675
10676 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10677 {
10678 if (fmt[i] == 'e')
10679 mark_used_regs_combine (XEXP (x, i));
10680 else if (fmt[i] == 'E')
10681 {
10682 register int j;
10683
10684 for (j = 0; j < XVECLEN (x, i); j++)
10685 mark_used_regs_combine (XVECEXP (x, i, j));
10686 }
10687 }
10688 }
10689}
10690
230d793d
RS
10691\f
10692/* Remove register number REGNO from the dead registers list of INSN.
10693
10694 Return the note used to record the death, if there was one. */
10695
10696rtx
10697remove_death (regno, insn)
10698 int regno;
10699 rtx insn;
10700{
10701 register rtx note = find_regno_note (insn, REG_DEAD, regno);
10702
10703 if (note)
1a26b032
RK
10704 {
10705 reg_n_deaths[regno]--;
10706 remove_note (insn, note);
10707 }
230d793d
RS
10708
10709 return note;
10710}
10711
10712/* For each register (hardware or pseudo) used within expression X, if its
10713 death is in an instruction with cuid between FROM_CUID (inclusive) and
10714 TO_INSN (exclusive), put a REG_DEAD note for that register in the
10715 list headed by PNOTES.
10716
6eb12cef
RK
10717 That said, don't move registers killed by maybe_kill_insn.
10718
230d793d
RS
10719 This is done when X is being merged by combination into TO_INSN. These
10720 notes will then be distributed as needed. */
10721
10722static void
6eb12cef 10723move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 10724 rtx x;
6eb12cef 10725 rtx maybe_kill_insn;
230d793d
RS
10726 int from_cuid;
10727 rtx to_insn;
10728 rtx *pnotes;
10729{
10730 register char *fmt;
10731 register int len, i;
10732 register enum rtx_code code = GET_CODE (x);
10733
10734 if (code == REG)
10735 {
10736 register int regno = REGNO (x);
10737 register rtx where_dead = reg_last_death[regno];
e340018d
JW
10738 register rtx before_dead, after_dead;
10739
6eb12cef
RK
10740 /* Don't move the register if it gets killed in between from and to */
10741 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
10742 && !reg_referenced_p (x, maybe_kill_insn))
10743 return;
10744
e340018d
JW
10745 /* WHERE_DEAD could be a USE insn made by combine, so first we
10746 make sure that we have insns with valid INSN_CUID values. */
10747 before_dead = where_dead;
10748 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
10749 before_dead = PREV_INSN (before_dead);
10750 after_dead = where_dead;
10751 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
10752 after_dead = NEXT_INSN (after_dead);
10753
10754 if (before_dead && after_dead
10755 && INSN_CUID (before_dead) >= from_cuid
10756 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
10757 || (where_dead != after_dead
10758 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 10759 {
dbc131f3 10760 rtx note = remove_death (regno, where_dead);
230d793d
RS
10761
10762 /* It is possible for the call above to return 0. This can occur
10763 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
10764 In that case make a new note.
10765
10766 We must also check for the case where X is a hard register
10767 and NOTE is a death note for a range of hard registers
10768 including X. In that case, we must put REG_DEAD notes for
10769 the remaining registers in place of NOTE. */
10770
10771 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10772 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10773 != GET_MODE_SIZE (GET_MODE (x))))
10774 {
10775 int deadregno = REGNO (XEXP (note, 0));
10776 int deadend
10777 = (deadregno + HARD_REGNO_NREGS (deadregno,
10778 GET_MODE (XEXP (note, 0))));
10779 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10780 int i;
10781
10782 for (i = deadregno; i < deadend; i++)
10783 if (i < regno || i >= ourend)
10784 REG_NOTES (where_dead)
10785 = gen_rtx (EXPR_LIST, REG_DEAD,
36b878d1 10786 gen_rtx (REG, reg_raw_mode[i], i),
dbc131f3
RK
10787 REG_NOTES (where_dead));
10788 }
fabd69e8
RK
10789 /* If we didn't find any note, and we have a multi-reg hard
10790 register, then to be safe we must check for REG_DEAD notes
10791 for each register other than the first. They could have
10792 their own REG_DEAD notes lying around. */
10793 else if (note == 0 && regno < FIRST_PSEUDO_REGISTER
10794 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
10795 {
10796 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10797 int i;
10798 rtx oldnotes = 0;
10799
10800 for (i = regno + 1; i < ourend; i++)
10801 move_deaths (gen_rtx (REG, reg_raw_mode[i], i),
6eb12cef 10802 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 10803 }
230d793d 10804
dbc131f3 10805 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
10806 {
10807 XEXP (note, 1) = *pnotes;
10808 *pnotes = note;
10809 }
10810 else
10811 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
1a26b032
RK
10812
10813 reg_n_deaths[regno]++;
230d793d
RS
10814 }
10815
10816 return;
10817 }
10818
10819 else if (GET_CODE (x) == SET)
10820 {
10821 rtx dest = SET_DEST (x);
10822
6eb12cef 10823 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 10824
a7c99304
RK
10825 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
10826 that accesses one word of a multi-word item, some
10827 piece of everything register in the expression is used by
10828 this insn, so remove any old death. */
10829
10830 if (GET_CODE (dest) == ZERO_EXTRACT
10831 || GET_CODE (dest) == STRICT_LOW_PART
10832 || (GET_CODE (dest) == SUBREG
10833 && (((GET_MODE_SIZE (GET_MODE (dest))
10834 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
10835 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
10836 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 10837 {
6eb12cef 10838 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 10839 return;
230d793d
RS
10840 }
10841
a7c99304
RK
10842 /* If this is some other SUBREG, we know it replaces the entire
10843 value, so use that as the destination. */
10844 if (GET_CODE (dest) == SUBREG)
10845 dest = SUBREG_REG (dest);
10846
10847 /* If this is a MEM, adjust deaths of anything used in the address.
10848 For a REG (the only other possibility), the entire value is
10849 being replaced so the old value is not used in this insn. */
230d793d
RS
10850
10851 if (GET_CODE (dest) == MEM)
6eb12cef
RK
10852 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
10853 to_insn, pnotes);
230d793d
RS
10854 return;
10855 }
10856
10857 else if (GET_CODE (x) == CLOBBER)
10858 return;
10859
10860 len = GET_RTX_LENGTH (code);
10861 fmt = GET_RTX_FORMAT (code);
10862
10863 for (i = 0; i < len; i++)
10864 {
10865 if (fmt[i] == 'E')
10866 {
10867 register int j;
10868 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
10869 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
10870 to_insn, pnotes);
230d793d
RS
10871 }
10872 else if (fmt[i] == 'e')
6eb12cef 10873 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
10874 }
10875}
10876\f
a7c99304
RK
10877/* Return 1 if X is the target of a bit-field assignment in BODY, the
10878 pattern of an insn. X must be a REG. */
230d793d
RS
10879
10880static int
a7c99304
RK
10881reg_bitfield_target_p (x, body)
10882 rtx x;
230d793d
RS
10883 rtx body;
10884{
10885 int i;
10886
10887 if (GET_CODE (body) == SET)
a7c99304
RK
10888 {
10889 rtx dest = SET_DEST (body);
10890 rtx target;
10891 int regno, tregno, endregno, endtregno;
10892
10893 if (GET_CODE (dest) == ZERO_EXTRACT)
10894 target = XEXP (dest, 0);
10895 else if (GET_CODE (dest) == STRICT_LOW_PART)
10896 target = SUBREG_REG (XEXP (dest, 0));
10897 else
10898 return 0;
10899
10900 if (GET_CODE (target) == SUBREG)
10901 target = SUBREG_REG (target);
10902
10903 if (GET_CODE (target) != REG)
10904 return 0;
10905
10906 tregno = REGNO (target), regno = REGNO (x);
10907 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
10908 return target == x;
10909
10910 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
10911 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10912
10913 return endregno > tregno && regno < endtregno;
10914 }
230d793d
RS
10915
10916 else if (GET_CODE (body) == PARALLEL)
10917 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 10918 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
10919 return 1;
10920
10921 return 0;
10922}
10923\f
10924/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
10925 as appropriate. I3 and I2 are the insns resulting from the combination
10926 insns including FROM (I2 may be zero).
10927
10928 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
10929 not need REG_DEAD notes because they are being substituted for. This
10930 saves searching in the most common cases.
10931
10932 Each note in the list is either ignored or placed on some insns, depending
10933 on the type of note. */
10934
10935static void
10936distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
10937 rtx notes;
10938 rtx from_insn;
10939 rtx i3, i2;
10940 rtx elim_i2, elim_i1;
10941{
10942 rtx note, next_note;
10943 rtx tem;
10944
10945 for (note = notes; note; note = next_note)
10946 {
10947 rtx place = 0, place2 = 0;
10948
10949 /* If this NOTE references a pseudo register, ensure it references
10950 the latest copy of that register. */
10951 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10952 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10953 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10954
10955 next_note = XEXP (note, 1);
10956 switch (REG_NOTE_KIND (note))
10957 {
10958 case REG_UNUSED:
07d0cbdd 10959 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
10960 REG_UNUSED notes from that insn.
10961
10962 Any clobbers from i2 or i1 can only exist if they were added by
10963 recog_for_combine. In that case, recog_for_combine created the
10964 necessary REG_UNUSED notes. Trying to keep any original
10965 REG_UNUSED notes from these insns can cause incorrect output
10966 if it is for the same register as the original i3 dest.
10967 In that case, we will notice that the register is set in i3,
10968 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
10969 is wrong. However, it is possible to have REG_UNUSED notes from
10970 i2 or i1 for register which were both used and clobbered, so
10971 we keep notes from i2 or i1 if they will turn into REG_DEAD
10972 notes. */
176c9e6b 10973
230d793d
RS
10974 /* If this register is set or clobbered in I3, put the note there
10975 unless there is one already. */
07d0cbdd 10976 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 10977 {
07d0cbdd
JW
10978 if (from_insn != i3)
10979 break;
10980
230d793d
RS
10981 if (! (GET_CODE (XEXP (note, 0)) == REG
10982 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
10983 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
10984 place = i3;
10985 }
10986 /* Otherwise, if this register is used by I3, then this register
10987 now dies here, so we must put a REG_DEAD note here unless there
10988 is one already. */
10989 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
10990 && ! (GET_CODE (XEXP (note, 0)) == REG
10991 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
10992 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
10993 {
10994 PUT_REG_NOTE_KIND (note, REG_DEAD);
10995 place = i3;
10996 }
10997 break;
10998
10999 case REG_EQUAL:
11000 case REG_EQUIV:
11001 case REG_NONNEG:
11002 /* These notes say something about results of an insn. We can
11003 only support them if they used to be on I3 in which case they
a687e897
RK
11004 remain on I3. Otherwise they are ignored.
11005
11006 If the note refers to an expression that is not a constant, we
11007 must also ignore the note since we cannot tell whether the
11008 equivalence is still true. It might be possible to do
11009 slightly better than this (we only have a problem if I2DEST
11010 or I1DEST is present in the expression), but it doesn't
11011 seem worth the trouble. */
11012
11013 if (from_insn == i3
11014 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
11015 place = i3;
11016 break;
11017
11018 case REG_INC:
11019 case REG_NO_CONFLICT:
11020 case REG_LABEL:
11021 /* These notes say something about how a register is used. They must
11022 be present on any use of the register in I2 or I3. */
11023 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11024 place = i3;
11025
11026 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11027 {
11028 if (place)
11029 place2 = i2;
11030 else
11031 place = i2;
11032 }
11033 break;
11034
11035 case REG_WAS_0:
11036 /* It is too much trouble to try to see if this note is still
11037 correct in all situations. It is better to simply delete it. */
11038 break;
11039
11040 case REG_RETVAL:
11041 /* If the insn previously containing this note still exists,
11042 put it back where it was. Otherwise move it to the previous
11043 insn. Adjust the corresponding REG_LIBCALL note. */
11044 if (GET_CODE (from_insn) != NOTE)
11045 place = from_insn;
11046 else
11047 {
5f4f0e22 11048 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
11049 place = prev_real_insn (from_insn);
11050 if (tem && place)
11051 XEXP (tem, 0) = place;
11052 }
11053 break;
11054
11055 case REG_LIBCALL:
11056 /* This is handled similarly to REG_RETVAL. */
11057 if (GET_CODE (from_insn) != NOTE)
11058 place = from_insn;
11059 else
11060 {
5f4f0e22 11061 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
11062 place = next_real_insn (from_insn);
11063 if (tem && place)
11064 XEXP (tem, 0) = place;
11065 }
11066 break;
11067
11068 case REG_DEAD:
11069 /* If the register is used as an input in I3, it dies there.
11070 Similarly for I2, if it is non-zero and adjacent to I3.
11071
11072 If the register is not used as an input in either I3 or I2
11073 and it is not one of the registers we were supposed to eliminate,
11074 there are two possibilities. We might have a non-adjacent I2
11075 or we might have somehow eliminated an additional register
11076 from a computation. For example, we might have had A & B where
11077 we discover that B will always be zero. In this case we will
11078 eliminate the reference to A.
11079
11080 In both cases, we must search to see if we can find a previous
11081 use of A and put the death note there. */
11082
6e2d1486
RK
11083 if (from_insn
11084 && GET_CODE (from_insn) == CALL_INSN
11085 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11086 place = from_insn;
11087 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
11088 place = i3;
11089 else if (i2 != 0 && next_nonnote_insn (i2) == i3
11090 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11091 place = i2;
11092
11093 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11094 break;
11095
510dd77e
RK
11096 /* If the register is used in both I2 and I3 and it dies in I3,
11097 we might have added another reference to it. If reg_n_refs
11098 was 2, bump it to 3. This has to be correct since the
11099 register must have been set somewhere. The reason this is
11100 done is because local-alloc.c treats 2 references as a
11101 special case. */
11102
11103 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
11104 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
11105 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11106 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
11107
230d793d 11108 if (place == 0)
38d8473f
RK
11109 {
11110 for (tem = prev_nonnote_insn (i3);
11111 place == 0 && tem
11112 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
11113 tem = prev_nonnote_insn (tem))
11114 {
11115 /* If the register is being set at TEM, see if that is all
11116 TEM is doing. If so, delete TEM. Otherwise, make this
11117 into a REG_UNUSED note instead. */
11118 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11119 {
11120 rtx set = single_set (tem);
11121
11122 /* Verify that it was the set, and not a clobber that
11123 modified the register. */
11124
11125 if (set != 0 && ! side_effects_p (SET_SRC (set))
d02089a5
RK
11126 && (rtx_equal_p (XEXP (note, 0), SET_DEST (set))
11127 || (GET_CODE (SET_DEST (set)) == SUBREG
11128 && rtx_equal_p (XEXP (note, 0),
11129 XEXP (SET_DEST (set), 0)))))
38d8473f
RK
11130 {
11131 /* Move the notes and links of TEM elsewhere.
11132 This might delete other dead insns recursively.
11133 First set the pattern to something that won't use
11134 any register. */
11135
11136 PATTERN (tem) = pc_rtx;
11137
11138 distribute_notes (REG_NOTES (tem), tem, tem,
11139 NULL_RTX, NULL_RTX, NULL_RTX);
11140 distribute_links (LOG_LINKS (tem));
11141
11142 PUT_CODE (tem, NOTE);
11143 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11144 NOTE_SOURCE_FILE (tem) = 0;
11145 }
11146 else
11147 {
11148 PUT_REG_NOTE_KIND (note, REG_UNUSED);
11149
11150 /* If there isn't already a REG_UNUSED note, put one
11151 here. */
11152 if (! find_regno_note (tem, REG_UNUSED,
11153 REGNO (XEXP (note, 0))))
11154 place = tem;
11155 break;
230d793d
RS
11156 }
11157 }
13018fad
RE
11158 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11159 || (GET_CODE (tem) == CALL_INSN
11160 && find_reg_fusage (tem, USE, XEXP (note, 0))))
230d793d
RS
11161 {
11162 place = tem;
932d1119
RK
11163
11164 /* If we are doing a 3->2 combination, and we have a
11165 register which formerly died in i3 and was not used
11166 by i2, which now no longer dies in i3 and is used in
11167 i2 but does not die in i2, and place is between i2
11168 and i3, then we may need to move a link from place to
11169 i2. */
a8908849
RK
11170 if (i2 && INSN_UID (place) <= max_uid_cuid
11171 && INSN_CUID (place) > INSN_CUID (i2)
932d1119
RK
11172 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11173 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11174 {
11175 rtx links = LOG_LINKS (place);
11176 LOG_LINKS (place) = 0;
11177 distribute_links (links);
11178 }
230d793d
RS
11179 break;
11180 }
38d8473f
RK
11181 }
11182
11183 /* If we haven't found an insn for the death note and it
11184 is still a REG_DEAD note, but we have hit a CODE_LABEL,
11185 insert a USE insn for the register at that label and
11186 put the death node there. This prevents problems with
11187 call-state tracking in caller-save.c. */
11188 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
e2cce0cf
RK
11189 {
11190 place
11191 = emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (note, 0)),
11192 tem);
11193
11194 /* If this insn was emitted between blocks, then update
11195 basic_block_head of the current block to include it. */
11196 if (basic_block_end[this_basic_block - 1] == tem)
11197 basic_block_head[this_basic_block] = place;
11198 }
38d8473f 11199 }
230d793d
RS
11200
11201 /* If the register is set or already dead at PLACE, we needn't do
11202 anything with this note if it is still a REG_DEAD note.
11203
11204 Note that we cannot use just `dead_or_set_p' here since we can
11205 convert an assignment to a register into a bit-field assignment.
11206 Therefore, we must also omit the note if the register is the
11207 target of a bitfield assignment. */
11208
11209 if (place && REG_NOTE_KIND (note) == REG_DEAD)
11210 {
11211 int regno = REGNO (XEXP (note, 0));
11212
11213 if (dead_or_set_p (place, XEXP (note, 0))
11214 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11215 {
11216 /* Unless the register previously died in PLACE, clear
11217 reg_last_death. [I no longer understand why this is
11218 being done.] */
11219 if (reg_last_death[regno] != place)
11220 reg_last_death[regno] = 0;
11221 place = 0;
11222 }
11223 else
11224 reg_last_death[regno] = place;
11225
11226 /* If this is a death note for a hard reg that is occupying
11227 multiple registers, ensure that we are still using all
11228 parts of the object. If we find a piece of the object
11229 that is unused, we must add a USE for that piece before
11230 PLACE and put the appropriate REG_DEAD note on it.
11231
11232 An alternative would be to put a REG_UNUSED for the pieces
11233 on the insn that set the register, but that can't be done if
11234 it is not in the same block. It is simpler, though less
11235 efficient, to add the USE insns. */
11236
11237 if (place && regno < FIRST_PSEUDO_REGISTER
11238 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11239 {
11240 int endregno
11241 = regno + HARD_REGNO_NREGS (regno,
11242 GET_MODE (XEXP (note, 0)));
11243 int all_used = 1;
11244 int i;
11245
11246 for (i = regno; i < endregno; i++)
9fd5bb62
JW
11247 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11248 && ! find_regno_fusage (place, USE, i))
230d793d 11249 {
485eeec4 11250 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
28f6d3af
RK
11251 rtx p;
11252
11253 /* See if we already placed a USE note for this
11254 register in front of PLACE. */
11255 for (p = place;
11256 GET_CODE (PREV_INSN (p)) == INSN
11257 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11258 p = PREV_INSN (p))
11259 if (rtx_equal_p (piece,
11260 XEXP (PATTERN (PREV_INSN (p)), 0)))
11261 {
11262 p = 0;
11263 break;
11264 }
11265
11266 if (p)
11267 {
11268 rtx use_insn
11269 = emit_insn_before (gen_rtx (USE, VOIDmode,
11270 piece),
11271 p);
11272 REG_NOTES (use_insn)
11273 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
11274 REG_NOTES (use_insn));
11275 }
230d793d 11276
5089e22e 11277 all_used = 0;
230d793d
RS
11278 }
11279
a394b17b
JW
11280 /* Check for the case where the register dying partially
11281 overlaps the register set by this insn. */
11282 if (all_used)
11283 for (i = regno; i < endregno; i++)
11284 if (dead_or_set_regno_p (place, i))
11285 {
11286 all_used = 0;
11287 break;
11288 }
11289
230d793d
RS
11290 if (! all_used)
11291 {
11292 /* Put only REG_DEAD notes for pieces that are
11293 still used and that are not already dead or set. */
11294
11295 for (i = regno; i < endregno; i++)
11296 {
485eeec4 11297 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
230d793d 11298
17cbf358
JW
11299 if ((reg_referenced_p (piece, PATTERN (place))
11300 || (GET_CODE (place) == CALL_INSN
11301 && find_reg_fusage (place, USE, piece)))
230d793d
RS
11302 && ! dead_or_set_p (place, piece)
11303 && ! reg_bitfield_target_p (piece,
11304 PATTERN (place)))
11305 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
11306 piece,
11307 REG_NOTES (place));
11308 }
11309
11310 place = 0;
11311 }
11312 }
11313 }
11314 break;
11315
11316 default:
11317 /* Any other notes should not be present at this point in the
11318 compilation. */
11319 abort ();
11320 }
11321
11322 if (place)
11323 {
11324 XEXP (note, 1) = REG_NOTES (place);
11325 REG_NOTES (place) = note;
11326 }
1a26b032
RK
11327 else if ((REG_NOTE_KIND (note) == REG_DEAD
11328 || REG_NOTE_KIND (note) == REG_UNUSED)
11329 && GET_CODE (XEXP (note, 0)) == REG)
11330 reg_n_deaths[REGNO (XEXP (note, 0))]--;
230d793d
RS
11331
11332 if (place2)
1a26b032
RK
11333 {
11334 if ((REG_NOTE_KIND (note) == REG_DEAD
11335 || REG_NOTE_KIND (note) == REG_UNUSED)
11336 && GET_CODE (XEXP (note, 0)) == REG)
11337 reg_n_deaths[REGNO (XEXP (note, 0))]++;
11338
11339 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
11340 XEXP (note, 0), REG_NOTES (place2));
11341 }
230d793d
RS
11342 }
11343}
11344\f
11345/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
11346 I3, I2, and I1 to new locations. This is also called in one case to
11347 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
11348
11349static void
11350distribute_links (links)
11351 rtx links;
11352{
11353 rtx link, next_link;
11354
11355 for (link = links; link; link = next_link)
11356 {
11357 rtx place = 0;
11358 rtx insn;
11359 rtx set, reg;
11360
11361 next_link = XEXP (link, 1);
11362
11363 /* If the insn that this link points to is a NOTE or isn't a single
11364 set, ignore it. In the latter case, it isn't clear what we
11365 can do other than ignore the link, since we can't tell which
11366 register it was for. Such links wouldn't be used by combine
11367 anyway.
11368
11369 It is not possible for the destination of the target of the link to
11370 have been changed by combine. The only potential of this is if we
11371 replace I3, I2, and I1 by I3 and I2. But in that case the
11372 destination of I2 also remains unchanged. */
11373
11374 if (GET_CODE (XEXP (link, 0)) == NOTE
11375 || (set = single_set (XEXP (link, 0))) == 0)
11376 continue;
11377
11378 reg = SET_DEST (set);
11379 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11380 || GET_CODE (reg) == SIGN_EXTRACT
11381 || GET_CODE (reg) == STRICT_LOW_PART)
11382 reg = XEXP (reg, 0);
11383
11384 /* A LOG_LINK is defined as being placed on the first insn that uses
11385 a register and points to the insn that sets the register. Start
11386 searching at the next insn after the target of the link and stop
11387 when we reach a set of the register or the end of the basic block.
11388
11389 Note that this correctly handles the link that used to point from
5089e22e 11390 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
11391 since most links don't point very far away. */
11392
11393 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3
RK
11394 (insn && (this_basic_block == n_basic_blocks - 1
11395 || basic_block_head[this_basic_block + 1] != insn));
230d793d
RS
11396 insn = NEXT_INSN (insn))
11397 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11398 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11399 {
11400 if (reg_referenced_p (reg, PATTERN (insn)))
11401 place = insn;
11402 break;
11403 }
6e2d1486
RK
11404 else if (GET_CODE (insn) == CALL_INSN
11405 && find_reg_fusage (insn, USE, reg))
11406 {
11407 place = insn;
11408 break;
11409 }
230d793d
RS
11410
11411 /* If we found a place to put the link, place it there unless there
11412 is already a link to the same insn as LINK at that point. */
11413
11414 if (place)
11415 {
11416 rtx link2;
11417
11418 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11419 if (XEXP (link2, 0) == XEXP (link, 0))
11420 break;
11421
11422 if (link2 == 0)
11423 {
11424 XEXP (link, 1) = LOG_LINKS (place);
11425 LOG_LINKS (place) = link;
abe6e52f
RK
11426
11427 /* Set added_links_insn to the earliest insn we added a
11428 link to. */
11429 if (added_links_insn == 0
11430 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11431 added_links_insn = place;
230d793d
RS
11432 }
11433 }
11434 }
11435}
11436\f
1427d6d2
RK
11437/* Compute INSN_CUID for INSN, which is an insn made by combine. */
11438
11439static int
11440insn_cuid (insn)
11441 rtx insn;
11442{
11443 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
11444 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
11445 insn = NEXT_INSN (insn);
11446
11447 if (INSN_UID (insn) > max_uid_cuid)
11448 abort ();
11449
11450 return INSN_CUID (insn);
11451}
11452\f
230d793d
RS
11453void
11454dump_combine_stats (file)
11455 FILE *file;
11456{
11457 fprintf
11458 (file,
11459 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11460 combine_attempts, combine_merges, combine_extras, combine_successes);
11461}
11462
11463void
11464dump_combine_total_stats (file)
11465 FILE *file;
11466{
11467 fprintf
11468 (file,
11469 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11470 total_attempts, total_merges, total_extras, total_successes);
11471}
This page took 1.961635 seconds and 5 git commands to generate.