]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
Major cutover to using system.h:
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
c85f7c16 2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
230d793d
RS
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
4f90e4a0 78#ifdef __STDC__
04fe4385 79#include <stdarg.h>
4f90e4a0 80#else
04fe4385 81#include <varargs.h>
4f90e4a0 82#endif
dfa3449b 83
670ee920
KG
84/* stdio.h must precede rtl.h for FFS. */
85#include "system.h"
9c3b4c8b 86
230d793d
RS
87#include "rtl.h"
88#include "flags.h"
89#include "regs.h"
55310dad 90#include "hard-reg-set.h"
230d793d
RS
91#include "expr.h"
92#include "basic-block.h"
93#include "insn-config.h"
94#include "insn-flags.h"
95#include "insn-codes.h"
96#include "insn-attr.h"
97#include "recog.h"
98#include "real.h"
99
100/* It is not safe to use ordinary gen_lowpart in combine.
101 Use gen_lowpart_for_combine instead. See comments there. */
102#define gen_lowpart dont_use_gen_lowpart_you_dummy
103
104/* Number of attempts to combine instructions in this function. */
105
106static int combine_attempts;
107
108/* Number of attempts that got as far as substitution in this function. */
109
110static int combine_merges;
111
112/* Number of instructions combined with added SETs in this function. */
113
114static int combine_extras;
115
116/* Number of instructions combined in this function. */
117
118static int combine_successes;
119
120/* Totals over entire compilation. */
121
122static int total_attempts, total_merges, total_extras, total_successes;
9210df58 123
ddd5a7c1 124/* Define a default value for REVERSIBLE_CC_MODE.
9210df58
RK
125 We can never assume that a condition code mode is safe to reverse unless
126 the md tells us so. */
127#ifndef REVERSIBLE_CC_MODE
128#define REVERSIBLE_CC_MODE(MODE) 0
129#endif
230d793d
RS
130\f
131/* Vector mapping INSN_UIDs to cuids.
5089e22e 132 The cuids are like uids but increase monotonically always.
230d793d
RS
133 Combine always uses cuids so that it can compare them.
134 But actually renumbering the uids, which we used to do,
135 proves to be a bad idea because it makes it hard to compare
136 the dumps produced by earlier passes with those from later passes. */
137
138static int *uid_cuid;
4255220d 139static int max_uid_cuid;
230d793d
RS
140
141/* Get the cuid of an insn. */
142
1427d6d2
RK
143#define INSN_CUID(INSN) \
144(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d
RS
145
146/* Maximum register number, which is the size of the tables below. */
147
148static int combine_max_regno;
149
150/* Record last point of death of (hard or pseudo) register n. */
151
152static rtx *reg_last_death;
153
154/* Record last point of modification of (hard or pseudo) register n. */
155
156static rtx *reg_last_set;
157
158/* Record the cuid of the last insn that invalidated memory
159 (anything that writes memory, and subroutine calls, but not pushes). */
160
161static int mem_last_set;
162
163/* Record the cuid of the last CALL_INSN
164 so we can tell whether a potential combination crosses any calls. */
165
166static int last_call_cuid;
167
168/* When `subst' is called, this is the insn that is being modified
169 (by combining in a previous insn). The PATTERN of this insn
170 is still the old pattern partially modified and it should not be
171 looked at, but this may be used to examine the successors of the insn
172 to judge whether a simplification is valid. */
173
174static rtx subst_insn;
175
0d9641d1
JW
176/* This is an insn that belongs before subst_insn, but is not currently
177 on the insn chain. */
178
179static rtx subst_prev_insn;
180
230d793d
RS
181/* This is the lowest CUID that `subst' is currently dealing with.
182 get_last_value will not return a value if the register was set at or
183 after this CUID. If not for this mechanism, we could get confused if
184 I2 or I1 in try_combine were an insn that used the old value of a register
185 to obtain a new value. In that case, we might erroneously get the
186 new value of the register when we wanted the old one. */
187
188static int subst_low_cuid;
189
6e25d159
RK
190/* This contains any hard registers that are used in newpat; reg_dead_at_p
191 must consider all these registers to be always live. */
192
193static HARD_REG_SET newpat_used_regs;
194
abe6e52f
RK
195/* This is an insn to which a LOG_LINKS entry has been added. If this
196 insn is the earlier than I2 or I3, combine should rescan starting at
197 that location. */
198
199static rtx added_links_insn;
200
0d4d42c3
RK
201/* Basic block number of the block in which we are performing combines. */
202static int this_basic_block;
230d793d
RS
203\f
204/* The next group of arrays allows the recording of the last value assigned
205 to (hard or pseudo) register n. We use this information to see if a
5089e22e 206 operation being processed is redundant given a prior operation performed
230d793d
RS
207 on the register. For example, an `and' with a constant is redundant if
208 all the zero bits are already known to be turned off.
209
210 We use an approach similar to that used by cse, but change it in the
211 following ways:
212
213 (1) We do not want to reinitialize at each label.
214 (2) It is useful, but not critical, to know the actual value assigned
215 to a register. Often just its form is helpful.
216
217 Therefore, we maintain the following arrays:
218
219 reg_last_set_value the last value assigned
220 reg_last_set_label records the value of label_tick when the
221 register was assigned
222 reg_last_set_table_tick records the value of label_tick when a
223 value using the register is assigned
224 reg_last_set_invalid set to non-zero when it is not valid
225 to use the value of this register in some
226 register's value
227
228 To understand the usage of these tables, it is important to understand
229 the distinction between the value in reg_last_set_value being valid
230 and the register being validly contained in some other expression in the
231 table.
232
233 Entry I in reg_last_set_value is valid if it is non-zero, and either
234 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
235
236 Register I may validly appear in any expression returned for the value
237 of another register if reg_n_sets[i] is 1. It may also appear in the
238 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239 reg_last_set_invalid[j] is zero.
240
241 If an expression is found in the table containing a register which may
242 not validly appear in an expression, the register is replaced by
243 something that won't match, (clobber (const_int 0)).
244
245 reg_last_set_invalid[i] is set non-zero when register I is being assigned
246 to and reg_last_set_table_tick[i] == label_tick. */
247
0f41302f 248/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
249
250static rtx *reg_last_set_value;
251
252/* Record the value of label_tick when the value for register n is placed in
253 reg_last_set_value[n]. */
254
568356af 255static int *reg_last_set_label;
230d793d
RS
256
257/* Record the value of label_tick when an expression involving register n
0f41302f 258 is placed in reg_last_set_value. */
230d793d 259
568356af 260static int *reg_last_set_table_tick;
230d793d
RS
261
262/* Set non-zero if references to register n in expressions should not be
263 used. */
264
265static char *reg_last_set_invalid;
266
0f41302f 267/* Incremented for each label. */
230d793d 268
568356af 269static int label_tick;
230d793d
RS
270
271/* Some registers that are set more than once and used in more than one
272 basic block are nevertheless always set in similar ways. For example,
273 a QImode register may be loaded from memory in two places on a machine
274 where byte loads zero extend.
275
951553af 276 We record in the following array what we know about the nonzero
230d793d
RS
277 bits of a register, specifically which bits are known to be zero.
278
279 If an entry is zero, it means that we don't know anything special. */
280
55310dad 281static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 282
951553af 283/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 284 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 285
951553af 286static enum machine_mode nonzero_bits_mode;
230d793d 287
d0ab8cd3
RK
288/* Nonzero if we know that a register has some leading bits that are always
289 equal to the sign bit. */
290
291static char *reg_sign_bit_copies;
292
951553af 293/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
294 It is zero while computing them and after combine has completed. This
295 former test prevents propagating values based on previously set values,
296 which can be incorrect if a variable is modified in a loop. */
230d793d 297
951553af 298static int nonzero_sign_valid;
55310dad
RK
299
300/* These arrays are maintained in parallel with reg_last_set_value
301 and are used to store the mode in which the register was last set,
302 the bits that were known to be zero when it was last set, and the
303 number of sign bits copies it was known to have when it was last set. */
304
305static enum machine_mode *reg_last_set_mode;
306static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307static char *reg_last_set_sign_bit_copies;
230d793d
RS
308\f
309/* Record one modification to rtl structure
310 to be undone by storing old_contents into *where.
311 is_int is 1 if the contents are an int. */
312
313struct undo
314{
241cea85 315 struct undo *next;
230d793d 316 int is_int;
f5393ab9
RS
317 union {rtx r; int i;} old_contents;
318 union {rtx *r; int *i;} where;
230d793d
RS
319};
320
321/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322 num_undo says how many are currently recorded.
323
324 storage is nonzero if we must undo the allocation of new storage.
325 The value of storage is what to pass to obfree.
326
327 other_insn is nonzero if we have modified some other insn in the process
241cea85 328 of working on subst_insn. It must be verified too.
230d793d 329
241cea85
RK
330 previous_undos is the value of undobuf.undos when we started processing
331 this substitution. This will prevent gen_rtx_combine from re-used a piece
332 from the previous expression. Doing so can produce circular rtl
333 structures. */
230d793d
RS
334
335struct undobuf
336{
230d793d 337 char *storage;
241cea85
RK
338 struct undo *undos;
339 struct undo *frees;
340 struct undo *previous_undos;
230d793d
RS
341 rtx other_insn;
342};
343
344static struct undobuf undobuf;
345
cc876596 346/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 347 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
348 set to NEWVAL, do not record this change. Because computing NEWVAL might
349 also call SUBST, we have to compute it before we put anything into
350 the undo table. */
230d793d
RS
351
352#define SUBST(INTO, NEWVAL) \
241cea85
RK
353 do { rtx _new = (NEWVAL); \
354 struct undo *_buf; \
355 \
356 if (undobuf.frees) \
357 _buf = undobuf.frees, undobuf.frees = _buf->next; \
358 else \
359 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
360 \
361 _buf->is_int = 0; \
362 _buf->where.r = &INTO; \
363 _buf->old_contents.r = INTO; \
364 INTO = _new; \
365 if (_buf->old_contents.r == INTO) \
366 _buf->next = undobuf.frees, undobuf.frees = _buf; \
367 else \
368 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
369 } while (0)
370
241cea85
RK
371/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
372 for the value of a HOST_WIDE_INT value (including CONST_INT) is
373 not safe. */
230d793d
RS
374
375#define SUBST_INT(INTO, NEWVAL) \
241cea85
RK
376 do { struct undo *_buf; \
377 \
378 if (undobuf.frees) \
379 _buf = undobuf.frees, undobuf.frees = _buf->next; \
380 else \
381 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
382 \
383 _buf->is_int = 1; \
384 _buf->where.i = (int *) &INTO; \
385 _buf->old_contents.i = INTO; \
386 INTO = NEWVAL; \
387 if (_buf->old_contents.i == INTO) \
388 _buf->next = undobuf.frees, undobuf.frees = _buf; \
389 else \
390 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
391 } while (0)
392
393/* Number of times the pseudo being substituted for
394 was found and replaced. */
395
396static int n_occurrences;
397
c5ad722c
RK
398static void init_reg_last_arrays PROTO((void));
399static void setup_incoming_promotions PROTO((void));
fe2db4fb
RK
400static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
401static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
402static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
403static rtx try_combine PROTO((rtx, rtx, rtx));
404static void undo_all PROTO((void));
405static rtx *find_split_point PROTO((rtx *, rtx));
406static rtx subst PROTO((rtx, rtx, rtx, int, int));
8079805d
RK
407static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
408static rtx simplify_if_then_else PROTO((rtx));
409static rtx simplify_set PROTO((rtx));
410static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
411static rtx expand_compound_operation PROTO((rtx));
412static rtx expand_field_assignment PROTO((rtx));
413static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
414 int, int, int));
71923da7 415static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
416static rtx make_compound_operation PROTO((rtx, enum rtx_code));
417static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 418static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 419 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 420static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb 421static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
e11fa86f 422static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
fe2db4fb
RK
423static rtx make_field_assignment PROTO((rtx));
424static rtx apply_distributive_law PROTO((rtx));
425static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
426 unsigned HOST_WIDE_INT));
427static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
428static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
429static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
430 enum rtx_code, HOST_WIDE_INT,
431 enum machine_mode, int *));
432static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
433 rtx, int));
a29ca9db 434static int recog_for_combine PROTO((rtx *, rtx, rtx *, int *));
fe2db4fb 435static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 436static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 437 ...));
fe2db4fb
RK
438static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
439 rtx, rtx));
0c1c8ea6
RK
440static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
441 enum machine_mode, rtx));
fe2db4fb
RK
442static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
443static int reversible_comparison_p PROTO((rtx));
444static void update_table_tick PROTO((rtx));
445static void record_value_for_reg PROTO((rtx, rtx, rtx));
446static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
447static void record_dead_and_set_regs PROTO((rtx));
9a893315 448static int get_last_value_validate PROTO((rtx *, rtx, int, int));
fe2db4fb
RK
449static rtx get_last_value PROTO((rtx));
450static int use_crosses_set_p PROTO((rtx, int));
451static void reg_dead_at_p_1 PROTO((rtx, rtx));
452static int reg_dead_at_p PROTO((rtx, rtx));
6eb12cef 453static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
fe2db4fb
RK
454static int reg_bitfield_target_p PROTO((rtx, rtx));
455static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
456static void distribute_links PROTO((rtx));
6e25d159 457static void mark_used_regs_combine PROTO((rtx));
1427d6d2 458static int insn_cuid PROTO((rtx));
230d793d
RS
459\f
460/* Main entry point for combiner. F is the first insn of the function.
461 NREGS is the first unused pseudo-reg number. */
462
463void
464combine_instructions (f, nregs)
465 rtx f;
466 int nregs;
467{
b729186a
JL
468 register rtx insn, next;
469#ifdef HAVE_cc0
470 register rtx prev;
471#endif
230d793d
RS
472 register int i;
473 register rtx links, nextlinks;
474
475 combine_attempts = 0;
476 combine_merges = 0;
477 combine_extras = 0;
478 combine_successes = 0;
241cea85 479 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
480
481 combine_max_regno = nregs;
482
ef026f91
RS
483 reg_nonzero_bits
484 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
485 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
486
4c9a05bc 487 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
488 bzero (reg_sign_bit_copies, nregs * sizeof (char));
489
230d793d
RS
490 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
491 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
492 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
493 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
494 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 495 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
496 reg_last_set_mode
497 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
498 reg_last_set_nonzero_bits
499 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
500 reg_last_set_sign_bit_copies
501 = (char *) alloca (nregs * sizeof (char));
502
ef026f91 503 init_reg_last_arrays ();
230d793d
RS
504
505 init_recog_no_volatile ();
506
507 /* Compute maximum uid value so uid_cuid can be allocated. */
508
509 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
510 if (INSN_UID (insn) > i)
511 i = INSN_UID (insn);
512
513 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
4255220d 514 max_uid_cuid = i;
230d793d 515
951553af 516 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 517
951553af 518 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
519 when, for example, we have j <<= 1 in a loop. */
520
951553af 521 nonzero_sign_valid = 0;
230d793d
RS
522
523 /* Compute the mapping from uids to cuids.
524 Cuids are numbers assigned to insns, like uids,
525 except that cuids increase monotonically through the code.
526
527 Scan all SETs and see if we can deduce anything about what
951553af 528 bits are known to be zero for some registers and how many copies
d79f08e0
RK
529 of the sign bit are known to exist for those registers.
530
531 Also set any known values so that we can use it while searching
532 for what bits are known to be set. */
533
534 label_tick = 1;
230d793d 535
bcd49eb7
JW
536 /* We need to initialize it here, because record_dead_and_set_regs may call
537 get_last_value. */
538 subst_prev_insn = NULL_RTX;
539
7988fd36
RK
540 setup_incoming_promotions ();
541
230d793d
RS
542 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
543 {
4255220d 544 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
545 subst_low_cuid = i;
546 subst_insn = insn;
547
230d793d 548 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
549 {
550 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
551 record_dead_and_set_regs (insn);
2dab894a
RK
552
553#ifdef AUTO_INC_DEC
554 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
555 if (REG_NOTE_KIND (links) == REG_INC)
556 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
557#endif
d79f08e0
RK
558 }
559
560 if (GET_CODE (insn) == CODE_LABEL)
561 label_tick++;
230d793d
RS
562 }
563
951553af 564 nonzero_sign_valid = 1;
230d793d
RS
565
566 /* Now scan all the insns in forward order. */
567
0d4d42c3 568 this_basic_block = -1;
230d793d
RS
569 label_tick = 1;
570 last_call_cuid = 0;
571 mem_last_set = 0;
ef026f91 572 init_reg_last_arrays ();
7988fd36
RK
573 setup_incoming_promotions ();
574
230d793d
RS
575 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
576 {
577 next = 0;
578
0d4d42c3 579 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 580 if (this_basic_block + 1 < n_basic_blocks
0d4d42c3
RK
581 && basic_block_head[this_basic_block + 1] == insn)
582 this_basic_block++;
583
230d793d
RS
584 if (GET_CODE (insn) == CODE_LABEL)
585 label_tick++;
586
0d4d42c3 587 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
588 {
589 /* Try this insn with each insn it links back to. */
590
591 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 592 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
593 goto retry;
594
595 /* Try each sequence of three linked insns ending with this one. */
596
597 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
598 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
599 nextlinks = XEXP (nextlinks, 1))
600 if ((next = try_combine (insn, XEXP (links, 0),
601 XEXP (nextlinks, 0))) != 0)
602 goto retry;
603
604#ifdef HAVE_cc0
605 /* Try to combine a jump insn that uses CC0
606 with a preceding insn that sets CC0, and maybe with its
607 logical predecessor as well.
608 This is how we make decrement-and-branch insns.
609 We need this special code because data flow connections
610 via CC0 do not get entered in LOG_LINKS. */
611
612 if (GET_CODE (insn) == JUMP_INSN
613 && (prev = prev_nonnote_insn (insn)) != 0
614 && GET_CODE (prev) == INSN
615 && sets_cc0_p (PATTERN (prev)))
616 {
5f4f0e22 617 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
618 goto retry;
619
620 for (nextlinks = LOG_LINKS (prev); nextlinks;
621 nextlinks = XEXP (nextlinks, 1))
622 if ((next = try_combine (insn, prev,
623 XEXP (nextlinks, 0))) != 0)
624 goto retry;
625 }
626
627 /* Do the same for an insn that explicitly references CC0. */
628 if (GET_CODE (insn) == INSN
629 && (prev = prev_nonnote_insn (insn)) != 0
630 && GET_CODE (prev) == INSN
631 && sets_cc0_p (PATTERN (prev))
632 && GET_CODE (PATTERN (insn)) == SET
633 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
634 {
5f4f0e22 635 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
636 goto retry;
637
638 for (nextlinks = LOG_LINKS (prev); nextlinks;
639 nextlinks = XEXP (nextlinks, 1))
640 if ((next = try_combine (insn, prev,
641 XEXP (nextlinks, 0))) != 0)
642 goto retry;
643 }
644
645 /* Finally, see if any of the insns that this insn links to
646 explicitly references CC0. If so, try this insn, that insn,
5089e22e 647 and its predecessor if it sets CC0. */
230d793d
RS
648 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
649 if (GET_CODE (XEXP (links, 0)) == INSN
650 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
651 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
652 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
653 && GET_CODE (prev) == INSN
654 && sets_cc0_p (PATTERN (prev))
655 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
656 goto retry;
657#endif
658
659 /* Try combining an insn with two different insns whose results it
660 uses. */
661 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
662 for (nextlinks = XEXP (links, 1); nextlinks;
663 nextlinks = XEXP (nextlinks, 1))
664 if ((next = try_combine (insn, XEXP (links, 0),
665 XEXP (nextlinks, 0))) != 0)
666 goto retry;
667
668 if (GET_CODE (insn) != NOTE)
669 record_dead_and_set_regs (insn);
670
671 retry:
672 ;
673 }
674 }
675
676 total_attempts += combine_attempts;
677 total_merges += combine_merges;
678 total_extras += combine_extras;
679 total_successes += combine_successes;
1a26b032 680
951553af 681 nonzero_sign_valid = 0;
230d793d 682}
ef026f91
RS
683
684/* Wipe the reg_last_xxx arrays in preparation for another pass. */
685
686static void
687init_reg_last_arrays ()
688{
689 int nregs = combine_max_regno;
690
4c9a05bc
RK
691 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
692 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
693 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
694 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
695 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 696 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
697 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
698 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
699 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
700}
230d793d 701\f
7988fd36
RK
702/* Set up any promoted values for incoming argument registers. */
703
ee791cc3 704static void
7988fd36
RK
705setup_incoming_promotions ()
706{
707#ifdef PROMOTE_FUNCTION_ARGS
708 int regno;
709 rtx reg;
710 enum machine_mode mode;
711 int unsignedp;
712 rtx first = get_insns ();
713
714 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
715 if (FUNCTION_ARG_REGNO_P (regno)
716 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
38a448ca
RH
717 {
718 record_value_for_reg
719 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
720 : SIGN_EXTEND),
721 GET_MODE (reg),
722 gen_rtx_CLOBBER (mode, const0_rtx)));
723 }
7988fd36
RK
724#endif
725}
726\f
91102d5a
RK
727/* Called via note_stores. If X is a pseudo that is narrower than
728 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
729
730 If we are setting only a portion of X and we can't figure out what
731 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
732 be happening.
733
734 Similarly, set how many bits of X are known to be copies of the sign bit
735 at all locations in the function. This is the smallest number implied
736 by any set of X. */
230d793d
RS
737
738static void
951553af 739set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
740 rtx x;
741 rtx set;
742{
d0ab8cd3
RK
743 int num;
744
230d793d
RS
745 if (GET_CODE (x) == REG
746 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
747 /* If this register is undefined at the start of the file, we can't
748 say what its contents were. */
8e08106d 749 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], REGNO (x))
5f4f0e22 750 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 751 {
2dab894a 752 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
753 {
754 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 755 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
756 return;
757 }
230d793d
RS
758
759 /* If this is a complex assignment, see if we can convert it into a
5089e22e 760 simple assignment. */
230d793d 761 set = expand_field_assignment (set);
d79f08e0
RK
762
763 /* If this is a simple assignment, or we have a paradoxical SUBREG,
764 set what we know about X. */
765
766 if (SET_DEST (set) == x
767 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
768 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
769 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 770 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 771 {
9afa3d54
RK
772 rtx src = SET_SRC (set);
773
774#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
775 /* If X is narrower than a word and SRC is a non-negative
776 constant that would appear negative in the mode of X,
777 sign-extend it for use in reg_nonzero_bits because some
778 machines (maybe most) will actually do the sign-extension
779 and this is the conservative approach.
780
781 ??? For 2.5, try to tighten up the MD files in this regard
782 instead of this kludge. */
783
784 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
785 && GET_CODE (src) == CONST_INT
786 && INTVAL (src) > 0
787 && 0 != (INTVAL (src)
788 & ((HOST_WIDE_INT) 1
9e69be8c 789 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
790 src = GEN_INT (INTVAL (src)
791 | ((HOST_WIDE_INT) (-1)
792 << GET_MODE_BITSIZE (GET_MODE (x))));
793#endif
794
951553af 795 reg_nonzero_bits[REGNO (x)]
9afa3d54 796 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
797 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
798 if (reg_sign_bit_copies[REGNO (x)] == 0
799 || reg_sign_bit_copies[REGNO (x)] > num)
800 reg_sign_bit_copies[REGNO (x)] = num;
801 }
230d793d 802 else
d0ab8cd3 803 {
951553af 804 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 805 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 806 }
230d793d
RS
807 }
808}
809\f
810/* See if INSN can be combined into I3. PRED and SUCC are optionally
811 insns that were previously combined into I3 or that will be combined
812 into the merger of INSN and I3.
813
814 Return 0 if the combination is not allowed for any reason.
815
816 If the combination is allowed, *PDEST will be set to the single
817 destination of INSN and *PSRC to the single source, and this function
818 will return 1. */
819
820static int
821can_combine_p (insn, i3, pred, succ, pdest, psrc)
822 rtx insn;
823 rtx i3;
824 rtx pred, succ;
825 rtx *pdest, *psrc;
826{
827 int i;
828 rtx set = 0, src, dest;
b729186a
JL
829 rtx p;
830#ifdef AUTO_INC_DEC
76d31c63 831 rtx link;
b729186a 832#endif
230d793d
RS
833 int all_adjacent = (succ ? (next_active_insn (insn) == succ
834 && next_active_insn (succ) == i3)
835 : next_active_insn (insn) == i3);
836
837 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
838 or a PARALLEL consisting of such a SET and CLOBBERs.
839
840 If INSN has CLOBBER parallel parts, ignore them for our processing.
841 By definition, these happen during the execution of the insn. When it
842 is merged with another insn, all bets are off. If they are, in fact,
843 needed and aren't also supplied in I3, they may be added by
844 recog_for_combine. Otherwise, it won't match.
845
846 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
847 note.
848
849 Get the source and destination of INSN. If more than one, can't
850 combine. */
851
852 if (GET_CODE (PATTERN (insn)) == SET)
853 set = PATTERN (insn);
854 else if (GET_CODE (PATTERN (insn)) == PARALLEL
855 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
856 {
857 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
858 {
859 rtx elt = XVECEXP (PATTERN (insn), 0, i);
860
861 switch (GET_CODE (elt))
862 {
e3258cef
R
863 /* This is important to combine floating point insns
864 for the SH4 port. */
865 case USE:
866 /* Combining an isolated USE doesn't make sense.
867 We depend here on combinable_i3_pat to reject them. */
868 /* The code below this loop only verifies that the inputs of
869 the SET in INSN do not change. We call reg_set_between_p
870 to verify that the REG in the USE does not change betweeen
871 I3 and INSN.
872 If the USE in INSN was for a pseudo register, the matching
873 insn pattern will likely match any register; combining this
874 with any other USE would only be safe if we knew that the
875 used registers have identical values, or if there was
876 something to tell them apart, e.g. different modes. For
877 now, we forgo such compilcated tests and simply disallow
878 combining of USES of pseudo registers with any other USE. */
879 if (GET_CODE (XEXP (elt, 0)) == REG
880 && GET_CODE (PATTERN (i3)) == PARALLEL)
881 {
882 rtx i3pat = PATTERN (i3);
883 int i = XVECLEN (i3pat, 0) - 1;
884 int regno = REGNO (XEXP (elt, 0));
885 do
886 {
887 rtx i3elt = XVECEXP (i3pat, 0, i);
888 if (GET_CODE (i3elt) == USE
889 && GET_CODE (XEXP (i3elt, 0)) == REG
890 && (REGNO (XEXP (i3elt, 0)) == regno
891 ? reg_set_between_p (XEXP (elt, 0),
892 PREV_INSN (insn), i3)
893 : regno >= FIRST_PSEUDO_REGISTER))
894 return 0;
895 }
896 while (--i >= 0);
897 }
898 break;
899
230d793d
RS
900 /* We can ignore CLOBBERs. */
901 case CLOBBER:
902 break;
903
904 case SET:
905 /* Ignore SETs whose result isn't used but not those that
906 have side-effects. */
907 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
908 && ! side_effects_p (elt))
909 break;
910
911 /* If we have already found a SET, this is a second one and
912 so we cannot combine with this insn. */
913 if (set)
914 return 0;
915
916 set = elt;
917 break;
918
919 default:
920 /* Anything else means we can't combine. */
921 return 0;
922 }
923 }
924
925 if (set == 0
926 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
927 so don't do anything with it. */
928 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
929 return 0;
930 }
931 else
932 return 0;
933
934 if (set == 0)
935 return 0;
936
937 set = expand_field_assignment (set);
938 src = SET_SRC (set), dest = SET_DEST (set);
939
940 /* Don't eliminate a store in the stack pointer. */
941 if (dest == stack_pointer_rtx
230d793d
RS
942 /* If we couldn't eliminate a field assignment, we can't combine. */
943 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
944 /* Don't combine with an insn that sets a register to itself if it has
945 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 946 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
947 /* Can't merge a function call. */
948 || GET_CODE (src) == CALL
cd5e8f1f 949 /* Don't eliminate a function call argument. */
4dca5ec5
RK
950 || (GET_CODE (i3) == CALL_INSN
951 && (find_reg_fusage (i3, USE, dest)
952 || (GET_CODE (dest) == REG
953 && REGNO (dest) < FIRST_PSEUDO_REGISTER
954 && global_regs[REGNO (dest)])))
230d793d
RS
955 /* Don't substitute into an incremented register. */
956 || FIND_REG_INC_NOTE (i3, dest)
957 || (succ && FIND_REG_INC_NOTE (succ, dest))
958 /* Don't combine the end of a libcall into anything. */
5f4f0e22 959 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
960 /* Make sure that DEST is not used after SUCC but before I3. */
961 || (succ && ! all_adjacent
962 && reg_used_between_p (dest, succ, i3))
963 /* Make sure that the value that is to be substituted for the register
964 does not use any registers whose values alter in between. However,
965 If the insns are adjacent, a use can't cross a set even though we
966 think it might (this can happen for a sequence of insns each setting
967 the same destination; reg_last_set of that register might point to
d81481d3
RK
968 a NOTE). If INSN has a REG_EQUIV note, the register is always
969 equivalent to the memory so the substitution is valid even if there
970 are intervening stores. Also, don't move a volatile asm or
971 UNSPEC_VOLATILE across any other insns. */
230d793d 972 || (! all_adjacent
d81481d3
RK
973 && (((GET_CODE (src) != MEM
974 || ! find_reg_note (insn, REG_EQUIV, src))
975 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
976 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
977 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
978 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
979 better register allocation by not doing the combine. */
980 || find_reg_note (i3, REG_NO_CONFLICT, dest)
981 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
982 /* Don't combine across a CALL_INSN, because that would possibly
983 change whether the life span of some REGs crosses calls or not,
984 and it is a pain to update that information.
985 Exception: if source is a constant, moving it later can't hurt.
986 Accept that special case, because it helps -fforce-addr a lot. */
987 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
988 return 0;
989
990 /* DEST must either be a REG or CC0. */
991 if (GET_CODE (dest) == REG)
992 {
993 /* If register alignment is being enforced for multi-word items in all
994 cases except for parameters, it is possible to have a register copy
995 insn referencing a hard register that is not allowed to contain the
996 mode being copied and which would not be valid as an operand of most
997 insns. Eliminate this problem by not combining with such an insn.
998
999 Also, on some machines we don't want to extend the life of a hard
4d2c432d
RK
1000 register.
1001
1002 This is the same test done in can_combine except that we don't test
1003 if SRC is a CALL operation to permit a hard register with
1004 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1005 into account. */
230d793d
RS
1006
1007 if (GET_CODE (src) == REG
1008 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1009 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1010 /* Don't extend the life of a hard register unless it is
1011 user variable (if we have few registers) or it can't
1012 fit into the desired register (meaning something special
ecd40809
RK
1013 is going on).
1014 Also avoid substituting a return register into I3, because
1015 reload can't handle a conflict with constraints of other
1016 inputs. */
230d793d 1017 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e 1018 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
f95182a4
ILT
1019 || (SMALL_REGISTER_CLASSES
1020 && ((! all_adjacent && ! REG_USERVAR_P (src))
1021 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
e9a25f70 1022 && ! REG_USERVAR_P (src))))))))
230d793d
RS
1023 return 0;
1024 }
1025 else if (GET_CODE (dest) != CC0)
1026 return 0;
1027
5f96750d
RS
1028 /* Don't substitute for a register intended as a clobberable operand.
1029 Similarly, don't substitute an expression containing a register that
1030 will be clobbered in I3. */
230d793d
RS
1031 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1032 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1033 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
1034 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1035 src)
1036 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
1037 return 0;
1038
1039 /* If INSN contains anything volatile, or is an `asm' (whether volatile
1040 or not), reject, unless nothing volatile comes between it and I3,
1041 with the exception of SUCC. */
1042
1043 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
1044 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1045 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1046 && p != succ && volatile_refs_p (PATTERN (p)))
1047 return 0;
1048
b79ee7eb
RH
1049 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1050 to be an explicit register variable, and was chosen for a reason. */
1051
1052 if (GET_CODE (src) == ASM_OPERANDS
1053 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1054 return 0;
1055
4b2cb4a2
RS
1056 /* If there are any volatile insns between INSN and I3, reject, because
1057 they might affect machine state. */
1058
1059 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1060 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1061 && p != succ && volatile_insn_p (PATTERN (p)))
1062 return 0;
1063
230d793d
RS
1064 /* If INSN or I2 contains an autoincrement or autodecrement,
1065 make sure that register is not used between there and I3,
1066 and not already used in I3 either.
1067 Also insist that I3 not be a jump; if it were one
1068 and the incremented register were spilled, we would lose. */
1069
1070#ifdef AUTO_INC_DEC
1071 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1072 if (REG_NOTE_KIND (link) == REG_INC
1073 && (GET_CODE (i3) == JUMP_INSN
1074 || reg_used_between_p (XEXP (link, 0), insn, i3)
1075 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1076 return 0;
1077#endif
1078
1079#ifdef HAVE_cc0
1080 /* Don't combine an insn that follows a CC0-setting insn.
1081 An insn that uses CC0 must not be separated from the one that sets it.
1082 We do, however, allow I2 to follow a CC0-setting insn if that insn
1083 is passed as I1; in that case it will be deleted also.
1084 We also allow combining in this case if all the insns are adjacent
1085 because that would leave the two CC0 insns adjacent as well.
1086 It would be more logical to test whether CC0 occurs inside I1 or I2,
1087 but that would be much slower, and this ought to be equivalent. */
1088
1089 p = prev_nonnote_insn (insn);
1090 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1091 && ! all_adjacent)
1092 return 0;
1093#endif
1094
1095 /* If we get here, we have passed all the tests and the combination is
1096 to be allowed. */
1097
1098 *pdest = dest;
1099 *psrc = src;
1100
1101 return 1;
1102}
1103\f
956d6950
JL
1104/* Check if PAT is an insn - or a part of it - used to set up an
1105 argument for a function in a hard register. */
1106
1107static int
1108sets_function_arg_p (pat)
1109 rtx pat;
1110{
1111 int i;
1112 rtx inner_dest;
1113
1114 switch (GET_CODE (pat))
1115 {
1116 case INSN:
1117 return sets_function_arg_p (PATTERN (pat));
1118
1119 case PARALLEL:
1120 for (i = XVECLEN (pat, 0); --i >= 0;)
1121 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1122 return 1;
1123
1124 break;
1125
1126 case SET:
1127 inner_dest = SET_DEST (pat);
1128 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1129 || GET_CODE (inner_dest) == SUBREG
1130 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1131 inner_dest = XEXP (inner_dest, 0);
1132
1133 return (GET_CODE (inner_dest) == REG
1134 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1135 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1d300e19
KG
1136
1137 default:
1138 break;
956d6950
JL
1139 }
1140
1141 return 0;
1142}
1143
230d793d
RS
1144/* LOC is the location within I3 that contains its pattern or the component
1145 of a PARALLEL of the pattern. We validate that it is valid for combining.
1146
1147 One problem is if I3 modifies its output, as opposed to replacing it
1148 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1149 so would produce an insn that is not equivalent to the original insns.
1150
1151 Consider:
1152
1153 (set (reg:DI 101) (reg:DI 100))
1154 (set (subreg:SI (reg:DI 101) 0) <foo>)
1155
1156 This is NOT equivalent to:
1157
1158 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1159 (set (reg:DI 101) (reg:DI 100))])
1160
1161 Not only does this modify 100 (in which case it might still be valid
1162 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1163
1164 We can also run into a problem if I2 sets a register that I1
1165 uses and I1 gets directly substituted into I3 (not via I2). In that
1166 case, we would be getting the wrong value of I2DEST into I3, so we
1167 must reject the combination. This case occurs when I2 and I1 both
1168 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1169 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1170 of a SET must prevent combination from occurring.
1171
e9a25f70 1172 On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
c448a43e
RK
1173 if the destination of a SET is a hard register that isn't a user
1174 variable.
230d793d
RS
1175
1176 Before doing the above check, we first try to expand a field assignment
1177 into a set of logical operations.
1178
1179 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1180 we place a register that is both set and used within I3. If more than one
1181 such register is detected, we fail.
1182
1183 Return 1 if the combination is valid, zero otherwise. */
1184
1185static int
1186combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1187 rtx i3;
1188 rtx *loc;
1189 rtx i2dest;
1190 rtx i1dest;
1191 int i1_not_in_src;
1192 rtx *pi3dest_killed;
1193{
1194 rtx x = *loc;
1195
1196 if (GET_CODE (x) == SET)
1197 {
1198 rtx set = expand_field_assignment (x);
1199 rtx dest = SET_DEST (set);
1200 rtx src = SET_SRC (set);
29a82058
JL
1201 rtx inner_dest = dest;
1202
1203#if 0
1204 rtx inner_src = src;
1205#endif
230d793d
RS
1206
1207 SUBST (*loc, set);
1208
1209 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1210 || GET_CODE (inner_dest) == SUBREG
1211 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1212 inner_dest = XEXP (inner_dest, 0);
1213
1214 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1215 was added. */
1216#if 0
1217 while (GET_CODE (inner_src) == STRICT_LOW_PART
1218 || GET_CODE (inner_src) == SUBREG
1219 || GET_CODE (inner_src) == ZERO_EXTRACT)
1220 inner_src = XEXP (inner_src, 0);
1221
1222 /* If it is better that two different modes keep two different pseudos,
1223 avoid combining them. This avoids producing the following pattern
1224 on a 386:
1225 (set (subreg:SI (reg/v:QI 21) 0)
1226 (lshiftrt:SI (reg/v:SI 20)
1227 (const_int 24)))
1228 If that were made, reload could not handle the pair of
1229 reg 20/21, since it would try to get any GENERAL_REGS
1230 but some of them don't handle QImode. */
1231
1232 if (rtx_equal_p (inner_src, i2dest)
1233 && GET_CODE (inner_dest) == REG
1234 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1235 return 0;
1236#endif
1237
1238 /* Check for the case where I3 modifies its output, as
1239 discussed above. */
1240 if ((inner_dest != dest
1241 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1242 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1243
3f508eca
RK
1244 /* This is the same test done in can_combine_p except that we
1245 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
956d6950
JL
1246 CALL operation. Moreover, we can't test all_adjacent; we don't
1247 have to, since this instruction will stay in place, thus we are
1248 not considering increasing the lifetime of INNER_DEST.
1249
1250 Also, if this insn sets a function argument, combining it with
1251 something that might need a spill could clobber a previous
1252 function argument; the all_adjacent test in can_combine_p also
1253 checks this; here, we do a more specific test for this case. */
1254
230d793d 1255 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1256 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1257 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1258 GET_MODE (inner_dest))
e9a25f70
JL
1259 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1260 && ! REG_USERVAR_P (inner_dest)
956d6950
JL
1261 && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1262 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1263 && i3 != 0
1264 && sets_function_arg_p (prev_nonnote_insn (i3)))))))
230d793d
RS
1265 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1266 return 0;
1267
1268 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1269 so record that for later.
1270 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1271 STACK_POINTER_REGNUM, since these are always considered to be
1272 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1273 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1274 && reg_referenced_p (dest, PATTERN (i3))
1275 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1276#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1277 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1278#endif
36a9c2e9
JL
1279#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1280 && (REGNO (dest) != ARG_POINTER_REGNUM
1281 || ! fixed_regs [REGNO (dest)])
1282#endif
1283 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1284 {
1285 if (*pi3dest_killed)
1286 return 0;
1287
1288 *pi3dest_killed = dest;
1289 }
1290 }
1291
1292 else if (GET_CODE (x) == PARALLEL)
1293 {
1294 int i;
1295
1296 for (i = 0; i < XVECLEN (x, 0); i++)
1297 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1298 i1_not_in_src, pi3dest_killed))
1299 return 0;
1300 }
1301
1302 return 1;
1303}
1304\f
1305/* Try to combine the insns I1 and I2 into I3.
1306 Here I1 and I2 appear earlier than I3.
1307 I1 can be zero; then we combine just I2 into I3.
1308
1309 It we are combining three insns and the resulting insn is not recognized,
1310 try splitting it into two insns. If that happens, I2 and I3 are retained
1311 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1312 are pseudo-deleted.
1313
abe6e52f
RK
1314 Return 0 if the combination does not work. Then nothing is changed.
1315 If we did the combination, return the insn at which combine should
1316 resume scanning. */
230d793d
RS
1317
1318static rtx
1319try_combine (i3, i2, i1)
1320 register rtx i3, i2, i1;
1321{
1322 /* New patterns for I3 and I3, respectively. */
1323 rtx newpat, newi2pat = 0;
1324 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1325 int added_sets_1, added_sets_2;
1326 /* Total number of SETs to put into I3. */
1327 int total_sets;
1328 /* Nonzero is I2's body now appears in I3. */
1329 int i2_is_used;
1330 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1331 int insn_code_number, i2_code_number, other_code_number;
1332 /* Contains I3 if the destination of I3 is used in its source, which means
1333 that the old life of I3 is being killed. If that usage is placed into
1334 I2 and not in I3, a REG_DEAD note must be made. */
1335 rtx i3dest_killed = 0;
1336 /* SET_DEST and SET_SRC of I2 and I1. */
1337 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1338 /* PATTERN (I2), or a copy of it in certain cases. */
1339 rtx i2pat;
1340 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1341 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1342 int i1_feeds_i3 = 0;
1343 /* Notes that must be added to REG_NOTES in I3 and I2. */
1344 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1345 /* Notes that we substituted I3 into I2 instead of the normal case. */
1346 int i3_subst_into_i2 = 0;
df7d75de
RK
1347 /* Notes that I1, I2 or I3 is a MULT operation. */
1348 int have_mult = 0;
a29ca9db
RK
1349 /* Number of clobbers of SCRATCH we had to add. */
1350 int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
230d793d
RS
1351
1352 int maxreg;
1353 rtx temp;
1354 register rtx link;
1355 int i;
1356
1357 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1358 This can occur when flow deletes an insn that it has merged into an
1359 auto-increment address. We also can't do anything if I3 has a
1360 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1361 libcall. */
1362
1363 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1364 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1365 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1366 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1367 return 0;
1368
1369 combine_attempts++;
1370
241cea85 1371 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
1372 undobuf.other_insn = 0;
1373
1374 /* Save the current high-water-mark so we can free storage if we didn't
1375 accept this combination. */
1376 undobuf.storage = (char *) oballoc (0);
1377
6e25d159
RK
1378 /* Reset the hard register usage information. */
1379 CLEAR_HARD_REG_SET (newpat_used_regs);
1380
230d793d
RS
1381 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1382 code below, set I1 to be the earlier of the two insns. */
1383 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1384 temp = i1, i1 = i2, i2 = temp;
1385
abe6e52f 1386 added_links_insn = 0;
137e889e 1387
230d793d
RS
1388 /* First check for one important special-case that the code below will
1389 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1390 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1391 we may be able to replace that destination with the destination of I3.
1392 This occurs in the common code where we compute both a quotient and
1393 remainder into a structure, in which case we want to do the computation
1394 directly into the structure to avoid register-register copies.
1395
1396 We make very conservative checks below and only try to handle the
1397 most common cases of this. For example, we only handle the case
1398 where I2 and I3 are adjacent to avoid making difficult register
1399 usage tests. */
1400
1401 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1402 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1403 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
f95182a4 1404 && (! SMALL_REGISTER_CLASSES
e9a25f70
JL
1405 || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1406 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1407 || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
230d793d
RS
1408 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1409 && GET_CODE (PATTERN (i2)) == PARALLEL
1410 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1411 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1412 below would need to check what is inside (and reg_overlap_mentioned_p
1413 doesn't support those codes anyway). Don't allow those destinations;
1414 the resulting insn isn't likely to be recognized anyway. */
1415 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1416 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1417 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1418 SET_DEST (PATTERN (i3)))
1419 && next_real_insn (i2) == i3)
5089e22e
RS
1420 {
1421 rtx p2 = PATTERN (i2);
1422
1423 /* Make sure that the destination of I3,
1424 which we are going to substitute into one output of I2,
1425 is not used within another output of I2. We must avoid making this:
1426 (parallel [(set (mem (reg 69)) ...)
1427 (set (reg 69) ...)])
1428 which is not well-defined as to order of actions.
1429 (Besides, reload can't handle output reloads for this.)
1430
1431 The problem can also happen if the dest of I3 is a memory ref,
1432 if another dest in I2 is an indirect memory ref. */
1433 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1434 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1435 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1436 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1437 SET_DEST (XVECEXP (p2, 0, i))))
1438 break;
230d793d 1439
5089e22e
RS
1440 if (i == XVECLEN (p2, 0))
1441 for (i = 0; i < XVECLEN (p2, 0); i++)
1442 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1443 {
1444 combine_merges++;
230d793d 1445
5089e22e
RS
1446 subst_insn = i3;
1447 subst_low_cuid = INSN_CUID (i2);
230d793d 1448
c4e861e8 1449 added_sets_2 = added_sets_1 = 0;
5089e22e 1450 i2dest = SET_SRC (PATTERN (i3));
230d793d 1451
5089e22e
RS
1452 /* Replace the dest in I2 with our dest and make the resulting
1453 insn the new pattern for I3. Then skip to where we
1454 validate the pattern. Everything was set up above. */
1455 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1456 SET_DEST (PATTERN (i3)));
1457
1458 newpat = p2;
176c9e6b 1459 i3_subst_into_i2 = 1;
5089e22e
RS
1460 goto validate_replacement;
1461 }
1462 }
230d793d
RS
1463
1464#ifndef HAVE_cc0
1465 /* If we have no I1 and I2 looks like:
1466 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1467 (set Y OP)])
1468 make up a dummy I1 that is
1469 (set Y OP)
1470 and change I2 to be
1471 (set (reg:CC X) (compare:CC Y (const_int 0)))
1472
1473 (We can ignore any trailing CLOBBERs.)
1474
1475 This undoes a previous combination and allows us to match a branch-and-
1476 decrement insn. */
1477
1478 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1479 && XVECLEN (PATTERN (i2), 0) >= 2
1480 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1481 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1482 == MODE_CC)
1483 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1484 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1485 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1486 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1487 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1488 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1489 {
1490 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1491 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1492 break;
1493
1494 if (i == 1)
1495 {
1496 /* We make I1 with the same INSN_UID as I2. This gives it
1497 the same INSN_CUID for value tracking. Our fake I1 will
1498 never appear in the insn stream so giving it the same INSN_UID
1499 as I2 will not cause a problem. */
1500
0d9641d1 1501 subst_prev_insn = i1
38a448ca
RH
1502 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1503 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1504 NULL_RTX);
230d793d
RS
1505
1506 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1507 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1508 SET_DEST (PATTERN (i1)));
1509 }
1510 }
1511#endif
1512
1513 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1514 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1515 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1516 {
1517 undo_all ();
1518 return 0;
1519 }
1520
1521 /* Record whether I2DEST is used in I2SRC and similarly for the other
1522 cases. Knowing this will help in register status updating below. */
1523 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1524 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1525 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1526
916f14f1 1527 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1528 in I2SRC. */
1529 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1530
1531 /* Ensure that I3's pattern can be the destination of combines. */
1532 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1533 i1 && i2dest_in_i1src && i1_feeds_i3,
1534 &i3dest_killed))
1535 {
1536 undo_all ();
1537 return 0;
1538 }
1539
df7d75de
RK
1540 /* See if any of the insns is a MULT operation. Unless one is, we will
1541 reject a combination that is, since it must be slower. Be conservative
1542 here. */
1543 if (GET_CODE (i2src) == MULT
1544 || (i1 != 0 && GET_CODE (i1src) == MULT)
1545 || (GET_CODE (PATTERN (i3)) == SET
1546 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1547 have_mult = 1;
1548
230d793d
RS
1549 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1550 We used to do this EXCEPT in one case: I3 has a post-inc in an
1551 output operand. However, that exception can give rise to insns like
1552 mov r3,(r3)+
1553 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1554 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1555
1556#if 0
1557 if (!(GET_CODE (PATTERN (i3)) == SET
1558 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1559 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1560 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1561 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1562 /* It's not the exception. */
1563#endif
1564#ifdef AUTO_INC_DEC
1565 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1566 if (REG_NOTE_KIND (link) == REG_INC
1567 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1568 || (i1 != 0
1569 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1570 {
1571 undo_all ();
1572 return 0;
1573 }
1574#endif
1575
1576 /* See if the SETs in I1 or I2 need to be kept around in the merged
1577 instruction: whenever the value set there is still needed past I3.
1578 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1579
1580 For the SET in I1, we have two cases: If I1 and I2 independently
1581 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1582 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1583 in I1 needs to be kept around unless I1DEST dies or is set in either
1584 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1585 I1DEST. If so, we know I1 feeds into I2. */
1586
1587 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1588
1589 added_sets_1
1590 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1591 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1592
1593 /* If the set in I2 needs to be kept around, we must make a copy of
1594 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1595 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1596 an already-substituted copy. This also prevents making self-referential
1597 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1598 I2DEST. */
1599
1600 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
38a448ca 1601 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
230d793d
RS
1602 : PATTERN (i2));
1603
1604 if (added_sets_2)
1605 i2pat = copy_rtx (i2pat);
1606
1607 combine_merges++;
1608
1609 /* Substitute in the latest insn for the regs set by the earlier ones. */
1610
1611 maxreg = max_reg_num ();
1612
1613 subst_insn = i3;
230d793d
RS
1614
1615 /* It is possible that the source of I2 or I1 may be performing an
1616 unneeded operation, such as a ZERO_EXTEND of something that is known
1617 to have the high part zero. Handle that case by letting subst look at
1618 the innermost one of them.
1619
1620 Another way to do this would be to have a function that tries to
1621 simplify a single insn instead of merging two or more insns. We don't
1622 do this because of the potential of infinite loops and because
1623 of the potential extra memory required. However, doing it the way
1624 we are is a bit of a kludge and doesn't catch all cases.
1625
1626 But only do this if -fexpensive-optimizations since it slows things down
1627 and doesn't usually win. */
1628
1629 if (flag_expensive_optimizations)
1630 {
1631 /* Pass pc_rtx so no substitutions are done, just simplifications.
1632 The cases that we are interested in here do not involve the few
1633 cases were is_replaced is checked. */
1634 if (i1)
d0ab8cd3
RK
1635 {
1636 subst_low_cuid = INSN_CUID (i1);
1637 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1638 }
230d793d 1639 else
d0ab8cd3
RK
1640 {
1641 subst_low_cuid = INSN_CUID (i2);
1642 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1643 }
230d793d 1644
241cea85 1645 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1646 }
1647
1648#ifndef HAVE_cc0
1649 /* Many machines that don't use CC0 have insns that can both perform an
1650 arithmetic operation and set the condition code. These operations will
1651 be represented as a PARALLEL with the first element of the vector
1652 being a COMPARE of an arithmetic operation with the constant zero.
1653 The second element of the vector will set some pseudo to the result
1654 of the same arithmetic operation. If we simplify the COMPARE, we won't
1655 match such a pattern and so will generate an extra insn. Here we test
1656 for this case, where both the comparison and the operation result are
1657 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1658 I2SRC. Later we will make the PARALLEL that contains I2. */
1659
1660 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1661 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1662 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1663 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1664 {
081f5e7e 1665#ifdef EXTRA_CC_MODES
230d793d
RS
1666 rtx *cc_use;
1667 enum machine_mode compare_mode;
081f5e7e 1668#endif
230d793d
RS
1669
1670 newpat = PATTERN (i3);
1671 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1672
1673 i2_is_used = 1;
1674
1675#ifdef EXTRA_CC_MODES
1676 /* See if a COMPARE with the operand we substituted in should be done
1677 with the mode that is currently being used. If not, do the same
1678 processing we do in `subst' for a SET; namely, if the destination
1679 is used only once, try to replace it with a register of the proper
1680 mode and also replace the COMPARE. */
1681 if (undobuf.other_insn == 0
1682 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1683 &undobuf.other_insn))
77fa0940
RK
1684 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1685 i2src, const0_rtx))
230d793d
RS
1686 != GET_MODE (SET_DEST (newpat))))
1687 {
1688 int regno = REGNO (SET_DEST (newpat));
38a448ca 1689 rtx new_dest = gen_rtx_REG (compare_mode, regno);
230d793d
RS
1690
1691 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 1692 || (REG_N_SETS (regno) == 1 && ! added_sets_2
230d793d
RS
1693 && ! REG_USERVAR_P (SET_DEST (newpat))))
1694 {
1695 if (regno >= FIRST_PSEUDO_REGISTER)
1696 SUBST (regno_reg_rtx[regno], new_dest);
1697
1698 SUBST (SET_DEST (newpat), new_dest);
1699 SUBST (XEXP (*cc_use, 0), new_dest);
1700 SUBST (SET_SRC (newpat),
1701 gen_rtx_combine (COMPARE, compare_mode,
1702 i2src, const0_rtx));
1703 }
1704 else
1705 undobuf.other_insn = 0;
1706 }
1707#endif
1708 }
1709 else
1710#endif
1711 {
1712 n_occurrences = 0; /* `subst' counts here */
1713
1714 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1715 need to make a unique copy of I2SRC each time we substitute it
1716 to avoid self-referential rtl. */
1717
d0ab8cd3 1718 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1719 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1720 ! i1_feeds_i3 && i1dest_in_i1src);
241cea85 1721 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1722
1723 /* Record whether i2's body now appears within i3's body. */
1724 i2_is_used = n_occurrences;
1725 }
1726
1727 /* If we already got a failure, don't try to do more. Otherwise,
1728 try to substitute in I1 if we have it. */
1729
1730 if (i1 && GET_CODE (newpat) != CLOBBER)
1731 {
1732 /* Before we can do this substitution, we must redo the test done
1733 above (see detailed comments there) that ensures that I1DEST
0f41302f 1734 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1735
5f4f0e22
CH
1736 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1737 0, NULL_PTR))
230d793d
RS
1738 {
1739 undo_all ();
1740 return 0;
1741 }
1742
1743 n_occurrences = 0;
d0ab8cd3 1744 subst_low_cuid = INSN_CUID (i1);
230d793d 1745 newpat = subst (newpat, i1dest, i1src, 0, 0);
241cea85 1746 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1747 }
1748
916f14f1
RK
1749 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1750 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1751 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1752 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1753 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1754 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1755 > 1))
230d793d
RS
1756 /* Fail if we tried to make a new register (we used to abort, but there's
1757 really no reason to). */
1758 || max_reg_num () != maxreg
1759 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1760 || GET_CODE (newpat) == CLOBBER
1761 /* Fail if this new pattern is a MULT and we didn't have one before
1762 at the outer level. */
1763 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1764 && ! have_mult))
230d793d
RS
1765 {
1766 undo_all ();
1767 return 0;
1768 }
1769
1770 /* If the actions of the earlier insns must be kept
1771 in addition to substituting them into the latest one,
1772 we must make a new PARALLEL for the latest insn
1773 to hold additional the SETs. */
1774
1775 if (added_sets_1 || added_sets_2)
1776 {
1777 combine_extras++;
1778
1779 if (GET_CODE (newpat) == PARALLEL)
1780 {
1781 rtvec old = XVEC (newpat, 0);
1782 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 1783 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
59888de2 1784 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
230d793d
RS
1785 sizeof (old->elem[0]) * old->num_elem);
1786 }
1787 else
1788 {
1789 rtx old = newpat;
1790 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 1791 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
1792 XVECEXP (newpat, 0, 0) = old;
1793 }
1794
1795 if (added_sets_1)
1796 XVECEXP (newpat, 0, --total_sets)
1797 = (GET_CODE (PATTERN (i1)) == PARALLEL
38a448ca 1798 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
230d793d
RS
1799
1800 if (added_sets_2)
1801 {
1802 /* If there is no I1, use I2's body as is. We used to also not do
1803 the subst call below if I2 was substituted into I3,
1804 but that could lose a simplification. */
1805 if (i1 == 0)
1806 XVECEXP (newpat, 0, --total_sets) = i2pat;
1807 else
1808 /* See comment where i2pat is assigned. */
1809 XVECEXP (newpat, 0, --total_sets)
1810 = subst (i2pat, i1dest, i1src, 0, 0);
1811 }
1812 }
1813
1814 /* We come here when we are replacing a destination in I2 with the
1815 destination of I3. */
1816 validate_replacement:
1817
6e25d159
RK
1818 /* Note which hard regs this insn has as inputs. */
1819 mark_used_regs_combine (newpat);
1820
230d793d 1821 /* Is the result of combination a valid instruction? */
a29ca9db
RK
1822 insn_code_number
1823 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1824
1825 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1826 the second SET's destination is a register that is unused. In that case,
1827 we just need the first SET. This can occur when simplifying a divmod
1828 insn. We *must* test for this case here because the code below that
1829 splits two independent SETs doesn't handle this case correctly when it
1830 updates the register status. Also check the case where the first
1831 SET's destination is unused. That would not cause incorrect code, but
1832 does cause an unneeded insn to remain. */
1833
1834 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1835 && XVECLEN (newpat, 0) == 2
1836 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1837 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1838 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1839 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1840 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1841 && asm_noperands (newpat) < 0)
1842 {
1843 newpat = XVECEXP (newpat, 0, 0);
a29ca9db
RK
1844 insn_code_number
1845 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1846 }
1847
1848 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1849 && XVECLEN (newpat, 0) == 2
1850 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1851 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1852 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1853 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1854 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1855 && asm_noperands (newpat) < 0)
1856 {
1857 newpat = XVECEXP (newpat, 0, 1);
a29ca9db
RK
1858 insn_code_number
1859 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1860 }
1861
1862 /* If we were combining three insns and the result is a simple SET
1863 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1864 insns. There are two ways to do this. It can be split using a
1865 machine-specific method (like when you have an addition of a large
1866 constant) or by combine in the function find_split_point. */
1867
230d793d
RS
1868 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1869 && asm_noperands (newpat) < 0)
1870 {
916f14f1 1871 rtx m_split, *split;
42495ca0 1872 rtx ni2dest = i2dest;
916f14f1
RK
1873
1874 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1875 use I2DEST as a scratch register will help. In the latter case,
1876 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1877
1878 m_split = split_insns (newpat, i3);
a70c61d9
JW
1879
1880 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1881 inputs of NEWPAT. */
1882
1883 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1884 possible to try that as a scratch reg. This would require adding
1885 more code to make it work though. */
1886
1887 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1888 {
1889 /* If I2DEST is a hard register or the only use of a pseudo,
1890 we can change its mode. */
1891 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1892 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1893 && GET_CODE (i2dest) == REG
42495ca0 1894 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 1895 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
42495ca0 1896 && ! REG_USERVAR_P (i2dest))))
38a448ca 1897 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
42495ca0
RK
1898 REGNO (i2dest));
1899
38a448ca
RH
1900 m_split = split_insns
1901 (gen_rtx_PARALLEL (VOIDmode,
1902 gen_rtvec (2, newpat,
1903 gen_rtx_CLOBBER (VOIDmode,
1904 ni2dest))),
1905 i3);
42495ca0 1906 }
916f14f1
RK
1907
1908 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1909 && XVECLEN (m_split, 0) == 2
1910 && (next_real_insn (i2) == i3
1911 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1912 INSN_CUID (i2))))
916f14f1 1913 {
1a26b032 1914 rtx i2set, i3set;
d0ab8cd3 1915 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1916 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1917
e4ba89be
RK
1918 i3set = single_set (XVECEXP (m_split, 0, 1));
1919 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1920
42495ca0
RK
1921 /* In case we changed the mode of I2DEST, replace it in the
1922 pseudo-register table here. We can't do it above in case this
1923 code doesn't get executed and we do a split the other way. */
1924
1925 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1926 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1927
a29ca9db
RK
1928 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1929 &i2_scratches);
1a26b032
RK
1930
1931 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
1932 register status, so don't use these insns. If I2's destination
1933 is used between I2 and I3, we also can't use these insns. */
1a26b032 1934
9cc96794
RK
1935 if (i2_code_number >= 0 && i2set && i3set
1936 && (next_real_insn (i2) == i3
1937 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
a29ca9db
RK
1938 insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1939 &i3_scratches);
d0ab8cd3
RK
1940 if (insn_code_number >= 0)
1941 newpat = newi3pat;
1942
c767f54b 1943 /* It is possible that both insns now set the destination of I3.
22609cbf 1944 If so, we must show an extra use of it. */
c767f54b 1945
393de53f
RK
1946 if (insn_code_number >= 0)
1947 {
1948 rtx new_i3_dest = SET_DEST (i3set);
1949 rtx new_i2_dest = SET_DEST (i2set);
1950
1951 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1952 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1953 || GET_CODE (new_i3_dest) == SUBREG)
1954 new_i3_dest = XEXP (new_i3_dest, 0);
1955
d4096689
RK
1956 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
1957 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
1958 || GET_CODE (new_i2_dest) == SUBREG)
1959 new_i2_dest = XEXP (new_i2_dest, 0);
1960
393de53f
RK
1961 if (GET_CODE (new_i3_dest) == REG
1962 && GET_CODE (new_i2_dest) == REG
1963 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
b1f21e0a 1964 REG_N_SETS (REGNO (new_i2_dest))++;
393de53f 1965 }
916f14f1 1966 }
230d793d
RS
1967
1968 /* If we can split it and use I2DEST, go ahead and see if that
1969 helps things be recognized. Verify that none of the registers
1970 are set between I2 and I3. */
d0ab8cd3 1971 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1972#ifdef HAVE_cc0
1973 && GET_CODE (i2dest) == REG
1974#endif
1975 /* We need I2DEST in the proper mode. If it is a hard register
1976 or the only use of a pseudo, we can change its mode. */
1977 && (GET_MODE (*split) == GET_MODE (i2dest)
1978 || GET_MODE (*split) == VOIDmode
1979 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 1980 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
230d793d
RS
1981 && ! REG_USERVAR_P (i2dest)))
1982 && (next_real_insn (i2) == i3
1983 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1984 /* We can't overwrite I2DEST if its value is still used by
1985 NEWPAT. */
1986 && ! reg_referenced_p (i2dest, newpat))
1987 {
1988 rtx newdest = i2dest;
df7d75de
RK
1989 enum rtx_code split_code = GET_CODE (*split);
1990 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
1991
1992 /* Get NEWDEST as a register in the proper mode. We have already
1993 validated that we can do this. */
df7d75de 1994 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 1995 {
38a448ca 1996 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
230d793d
RS
1997
1998 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1999 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2000 }
2001
2002 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2003 an ASHIFT. This can occur if it was inside a PLUS and hence
2004 appeared to be a memory address. This is a kludge. */
df7d75de 2005 if (split_code == MULT
230d793d
RS
2006 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2007 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
2008 {
2009 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2010 XEXP (*split, 0), GEN_INT (i)));
2011 /* Update split_code because we may not have a multiply
2012 anymore. */
2013 split_code = GET_CODE (*split);
2014 }
230d793d
RS
2015
2016#ifdef INSN_SCHEDULING
2017 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2018 be written as a ZERO_EXTEND. */
df7d75de
RK
2019 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2020 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
2021 XEXP (*split, 0)));
2022#endif
2023
2024 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2025 SUBST (*split, newdest);
a29ca9db
RK
2026 i2_code_number
2027 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
df7d75de
RK
2028
2029 /* If the split point was a MULT and we didn't have one before,
2030 don't use one now. */
2031 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
a29ca9db
RK
2032 insn_code_number
2033 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
2034 }
2035 }
2036
2037 /* Check for a case where we loaded from memory in a narrow mode and
2038 then sign extended it, but we need both registers. In that case,
2039 we have a PARALLEL with both loads from the same memory location.
2040 We can split this into a load from memory followed by a register-register
2041 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
2042 eliminate the copy.
2043
2044 We cannot do this if the destination of the second assignment is
2045 a register that we have already assumed is zero-extended. Similarly
2046 for a SUBREG of such a register. */
230d793d
RS
2047
2048 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2049 && GET_CODE (newpat) == PARALLEL
2050 && XVECLEN (newpat, 0) == 2
2051 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2052 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2053 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2054 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2055 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2056 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2057 INSN_CUID (i2))
2058 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2059 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
2060 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2061 (GET_CODE (temp) == REG
2062 && reg_nonzero_bits[REGNO (temp)] != 0
2063 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2064 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2065 && (reg_nonzero_bits[REGNO (temp)]
2066 != GET_MODE_MASK (word_mode))))
2067 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2068 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2069 (GET_CODE (temp) == REG
2070 && reg_nonzero_bits[REGNO (temp)] != 0
2071 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2072 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2073 && (reg_nonzero_bits[REGNO (temp)]
2074 != GET_MODE_MASK (word_mode)))))
230d793d
RS
2075 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2076 SET_SRC (XVECEXP (newpat, 0, 1)))
2077 && ! find_reg_note (i3, REG_UNUSED,
2078 SET_DEST (XVECEXP (newpat, 0, 0))))
2079 {
472fbdd1
RK
2080 rtx ni2dest;
2081
230d793d 2082 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 2083 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
2084 newpat = XVECEXP (newpat, 0, 1);
2085 SUBST (SET_SRC (newpat),
472fbdd1 2086 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
a29ca9db
RK
2087 i2_code_number
2088 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2089
230d793d 2090 if (i2_code_number >= 0)
a29ca9db
RK
2091 insn_code_number
2092 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
5089e22e
RS
2093
2094 if (insn_code_number >= 0)
2095 {
2096 rtx insn;
2097 rtx link;
2098
2099 /* If we will be able to accept this, we have made a change to the
2100 destination of I3. This can invalidate a LOG_LINKS pointing
2101 to I3. No other part of combine.c makes such a transformation.
2102
2103 The new I3 will have a destination that was previously the
2104 destination of I1 or I2 and which was used in i2 or I3. Call
2105 distribute_links to make a LOG_LINK from the next use of
2106 that destination. */
2107
2108 PATTERN (i3) = newpat;
38a448ca 2109 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
5089e22e
RS
2110
2111 /* I3 now uses what used to be its destination and which is
2112 now I2's destination. That means we need a LOG_LINK from
2113 I3 to I2. But we used to have one, so we still will.
2114
2115 However, some later insn might be using I2's dest and have
2116 a LOG_LINK pointing at I3. We must remove this link.
2117 The simplest way to remove the link is to point it at I1,
2118 which we know will be a NOTE. */
2119
2120 for (insn = NEXT_INSN (i3);
0d4d42c3
RK
2121 insn && (this_basic_block == n_basic_blocks - 1
2122 || insn != basic_block_head[this_basic_block + 1]);
5089e22e
RS
2123 insn = NEXT_INSN (insn))
2124 {
2125 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 2126 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2127 {
2128 for (link = LOG_LINKS (insn); link;
2129 link = XEXP (link, 1))
2130 if (XEXP (link, 0) == i3)
2131 XEXP (link, 0) = i1;
2132
2133 break;
2134 }
2135 }
2136 }
230d793d
RS
2137 }
2138
2139 /* Similarly, check for a case where we have a PARALLEL of two independent
2140 SETs but we started with three insns. In this case, we can do the sets
2141 as two separate insns. This case occurs when some SET allows two
2142 other insns to combine, but the destination of that SET is still live. */
2143
2144 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2145 && GET_CODE (newpat) == PARALLEL
2146 && XVECLEN (newpat, 0) == 2
2147 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2148 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2149 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2150 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2151 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2152 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2153 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2154 INSN_CUID (i2))
2155 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2156 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2157 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2158 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2159 XVECEXP (newpat, 0, 0))
2160 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2161 XVECEXP (newpat, 0, 1)))
2162 {
e9a25f70
JL
2163 /* Normally, it doesn't matter which of the two is done first,
2164 but it does if one references cc0. In that case, it has to
2165 be first. */
2166#ifdef HAVE_cc0
2167 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2168 {
2169 newi2pat = XVECEXP (newpat, 0, 0);
2170 newpat = XVECEXP (newpat, 0, 1);
2171 }
2172 else
2173#endif
2174 {
2175 newi2pat = XVECEXP (newpat, 0, 1);
2176 newpat = XVECEXP (newpat, 0, 0);
2177 }
230d793d 2178
a29ca9db
RK
2179 i2_code_number
2180 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2181
230d793d 2182 if (i2_code_number >= 0)
a29ca9db
RK
2183 insn_code_number
2184 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
2185 }
2186
2187 /* If it still isn't recognized, fail and change things back the way they
2188 were. */
2189 if ((insn_code_number < 0
2190 /* Is the result a reasonable ASM_OPERANDS? */
2191 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2192 {
2193 undo_all ();
2194 return 0;
2195 }
2196
2197 /* If we had to change another insn, make sure it is valid also. */
2198 if (undobuf.other_insn)
2199 {
230d793d
RS
2200 rtx other_pat = PATTERN (undobuf.other_insn);
2201 rtx new_other_notes;
2202 rtx note, next;
2203
6e25d159
RK
2204 CLEAR_HARD_REG_SET (newpat_used_regs);
2205
a29ca9db
RK
2206 other_code_number
2207 = recog_for_combine (&other_pat, undobuf.other_insn,
2208 &new_other_notes, &other_scratches);
230d793d
RS
2209
2210 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2211 {
2212 undo_all ();
2213 return 0;
2214 }
2215
2216 PATTERN (undobuf.other_insn) = other_pat;
2217
2218 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2219 are still valid. Then add any non-duplicate notes added by
2220 recog_for_combine. */
2221 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2222 {
2223 next = XEXP (note, 1);
2224
2225 if (REG_NOTE_KIND (note) == REG_UNUSED
2226 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2227 {
2228 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2229 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
1a26b032
RK
2230
2231 remove_note (undobuf.other_insn, note);
2232 }
230d793d
RS
2233 }
2234
1a26b032
RK
2235 for (note = new_other_notes; note; note = XEXP (note, 1))
2236 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2237 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 2238
230d793d 2239 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2240 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2241 }
2242
2243 /* We now know that we can do this combination. Merge the insns and
2244 update the status of registers and LOG_LINKS. */
2245
2246 {
2247 rtx i3notes, i2notes, i1notes = 0;
2248 rtx i3links, i2links, i1links = 0;
2249 rtx midnotes = 0;
230d793d 2250 register int regno;
ff3467a9
JW
2251 /* Compute which registers we expect to eliminate. newi2pat may be setting
2252 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2253 same as i3dest, in which case newi2pat may be setting i1dest. */
2254 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2255 || i2dest_in_i2src || i2dest_in_i1src
230d793d 2256 ? 0 : i2dest);
ff3467a9
JW
2257 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2258 || (newi2pat && reg_set_p (i1dest, newi2pat))
2259 ? 0 : i1dest);
230d793d
RS
2260
2261 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2262 clear them. */
2263 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2264 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2265 if (i1)
2266 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2267
2268 /* Ensure that we do not have something that should not be shared but
2269 occurs multiple times in the new insns. Check this by first
5089e22e 2270 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2271
2272 reset_used_flags (i3notes);
2273 reset_used_flags (i2notes);
2274 reset_used_flags (i1notes);
2275 reset_used_flags (newpat);
2276 reset_used_flags (newi2pat);
2277 if (undobuf.other_insn)
2278 reset_used_flags (PATTERN (undobuf.other_insn));
2279
2280 i3notes = copy_rtx_if_shared (i3notes);
2281 i2notes = copy_rtx_if_shared (i2notes);
2282 i1notes = copy_rtx_if_shared (i1notes);
2283 newpat = copy_rtx_if_shared (newpat);
2284 newi2pat = copy_rtx_if_shared (newi2pat);
2285 if (undobuf.other_insn)
2286 reset_used_flags (PATTERN (undobuf.other_insn));
2287
2288 INSN_CODE (i3) = insn_code_number;
2289 PATTERN (i3) = newpat;
2290 if (undobuf.other_insn)
2291 INSN_CODE (undobuf.other_insn) = other_code_number;
2292
2293 /* We had one special case above where I2 had more than one set and
2294 we replaced a destination of one of those sets with the destination
2295 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2296 in this basic block. Note that this (expensive) case is rare.
2297
2298 Also, in this case, we must pretend that all REG_NOTEs for I2
2299 actually came from I3, so that REG_UNUSED notes from I2 will be
2300 properly handled. */
2301
2302 if (i3_subst_into_i2)
2303 {
2304 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2305 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2306 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2307 && ! find_reg_note (i2, REG_UNUSED,
2308 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2309 for (temp = NEXT_INSN (i2);
2310 temp && (this_basic_block == n_basic_blocks - 1
2311 || basic_block_head[this_basic_block] != temp);
2312 temp = NEXT_INSN (temp))
2313 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2314 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2315 if (XEXP (link, 0) == i2)
2316 XEXP (link, 0) = i3;
2317
2318 if (i3notes)
2319 {
2320 rtx link = i3notes;
2321 while (XEXP (link, 1))
2322 link = XEXP (link, 1);
2323 XEXP (link, 1) = i2notes;
2324 }
2325 else
2326 i3notes = i2notes;
2327 i2notes = 0;
2328 }
230d793d
RS
2329
2330 LOG_LINKS (i3) = 0;
2331 REG_NOTES (i3) = 0;
2332 LOG_LINKS (i2) = 0;
2333 REG_NOTES (i2) = 0;
2334
2335 if (newi2pat)
2336 {
2337 INSN_CODE (i2) = i2_code_number;
2338 PATTERN (i2) = newi2pat;
2339 }
2340 else
2341 {
2342 PUT_CODE (i2, NOTE);
2343 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2344 NOTE_SOURCE_FILE (i2) = 0;
2345 }
2346
2347 if (i1)
2348 {
2349 LOG_LINKS (i1) = 0;
2350 REG_NOTES (i1) = 0;
2351 PUT_CODE (i1, NOTE);
2352 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2353 NOTE_SOURCE_FILE (i1) = 0;
2354 }
2355
2356 /* Get death notes for everything that is now used in either I3 or
6eb12cef
RK
2357 I2 and used to die in a previous insn. If we built two new
2358 patterns, move from I1 to I2 then I2 to I3 so that we get the
2359 proper movement on registers that I2 modifies. */
230d793d 2360
230d793d 2361 if (newi2pat)
6eb12cef
RK
2362 {
2363 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2364 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2365 }
2366 else
2367 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2368 i3, &midnotes);
230d793d
RS
2369
2370 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2371 if (i3notes)
5f4f0e22
CH
2372 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2373 elim_i2, elim_i1);
230d793d 2374 if (i2notes)
5f4f0e22
CH
2375 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2376 elim_i2, elim_i1);
230d793d 2377 if (i1notes)
5f4f0e22
CH
2378 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2379 elim_i2, elim_i1);
230d793d 2380 if (midnotes)
5f4f0e22
CH
2381 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2382 elim_i2, elim_i1);
230d793d
RS
2383
2384 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2385 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2386 so we always pass it as i3. We have not counted the notes in
2387 reg_n_deaths yet, so we need to do so now. */
2388
230d793d 2389 if (newi2pat && new_i2_notes)
1a26b032
RK
2390 {
2391 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2392 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2393 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2394
2395 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2396 }
2397
230d793d 2398 if (new_i3_notes)
1a26b032
RK
2399 {
2400 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2401 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2402 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2403
2404 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2405 }
230d793d
RS
2406
2407 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
2408 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2409 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2410 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
2411 Show an additional death due to the REG_DEAD note we make here. If
2412 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2413
230d793d 2414 if (i3dest_killed)
1a26b032
RK
2415 {
2416 if (GET_CODE (i3dest_killed) == REG)
b1f21e0a 2417 REG_N_DEATHS (REGNO (i3dest_killed))++;
1a26b032 2418
e9a25f70 2419 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
38a448ca
RH
2420 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2421 NULL_RTX),
ff3467a9 2422 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 2423 else
38a448ca
RH
2424 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2425 NULL_RTX),
e9a25f70 2426 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
ff3467a9 2427 elim_i2, elim_i1);
1a26b032 2428 }
58c8c593 2429
230d793d 2430 if (i2dest_in_i2src)
58c8c593 2431 {
1a26b032 2432 if (GET_CODE (i2dest) == REG)
b1f21e0a 2433 REG_N_DEATHS (REGNO (i2dest))++;
1a26b032 2434
58c8c593 2435 if (newi2pat && reg_set_p (i2dest, newi2pat))
38a448ca 2436 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2437 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2438 else
38a448ca 2439 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2440 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2441 NULL_RTX, NULL_RTX);
2442 }
2443
230d793d 2444 if (i1dest_in_i1src)
58c8c593 2445 {
1a26b032 2446 if (GET_CODE (i1dest) == REG)
b1f21e0a 2447 REG_N_DEATHS (REGNO (i1dest))++;
1a26b032 2448
58c8c593 2449 if (newi2pat && reg_set_p (i1dest, newi2pat))
38a448ca 2450 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2451 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2452 else
38a448ca 2453 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2454 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2455 NULL_RTX, NULL_RTX);
2456 }
230d793d
RS
2457
2458 distribute_links (i3links);
2459 distribute_links (i2links);
2460 distribute_links (i1links);
2461
2462 if (GET_CODE (i2dest) == REG)
2463 {
d0ab8cd3
RK
2464 rtx link;
2465 rtx i2_insn = 0, i2_val = 0, set;
2466
2467 /* The insn that used to set this register doesn't exist, and
2468 this life of the register may not exist either. See if one of
2469 I3's links points to an insn that sets I2DEST. If it does,
2470 that is now the last known value for I2DEST. If we don't update
2471 this and I2 set the register to a value that depended on its old
230d793d
RS
2472 contents, we will get confused. If this insn is used, thing
2473 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2474
2475 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2476 if ((set = single_set (XEXP (link, 0))) != 0
2477 && rtx_equal_p (i2dest, SET_DEST (set)))
2478 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2479
2480 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2481
2482 /* If the reg formerly set in I2 died only once and that was in I3,
2483 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2484 if (! added_sets_2
2485 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2486 && ! i2dest_in_i2src)
230d793d
RS
2487 {
2488 regno = REGNO (i2dest);
b1f21e0a
MM
2489 REG_N_SETS (regno)--;
2490 if (REG_N_SETS (regno) == 0
8e08106d 2491 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
b1f21e0a 2492 REG_N_REFS (regno) = 0;
230d793d
RS
2493 }
2494 }
2495
2496 if (i1 && GET_CODE (i1dest) == REG)
2497 {
d0ab8cd3
RK
2498 rtx link;
2499 rtx i1_insn = 0, i1_val = 0, set;
2500
2501 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2502 if ((set = single_set (XEXP (link, 0))) != 0
2503 && rtx_equal_p (i1dest, SET_DEST (set)))
2504 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2505
2506 record_value_for_reg (i1dest, i1_insn, i1_val);
2507
230d793d 2508 regno = REGNO (i1dest);
5af91171 2509 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d 2510 {
b1f21e0a
MM
2511 REG_N_SETS (regno)--;
2512 if (REG_N_SETS (regno) == 0
8e08106d 2513 && ! REGNO_REG_SET_P (basic_block_live_at_start[0], regno))
b1f21e0a 2514 REG_N_REFS (regno) = 0;
230d793d
RS
2515 }
2516 }
2517
951553af 2518 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2519 to this insn. */
2520
951553af 2521 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2522 if (newi2pat)
951553af 2523 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2524
a29ca9db
RK
2525 /* If we added any (clobber (scratch)), add them to the max for a
2526 block. This is a very pessimistic calculation, since we might
2527 have had them already and this might not be the worst block, but
2528 it's not worth doing any better. */
2529 max_scratch += i3_scratches + i2_scratches + other_scratches;
2530
230d793d
RS
2531 /* If I3 is now an unconditional jump, ensure that it has a
2532 BARRIER following it since it may have initially been a
381ee8af 2533 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2534
2535 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2536 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2537 || GET_CODE (temp) != BARRIER))
230d793d
RS
2538 emit_barrier_after (i3);
2539 }
2540
2541 combine_successes++;
2542
bcd49eb7
JW
2543 /* Clear this here, so that subsequent get_last_value calls are not
2544 affected. */
2545 subst_prev_insn = NULL_RTX;
2546
abe6e52f
RK
2547 if (added_links_insn
2548 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2549 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2550 return added_links_insn;
2551 else
2552 return newi2pat ? i2 : i3;
230d793d
RS
2553}
2554\f
2555/* Undo all the modifications recorded in undobuf. */
2556
2557static void
2558undo_all ()
2559{
241cea85
RK
2560 struct undo *undo, *next;
2561
2562 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2563 {
241cea85
RK
2564 next = undo->next;
2565 if (undo->is_int)
2566 *undo->where.i = undo->old_contents.i;
7c046e4e 2567 else
241cea85
RK
2568 *undo->where.r = undo->old_contents.r;
2569
2570 undo->next = undobuf.frees;
2571 undobuf.frees = undo;
7c046e4e 2572 }
230d793d
RS
2573
2574 obfree (undobuf.storage);
845fc875 2575 undobuf.undos = undobuf.previous_undos = 0;
bcd49eb7
JW
2576
2577 /* Clear this here, so that subsequent get_last_value calls are not
2578 affected. */
2579 subst_prev_insn = NULL_RTX;
230d793d
RS
2580}
2581\f
2582/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2583 where we have an arithmetic expression and return that point. LOC will
2584 be inside INSN.
230d793d
RS
2585
2586 try_combine will call this function to see if an insn can be split into
2587 two insns. */
2588
2589static rtx *
d0ab8cd3 2590find_split_point (loc, insn)
230d793d 2591 rtx *loc;
d0ab8cd3 2592 rtx insn;
230d793d
RS
2593{
2594 rtx x = *loc;
2595 enum rtx_code code = GET_CODE (x);
2596 rtx *split;
2597 int len = 0, pos, unsignedp;
2598 rtx inner;
2599
2600 /* First special-case some codes. */
2601 switch (code)
2602 {
2603 case SUBREG:
2604#ifdef INSN_SCHEDULING
2605 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2606 point. */
2607 if (GET_CODE (SUBREG_REG (x)) == MEM)
2608 return loc;
2609#endif
d0ab8cd3 2610 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2611
230d793d 2612 case MEM:
916f14f1 2613#ifdef HAVE_lo_sum
230d793d
RS
2614 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2615 using LO_SUM and HIGH. */
2616 if (GET_CODE (XEXP (x, 0)) == CONST
2617 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2618 {
2619 SUBST (XEXP (x, 0),
2620 gen_rtx_combine (LO_SUM, Pmode,
2621 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2622 XEXP (x, 0)));
2623 return &XEXP (XEXP (x, 0), 0);
2624 }
230d793d
RS
2625#endif
2626
916f14f1
RK
2627 /* If we have a PLUS whose second operand is a constant and the
2628 address is not valid, perhaps will can split it up using
2629 the machine-specific way to split large constants. We use
ddd5a7c1 2630 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2631 it will not remain in the result. */
2632 if (GET_CODE (XEXP (x, 0)) == PLUS
2633 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2634 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2635 {
2636 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
38a448ca 2637 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
916f14f1
RK
2638 subst_insn);
2639
2640 /* This should have produced two insns, each of which sets our
2641 placeholder. If the source of the second is a valid address,
2642 we can make put both sources together and make a split point
2643 in the middle. */
2644
2645 if (seq && XVECLEN (seq, 0) == 2
2646 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2647 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2648 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2649 && ! reg_mentioned_p (reg,
2650 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2651 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2652 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2653 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2654 && memory_address_p (GET_MODE (x),
2655 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2656 {
2657 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2658 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2659
2660 /* Replace the placeholder in SRC2 with SRC1. If we can
2661 find where in SRC2 it was placed, that can become our
2662 split point and we can replace this address with SRC2.
2663 Just try two obvious places. */
2664
2665 src2 = replace_rtx (src2, reg, src1);
2666 split = 0;
2667 if (XEXP (src2, 0) == src1)
2668 split = &XEXP (src2, 0);
2669 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2670 && XEXP (XEXP (src2, 0), 0) == src1)
2671 split = &XEXP (XEXP (src2, 0), 0);
2672
2673 if (split)
2674 {
2675 SUBST (XEXP (x, 0), src2);
2676 return split;
2677 }
2678 }
1a26b032
RK
2679
2680 /* If that didn't work, perhaps the first operand is complex and
2681 needs to be computed separately, so make a split point there.
2682 This will occur on machines that just support REG + CONST
2683 and have a constant moved through some previous computation. */
2684
2685 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2686 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2687 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2688 == 'o')))
2689 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2690 }
2691 break;
2692
230d793d
RS
2693 case SET:
2694#ifdef HAVE_cc0
2695 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2696 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2697 we need to put the operand into a register. So split at that
2698 point. */
2699
2700 if (SET_DEST (x) == cc0_rtx
2701 && GET_CODE (SET_SRC (x)) != COMPARE
2702 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2703 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2704 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2705 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2706 return &SET_SRC (x);
2707#endif
2708
2709 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2710 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2711 if (split && split != &SET_SRC (x))
2712 return split;
2713
041d7180
JL
2714 /* See if we can split SET_DEST as it stands. */
2715 split = find_split_point (&SET_DEST (x), insn);
2716 if (split && split != &SET_DEST (x))
2717 return split;
2718
230d793d
RS
2719 /* See if this is a bitfield assignment with everything constant. If
2720 so, this is an IOR of an AND, so split it into that. */
2721 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2722 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2723 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2724 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2725 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2726 && GET_CODE (SET_SRC (x)) == CONST_INT
2727 && ((INTVAL (XEXP (SET_DEST (x), 1))
2728 + INTVAL (XEXP (SET_DEST (x), 2)))
2729 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2730 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2731 {
2732 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2733 int len = INTVAL (XEXP (SET_DEST (x), 1));
2734 int src = INTVAL (SET_SRC (x));
2735 rtx dest = XEXP (SET_DEST (x), 0);
2736 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2737 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2738
f76b9db2
ILT
2739 if (BITS_BIG_ENDIAN)
2740 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d
RS
2741
2742 if (src == mask)
2743 SUBST (SET_SRC (x),
5f4f0e22 2744 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2745 else
2746 SUBST (SET_SRC (x),
2747 gen_binary (IOR, mode,
2748 gen_binary (AND, mode, dest,
5f4f0e22
CH
2749 GEN_INT (~ (mask << pos)
2750 & GET_MODE_MASK (mode))),
2751 GEN_INT (src << pos)));
230d793d
RS
2752
2753 SUBST (SET_DEST (x), dest);
2754
d0ab8cd3 2755 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2756 if (split && split != &SET_SRC (x))
2757 return split;
2758 }
2759
2760 /* Otherwise, see if this is an operation that we can split into two.
2761 If so, try to split that. */
2762 code = GET_CODE (SET_SRC (x));
2763
2764 switch (code)
2765 {
d0ab8cd3
RK
2766 case AND:
2767 /* If we are AND'ing with a large constant that is only a single
2768 bit and the result is only being used in a context where we
2769 need to know if it is zero or non-zero, replace it with a bit
2770 extraction. This will avoid the large constant, which might
2771 have taken more than one insn to make. If the constant were
2772 not a valid argument to the AND but took only one insn to make,
2773 this is no worse, but if it took more than one insn, it will
2774 be better. */
2775
2776 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2777 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2778 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2779 && GET_CODE (SET_DEST (x)) == REG
2780 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2781 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2782 && XEXP (*split, 0) == SET_DEST (x)
2783 && XEXP (*split, 1) == const0_rtx)
2784 {
76184def
DE
2785 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2786 XEXP (SET_SRC (x), 0),
2787 pos, NULL_RTX, 1, 1, 0, 0);
2788 if (extraction != 0)
2789 {
2790 SUBST (SET_SRC (x), extraction);
2791 return find_split_point (loc, insn);
2792 }
d0ab8cd3
RK
2793 }
2794 break;
2795
1a6ec070
RK
2796 case NE:
2797 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2798 is known to be on, this can be converted into a NEG of a shift. */
2799 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2800 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 2801 && 1 <= (pos = exact_log2
1a6ec070
RK
2802 (nonzero_bits (XEXP (SET_SRC (x), 0),
2803 GET_MODE (XEXP (SET_SRC (x), 0))))))
2804 {
2805 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2806
2807 SUBST (SET_SRC (x),
2808 gen_rtx_combine (NEG, mode,
2809 gen_rtx_combine (LSHIFTRT, mode,
2810 XEXP (SET_SRC (x), 0),
4eb2cb10 2811 GEN_INT (pos))));
1a6ec070
RK
2812
2813 split = find_split_point (&SET_SRC (x), insn);
2814 if (split && split != &SET_SRC (x))
2815 return split;
2816 }
2817 break;
2818
230d793d
RS
2819 case SIGN_EXTEND:
2820 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
2821
2822 /* We can't optimize if either mode is a partial integer
2823 mode as we don't know how many bits are significant
2824 in those modes. */
2825 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
2826 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
2827 break;
2828
230d793d
RS
2829 pos = 0;
2830 len = GET_MODE_BITSIZE (GET_MODE (inner));
2831 unsignedp = 0;
2832 break;
2833
2834 case SIGN_EXTRACT:
2835 case ZERO_EXTRACT:
2836 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2837 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2838 {
2839 inner = XEXP (SET_SRC (x), 0);
2840 len = INTVAL (XEXP (SET_SRC (x), 1));
2841 pos = INTVAL (XEXP (SET_SRC (x), 2));
2842
f76b9db2
ILT
2843 if (BITS_BIG_ENDIAN)
2844 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
2845 unsignedp = (code == ZERO_EXTRACT);
2846 }
2847 break;
e9a25f70
JL
2848
2849 default:
2850 break;
230d793d
RS
2851 }
2852
2853 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2854 {
2855 enum machine_mode mode = GET_MODE (SET_SRC (x));
2856
d0ab8cd3
RK
2857 /* For unsigned, we have a choice of a shift followed by an
2858 AND or two shifts. Use two shifts for field sizes where the
2859 constant might be too large. We assume here that we can
2860 always at least get 8-bit constants in an AND insn, which is
2861 true for every current RISC. */
2862
2863 if (unsignedp && len <= 8)
230d793d
RS
2864 {
2865 SUBST (SET_SRC (x),
2866 gen_rtx_combine
2867 (AND, mode,
2868 gen_rtx_combine (LSHIFTRT, mode,
2869 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2870 GEN_INT (pos)),
2871 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2872
d0ab8cd3 2873 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2874 if (split && split != &SET_SRC (x))
2875 return split;
2876 }
2877 else
2878 {
2879 SUBST (SET_SRC (x),
2880 gen_rtx_combine
d0ab8cd3 2881 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2882 gen_rtx_combine (ASHIFT, mode,
2883 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2884 GEN_INT (GET_MODE_BITSIZE (mode)
2885 - len - pos)),
2886 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2887
d0ab8cd3 2888 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2889 if (split && split != &SET_SRC (x))
2890 return split;
2891 }
2892 }
2893
2894 /* See if this is a simple operation with a constant as the second
2895 operand. It might be that this constant is out of range and hence
2896 could be used as a split point. */
2897 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2898 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2899 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2900 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2901 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2902 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2903 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2904 == 'o'))))
2905 return &XEXP (SET_SRC (x), 1);
2906
2907 /* Finally, see if this is a simple operation with its first operand
2908 not in a register. The operation might require this operand in a
2909 register, so return it as a split point. We can always do this
2910 because if the first operand were another operation, we would have
2911 already found it as a split point. */
2912 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2913 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2914 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2915 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2916 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2917 return &XEXP (SET_SRC (x), 0);
2918
2919 return 0;
2920
2921 case AND:
2922 case IOR:
2923 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2924 it is better to write this as (not (ior A B)) so we can split it.
2925 Similarly for IOR. */
2926 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2927 {
2928 SUBST (*loc,
2929 gen_rtx_combine (NOT, GET_MODE (x),
2930 gen_rtx_combine (code == IOR ? AND : IOR,
2931 GET_MODE (x),
2932 XEXP (XEXP (x, 0), 0),
2933 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2934 return find_split_point (loc, insn);
230d793d
RS
2935 }
2936
2937 /* Many RISC machines have a large set of logical insns. If the
2938 second operand is a NOT, put it first so we will try to split the
2939 other operand first. */
2940 if (GET_CODE (XEXP (x, 1)) == NOT)
2941 {
2942 rtx tem = XEXP (x, 0);
2943 SUBST (XEXP (x, 0), XEXP (x, 1));
2944 SUBST (XEXP (x, 1), tem);
2945 }
2946 break;
e9a25f70
JL
2947
2948 default:
2949 break;
230d793d
RS
2950 }
2951
2952 /* Otherwise, select our actions depending on our rtx class. */
2953 switch (GET_RTX_CLASS (code))
2954 {
2955 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2956 case '3':
d0ab8cd3 2957 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2958 if (split)
2959 return split;
0f41302f 2960 /* ... fall through ... */
230d793d
RS
2961 case '2':
2962 case 'c':
2963 case '<':
d0ab8cd3 2964 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2965 if (split)
2966 return split;
0f41302f 2967 /* ... fall through ... */
230d793d
RS
2968 case '1':
2969 /* Some machines have (and (shift ...) ...) insns. If X is not
2970 an AND, but XEXP (X, 0) is, use it as our split point. */
2971 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2972 return &XEXP (x, 0);
2973
d0ab8cd3 2974 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2975 if (split)
2976 return split;
2977 return loc;
2978 }
2979
2980 /* Otherwise, we don't have a split point. */
2981 return 0;
2982}
2983\f
2984/* Throughout X, replace FROM with TO, and return the result.
2985 The result is TO if X is FROM;
2986 otherwise the result is X, but its contents may have been modified.
2987 If they were modified, a record was made in undobuf so that
2988 undo_all will (among other things) return X to its original state.
2989
2990 If the number of changes necessary is too much to record to undo,
2991 the excess changes are not made, so the result is invalid.
2992 The changes already made can still be undone.
2993 undobuf.num_undo is incremented for such changes, so by testing that
2994 the caller can tell whether the result is valid.
2995
2996 `n_occurrences' is incremented each time FROM is replaced.
2997
2998 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2999
5089e22e 3000 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
3001 by copying if `n_occurrences' is non-zero. */
3002
3003static rtx
3004subst (x, from, to, in_dest, unique_copy)
3005 register rtx x, from, to;
3006 int in_dest;
3007 int unique_copy;
3008{
f24ad0e4 3009 register enum rtx_code code = GET_CODE (x);
230d793d 3010 enum machine_mode op0_mode = VOIDmode;
8079805d
RK
3011 register char *fmt;
3012 register int len, i;
3013 rtx new;
230d793d
RS
3014
3015/* Two expressions are equal if they are identical copies of a shared
3016 RTX or if they are both registers with the same register number
3017 and mode. */
3018
3019#define COMBINE_RTX_EQUAL_P(X,Y) \
3020 ((X) == (Y) \
3021 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3022 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3023
3024 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3025 {
3026 n_occurrences++;
3027 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3028 }
3029
3030 /* If X and FROM are the same register but different modes, they will
3031 not have been seen as equal above. However, flow.c will make a
3032 LOG_LINKS entry for that case. If we do nothing, we will try to
3033 rerecognize our original insn and, when it succeeds, we will
3034 delete the feeding insn, which is incorrect.
3035
3036 So force this insn not to match in this (rare) case. */
3037 if (! in_dest && code == REG && GET_CODE (from) == REG
3038 && REGNO (x) == REGNO (from))
38a448ca 3039 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
3040
3041 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3042 of which may contain things that can be combined. */
3043 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3044 return x;
3045
3046 /* It is possible to have a subexpression appear twice in the insn.
3047 Suppose that FROM is a register that appears within TO.
3048 Then, after that subexpression has been scanned once by `subst',
3049 the second time it is scanned, TO may be found. If we were
3050 to scan TO here, we would find FROM within it and create a
3051 self-referent rtl structure which is completely wrong. */
3052 if (COMBINE_RTX_EQUAL_P (x, to))
3053 return to;
3054
3055 len = GET_RTX_LENGTH (code);
3056 fmt = GET_RTX_FORMAT (code);
3057
3058 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
3059 set up to skip this common case. All other cases where we want to
3060 suppress replacing something inside a SET_SRC are handled via the
3061 IN_DEST operand. */
3062 if (code == SET
3063 && (GET_CODE (SET_DEST (x)) == REG
3064 || GET_CODE (SET_DEST (x)) == CC0
3065 || GET_CODE (SET_DEST (x)) == PC))
3066 fmt = "ie";
3067
0f41302f
MS
3068 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3069 constant. */
230d793d
RS
3070 if (fmt[0] == 'e')
3071 op0_mode = GET_MODE (XEXP (x, 0));
3072
3073 for (i = 0; i < len; i++)
3074 {
3075 if (fmt[i] == 'E')
3076 {
3077 register int j;
3078 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3079 {
230d793d
RS
3080 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3081 {
3082 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3083 n_occurrences++;
3084 }
3085 else
3086 {
3087 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
3088
3089 /* If this substitution failed, this whole thing fails. */
3090 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3091 return new;
3092 }
3093
3094 SUBST (XVECEXP (x, i, j), new);
3095 }
3096 }
3097 else if (fmt[i] == 'e')
3098 {
230d793d
RS
3099 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3100 {
42301240
RK
3101 /* In general, don't install a subreg involving two modes not
3102 tieable. It can worsen register allocation, and can even
3103 make invalid reload insns, since the reg inside may need to
3104 be copied from in the outside mode, and that may be invalid
3105 if it is an fp reg copied in integer mode.
3106
3107 We allow two exceptions to this: It is valid if it is inside
3108 another SUBREG and the mode of that SUBREG and the mode of
3109 the inside of TO is tieable and it is valid if X is a SET
3110 that copies FROM to CC0. */
3111 if (GET_CODE (to) == SUBREG
3112 && ! MODES_TIEABLE_P (GET_MODE (to),
3113 GET_MODE (SUBREG_REG (to)))
3114 && ! (code == SUBREG
8079805d
RK
3115 && MODES_TIEABLE_P (GET_MODE (x),
3116 GET_MODE (SUBREG_REG (to))))
42301240
RK
3117#ifdef HAVE_cc0
3118 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
3119#endif
3120 )
38a448ca 3121 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 3122
230d793d
RS
3123 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3124 n_occurrences++;
3125 }
3126 else
3127 /* If we are in a SET_DEST, suppress most cases unless we
3128 have gone inside a MEM, in which case we want to
3129 simplify the address. We assume here that things that
3130 are actually part of the destination have their inner
3131 parts in the first expression. This is true for SUBREG,
3132 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3133 things aside from REG and MEM that should appear in a
3134 SET_DEST. */
3135 new = subst (XEXP (x, i), from, to,
3136 (((in_dest
3137 && (code == SUBREG || code == STRICT_LOW_PART
3138 || code == ZERO_EXTRACT))
3139 || code == SET)
3140 && i == 0), unique_copy);
3141
3142 /* If we found that we will have to reject this combination,
3143 indicate that by returning the CLOBBER ourselves, rather than
3144 an expression containing it. This will speed things up as
3145 well as prevent accidents where two CLOBBERs are considered
3146 to be equal, thus producing an incorrect simplification. */
3147
3148 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3149 return new;
3150
3151 SUBST (XEXP (x, i), new);
3152 }
3153 }
3154
8079805d
RK
3155 /* Try to simplify X. If the simplification changed the code, it is likely
3156 that further simplification will help, so loop, but limit the number
3157 of repetitions that will be performed. */
3158
3159 for (i = 0; i < 4; i++)
3160 {
3161 /* If X is sufficiently simple, don't bother trying to do anything
3162 with it. */
3163 if (code != CONST_INT && code != REG && code != CLOBBER)
3164 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3165
8079805d
RK
3166 if (GET_CODE (x) == code)
3167 break;
d0ab8cd3 3168
8079805d 3169 code = GET_CODE (x);
eeb43d32 3170
8079805d
RK
3171 /* We no longer know the original mode of operand 0 since we
3172 have changed the form of X) */
3173 op0_mode = VOIDmode;
3174 }
eeb43d32 3175
8079805d
RK
3176 return x;
3177}
3178\f
3179/* Simplify X, a piece of RTL. We just operate on the expression at the
3180 outer level; call `subst' to simplify recursively. Return the new
3181 expression.
3182
3183 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3184 will be the iteration even if an expression with a code different from
3185 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3186
8079805d
RK
3187static rtx
3188simplify_rtx (x, op0_mode, last, in_dest)
3189 rtx x;
3190 enum machine_mode op0_mode;
3191 int last;
3192 int in_dest;
3193{
3194 enum rtx_code code = GET_CODE (x);
3195 enum machine_mode mode = GET_MODE (x);
3196 rtx temp;
3197 int i;
d0ab8cd3 3198
230d793d
RS
3199 /* If this is a commutative operation, put a constant last and a complex
3200 expression first. We don't need to do this for comparisons here. */
3201 if (GET_RTX_CLASS (code) == 'c'
3202 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3203 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3204 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3205 || (GET_CODE (XEXP (x, 0)) == SUBREG
3206 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3207 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3208 {
3209 temp = XEXP (x, 0);
3210 SUBST (XEXP (x, 0), XEXP (x, 1));
3211 SUBST (XEXP (x, 1), temp);
3212 }
3213
22609cbf
RK
3214 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3215 sign extension of a PLUS with a constant, reverse the order of the sign
3216 extension and the addition. Note that this not the same as the original
3217 code, but overflow is undefined for signed values. Also note that the
3218 PLUS will have been partially moved "inside" the sign-extension, so that
3219 the first operand of X will really look like:
3220 (ashiftrt (plus (ashift A C4) C5) C4).
3221 We convert this to
3222 (plus (ashiftrt (ashift A C4) C2) C4)
3223 and replace the first operand of X with that expression. Later parts
3224 of this function may simplify the expression further.
3225
3226 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3227 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3228 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3229
3230 We do this to simplify address expressions. */
3231
3232 if ((code == PLUS || code == MINUS || code == MULT)
3233 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3234 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3235 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3236 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3237 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3238 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3239 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3240 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3241 XEXP (XEXP (XEXP (x, 0), 0), 1),
3242 XEXP (XEXP (x, 0), 1))) != 0)
3243 {
3244 rtx new
3245 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3246 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3247 INTVAL (XEXP (XEXP (x, 0), 1)));
3248
3249 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3250 INTVAL (XEXP (XEXP (x, 0), 1)));
3251
3252 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3253 }
3254
d0ab8cd3
RK
3255 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3256 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3257 things. Check for cases where both arms are testing the same
3258 condition.
3259
3260 Don't do anything if all operands are very simple. */
3261
3262 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3263 || GET_RTX_CLASS (code) == '<')
3264 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3265 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3266 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3267 == 'o')))
3268 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3269 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3270 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3271 == 'o')))))
3272 || (GET_RTX_CLASS (code) == '1'
3273 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3274 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3275 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3276 == 'o'))))))
d0ab8cd3 3277 {
abe6e52f
RK
3278 rtx cond, true, false;
3279
3280 cond = if_then_else_cond (x, &true, &false);
0802d516
RK
3281 if (cond != 0
3282 /* If everything is a comparison, what we have is highly unlikely
3283 to be simpler, so don't use it. */
3284 && ! (GET_RTX_CLASS (code) == '<'
3285 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3286 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
abe6e52f
RK
3287 {
3288 rtx cop1 = const0_rtx;
3289 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3290
15448afc
RK
3291 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3292 return x;
3293
9210df58
RK
3294 /* Simplify the alternative arms; this may collapse the true and
3295 false arms to store-flag values. */
3296 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3297 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3298
3299 /* Restarting if we generate a store-flag expression will cause
3300 us to loop. Just drop through in this case. */
3301
abe6e52f
RK
3302 /* If the result values are STORE_FLAG_VALUE and zero, we can
3303 just make the comparison operation. */
3304 if (true == const_true_rtx && false == const0_rtx)
3305 x = gen_binary (cond_code, mode, cond, cop1);
3306 else if (true == const0_rtx && false == const_true_rtx)
3307 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3308
3309 /* Likewise, we can make the negate of a comparison operation
3310 if the result values are - STORE_FLAG_VALUE and zero. */
3311 else if (GET_CODE (true) == CONST_INT
3312 && INTVAL (true) == - STORE_FLAG_VALUE
3313 && false == const0_rtx)
0c1c8ea6 3314 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3315 gen_binary (cond_code, mode, cond, cop1));
3316 else if (GET_CODE (false) == CONST_INT
3317 && INTVAL (false) == - STORE_FLAG_VALUE
3318 && true == const0_rtx)
0c1c8ea6 3319 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3320 gen_binary (reverse_condition (cond_code),
3321 mode, cond, cop1));
3322 else
38a448ca
RH
3323 return gen_rtx_IF_THEN_ELSE (mode,
3324 gen_binary (cond_code, VOIDmode,
3325 cond, cop1),
3326 true, false);
5109d49f 3327
9210df58
RK
3328 code = GET_CODE (x);
3329 op0_mode = VOIDmode;
abe6e52f 3330 }
d0ab8cd3
RK
3331 }
3332
230d793d
RS
3333 /* Try to fold this expression in case we have constants that weren't
3334 present before. */
3335 temp = 0;
3336 switch (GET_RTX_CLASS (code))
3337 {
3338 case '1':
3339 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3340 break;
3341 case '<':
3342 temp = simplify_relational_operation (code, op0_mode,
3343 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3344#ifdef FLOAT_STORE_FLAG_VALUE
3345 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3346 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3347 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3348#endif
230d793d
RS
3349 break;
3350 case 'c':
3351 case '2':
3352 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3353 break;
3354 case 'b':
3355 case '3':
3356 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3357 XEXP (x, 1), XEXP (x, 2));
3358 break;
3359 }
3360
3361 if (temp)
d0ab8cd3 3362 x = temp, code = GET_CODE (temp);
230d793d 3363
230d793d 3364 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3365 if (code == PLUS || code == MINUS
3366 || code == AND || code == IOR || code == XOR)
230d793d
RS
3367 {
3368 x = apply_distributive_law (x);
3369 code = GET_CODE (x);
3370 }
3371
3372 /* If CODE is an associative operation not otherwise handled, see if we
3373 can associate some operands. This can win if they are constants or
3374 if they are logically related (i.e. (a & b) & a. */
3375 if ((code == PLUS || code == MINUS
3376 || code == MULT || code == AND || code == IOR || code == XOR
3377 || code == DIV || code == UDIV
3378 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3379 && INTEGRAL_MODE_P (mode))
230d793d
RS
3380 {
3381 if (GET_CODE (XEXP (x, 0)) == code)
3382 {
3383 rtx other = XEXP (XEXP (x, 0), 0);
3384 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3385 rtx inner_op1 = XEXP (x, 1);
3386 rtx inner;
3387
3388 /* Make sure we pass the constant operand if any as the second
3389 one if this is a commutative operation. */
3390 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3391 {
3392 rtx tem = inner_op0;
3393 inner_op0 = inner_op1;
3394 inner_op1 = tem;
3395 }
3396 inner = simplify_binary_operation (code == MINUS ? PLUS
3397 : code == DIV ? MULT
3398 : code == UDIV ? MULT
3399 : code,
3400 mode, inner_op0, inner_op1);
3401
3402 /* For commutative operations, try the other pair if that one
3403 didn't simplify. */
3404 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3405 {
3406 other = XEXP (XEXP (x, 0), 1);
3407 inner = simplify_binary_operation (code, mode,
3408 XEXP (XEXP (x, 0), 0),
3409 XEXP (x, 1));
3410 }
3411
3412 if (inner)
8079805d 3413 return gen_binary (code, mode, other, inner);
230d793d
RS
3414 }
3415 }
3416
3417 /* A little bit of algebraic simplification here. */
3418 switch (code)
3419 {
3420 case MEM:
3421 /* Ensure that our address has any ASHIFTs converted to MULT in case
3422 address-recognizing predicates are called later. */
3423 temp = make_compound_operation (XEXP (x, 0), MEM);
3424 SUBST (XEXP (x, 0), temp);
3425 break;
3426
3427 case SUBREG:
3428 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3429 is paradoxical. If we can't do that safely, then it becomes
3430 something nonsensical so that this combination won't take place. */
3431
3432 if (GET_CODE (SUBREG_REG (x)) == MEM
3433 && (GET_MODE_SIZE (mode)
3434 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3435 {
3436 rtx inner = SUBREG_REG (x);
3437 int endian_offset = 0;
3438 /* Don't change the mode of the MEM
3439 if that would change the meaning of the address. */
3440 if (MEM_VOLATILE_P (SUBREG_REG (x))
3441 || mode_dependent_address_p (XEXP (inner, 0)))
38a448ca 3442 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d 3443
f76b9db2
ILT
3444 if (BYTES_BIG_ENDIAN)
3445 {
3446 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3447 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3448 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3449 endian_offset -= (UNITS_PER_WORD
3450 - GET_MODE_SIZE (GET_MODE (inner)));
3451 }
230d793d
RS
3452 /* Note if the plus_constant doesn't make a valid address
3453 then this combination won't be accepted. */
38a448ca
RH
3454 x = gen_rtx_MEM (mode,
3455 plus_constant (XEXP (inner, 0),
3456 (SUBREG_WORD (x) * UNITS_PER_WORD
3457 + endian_offset)));
230d793d
RS
3458 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3459 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3460 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3461 return x;
3462 }
3463
3464 /* If we are in a SET_DEST, these other cases can't apply. */
3465 if (in_dest)
3466 return x;
3467
3468 /* Changing mode twice with SUBREG => just change it once,
3469 or not at all if changing back to starting mode. */
3470 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3471 {
3472 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3473 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3474 return SUBREG_REG (SUBREG_REG (x));
3475
3476 SUBST_INT (SUBREG_WORD (x),
3477 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3478 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3479 }
3480
3481 /* SUBREG of a hard register => just change the register number
3482 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3483 suppress this combination. If the hard register is the stack,
3484 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3485
3486 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3487 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3488 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3489#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3490 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3491#endif
26ecfc76
RK
3492#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3493 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3494#endif
3495 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3496 {
3497 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3498 mode))
38a448ca
RH
3499 return gen_rtx_REG (mode,
3500 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
230d793d 3501 else
38a448ca 3502 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d
RS
3503 }
3504
3505 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3506 word and low-order part. Only do this if we are narrowing
3507 the constant; if it is being widened, we have no idea what
3508 the extra bits will have been set to. */
230d793d
RS
3509
3510 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3511 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3c99d5ff 3512 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
230d793d
RS
3513 && GET_MODE_CLASS (mode) == MODE_INT)
3514 {
3515 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3516 0, op0_mode);
230d793d
RS
3517 if (temp)
3518 return temp;
3519 }
3520
19808e22
RS
3521 /* If we want a subreg of a constant, at offset 0,
3522 take the low bits. On a little-endian machine, that's
3523 always valid. On a big-endian machine, it's valid
3c99d5ff 3524 only if the constant's mode fits in one word. Note that we
61b1bece 3525 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3c99d5ff
RK
3526 if (CONSTANT_P (SUBREG_REG (x))
3527 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3528 || ! WORDS_BIG_ENDIAN)
3529 ? SUBREG_WORD (x) == 0
3530 : (SUBREG_WORD (x)
3531 == ((GET_MODE_SIZE (op0_mode)
3532 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3533 / UNITS_PER_WORD)))
f82da7d2 3534 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3535 && (! WORDS_BIG_ENDIAN
3536 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3537 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3538
b65c1b5b
RK
3539 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3540 since we are saying that the high bits don't matter. */
3541 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3542 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3543 return SUBREG_REG (x);
3544
87e3e0c1
RK
3545 /* Note that we cannot do any narrowing for non-constants since
3546 we might have been counting on using the fact that some bits were
3547 zero. We now do this in the SET. */
3548
230d793d
RS
3549 break;
3550
3551 case NOT:
3552 /* (not (plus X -1)) can become (neg X). */
3553 if (GET_CODE (XEXP (x, 0)) == PLUS
3554 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3555 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3556
3557 /* Similarly, (not (neg X)) is (plus X -1). */
3558 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3559 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3560 constm1_rtx);
230d793d 3561
d0ab8cd3
RK
3562 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3563 if (GET_CODE (XEXP (x, 0)) == XOR
3564 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3565 && (temp = simplify_unary_operation (NOT, mode,
3566 XEXP (XEXP (x, 0), 1),
3567 mode)) != 0)
787745f5 3568 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3569
230d793d
RS
3570 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3571 other than 1, but that is not valid. We could do a similar
3572 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3573 but this doesn't seem common enough to bother with. */
3574 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3575 && XEXP (XEXP (x, 0), 0) == const1_rtx)
38a448ca
RH
3576 return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3577 XEXP (XEXP (x, 0), 1));
230d793d
RS
3578
3579 if (GET_CODE (XEXP (x, 0)) == SUBREG
3580 && subreg_lowpart_p (XEXP (x, 0))
3581 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3582 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3583 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3584 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3585 {
3586 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3587
38a448ca
RH
3588 x = gen_rtx_ROTATE (inner_mode,
3589 gen_unary (NOT, inner_mode, inner_mode,
3590 const1_rtx),
3591 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3592 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3593 }
3594
0802d516
RK
3595 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3596 reversing the comparison code if valid. */
3597 if (STORE_FLAG_VALUE == -1
3598 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
230d793d
RS
3599 && reversible_comparison_p (XEXP (x, 0)))
3600 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3601 mode, XEXP (XEXP (x, 0), 0),
3602 XEXP (XEXP (x, 0), 1));
500c518b
RK
3603
3604 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3605 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3606 perform the above simplification. */
500c518b 3607
0802d516
RK
3608 if (STORE_FLAG_VALUE == -1
3609 && XEXP (x, 1) == const1_rtx
500c518b
RK
3610 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3611 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3612 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3613 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3614
3615 /* Apply De Morgan's laws to reduce number of patterns for machines
3616 with negating logical insns (and-not, nand, etc.). If result has
3617 only one NOT, put it first, since that is how the patterns are
3618 coded. */
3619
3620 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3621 {
3622 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3623
3624 if (GET_CODE (in1) == NOT)
3625 in1 = XEXP (in1, 0);
3626 else
3627 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3628
3629 if (GET_CODE (in2) == NOT)
3630 in2 = XEXP (in2, 0);
3631 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3632 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3633 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3634 else
3635 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3636
3637 if (GET_CODE (in2) == NOT)
3638 {
3639 rtx tem = in2;
3640 in2 = in1; in1 = tem;
3641 }
3642
8079805d
RK
3643 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3644 mode, in1, in2);
230d793d
RS
3645 }
3646 break;
3647
3648 case NEG:
3649 /* (neg (plus X 1)) can become (not X). */
3650 if (GET_CODE (XEXP (x, 0)) == PLUS
3651 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3652 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3653
3654 /* Similarly, (neg (not X)) is (plus X 1). */
3655 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3656 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3657
230d793d
RS
3658 /* (neg (minus X Y)) can become (minus Y X). */
3659 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3660 && (! FLOAT_MODE_P (mode)
0f41302f 3661 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3662 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3663 || flag_fast_math))
8079805d
RK
3664 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3665 XEXP (XEXP (x, 0), 0));
230d793d 3666
0f41302f 3667 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3668 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3669 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3670 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3671
230d793d
RS
3672 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3673 if we can then eliminate the NEG (e.g.,
3674 if the operand is a constant). */
3675
3676 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3677 {
3678 temp = simplify_unary_operation (NEG, mode,
3679 XEXP (XEXP (x, 0), 0), mode);
3680 if (temp)
3681 {
3682 SUBST (XEXP (XEXP (x, 0), 0), temp);
3683 return XEXP (x, 0);
3684 }
3685 }
3686
3687 temp = expand_compound_operation (XEXP (x, 0));
3688
3689 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3690 replaced by (lshiftrt X C). This will convert
3691 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3692
3693 if (GET_CODE (temp) == ASHIFTRT
3694 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3695 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3696 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3697 INTVAL (XEXP (temp, 1)));
230d793d 3698
951553af 3699 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3700 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3701 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3702 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3703 or a SUBREG of one since we'd be making the expression more
3704 complex if it was just a register. */
3705
3706 if (GET_CODE (temp) != REG
3707 && ! (GET_CODE (temp) == SUBREG
3708 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3709 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3710 {
3711 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3712 (NULL_RTX, ASHIFTRT, mode,
3713 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3714 GET_MODE_BITSIZE (mode) - 1 - i),
3715 GET_MODE_BITSIZE (mode) - 1 - i);
3716
3717 /* If all we did was surround TEMP with the two shifts, we
3718 haven't improved anything, so don't use it. Otherwise,
3719 we are better off with TEMP1. */
3720 if (GET_CODE (temp1) != ASHIFTRT
3721 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3722 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3723 return temp1;
230d793d
RS
3724 }
3725 break;
3726
2ca9ae17 3727 case TRUNCATE:
e30fb98f
JL
3728 /* We can't handle truncation to a partial integer mode here
3729 because we don't know the real bitsize of the partial
3730 integer mode. */
3731 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3732 break;
3733
2ca9ae17
JW
3734 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3735 SUBST (XEXP (x, 0),
3736 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3737 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
3738
3739 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3740 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3741 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3742 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3743 return XEXP (XEXP (x, 0), 0);
3744
3745 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3746 (OP:SI foo:SI) if OP is NEG or ABS. */
3747 if ((GET_CODE (XEXP (x, 0)) == ABS
3748 || GET_CODE (XEXP (x, 0)) == NEG)
3749 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3750 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3751 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3752 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3753 XEXP (XEXP (XEXP (x, 0), 0), 0));
3754
3755 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3756 (truncate:SI x). */
3757 if (GET_CODE (XEXP (x, 0)) == SUBREG
3758 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3759 && subreg_lowpart_p (XEXP (x, 0)))
3760 return SUBREG_REG (XEXP (x, 0));
3761
3762 /* If we know that the value is already truncated, we can
3763 replace the TRUNCATE with a SUBREG. */
3764 if (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) <= HOST_BITS_PER_WIDE_INT
3765 && (nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3766 &~ GET_MODE_MASK (mode)) == 0)
3767 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3768
3769 /* A truncate of a comparison can be replaced with a subreg if
3770 STORE_FLAG_VALUE permits. This is like the previous test,
3771 but it works even if the comparison is done in a mode larger
3772 than HOST_BITS_PER_WIDE_INT. */
3773 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3774 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3775 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3776 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3777
3778 /* Similarly, a truncate of a register whose value is a
3779 comparison can be replaced with a subreg if STORE_FLAG_VALUE
3780 permits. */
3781 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3782 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3783 && (temp = get_last_value (XEXP (x, 0)))
3784 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3785 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3786
2ca9ae17
JW
3787 break;
3788
230d793d
RS
3789 case FLOAT_TRUNCATE:
3790 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3791 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3792 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3793 return XEXP (XEXP (x, 0), 0);
4635f748
RK
3794
3795 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3796 (OP:SF foo:SF) if OP is NEG or ABS. */
3797 if ((GET_CODE (XEXP (x, 0)) == ABS
3798 || GET_CODE (XEXP (x, 0)) == NEG)
3799 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3800 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
3801 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3802 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
3803
3804 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3805 is (float_truncate:SF x). */
3806 if (GET_CODE (XEXP (x, 0)) == SUBREG
3807 && subreg_lowpart_p (XEXP (x, 0))
3808 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3809 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
3810 break;
3811
3812#ifdef HAVE_cc0
3813 case COMPARE:
3814 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3815 using cc0, in which case we want to leave it as a COMPARE
3816 so we can distinguish it from a register-register-copy. */
3817 if (XEXP (x, 1) == const0_rtx)
3818 return XEXP (x, 0);
3819
3820 /* In IEEE floating point, x-0 is not the same as x. */
3821 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3822 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3823 || flag_fast_math)
230d793d
RS
3824 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3825 return XEXP (x, 0);
3826 break;
3827#endif
3828
3829 case CONST:
3830 /* (const (const X)) can become (const X). Do it this way rather than
3831 returning the inner CONST since CONST can be shared with a
3832 REG_EQUAL note. */
3833 if (GET_CODE (XEXP (x, 0)) == CONST)
3834 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3835 break;
3836
3837#ifdef HAVE_lo_sum
3838 case LO_SUM:
3839 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3840 can add in an offset. find_split_point will split this address up
3841 again if it doesn't match. */
3842 if (GET_CODE (XEXP (x, 0)) == HIGH
3843 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3844 return XEXP (x, 1);
3845 break;
3846#endif
3847
3848 case PLUS:
3849 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3850 outermost. That's because that's the way indexed addresses are
3851 supposed to appear. This code used to check many more cases, but
3852 they are now checked elsewhere. */
3853 if (GET_CODE (XEXP (x, 0)) == PLUS
3854 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3855 return gen_binary (PLUS, mode,
3856 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3857 XEXP (x, 1)),
3858 XEXP (XEXP (x, 0), 1));
3859
3860 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3861 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3862 bit-field and can be replaced by either a sign_extend or a
3863 sign_extract. The `and' may be a zero_extend. */
3864 if (GET_CODE (XEXP (x, 0)) == XOR
3865 && GET_CODE (XEXP (x, 1)) == CONST_INT
3866 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3867 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3868 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3869 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3870 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3871 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3872 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3873 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3874 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3875 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3876 == i + 1))))
8079805d
RK
3877 return simplify_shift_const
3878 (NULL_RTX, ASHIFTRT, mode,
3879 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3880 XEXP (XEXP (XEXP (x, 0), 0), 0),
3881 GET_MODE_BITSIZE (mode) - (i + 1)),
3882 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 3883
bc0776c6
RK
3884 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3885 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3886 is 1. This produces better code than the alternative immediately
3887 below. */
3888 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3889 && reversible_comparison_p (XEXP (x, 0))
3890 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3891 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 3892 return
0c1c8ea6 3893 gen_unary (NEG, mode, mode,
8079805d
RK
3894 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3895 mode, XEXP (XEXP (x, 0), 0),
3896 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
3897
3898 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3899 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3900 the bitsize of the mode - 1. This allows simplification of
3901 "a = (b & 8) == 0;" */
3902 if (XEXP (x, 1) == constm1_rtx
3903 && GET_CODE (XEXP (x, 0)) != REG
3904 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3905 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3906 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
3907 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3908 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3909 gen_rtx_combine (XOR, mode,
3910 XEXP (x, 0), const1_rtx),
3911 GET_MODE_BITSIZE (mode) - 1),
3912 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
3913
3914 /* If we are adding two things that have no bits in common, convert
3915 the addition into an IOR. This will often be further simplified,
3916 for example in cases like ((a & 1) + (a & 2)), which can
3917 become a & 3. */
3918
ac49a949 3919 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3920 && (nonzero_bits (XEXP (x, 0), mode)
3921 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 3922 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
3923 break;
3924
3925 case MINUS:
0802d516
RK
3926 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
3927 by reversing the comparison code if valid. */
3928 if (STORE_FLAG_VALUE == 1
3929 && XEXP (x, 0) == const1_rtx
5109d49f
RK
3930 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3931 && reversible_comparison_p (XEXP (x, 1)))
3932 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3933 mode, XEXP (XEXP (x, 1), 0),
3934 XEXP (XEXP (x, 1), 1));
5109d49f 3935
230d793d
RS
3936 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3937 (and <foo> (const_int pow2-1)) */
3938 if (GET_CODE (XEXP (x, 1)) == AND
3939 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3940 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3941 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
3942 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3943 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
3944
3945 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3946 integers. */
3947 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
3948 return gen_binary (MINUS, mode,
3949 gen_binary (MINUS, mode, XEXP (x, 0),
3950 XEXP (XEXP (x, 1), 0)),
3951 XEXP (XEXP (x, 1), 1));
230d793d
RS
3952 break;
3953
3954 case MULT:
3955 /* If we have (mult (plus A B) C), apply the distributive law and then
3956 the inverse distributive law to see if things simplify. This
3957 occurs mostly in addresses, often when unrolling loops. */
3958
3959 if (GET_CODE (XEXP (x, 0)) == PLUS)
3960 {
3961 x = apply_distributive_law
3962 (gen_binary (PLUS, mode,
3963 gen_binary (MULT, mode,
3964 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3965 gen_binary (MULT, mode,
3966 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3967
3968 if (GET_CODE (x) != MULT)
8079805d 3969 return x;
230d793d 3970 }
230d793d
RS
3971 break;
3972
3973 case UDIV:
3974 /* If this is a divide by a power of two, treat it as a shift if
3975 its first operand is a shift. */
3976 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3977 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3978 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3979 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3980 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3981 || GET_CODE (XEXP (x, 0)) == ROTATE
3982 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 3983 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3984 break;
3985
3986 case EQ: case NE:
3987 case GT: case GTU: case GE: case GEU:
3988 case LT: case LTU: case LE: case LEU:
3989 /* If the first operand is a condition code, we can't do anything
3990 with it. */
3991 if (GET_CODE (XEXP (x, 0)) == COMPARE
3992 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3993#ifdef HAVE_cc0
3994 && XEXP (x, 0) != cc0_rtx
3995#endif
3996 ))
3997 {
3998 rtx op0 = XEXP (x, 0);
3999 rtx op1 = XEXP (x, 1);
4000 enum rtx_code new_code;
4001
4002 if (GET_CODE (op0) == COMPARE)
4003 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4004
4005 /* Simplify our comparison, if possible. */
4006 new_code = simplify_comparison (code, &op0, &op1);
4007
230d793d 4008 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 4009 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
4010 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4011 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4012 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4013 (plus X 1).
4014
4015 Remove any ZERO_EXTRACT we made when thinking this was a
4016 comparison. It may now be simpler to use, e.g., an AND. If a
4017 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4018 the call to make_compound_operation in the SET case. */
4019
0802d516
RK
4020 if (STORE_FLAG_VALUE == 1
4021 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4022 && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4023 return gen_lowpart_for_combine (mode,
4024 expand_compound_operation (op0));
5109d49f 4025
0802d516
RK
4026 else if (STORE_FLAG_VALUE == 1
4027 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4028 && op1 == const0_rtx
4029 && (num_sign_bit_copies (op0, mode)
4030 == GET_MODE_BITSIZE (mode)))
4031 {
4032 op0 = expand_compound_operation (op0);
0c1c8ea6 4033 return gen_unary (NEG, mode, mode,
8079805d 4034 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4035 }
4036
0802d516
RK
4037 else if (STORE_FLAG_VALUE == 1
4038 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4039 && op1 == const0_rtx
5109d49f 4040 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4041 {
4042 op0 = expand_compound_operation (op0);
8079805d
RK
4043 return gen_binary (XOR, mode,
4044 gen_lowpart_for_combine (mode, op0),
4045 const1_rtx);
5109d49f 4046 }
818b11b9 4047
0802d516
RK
4048 else if (STORE_FLAG_VALUE == 1
4049 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4050 && op1 == const0_rtx
4051 && (num_sign_bit_copies (op0, mode)
4052 == GET_MODE_BITSIZE (mode)))
4053 {
4054 op0 = expand_compound_operation (op0);
8079805d 4055 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 4056 }
230d793d 4057
5109d49f
RK
4058 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4059 those above. */
0802d516
RK
4060 if (STORE_FLAG_VALUE == -1
4061 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4062 && op1 == const0_rtx
5109d49f
RK
4063 && (num_sign_bit_copies (op0, mode)
4064 == GET_MODE_BITSIZE (mode)))
4065 return gen_lowpart_for_combine (mode,
4066 expand_compound_operation (op0));
4067
0802d516
RK
4068 else if (STORE_FLAG_VALUE == -1
4069 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4070 && op1 == const0_rtx
4071 && nonzero_bits (op0, mode) == 1)
4072 {
4073 op0 = expand_compound_operation (op0);
0c1c8ea6 4074 return gen_unary (NEG, mode, mode,
8079805d 4075 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4076 }
4077
0802d516
RK
4078 else if (STORE_FLAG_VALUE == -1
4079 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4080 && op1 == const0_rtx
4081 && (num_sign_bit_copies (op0, mode)
4082 == GET_MODE_BITSIZE (mode)))
230d793d 4083 {
818b11b9 4084 op0 = expand_compound_operation (op0);
0c1c8ea6 4085 return gen_unary (NOT, mode, mode,
8079805d 4086 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4087 }
4088
4089 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
4090 else if (STORE_FLAG_VALUE == -1
4091 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4092 && op1 == const0_rtx
4093 && nonzero_bits (op0, mode) == 1)
4094 {
4095 op0 = expand_compound_operation (op0);
8079805d 4096 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 4097 }
230d793d
RS
4098
4099 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
4100 one bit that might be nonzero, we can convert (ne x 0) to
4101 (ashift x c) where C puts the bit in the sign bit. Remove any
4102 AND with STORE_FLAG_VALUE when we are done, since we are only
4103 going to test the sign bit. */
3f508eca 4104 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 4105 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4106 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5f4f0e22 4107 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
4108 && op1 == const0_rtx
4109 && mode == GET_MODE (op0)
5109d49f 4110 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 4111 {
818b11b9
RK
4112 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4113 expand_compound_operation (op0),
230d793d
RS
4114 GET_MODE_BITSIZE (mode) - 1 - i);
4115 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4116 return XEXP (x, 0);
4117 else
4118 return x;
4119 }
4120
4121 /* If the code changed, return a whole new comparison. */
4122 if (new_code != code)
4123 return gen_rtx_combine (new_code, mode, op0, op1);
4124
4125 /* Otherwise, keep this operation, but maybe change its operands.
4126 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4127 SUBST (XEXP (x, 0), op0);
4128 SUBST (XEXP (x, 1), op1);
4129 }
4130 break;
4131
4132 case IF_THEN_ELSE:
8079805d 4133 return simplify_if_then_else (x);
9210df58 4134
8079805d
RK
4135 case ZERO_EXTRACT:
4136 case SIGN_EXTRACT:
4137 case ZERO_EXTEND:
4138 case SIGN_EXTEND:
0f41302f 4139 /* If we are processing SET_DEST, we are done. */
8079805d
RK
4140 if (in_dest)
4141 return x;
d0ab8cd3 4142
8079805d 4143 return expand_compound_operation (x);
d0ab8cd3 4144
8079805d
RK
4145 case SET:
4146 return simplify_set (x);
1a26b032 4147
8079805d
RK
4148 case AND:
4149 case IOR:
4150 case XOR:
4151 return simplify_logical (x, last);
d0ab8cd3 4152
b472527b 4153 case ABS:
8079805d
RK
4154 /* (abs (neg <foo>)) -> (abs <foo>) */
4155 if (GET_CODE (XEXP (x, 0)) == NEG)
4156 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4157
b472527b
JL
4158 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4159 do nothing. */
4160 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4161 break;
f40421ce 4162
8079805d
RK
4163 /* If operand is something known to be positive, ignore the ABS. */
4164 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4165 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4166 <= HOST_BITS_PER_WIDE_INT)
4167 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4168 & ((HOST_WIDE_INT) 1
4169 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4170 == 0)))
4171 return XEXP (x, 0);
1a26b032 4172
1a26b032 4173
8079805d
RK
4174 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4175 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4176 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 4177
8079805d 4178 break;
1a26b032 4179
8079805d
RK
4180 case FFS:
4181 /* (ffs (*_extend <X>)) = (ffs <X>) */
4182 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4183 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4184 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4185 break;
1a26b032 4186
8079805d
RK
4187 case FLOAT:
4188 /* (float (sign_extend <X>)) = (float <X>). */
4189 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4190 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4191 break;
1a26b032 4192
8079805d
RK
4193 case ASHIFT:
4194 case LSHIFTRT:
4195 case ASHIFTRT:
4196 case ROTATE:
4197 case ROTATERT:
4198 /* If this is a shift by a constant amount, simplify it. */
4199 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4200 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4201 INTVAL (XEXP (x, 1)));
4202
4203#ifdef SHIFT_COUNT_TRUNCATED
4204 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4205 SUBST (XEXP (x, 1),
4206 force_to_mode (XEXP (x, 1), GET_MODE (x),
4207 ((HOST_WIDE_INT) 1
4208 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4209 - 1,
4210 NULL_RTX, 0));
4211#endif
4212
4213 break;
e9a25f70
JL
4214
4215 default:
4216 break;
8079805d
RK
4217 }
4218
4219 return x;
4220}
4221\f
4222/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4223
8079805d
RK
4224static rtx
4225simplify_if_then_else (x)
4226 rtx x;
4227{
4228 enum machine_mode mode = GET_MODE (x);
4229 rtx cond = XEXP (x, 0);
4230 rtx true = XEXP (x, 1);
4231 rtx false = XEXP (x, 2);
4232 enum rtx_code true_code = GET_CODE (cond);
4233 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4234 rtx temp;
4235 int i;
4236
0f41302f 4237 /* Simplify storing of the truth value. */
8079805d
RK
4238 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4239 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4240
0f41302f 4241 /* Also when the truth value has to be reversed. */
8079805d
RK
4242 if (comparison_p && reversible_comparison_p (cond)
4243 && true == const0_rtx && false == const_true_rtx)
4244 return gen_binary (reverse_condition (true_code),
4245 mode, XEXP (cond, 0), XEXP (cond, 1));
4246
4247 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4248 in it is being compared against certain values. Get the true and false
4249 comparisons and see if that says anything about the value of each arm. */
4250
4251 if (comparison_p && reversible_comparison_p (cond)
4252 && GET_CODE (XEXP (cond, 0)) == REG)
4253 {
4254 HOST_WIDE_INT nzb;
4255 rtx from = XEXP (cond, 0);
4256 enum rtx_code false_code = reverse_condition (true_code);
4257 rtx true_val = XEXP (cond, 1);
4258 rtx false_val = true_val;
4259 int swapped = 0;
9210df58 4260
8079805d 4261 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4262
8079805d 4263 if (false_code == EQ)
1a26b032 4264 {
8079805d
RK
4265 swapped = 1, true_code = EQ, false_code = NE;
4266 temp = true, true = false, false = temp;
4267 }
5109d49f 4268
8079805d
RK
4269 /* If we are comparing against zero and the expression being tested has
4270 only a single bit that might be nonzero, that is its value when it is
4271 not equal to zero. Similarly if it is known to be -1 or 0. */
4272
4273 if (true_code == EQ && true_val == const0_rtx
4274 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4275 false_code = EQ, false_val = GEN_INT (nzb);
4276 else if (true_code == EQ && true_val == const0_rtx
4277 && (num_sign_bit_copies (from, GET_MODE (from))
4278 == GET_MODE_BITSIZE (GET_MODE (from))))
4279 false_code = EQ, false_val = constm1_rtx;
4280
4281 /* Now simplify an arm if we know the value of the register in the
4282 branch and it is used in the arm. Be careful due to the potential
4283 of locally-shared RTL. */
4284
4285 if (reg_mentioned_p (from, true))
4286 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4287 pc_rtx, pc_rtx, 0, 0);
4288 if (reg_mentioned_p (from, false))
4289 false = subst (known_cond (copy_rtx (false), false_code,
4290 from, false_val),
4291 pc_rtx, pc_rtx, 0, 0);
4292
4293 SUBST (XEXP (x, 1), swapped ? false : true);
4294 SUBST (XEXP (x, 2), swapped ? true : false);
4295
4296 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4297 }
5109d49f 4298
8079805d
RK
4299 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4300 reversed, do so to avoid needing two sets of patterns for
4301 subtract-and-branch insns. Similarly if we have a constant in the true
4302 arm, the false arm is the same as the first operand of the comparison, or
4303 the false arm is more complicated than the true arm. */
4304
4305 if (comparison_p && reversible_comparison_p (cond)
4306 && (true == pc_rtx
4307 || (CONSTANT_P (true)
4308 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4309 || true == const0_rtx
4310 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4311 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4312 || (GET_CODE (true) == SUBREG
4313 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4314 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4315 || reg_mentioned_p (true, false)
4316 || rtx_equal_p (false, XEXP (cond, 0))))
4317 {
4318 true_code = reverse_condition (true_code);
4319 SUBST (XEXP (x, 0),
4320 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4321 XEXP (cond, 1)));
5109d49f 4322
8079805d
RK
4323 SUBST (XEXP (x, 1), false);
4324 SUBST (XEXP (x, 2), true);
1a26b032 4325
8079805d 4326 temp = true, true = false, false = temp, cond = XEXP (x, 0);
bb821298 4327
0f41302f 4328 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4329 true_code = GET_CODE (cond);
4330 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4331 }
abe6e52f 4332
8079805d 4333 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4334
8079805d
RK
4335 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4336 return true;
1a26b032 4337
5be669c7
RK
4338 /* Convert a == b ? b : a to "a". */
4339 if (true_code == EQ && ! side_effects_p (cond)
4340 && rtx_equal_p (XEXP (cond, 0), false)
4341 && rtx_equal_p (XEXP (cond, 1), true))
4342 return false;
4343 else if (true_code == NE && ! side_effects_p (cond)
4344 && rtx_equal_p (XEXP (cond, 0), true)
4345 && rtx_equal_p (XEXP (cond, 1), false))
4346 return true;
4347
8079805d
RK
4348 /* Look for cases where we have (abs x) or (neg (abs X)). */
4349
4350 if (GET_MODE_CLASS (mode) == MODE_INT
4351 && GET_CODE (false) == NEG
4352 && rtx_equal_p (true, XEXP (false, 0))
4353 && comparison_p
4354 && rtx_equal_p (true, XEXP (cond, 0))
4355 && ! side_effects_p (true))
4356 switch (true_code)
4357 {
4358 case GT:
4359 case GE:
0c1c8ea6 4360 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4361 case LT:
4362 case LE:
0c1c8ea6 4363 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
e9a25f70
JL
4364 default:
4365 break;
8079805d
RK
4366 }
4367
4368 /* Look for MIN or MAX. */
4369
34c8be72 4370 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4371 && comparison_p
4372 && rtx_equal_p (XEXP (cond, 0), true)
4373 && rtx_equal_p (XEXP (cond, 1), false)
4374 && ! side_effects_p (cond))
4375 switch (true_code)
4376 {
4377 case GE:
4378 case GT:
4379 return gen_binary (SMAX, mode, true, false);
4380 case LE:
4381 case LT:
4382 return gen_binary (SMIN, mode, true, false);
4383 case GEU:
4384 case GTU:
4385 return gen_binary (UMAX, mode, true, false);
4386 case LEU:
4387 case LTU:
4388 return gen_binary (UMIN, mode, true, false);
e9a25f70
JL
4389 default:
4390 break;
8079805d
RK
4391 }
4392
8079805d
RK
4393 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4394 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4395 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4396 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4397 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4398 neither 1 or -1, but it isn't worth checking for. */
8079805d 4399
0802d516
RK
4400 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4401 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d
RK
4402 {
4403 rtx t = make_compound_operation (true, SET);
4404 rtx f = make_compound_operation (false, SET);
4405 rtx cond_op0 = XEXP (cond, 0);
4406 rtx cond_op1 = XEXP (cond, 1);
4407 enum rtx_code op, extend_op = NIL;
4408 enum machine_mode m = mode;
f24ad0e4 4409 rtx z = 0, c1;
8079805d 4410
8079805d
RK
4411 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4412 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4413 || GET_CODE (t) == ASHIFT
4414 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4415 && rtx_equal_p (XEXP (t, 0), f))
4416 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4417
4418 /* If an identity-zero op is commutative, check whether there
0f41302f 4419 would be a match if we swapped the operands. */
8079805d
RK
4420 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4421 || GET_CODE (t) == XOR)
4422 && rtx_equal_p (XEXP (t, 1), f))
4423 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4424 else if (GET_CODE (t) == SIGN_EXTEND
4425 && (GET_CODE (XEXP (t, 0)) == PLUS
4426 || GET_CODE (XEXP (t, 0)) == MINUS
4427 || GET_CODE (XEXP (t, 0)) == IOR
4428 || GET_CODE (XEXP (t, 0)) == XOR
4429 || GET_CODE (XEXP (t, 0)) == ASHIFT
4430 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4431 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4432 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4433 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4434 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4435 && (num_sign_bit_copies (f, GET_MODE (f))
4436 > (GET_MODE_BITSIZE (mode)
4437 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4438 {
4439 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4440 extend_op = SIGN_EXTEND;
4441 m = GET_MODE (XEXP (t, 0));
1a26b032 4442 }
8079805d
RK
4443 else if (GET_CODE (t) == SIGN_EXTEND
4444 && (GET_CODE (XEXP (t, 0)) == PLUS
4445 || GET_CODE (XEXP (t, 0)) == IOR
4446 || GET_CODE (XEXP (t, 0)) == XOR)
4447 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4448 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4449 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4450 && (num_sign_bit_copies (f, GET_MODE (f))
4451 > (GET_MODE_BITSIZE (mode)
4452 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4453 {
4454 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4455 extend_op = SIGN_EXTEND;
4456 m = GET_MODE (XEXP (t, 0));
4457 }
4458 else if (GET_CODE (t) == ZERO_EXTEND
4459 && (GET_CODE (XEXP (t, 0)) == PLUS
4460 || GET_CODE (XEXP (t, 0)) == MINUS
4461 || GET_CODE (XEXP (t, 0)) == IOR
4462 || GET_CODE (XEXP (t, 0)) == XOR
4463 || GET_CODE (XEXP (t, 0)) == ASHIFT
4464 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4465 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4466 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4467 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4468 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4469 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4470 && ((nonzero_bits (f, GET_MODE (f))
4471 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4472 == 0))
4473 {
4474 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4475 extend_op = ZERO_EXTEND;
4476 m = GET_MODE (XEXP (t, 0));
4477 }
4478 else if (GET_CODE (t) == ZERO_EXTEND
4479 && (GET_CODE (XEXP (t, 0)) == PLUS
4480 || GET_CODE (XEXP (t, 0)) == IOR
4481 || GET_CODE (XEXP (t, 0)) == XOR)
4482 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4483 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4484 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4485 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4486 && ((nonzero_bits (f, GET_MODE (f))
4487 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4488 == 0))
4489 {
4490 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4491 extend_op = ZERO_EXTEND;
4492 m = GET_MODE (XEXP (t, 0));
4493 }
4494
4495 if (z)
4496 {
4497 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4498 pc_rtx, pc_rtx, 0, 0);
4499 temp = gen_binary (MULT, m, temp,
4500 gen_binary (MULT, m, c1, const_true_rtx));
4501 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4502 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4503
4504 if (extend_op != NIL)
0c1c8ea6 4505 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4506
4507 return temp;
4508 }
4509 }
224eeff2 4510
8079805d
RK
4511 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4512 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4513 negation of a single bit, we can convert this operation to a shift. We
4514 can actually do this more generally, but it doesn't seem worth it. */
4515
4516 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4517 && false == const0_rtx && GET_CODE (true) == CONST_INT
4518 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4519 && (i = exact_log2 (INTVAL (true))) >= 0)
4520 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4521 == GET_MODE_BITSIZE (mode))
4522 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4523 return
4524 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4525 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4526
8079805d
RK
4527 return x;
4528}
4529\f
4530/* Simplify X, a SET expression. Return the new expression. */
230d793d 4531
8079805d
RK
4532static rtx
4533simplify_set (x)
4534 rtx x;
4535{
4536 rtx src = SET_SRC (x);
4537 rtx dest = SET_DEST (x);
4538 enum machine_mode mode
4539 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4540 rtx other_insn;
4541 rtx *cc_use;
4542
4543 /* (set (pc) (return)) gets written as (return). */
4544 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4545 return src;
230d793d 4546
87e3e0c1
RK
4547 /* Now that we know for sure which bits of SRC we are using, see if we can
4548 simplify the expression for the object knowing that we only need the
4549 low-order bits. */
4550
4551 if (GET_MODE_CLASS (mode) == MODE_INT)
4552 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4553
8079805d
RK
4554 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4555 the comparison result and try to simplify it unless we already have used
4556 undobuf.other_insn. */
4557 if ((GET_CODE (src) == COMPARE
230d793d 4558#ifdef HAVE_cc0
8079805d 4559 || dest == cc0_rtx
230d793d 4560#endif
8079805d
RK
4561 )
4562 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4563 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4564 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4565 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4566 {
4567 enum rtx_code old_code = GET_CODE (*cc_use);
4568 enum rtx_code new_code;
4569 rtx op0, op1;
4570 int other_changed = 0;
4571 enum machine_mode compare_mode = GET_MODE (dest);
4572
4573 if (GET_CODE (src) == COMPARE)
4574 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4575 else
4576 op0 = src, op1 = const0_rtx;
230d793d 4577
8079805d
RK
4578 /* Simplify our comparison, if possible. */
4579 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4580
c141a106 4581#ifdef EXTRA_CC_MODES
8079805d
RK
4582 /* If this machine has CC modes other than CCmode, check to see if we
4583 need to use a different CC mode here. */
4584 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4585#endif /* EXTRA_CC_MODES */
230d793d 4586
c141a106 4587#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4588 /* If the mode changed, we have to change SET_DEST, the mode in the
4589 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4590 a hard register, just build new versions with the proper mode. If it
4591 is a pseudo, we lose unless it is only time we set the pseudo, in
4592 which case we can safely change its mode. */
4593 if (compare_mode != GET_MODE (dest))
4594 {
4595 int regno = REGNO (dest);
38a448ca 4596 rtx new_dest = gen_rtx_REG (compare_mode, regno);
8079805d
RK
4597
4598 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 4599 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
230d793d 4600 {
8079805d
RK
4601 if (regno >= FIRST_PSEUDO_REGISTER)
4602 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4603
8079805d
RK
4604 SUBST (SET_DEST (x), new_dest);
4605 SUBST (XEXP (*cc_use, 0), new_dest);
4606 other_changed = 1;
230d793d 4607
8079805d 4608 dest = new_dest;
230d793d 4609 }
8079805d 4610 }
230d793d
RS
4611#endif
4612
8079805d
RK
4613 /* If the code changed, we have to build a new comparison in
4614 undobuf.other_insn. */
4615 if (new_code != old_code)
4616 {
4617 unsigned HOST_WIDE_INT mask;
4618
4619 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4620 dest, const0_rtx));
4621
4622 /* If the only change we made was to change an EQ into an NE or
4623 vice versa, OP0 has only one bit that might be nonzero, and OP1
4624 is zero, check if changing the user of the condition code will
4625 produce a valid insn. If it won't, we can keep the original code
4626 in that insn by surrounding our operation with an XOR. */
4627
4628 if (((old_code == NE && new_code == EQ)
4629 || (old_code == EQ && new_code == NE))
4630 && ! other_changed && op1 == const0_rtx
4631 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4632 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4633 {
8079805d 4634 rtx pat = PATTERN (other_insn), note = 0;
a29ca9db 4635 int scratches;
230d793d 4636
a29ca9db 4637 if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
8079805d
RK
4638 && ! check_asm_operands (pat)))
4639 {
4640 PUT_CODE (*cc_use, old_code);
4641 other_insn = 0;
230d793d 4642
8079805d 4643 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4644 }
230d793d
RS
4645 }
4646
8079805d
RK
4647 other_changed = 1;
4648 }
4649
4650 if (other_changed)
4651 undobuf.other_insn = other_insn;
230d793d
RS
4652
4653#ifdef HAVE_cc0
8079805d
RK
4654 /* If we are now comparing against zero, change our source if
4655 needed. If we do not use cc0, we always have a COMPARE. */
4656 if (op1 == const0_rtx && dest == cc0_rtx)
4657 {
4658 SUBST (SET_SRC (x), op0);
4659 src = op0;
4660 }
4661 else
230d793d
RS
4662#endif
4663
8079805d
RK
4664 /* Otherwise, if we didn't previously have a COMPARE in the
4665 correct mode, we need one. */
4666 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4667 {
4668 SUBST (SET_SRC (x),
4669 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4670 src = SET_SRC (x);
230d793d
RS
4671 }
4672 else
4673 {
8079805d
RK
4674 /* Otherwise, update the COMPARE if needed. */
4675 SUBST (XEXP (src, 0), op0);
4676 SUBST (XEXP (src, 1), op1);
230d793d 4677 }
8079805d
RK
4678 }
4679 else
4680 {
4681 /* Get SET_SRC in a form where we have placed back any
4682 compound expressions. Then do the checks below. */
4683 src = make_compound_operation (src, SET);
4684 SUBST (SET_SRC (x), src);
4685 }
230d793d 4686
8079805d
RK
4687 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4688 and X being a REG or (subreg (reg)), we may be able to convert this to
4689 (set (subreg:m2 x) (op)).
df62f951 4690
8079805d
RK
4691 We can always do this if M1 is narrower than M2 because that means that
4692 we only care about the low bits of the result.
df62f951 4693
8079805d
RK
4694 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4695 perform a narrower operation that requested since the high-order bits will
4696 be undefined. On machine where it is defined, this transformation is safe
4697 as long as M1 and M2 have the same number of words. */
df62f951 4698
8079805d
RK
4699 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4700 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4701 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4702 / UNITS_PER_WORD)
4703 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4704 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4705#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4706 && (GET_MODE_SIZE (GET_MODE (src))
4707 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4708#endif
f507a070
RK
4709#ifdef CLASS_CANNOT_CHANGE_SIZE
4710 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4711 && (TEST_HARD_REG_BIT
4712 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4713 REGNO (dest)))
4714 && (GET_MODE_SIZE (GET_MODE (src))
4715 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4716#endif
8079805d
RK
4717 && (GET_CODE (dest) == REG
4718 || (GET_CODE (dest) == SUBREG
4719 && GET_CODE (SUBREG_REG (dest)) == REG)))
4720 {
4721 SUBST (SET_DEST (x),
4722 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4723 dest));
4724 SUBST (SET_SRC (x), SUBREG_REG (src));
4725
4726 src = SET_SRC (x), dest = SET_DEST (x);
4727 }
df62f951 4728
8baf60bb 4729#ifdef LOAD_EXTEND_OP
8079805d
RK
4730 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4731 would require a paradoxical subreg. Replace the subreg with a
0f41302f 4732 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
4733
4734 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4735 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4736 && SUBREG_WORD (src) == 0
4737 && (GET_MODE_SIZE (GET_MODE (src))
4738 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4739 && GET_CODE (SUBREG_REG (src)) == MEM)
4740 {
4741 SUBST (SET_SRC (x),
4742 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4743 GET_MODE (src), XEXP (src, 0)));
4744
4745 src = SET_SRC (x);
4746 }
230d793d
RS
4747#endif
4748
8079805d
RK
4749 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4750 are comparing an item known to be 0 or -1 against 0, use a logical
4751 operation instead. Check for one of the arms being an IOR of the other
4752 arm with some value. We compute three terms to be IOR'ed together. In
4753 practice, at most two will be nonzero. Then we do the IOR's. */
4754
4755 if (GET_CODE (dest) != PC
4756 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 4757 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4758 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4759 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 4760 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
4761#ifdef HAVE_conditional_move
4762 && ! can_conditionally_move_p (GET_MODE (src))
4763#endif
8079805d
RK
4764 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4765 GET_MODE (XEXP (XEXP (src, 0), 0)))
4766 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4767 && ! side_effects_p (src))
4768 {
4769 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4770 ? XEXP (src, 1) : XEXP (src, 2));
4771 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4772 ? XEXP (src, 2) : XEXP (src, 1));
4773 rtx term1 = const0_rtx, term2, term3;
4774
4775 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4776 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4777 else if (GET_CODE (true) == IOR
4778 && rtx_equal_p (XEXP (true, 1), false))
4779 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4780 else if (GET_CODE (false) == IOR
4781 && rtx_equal_p (XEXP (false, 0), true))
4782 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4783 else if (GET_CODE (false) == IOR
4784 && rtx_equal_p (XEXP (false, 1), true))
4785 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4786
4787 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4788 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 4789 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
4790 XEXP (XEXP (src, 0), 0)),
4791 false);
4792
4793 SUBST (SET_SRC (x),
4794 gen_binary (IOR, GET_MODE (src),
4795 gen_binary (IOR, GET_MODE (src), term1, term2),
4796 term3));
4797
4798 src = SET_SRC (x);
4799 }
230d793d 4800
246e00f2
RK
4801 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4802 whole thing fail. */
4803 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4804 return src;
4805 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4806 return dest;
4807 else
4808 /* Convert this into a field assignment operation, if possible. */
4809 return make_field_assignment (x);
8079805d
RK
4810}
4811\f
4812/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4813 result. LAST is nonzero if this is the last retry. */
4814
4815static rtx
4816simplify_logical (x, last)
4817 rtx x;
4818 int last;
4819{
4820 enum machine_mode mode = GET_MODE (x);
4821 rtx op0 = XEXP (x, 0);
4822 rtx op1 = XEXP (x, 1);
4823
4824 switch (GET_CODE (x))
4825 {
230d793d 4826 case AND:
8079805d
RK
4827 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4828 insn (and may simplify more). */
4829 if (GET_CODE (op0) == XOR
4830 && rtx_equal_p (XEXP (op0, 0), op1)
4831 && ! side_effects_p (op1))
0c1c8ea6
RK
4832 x = gen_binary (AND, mode,
4833 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
4834
4835 if (GET_CODE (op0) == XOR
4836 && rtx_equal_p (XEXP (op0, 1), op1)
4837 && ! side_effects_p (op1))
0c1c8ea6
RK
4838 x = gen_binary (AND, mode,
4839 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
4840
4841 /* Similarly for (~ (A ^ B)) & A. */
4842 if (GET_CODE (op0) == NOT
4843 && GET_CODE (XEXP (op0, 0)) == XOR
4844 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4845 && ! side_effects_p (op1))
4846 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4847
4848 if (GET_CODE (op0) == NOT
4849 && GET_CODE (XEXP (op0, 0)) == XOR
4850 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4851 && ! side_effects_p (op1))
4852 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4853
4854 if (GET_CODE (op1) == CONST_INT)
230d793d 4855 {
8079805d 4856 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
4857
4858 /* If we have (ior (and (X C1) C2)) and the next restart would be
4859 the last, simplify this by making C1 as small as possible
0f41302f 4860 and then exit. */
8079805d
RK
4861 if (last
4862 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4863 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4864 && GET_CODE (op1) == CONST_INT)
4865 return gen_binary (IOR, mode,
4866 gen_binary (AND, mode, XEXP (op0, 0),
4867 GEN_INT (INTVAL (XEXP (op0, 1))
4868 & ~ INTVAL (op1))), op1);
230d793d
RS
4869
4870 if (GET_CODE (x) != AND)
8079805d 4871 return x;
0e32506c
RK
4872
4873 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4874 || GET_RTX_CLASS (GET_CODE (x)) == '2')
4875 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
4876 }
4877
4878 /* Convert (A | B) & A to A. */
8079805d
RK
4879 if (GET_CODE (op0) == IOR
4880 && (rtx_equal_p (XEXP (op0, 0), op1)
4881 || rtx_equal_p (XEXP (op0, 1), op1))
4882 && ! side_effects_p (XEXP (op0, 0))
4883 && ! side_effects_p (XEXP (op0, 1)))
4884 return op1;
230d793d 4885
d0ab8cd3 4886 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4887 we start with some combination of logical operations and apply
4888 the distributive law followed by the inverse distributive law.
4889 Most of the time, this results in no change. However, if some of
4890 the operands are the same or inverses of each other, simplifications
4891 will result.
4892
4893 For example, (and (ior A B) (not B)) can occur as the result of
4894 expanding a bit field assignment. When we apply the distributive
4895 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 4896 which then simplifies to (and (A (not B))).
230d793d 4897
8079805d 4898 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
4899 the inverse distributive law to see if things simplify. */
4900
8079805d 4901 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
4902 {
4903 x = apply_distributive_law
8079805d
RK
4904 (gen_binary (GET_CODE (op0), mode,
4905 gen_binary (AND, mode, XEXP (op0, 0), op1),
4906 gen_binary (AND, mode, XEXP (op0, 1), op1)));
230d793d 4907 if (GET_CODE (x) != AND)
8079805d 4908 return x;
230d793d
RS
4909 }
4910
8079805d
RK
4911 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4912 return apply_distributive_law
4913 (gen_binary (GET_CODE (op1), mode,
4914 gen_binary (AND, mode, XEXP (op1, 0), op0),
4915 gen_binary (AND, mode, XEXP (op1, 1), op0)));
230d793d
RS
4916
4917 /* Similarly, taking advantage of the fact that
4918 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4919
8079805d
RK
4920 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4921 return apply_distributive_law
4922 (gen_binary (XOR, mode,
4923 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4924 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
230d793d 4925
8079805d
RK
4926 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4927 return apply_distributive_law
4928 (gen_binary (XOR, mode,
4929 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4930 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
230d793d
RS
4931 break;
4932
4933 case IOR:
951553af 4934 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 4935 if (GET_CODE (op1) == CONST_INT
ac49a949 4936 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
4937 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4938 return op1;
d0ab8cd3 4939
230d793d 4940 /* Convert (A & B) | A to A. */
8079805d
RK
4941 if (GET_CODE (op0) == AND
4942 && (rtx_equal_p (XEXP (op0, 0), op1)
4943 || rtx_equal_p (XEXP (op0, 1), op1))
4944 && ! side_effects_p (XEXP (op0, 0))
4945 && ! side_effects_p (XEXP (op0, 1)))
4946 return op1;
230d793d
RS
4947
4948 /* If we have (ior (and A B) C), apply the distributive law and then
4949 the inverse distributive law to see if things simplify. */
4950
8079805d 4951 if (GET_CODE (op0) == AND)
230d793d
RS
4952 {
4953 x = apply_distributive_law
4954 (gen_binary (AND, mode,
8079805d
RK
4955 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4956 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
230d793d
RS
4957
4958 if (GET_CODE (x) != IOR)
8079805d 4959 return x;
230d793d
RS
4960 }
4961
8079805d 4962 if (GET_CODE (op1) == AND)
230d793d
RS
4963 {
4964 x = apply_distributive_law
4965 (gen_binary (AND, mode,
8079805d
RK
4966 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4967 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
230d793d
RS
4968
4969 if (GET_CODE (x) != IOR)
8079805d 4970 return x;
230d793d
RS
4971 }
4972
4973 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4974 mode size to (rotate A CX). */
4975
8079805d
RK
4976 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4977 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4978 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4979 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4980 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4981 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 4982 == GET_MODE_BITSIZE (mode)))
38a448ca
RH
4983 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
4984 (GET_CODE (op0) == ASHIFT
4985 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 4986
71923da7
RK
4987 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4988 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4989 does not affect any of the bits in OP1, it can really be done
4990 as a PLUS and we can associate. We do this by seeing if OP1
4991 can be safely shifted left C bits. */
4992 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4993 && GET_CODE (XEXP (op0, 0)) == PLUS
4994 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4995 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4996 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4997 {
4998 int count = INTVAL (XEXP (op0, 1));
4999 HOST_WIDE_INT mask = INTVAL (op1) << count;
5000
5001 if (mask >> count == INTVAL (op1)
5002 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5003 {
5004 SUBST (XEXP (XEXP (op0, 0), 1),
5005 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5006 return op0;
5007 }
5008 }
230d793d
RS
5009 break;
5010
5011 case XOR:
5012 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5013 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5014 (NOT y). */
5015 {
5016 int num_negated = 0;
230d793d 5017
8079805d
RK
5018 if (GET_CODE (op0) == NOT)
5019 num_negated++, op0 = XEXP (op0, 0);
5020 if (GET_CODE (op1) == NOT)
5021 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
5022
5023 if (num_negated == 2)
5024 {
8079805d
RK
5025 SUBST (XEXP (x, 0), op0);
5026 SUBST (XEXP (x, 1), op1);
230d793d
RS
5027 }
5028 else if (num_negated == 1)
0c1c8ea6 5029 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
5030 }
5031
5032 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5033 correspond to a machine insn or result in further simplifications
5034 if B is a constant. */
5035
8079805d
RK
5036 if (GET_CODE (op0) == AND
5037 && rtx_equal_p (XEXP (op0, 1), op1)
5038 && ! side_effects_p (op1))
0c1c8ea6
RK
5039 return gen_binary (AND, mode,
5040 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 5041 op1);
230d793d 5042
8079805d
RK
5043 else if (GET_CODE (op0) == AND
5044 && rtx_equal_p (XEXP (op0, 0), op1)
5045 && ! side_effects_p (op1))
0c1c8ea6
RK
5046 return gen_binary (AND, mode,
5047 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 5048 op1);
230d793d 5049
230d793d 5050 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
5051 comparison if STORE_FLAG_VALUE is 1. */
5052 if (STORE_FLAG_VALUE == 1
5053 && op1 == const1_rtx
8079805d
RK
5054 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5055 && reversible_comparison_p (op0))
5056 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5057 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
5058
5059 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5060 is (lt foo (const_int 0)), so we can perform the above
0802d516 5061 simplification if STORE_FLAG_VALUE is 1. */
500c518b 5062
0802d516
RK
5063 if (STORE_FLAG_VALUE == 1
5064 && op1 == const1_rtx
8079805d
RK
5065 && GET_CODE (op0) == LSHIFTRT
5066 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5067 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5068 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
5069
5070 /* (xor (comparison foo bar) (const_int sign-bit))
5071 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 5072 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5073 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5f4f0e22 5074 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
5075 && op1 == const_true_rtx
5076 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5077 && reversible_comparison_p (op0))
5078 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5079 mode, XEXP (op0, 0), XEXP (op0, 1));
230d793d 5080 break;
e9a25f70
JL
5081
5082 default:
5083 abort ();
230d793d
RS
5084 }
5085
5086 return x;
5087}
5088\f
5089/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5090 operations" because they can be replaced with two more basic operations.
5091 ZERO_EXTEND is also considered "compound" because it can be replaced with
5092 an AND operation, which is simpler, though only one operation.
5093
5094 The function expand_compound_operation is called with an rtx expression
5095 and will convert it to the appropriate shifts and AND operations,
5096 simplifying at each stage.
5097
5098 The function make_compound_operation is called to convert an expression
5099 consisting of shifts and ANDs into the equivalent compound expression.
5100 It is the inverse of this function, loosely speaking. */
5101
5102static rtx
5103expand_compound_operation (x)
5104 rtx x;
5105{
5106 int pos = 0, len;
5107 int unsignedp = 0;
5108 int modewidth;
5109 rtx tem;
5110
5111 switch (GET_CODE (x))
5112 {
5113 case ZERO_EXTEND:
5114 unsignedp = 1;
5115 case SIGN_EXTEND:
75473182
RS
5116 /* We can't necessarily use a const_int for a multiword mode;
5117 it depends on implicitly extending the value.
5118 Since we don't know the right way to extend it,
5119 we can't tell whether the implicit way is right.
5120
5121 Even for a mode that is no wider than a const_int,
5122 we can't win, because we need to sign extend one of its bits through
5123 the rest of it, and we don't know which bit. */
230d793d 5124 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 5125 return x;
230d793d 5126
8079805d
RK
5127 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5128 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5129 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5130 reloaded. If not for that, MEM's would very rarely be safe.
5131
5132 Reject MODEs bigger than a word, because we might not be able
5133 to reference a two-register group starting with an arbitrary register
5134 (and currently gen_lowpart might crash for a SUBREG). */
5135
5136 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
5137 return x;
5138
5139 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5140 /* If the inner object has VOIDmode (the only way this can happen
5141 is if it is a ASM_OPERANDS), we can't do anything since we don't
5142 know how much masking to do. */
5143 if (len == 0)
5144 return x;
5145
5146 break;
5147
5148 case ZERO_EXTRACT:
5149 unsignedp = 1;
5150 case SIGN_EXTRACT:
5151 /* If the operand is a CLOBBER, just return it. */
5152 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5153 return XEXP (x, 0);
5154
5155 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5156 || GET_CODE (XEXP (x, 2)) != CONST_INT
5157 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5158 return x;
5159
5160 len = INTVAL (XEXP (x, 1));
5161 pos = INTVAL (XEXP (x, 2));
5162
5163 /* If this goes outside the object being extracted, replace the object
5164 with a (use (mem ...)) construct that only combine understands
5165 and is used only for this purpose. */
5166 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
38a448ca 5167 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
230d793d 5168
f76b9db2
ILT
5169 if (BITS_BIG_ENDIAN)
5170 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5171
230d793d
RS
5172 break;
5173
5174 default:
5175 return x;
5176 }
5177
0f13a422
ILT
5178 /* We can optimize some special cases of ZERO_EXTEND. */
5179 if (GET_CODE (x) == ZERO_EXTEND)
5180 {
5181 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5182 know that the last value didn't have any inappropriate bits
5183 set. */
5184 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5185 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5186 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5187 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5188 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5189 return XEXP (XEXP (x, 0), 0);
5190
5191 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5192 if (GET_CODE (XEXP (x, 0)) == SUBREG
5193 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5194 && subreg_lowpart_p (XEXP (x, 0))
5195 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5196 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5197 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))) == 0)
5198 return SUBREG_REG (XEXP (x, 0));
5199
5200 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5201 is a comparison and STORE_FLAG_VALUE permits. This is like
5202 the first case, but it works even when GET_MODE (x) is larger
5203 than HOST_WIDE_INT. */
5204 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5205 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5206 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5207 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5208 <= HOST_BITS_PER_WIDE_INT)
5209 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5210 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5211 return XEXP (XEXP (x, 0), 0);
5212
5213 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5214 if (GET_CODE (XEXP (x, 0)) == SUBREG
5215 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5216 && subreg_lowpart_p (XEXP (x, 0))
5217 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5218 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5219 <= HOST_BITS_PER_WIDE_INT)
5220 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5221 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5222 return SUBREG_REG (XEXP (x, 0));
5223
5224 /* If sign extension is cheaper than zero extension, then use it
5225 if we know that no extraneous bits are set, and that the high
5226 bit is not set. */
5227 if (flag_expensive_optimizations
5228 && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5229 && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5230 & ~ (((unsigned HOST_WIDE_INT)
5231 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5232 >> 1))
5233 == 0))
5234 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5235 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5236 <= HOST_BITS_PER_WIDE_INT)
5237 && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5238 & ~ (((unsigned HOST_WIDE_INT)
5239 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5240 >> 1))
5241 == 0))))
5242 {
38a448ca 5243 rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0));
0f13a422
ILT
5244
5245 if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5246 return expand_compound_operation (temp);
5247 }
5248 }
5249
230d793d
RS
5250 /* If we reach here, we want to return a pair of shifts. The inner
5251 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5252 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5253 logical depending on the value of UNSIGNEDP.
5254
5255 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5256 converted into an AND of a shift.
5257
5258 We must check for the case where the left shift would have a negative
5259 count. This can happen in a case like (x >> 31) & 255 on machines
5260 that can't shift by a constant. On those machines, we would first
5261 combine the shift with the AND to produce a variable-position
5262 extraction. Then the constant of 31 would be substituted in to produce
5263 a such a position. */
5264
5265 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5266 if (modewidth >= pos - len)
5f4f0e22 5267 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5268 GET_MODE (x),
5f4f0e22
CH
5269 simplify_shift_const (NULL_RTX, ASHIFT,
5270 GET_MODE (x),
230d793d
RS
5271 XEXP (x, 0),
5272 modewidth - pos - len),
5273 modewidth - len);
5274
5f4f0e22
CH
5275 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5276 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5277 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5278 GET_MODE (x),
5279 XEXP (x, 0), pos),
5f4f0e22 5280 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5281 else
5282 /* Any other cases we can't handle. */
5283 return x;
5284
5285
5286 /* If we couldn't do this for some reason, return the original
5287 expression. */
5288 if (GET_CODE (tem) == CLOBBER)
5289 return x;
5290
5291 return tem;
5292}
5293\f
5294/* X is a SET which contains an assignment of one object into
5295 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5296 or certain SUBREGS). If possible, convert it into a series of
5297 logical operations.
5298
5299 We half-heartedly support variable positions, but do not at all
5300 support variable lengths. */
5301
5302static rtx
5303expand_field_assignment (x)
5304 rtx x;
5305{
5306 rtx inner;
0f41302f 5307 rtx pos; /* Always counts from low bit. */
230d793d
RS
5308 int len;
5309 rtx mask;
5310 enum machine_mode compute_mode;
5311
5312 /* Loop until we find something we can't simplify. */
5313 while (1)
5314 {
5315 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5316 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5317 {
5318 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5319 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4d9cfc7b 5320 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
230d793d
RS
5321 }
5322 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5323 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5324 {
5325 inner = XEXP (SET_DEST (x), 0);
5326 len = INTVAL (XEXP (SET_DEST (x), 1));
5327 pos = XEXP (SET_DEST (x), 2);
5328
5329 /* If the position is constant and spans the width of INNER,
5330 surround INNER with a USE to indicate this. */
5331 if (GET_CODE (pos) == CONST_INT
5332 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
38a448ca 5333 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
230d793d 5334
f76b9db2
ILT
5335 if (BITS_BIG_ENDIAN)
5336 {
5337 if (GET_CODE (pos) == CONST_INT)
5338 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5339 - INTVAL (pos));
5340 else if (GET_CODE (pos) == MINUS
5341 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5342 && (INTVAL (XEXP (pos, 1))
5343 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5344 /* If position is ADJUST - X, new position is X. */
5345 pos = XEXP (pos, 0);
5346 else
5347 pos = gen_binary (MINUS, GET_MODE (pos),
5348 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5349 - len),
5350 pos);
5351 }
230d793d
RS
5352 }
5353
5354 /* A SUBREG between two modes that occupy the same numbers of words
5355 can be done by moving the SUBREG to the source. */
5356 else if (GET_CODE (SET_DEST (x)) == SUBREG
5357 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5358 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5359 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5360 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5361 {
38a448ca
RH
5362 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5363 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
5364 SET_SRC (x)));
230d793d
RS
5365 continue;
5366 }
5367 else
5368 break;
5369
5370 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5371 inner = SUBREG_REG (inner);
5372
5373 compute_mode = GET_MODE (inner);
5374
5375 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5376 if (len < HOST_BITS_PER_WIDE_INT)
5377 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5378 else
5379 break;
5380
5381 /* Now compute the equivalent expression. Make a copy of INNER
5382 for the SET_DEST in case it is a MEM into which we will substitute;
5383 we don't want shared RTL in that case. */
38a448ca
RH
5384 x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
5385 gen_binary (IOR, compute_mode,
5386 gen_binary (AND, compute_mode,
5387 gen_unary (NOT, compute_mode,
5388 compute_mode,
5389 gen_binary (ASHIFT,
5390 compute_mode,
5391 mask, pos)),
5392 inner),
5393 gen_binary (ASHIFT, compute_mode,
5394 gen_binary (AND, compute_mode,
5395 gen_lowpart_for_combine
5396 (compute_mode,
5397 SET_SRC (x)),
5398 mask),
5399 pos)));
230d793d
RS
5400 }
5401
5402 return x;
5403}
5404\f
8999a12e
RK
5405/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5406 it is an RTX that represents a variable starting position; otherwise,
5407 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5408
5409 INNER may be a USE. This will occur when we started with a bitfield
5410 that went outside the boundary of the object in memory, which is
5411 allowed on most machines. To isolate this case, we produce a USE
5412 whose mode is wide enough and surround the MEM with it. The only
5413 code that understands the USE is this routine. If it is not removed,
5414 it will cause the resulting insn not to match.
5415
5416 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5417 signed reference.
5418
5419 IN_DEST is non-zero if this is a reference in the destination of a
5420 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5421 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5422 be used.
5423
5424 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5425 ZERO_EXTRACT should be built even for bits starting at bit 0.
5426
76184def
DE
5427 MODE is the desired mode of the result (if IN_DEST == 0).
5428
5429 The result is an RTX for the extraction or NULL_RTX if the target
5430 can't handle it. */
230d793d
RS
5431
5432static rtx
5433make_extraction (mode, inner, pos, pos_rtx, len,
5434 unsignedp, in_dest, in_compare)
5435 enum machine_mode mode;
5436 rtx inner;
5437 int pos;
5438 rtx pos_rtx;
5439 int len;
5440 int unsignedp;
5441 int in_dest, in_compare;
5442{
94b4b17a
RS
5443 /* This mode describes the size of the storage area
5444 to fetch the overall value from. Within that, we
5445 ignore the POS lowest bits, etc. */
230d793d
RS
5446 enum machine_mode is_mode = GET_MODE (inner);
5447 enum machine_mode inner_mode;
d7cd794f
RK
5448 enum machine_mode wanted_inner_mode = byte_mode;
5449 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5450 enum machine_mode pos_mode = word_mode;
5451 enum machine_mode extraction_mode = word_mode;
5452 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5453 int spans_byte = 0;
5454 rtx new = 0;
8999a12e 5455 rtx orig_pos_rtx = pos_rtx;
6139ff20 5456 int orig_pos;
230d793d
RS
5457
5458 /* Get some information about INNER and get the innermost object. */
5459 if (GET_CODE (inner) == USE)
94b4b17a 5460 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5461 /* We don't need to adjust the position because we set up the USE
5462 to pretend that it was a full-word object. */
5463 spans_byte = 1, inner = XEXP (inner, 0);
5464 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5465 {
5466 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5467 consider just the QI as the memory to extract from.
5468 The subreg adds or removes high bits; its mode is
5469 irrelevant to the meaning of this extraction,
5470 since POS and LEN count from the lsb. */
5471 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5472 is_mode = GET_MODE (SUBREG_REG (inner));
5473 inner = SUBREG_REG (inner);
5474 }
230d793d
RS
5475
5476 inner_mode = GET_MODE (inner);
5477
5478 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5479 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5480
5481 /* See if this can be done without an extraction. We never can if the
5482 width of the field is not the same as that of some integer mode. For
5483 registers, we can only avoid the extraction if the position is at the
5484 low-order bit and this is either not in the destination or we have the
5485 appropriate STRICT_LOW_PART operation available.
5486
5487 For MEM, we can avoid an extract if the field starts on an appropriate
5488 boundary and we can change the mode of the memory reference. However,
5489 we cannot directly access the MEM if we have a USE and the underlying
5490 MEM is not TMODE. This combination means that MEM was being used in a
5491 context where bits outside its mode were being referenced; that is only
5492 valid in bit-field insns. */
5493
5494 if (tmode != BLKmode
5495 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5496 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5497 && GET_CODE (inner) != MEM
230d793d 5498 && (! in_dest
df62f951
RK
5499 || (GET_CODE (inner) == REG
5500 && (movstrict_optab->handlers[(int) tmode].insn_code
5501 != CODE_FOR_nothing))))
8999a12e 5502 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5503 && (pos
5504 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5505 : BITS_PER_UNIT)) == 0
230d793d
RS
5506 /* We can't do this if we are widening INNER_MODE (it
5507 may not be aligned, for one thing). */
5508 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5509 && (inner_mode == tmode
5510 || (! mode_dependent_address_p (XEXP (inner, 0))
5511 && ! MEM_VOLATILE_P (inner))))))
5512 {
230d793d
RS
5513 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5514 field. If the original and current mode are the same, we need not
5515 adjust the offset. Otherwise, we do if bytes big endian.
5516
4d9cfc7b
RK
5517 If INNER is not a MEM, get a piece consisting of just the field
5518 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5519
5520 if (GET_CODE (inner) == MEM)
5521 {
94b4b17a
RS
5522 int offset;
5523 /* POS counts from lsb, but make OFFSET count in memory order. */
5524 if (BYTES_BIG_ENDIAN)
5525 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5526 else
5527 offset = pos / BITS_PER_UNIT;
230d793d 5528
38a448ca 5529 new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
230d793d
RS
5530 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5531 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5532 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5533 }
df62f951 5534 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5535 {
5536 /* We can't call gen_lowpart_for_combine here since we always want
5537 a SUBREG and it would sometimes return a new hard register. */
5538 if (tmode != inner_mode)
38a448ca
RH
5539 new = gen_rtx_SUBREG (tmode, inner,
5540 (WORDS_BIG_ENDIAN
5541 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5542 ? (((GET_MODE_SIZE (inner_mode)
5543 - GET_MODE_SIZE (tmode))
5544 / UNITS_PER_WORD)
5545 - pos / BITS_PER_WORD)
5546 : pos / BITS_PER_WORD));
c0d3ac4d
RK
5547 else
5548 new = inner;
5549 }
230d793d 5550 else
6139ff20
RK
5551 new = force_to_mode (inner, tmode,
5552 len >= HOST_BITS_PER_WIDE_INT
5553 ? GET_MODE_MASK (tmode)
5554 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5555 NULL_RTX, 0);
230d793d
RS
5556
5557 /* If this extraction is going into the destination of a SET,
5558 make a STRICT_LOW_PART unless we made a MEM. */
5559
5560 if (in_dest)
5561 return (GET_CODE (new) == MEM ? new
77fa0940 5562 : (GET_CODE (new) != SUBREG
38a448ca 5563 ? gen_rtx_CLOBBER (tmode, const0_rtx)
77fa0940 5564 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5565
5566 /* Otherwise, sign- or zero-extend unless we already are in the
5567 proper mode. */
5568
5569 return (mode == tmode ? new
5570 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5571 mode, new));
5572 }
5573
cc471082
RS
5574 /* Unless this is a COMPARE or we have a funny memory reference,
5575 don't do anything with zero-extending field extracts starting at
5576 the low-order bit since they are simple AND operations. */
8999a12e
RK
5577 if (pos_rtx == 0 && pos == 0 && ! in_dest
5578 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5579 return 0;
5580
e7373556
RK
5581 /* Unless we are allowed to span bytes, reject this if we would be
5582 spanning bytes or if the position is not a constant and the length
5583 is not 1. In all other cases, we would only be going outside
5584 out object in cases when an original shift would have been
5585 undefined. */
5586 if (! spans_byte
5587 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5588 || (pos_rtx != 0 && len != 1)))
5589 return 0;
5590
d7cd794f 5591 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
5592 and the mode for the result. */
5593#ifdef HAVE_insv
5594 if (in_dest)
5595 {
d7cd794f 5596 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
230d793d
RS
5597 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5598 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5599 }
5600#endif
5601
5602#ifdef HAVE_extzv
5603 if (! in_dest && unsignedp)
5604 {
d7cd794f 5605 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
230d793d
RS
5606 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5607 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5608 }
5609#endif
5610
5611#ifdef HAVE_extv
5612 if (! in_dest && ! unsignedp)
5613 {
d7cd794f 5614 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
230d793d
RS
5615 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5616 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5617 }
5618#endif
5619
5620 /* Never narrow an object, since that might not be safe. */
5621
5622 if (mode != VOIDmode
5623 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5624 extraction_mode = mode;
5625
5626 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5627 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5628 pos_mode = GET_MODE (pos_rtx);
5629
d7cd794f
RK
5630 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5631 if we have to change the mode of memory and cannot, the desired mode is
5632 EXTRACTION_MODE. */
5633 if (GET_CODE (inner) != MEM)
5634 wanted_inner_mode = wanted_inner_reg_mode;
5635 else if (inner_mode != wanted_inner_mode
5636 && (mode_dependent_address_p (XEXP (inner, 0))
5637 || MEM_VOLATILE_P (inner)))
5638 wanted_inner_mode = extraction_mode;
230d793d 5639
6139ff20
RK
5640 orig_pos = pos;
5641
f76b9db2
ILT
5642 if (BITS_BIG_ENDIAN)
5643 {
cf54c2cd
DE
5644 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5645 BITS_BIG_ENDIAN style. If position is constant, compute new
5646 position. Otherwise, build subtraction.
5647 Note that POS is relative to the mode of the original argument.
5648 If it's a MEM we need to recompute POS relative to that.
5649 However, if we're extracting from (or inserting into) a register,
5650 we want to recompute POS relative to wanted_inner_mode. */
5651 int width = (GET_CODE (inner) == MEM
5652 ? GET_MODE_BITSIZE (is_mode)
5653 : GET_MODE_BITSIZE (wanted_inner_mode));
5654
f76b9db2 5655 if (pos_rtx == 0)
cf54c2cd 5656 pos = width - len - pos;
f76b9db2
ILT
5657 else
5658 pos_rtx
5659 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
cf54c2cd
DE
5660 GEN_INT (width - len), pos_rtx);
5661 /* POS may be less than 0 now, but we check for that below.
5662 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 5663 }
230d793d
RS
5664
5665 /* If INNER has a wider mode, make it smaller. If this is a constant
5666 extract, try to adjust the byte to point to the byte containing
5667 the value. */
d7cd794f
RK
5668 if (wanted_inner_mode != VOIDmode
5669 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 5670 && ((GET_CODE (inner) == MEM
d7cd794f 5671 && (inner_mode == wanted_inner_mode
230d793d
RS
5672 || (! mode_dependent_address_p (XEXP (inner, 0))
5673 && ! MEM_VOLATILE_P (inner))))))
5674 {
5675 int offset = 0;
5676
5677 /* The computations below will be correct if the machine is big
5678 endian in both bits and bytes or little endian in bits and bytes.
5679 If it is mixed, we must adjust. */
5680
230d793d 5681 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 5682 adjust OFFSET to compensate. */
f76b9db2
ILT
5683 if (BYTES_BIG_ENDIAN
5684 && ! spans_byte
230d793d
RS
5685 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5686 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
5687
5688 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5689 if (pos_rtx == 0)
230d793d
RS
5690 {
5691 offset += pos / BITS_PER_UNIT;
d7cd794f 5692 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
5693 }
5694
f76b9db2
ILT
5695 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5696 && ! spans_byte
d7cd794f 5697 && is_mode != wanted_inner_mode)
c6b3f1f2 5698 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 5699 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 5700
d7cd794f 5701 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 5702 {
38a448ca
RH
5703 rtx newmem = gen_rtx_MEM (wanted_inner_mode,
5704 plus_constant (XEXP (inner, 0), offset));
230d793d
RS
5705 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5706 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5707 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5708 inner = newmem;
5709 }
5710 }
5711
9e74dc41
RK
5712 /* If INNER is not memory, we can always get it into the proper mode. If we
5713 are changing its mode, POS must be a constant and smaller than the size
5714 of the new mode. */
230d793d 5715 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
5716 {
5717 if (GET_MODE (inner) != wanted_inner_mode
5718 && (pos_rtx != 0
5719 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5720 return 0;
5721
5722 inner = force_to_mode (inner, wanted_inner_mode,
5723 pos_rtx
5724 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5725 ? GET_MODE_MASK (wanted_inner_mode)
5726 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5727 NULL_RTX, 0);
5728 }
230d793d
RS
5729
5730 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5731 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5732 if (pos_rtx != 0
230d793d
RS
5733 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5734 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5735 else if (pos_rtx != 0
230d793d
RS
5736 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5737 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5738
8999a12e
RK
5739 /* Make POS_RTX unless we already have it and it is correct. If we don't
5740 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 5741 be a CONST_INT. */
8999a12e
RK
5742 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5743 pos_rtx = orig_pos_rtx;
5744
5745 else if (pos_rtx == 0)
5f4f0e22 5746 pos_rtx = GEN_INT (pos);
230d793d
RS
5747
5748 /* Make the required operation. See if we can use existing rtx. */
5749 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5750 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5751 if (! in_dest)
5752 new = gen_lowpart_for_combine (mode, new);
5753
5754 return new;
5755}
5756\f
71923da7
RK
5757/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5758 with any other operations in X. Return X without that shift if so. */
5759
5760static rtx
5761extract_left_shift (x, count)
5762 rtx x;
5763 int count;
5764{
5765 enum rtx_code code = GET_CODE (x);
5766 enum machine_mode mode = GET_MODE (x);
5767 rtx tem;
5768
5769 switch (code)
5770 {
5771 case ASHIFT:
5772 /* This is the shift itself. If it is wide enough, we will return
5773 either the value being shifted if the shift count is equal to
5774 COUNT or a shift for the difference. */
5775 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5776 && INTVAL (XEXP (x, 1)) >= count)
5777 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5778 INTVAL (XEXP (x, 1)) - count);
5779 break;
5780
5781 case NEG: case NOT:
5782 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 5783 return gen_unary (code, mode, mode, tem);
71923da7
RK
5784
5785 break;
5786
5787 case PLUS: case IOR: case XOR: case AND:
5788 /* If we can safely shift this constant and we find the inner shift,
5789 make a new operation. */
5790 if (GET_CODE (XEXP (x,1)) == CONST_INT
b729186a 5791 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7
RK
5792 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5793 return gen_binary (code, mode, tem,
5794 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5795
5796 break;
e9a25f70
JL
5797
5798 default:
5799 break;
71923da7
RK
5800 }
5801
5802 return 0;
5803}
5804\f
230d793d
RS
5805/* Look at the expression rooted at X. Look for expressions
5806 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5807 Form these expressions.
5808
5809 Return the new rtx, usually just X.
5810
5811 Also, for machines like the Vax that don't have logical shift insns,
5812 try to convert logical to arithmetic shift operations in cases where
5813 they are equivalent. This undoes the canonicalizations to logical
5814 shifts done elsewhere.
5815
5816 We try, as much as possible, to re-use rtl expressions to save memory.
5817
5818 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5819 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5820 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5821 or a COMPARE against zero, it is COMPARE. */
5822
5823static rtx
5824make_compound_operation (x, in_code)
5825 rtx x;
5826 enum rtx_code in_code;
5827{
5828 enum rtx_code code = GET_CODE (x);
5829 enum machine_mode mode = GET_MODE (x);
5830 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 5831 rtx rhs, lhs;
230d793d 5832 enum rtx_code next_code;
f24ad0e4 5833 int i;
230d793d 5834 rtx new = 0;
280f58ba 5835 rtx tem;
230d793d
RS
5836 char *fmt;
5837
5838 /* Select the code to be used in recursive calls. Once we are inside an
5839 address, we stay there. If we have a comparison, set to COMPARE,
5840 but once inside, go back to our default of SET. */
5841
42495ca0 5842 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5843 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5844 && XEXP (x, 1) == const0_rtx) ? COMPARE
5845 : in_code == COMPARE ? SET : in_code);
5846
5847 /* Process depending on the code of this operation. If NEW is set
5848 non-zero, it will be returned. */
5849
5850 switch (code)
5851 {
5852 case ASHIFT:
230d793d
RS
5853 /* Convert shifts by constants into multiplications if inside
5854 an address. */
5855 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5856 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5857 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5858 {
5859 new = make_compound_operation (XEXP (x, 0), next_code);
5860 new = gen_rtx_combine (MULT, mode, new,
5861 GEN_INT ((HOST_WIDE_INT) 1
5862 << INTVAL (XEXP (x, 1))));
5863 }
230d793d
RS
5864 break;
5865
5866 case AND:
5867 /* If the second operand is not a constant, we can't do anything
5868 with it. */
5869 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5870 break;
5871
5872 /* If the constant is a power of two minus one and the first operand
5873 is a logical right shift, make an extraction. */
5874 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5875 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5876 {
5877 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5878 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5879 0, in_code == COMPARE);
5880 }
dfbe1b2f 5881
230d793d
RS
5882 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5883 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5884 && subreg_lowpart_p (XEXP (x, 0))
5885 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5886 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5887 {
5888 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5889 next_code);
2f99f437 5890 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
5891 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5892 0, in_code == COMPARE);
5893 }
45620ed4 5894 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
5895 else if ((GET_CODE (XEXP (x, 0)) == XOR
5896 || GET_CODE (XEXP (x, 0)) == IOR)
5897 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5898 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5899 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5900 {
5901 /* Apply the distributive law, and then try to make extractions. */
5902 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
38a448ca
RH
5903 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
5904 XEXP (x, 1)),
5905 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
5906 XEXP (x, 1)));
c2f9f64e
JW
5907 new = make_compound_operation (new, in_code);
5908 }
a7c99304
RK
5909
5910 /* If we are have (and (rotate X C) M) and C is larger than the number
5911 of bits in M, this is an extraction. */
5912
5913 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5914 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5915 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5916 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5917 {
5918 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5919 new = make_extraction (mode, new,
5920 (GET_MODE_BITSIZE (mode)
5921 - INTVAL (XEXP (XEXP (x, 0), 1))),
5922 NULL_RTX, i, 1, 0, in_code == COMPARE);
5923 }
a7c99304
RK
5924
5925 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5926 a logical shift and our mask turns off all the propagated sign
5927 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5928 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5929 && (lshr_optab->handlers[(int) mode].insn_code
5930 == CODE_FOR_nothing)
230d793d
RS
5931 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5932 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5933 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5934 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5935 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5936 {
5f4f0e22 5937 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5938
5939 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5940 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5941 SUBST (XEXP (x, 0),
280f58ba
RK
5942 gen_rtx_combine (ASHIFTRT, mode,
5943 make_compound_operation (XEXP (XEXP (x, 0), 0),
5944 next_code),
230d793d
RS
5945 XEXP (XEXP (x, 0), 1)));
5946 }
5947
5948 /* If the constant is one less than a power of two, this might be
5949 representable by an extraction even if no shift is present.
5950 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5951 we are in a COMPARE. */
5952 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5953 new = make_extraction (mode,
5954 make_compound_operation (XEXP (x, 0),
5955 next_code),
5956 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5957
5958 /* If we are in a comparison and this is an AND with a power of two,
5959 convert this into the appropriate bit extract. */
5960 else if (in_code == COMPARE
5961 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5962 new = make_extraction (mode,
5963 make_compound_operation (XEXP (x, 0),
5964 next_code),
5965 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5966
5967 break;
5968
5969 case LSHIFTRT:
5970 /* If the sign bit is known to be zero, replace this with an
5971 arithmetic shift. */
d0ab8cd3
RK
5972 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5973 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5974 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5975 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5976 {
280f58ba
RK
5977 new = gen_rtx_combine (ASHIFTRT, mode,
5978 make_compound_operation (XEXP (x, 0),
5979 next_code),
5980 XEXP (x, 1));
230d793d
RS
5981 break;
5982 }
5983
0f41302f 5984 /* ... fall through ... */
230d793d
RS
5985
5986 case ASHIFTRT:
71923da7
RK
5987 lhs = XEXP (x, 0);
5988 rhs = XEXP (x, 1);
5989
230d793d
RS
5990 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5991 this is a SIGN_EXTRACT. */
71923da7
RK
5992 if (GET_CODE (rhs) == CONST_INT
5993 && GET_CODE (lhs) == ASHIFT
5994 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5995 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 5996 {
71923da7 5997 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 5998 new = make_extraction (mode, new,
71923da7
RK
5999 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6000 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
6001 code == LSHIFTRT, 0, in_code == COMPARE);
6002 }
6003
71923da7
RK
6004 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6005 If so, try to merge the shifts into a SIGN_EXTEND. We could
6006 also do this for some cases of SIGN_EXTRACT, but it doesn't
6007 seem worth the effort; the case checked for occurs on Alpha. */
6008
6009 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6010 && ! (GET_CODE (lhs) == SUBREG
6011 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6012 && GET_CODE (rhs) == CONST_INT
6013 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6014 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6015 new = make_extraction (mode, make_compound_operation (new, next_code),
6016 0, NULL_RTX, mode_width - INTVAL (rhs),
6017 code == LSHIFTRT, 0, in_code == COMPARE);
6018
230d793d 6019 break;
280f58ba
RK
6020
6021 case SUBREG:
6022 /* Call ourselves recursively on the inner expression. If we are
6023 narrowing the object and it has a different RTL code from
6024 what it originally did, do this SUBREG as a force_to_mode. */
6025
0a5cbff6 6026 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
6027 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6028 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6029 && subreg_lowpart_p (x))
0a5cbff6
RK
6030 {
6031 rtx newer = force_to_mode (tem, mode,
e3d616e3 6032 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
6033
6034 /* If we have something other than a SUBREG, we might have
6035 done an expansion, so rerun outselves. */
6036 if (GET_CODE (newer) != SUBREG)
6037 newer = make_compound_operation (newer, in_code);
6038
6039 return newer;
6040 }
e9a25f70
JL
6041 break;
6042
6043 default:
6044 break;
230d793d
RS
6045 }
6046
6047 if (new)
6048 {
df62f951 6049 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
6050 code = GET_CODE (x);
6051 }
6052
6053 /* Now recursively process each operand of this operation. */
6054 fmt = GET_RTX_FORMAT (code);
6055 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6056 if (fmt[i] == 'e')
6057 {
6058 new = make_compound_operation (XEXP (x, i), next_code);
6059 SUBST (XEXP (x, i), new);
6060 }
6061
6062 return x;
6063}
6064\f
6065/* Given M see if it is a value that would select a field of bits
6066 within an item, but not the entire word. Return -1 if not.
6067 Otherwise, return the starting position of the field, where 0 is the
6068 low-order bit.
6069
6070 *PLEN is set to the length of the field. */
6071
6072static int
6073get_pos_from_mask (m, plen)
5f4f0e22 6074 unsigned HOST_WIDE_INT m;
230d793d
RS
6075 int *plen;
6076{
6077 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6078 int pos = exact_log2 (m & - m);
6079
6080 if (pos < 0)
6081 return -1;
6082
6083 /* Now shift off the low-order zero bits and see if we have a power of
6084 two minus 1. */
6085 *plen = exact_log2 ((m >> pos) + 1);
6086
6087 if (*plen <= 0)
6088 return -1;
6089
6090 return pos;
6091}
6092\f
6139ff20
RK
6093/* See if X can be simplified knowing that we will only refer to it in
6094 MODE and will only refer to those bits that are nonzero in MASK.
6095 If other bits are being computed or if masking operations are done
6096 that select a superset of the bits in MASK, they can sometimes be
6097 ignored.
6098
6099 Return a possibly simplified expression, but always convert X to
6100 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
6101
6102 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
6103 replace X with REG.
6104
6105 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6106 are all off in X. This is used when X will be complemented, by either
180b8e4b 6107 NOT, NEG, or XOR. */
dfbe1b2f
RK
6108
6109static rtx
e3d616e3 6110force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
6111 rtx x;
6112 enum machine_mode mode;
6139ff20 6113 unsigned HOST_WIDE_INT mask;
dfbe1b2f 6114 rtx reg;
e3d616e3 6115 int just_select;
dfbe1b2f
RK
6116{
6117 enum rtx_code code = GET_CODE (x);
180b8e4b 6118 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
6119 enum machine_mode op_mode;
6120 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
6121 rtx op0, op1, temp;
6122
132d2040
RK
6123 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6124 code below will do the wrong thing since the mode of such an
be3d27d6
CI
6125 expression is VOIDmode.
6126
6127 Also do nothing if X is a CLOBBER; this can happen if X was
6128 the return value from a call to gen_lowpart_for_combine. */
6129 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
6130 return x;
6131
6139ff20
RK
6132 /* We want to perform the operation is its present mode unless we know
6133 that the operation is valid in MODE, in which case we do the operation
6134 in MODE. */
1c75dfa4
RK
6135 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6136 && code_to_optab[(int) code] != 0
ef026f91
RS
6137 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6138 != CODE_FOR_nothing))
6139 ? mode : GET_MODE (x));
e3d616e3 6140
aa988991
RS
6141 /* It is not valid to do a right-shift in a narrower mode
6142 than the one it came in with. */
6143 if ((code == LSHIFTRT || code == ASHIFTRT)
6144 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6145 op_mode = GET_MODE (x);
ef026f91
RS
6146
6147 /* Truncate MASK to fit OP_MODE. */
6148 if (op_mode)
6149 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
6150
6151 /* When we have an arithmetic operation, or a shift whose count we
6152 do not know, we need to assume that all bit the up to the highest-order
6153 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
6154 if (op_mode)
6155 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6156 ? GET_MODE_MASK (op_mode)
6157 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
6158 else
6159 fuller_mask = ~ (HOST_WIDE_INT) 0;
6160
6161 /* Determine what bits of X are guaranteed to be (non)zero. */
6162 nonzero = nonzero_bits (x, mode);
6139ff20
RK
6163
6164 /* If none of the bits in X are needed, return a zero. */
e3d616e3 6165 if (! just_select && (nonzero & mask) == 0)
6139ff20 6166 return const0_rtx;
dfbe1b2f 6167
6139ff20
RK
6168 /* If X is a CONST_INT, return a new one. Do this here since the
6169 test below will fail. */
6170 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
6171 {
6172 HOST_WIDE_INT cval = INTVAL (x) & mask;
6173 int width = GET_MODE_BITSIZE (mode);
6174
6175 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6176 number, sign extend it. */
6177 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6178 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6179 cval |= (HOST_WIDE_INT) -1 << width;
6180
6181 return GEN_INT (cval);
6182 }
dfbe1b2f 6183
180b8e4b
RK
6184 /* If X is narrower than MODE and we want all the bits in X's mode, just
6185 get X in the proper mode. */
6186 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6187 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
6188 return gen_lowpart_for_combine (mode, x);
6189
71923da7
RK
6190 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6191 MASK are already known to be zero in X, we need not do anything. */
6192 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
6193 return x;
6194
dfbe1b2f
RK
6195 switch (code)
6196 {
6139ff20
RK
6197 case CLOBBER:
6198 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6199 generating something that won't match. */
6139ff20
RK
6200 return x;
6201
6139ff20
RK
6202 case USE:
6203 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6204 spanned the boundary of the MEM. If we are now masking so it is
6205 within that boundary, we don't need the USE any more. */
f76b9db2
ILT
6206 if (! BITS_BIG_ENDIAN
6207 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6208 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6209 break;
6139ff20 6210
dfbe1b2f
RK
6211 case SIGN_EXTEND:
6212 case ZERO_EXTEND:
6213 case ZERO_EXTRACT:
6214 case SIGN_EXTRACT:
6215 x = expand_compound_operation (x);
6216 if (GET_CODE (x) != code)
e3d616e3 6217 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6218 break;
6219
6220 case REG:
6221 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6222 || rtx_equal_p (reg, get_last_value (x))))
6223 x = reg;
6224 break;
6225
dfbe1b2f 6226 case SUBREG:
6139ff20 6227 if (subreg_lowpart_p (x)
180b8e4b
RK
6228 /* We can ignore the effect of this SUBREG if it narrows the mode or
6229 if the constant masks to zero all the bits the mode doesn't
6230 have. */
6139ff20
RK
6231 && ((GET_MODE_SIZE (GET_MODE (x))
6232 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6233 || (0 == (mask
6234 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 6235 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6236 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6237 break;
6238
6239 case AND:
6139ff20
RK
6240 /* If this is an AND with a constant, convert it into an AND
6241 whose constant is the AND of that constant with MASK. If it
6242 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6243
2ca9ae17 6244 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6245 {
6139ff20
RK
6246 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6247 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6248
6249 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6250 is just some low-order bits. If so, and it is MASK, we don't
6251 need it. */
dfbe1b2f
RK
6252
6253 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6254 && INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6255 x = XEXP (x, 0);
d0ab8cd3 6256
71923da7
RK
6257 /* If it remains an AND, try making another AND with the bits
6258 in the mode mask that aren't in MASK turned on. If the
6259 constant in the AND is wide enough, this might make a
6260 cheaper constant. */
6261
6262 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6263 && GET_MODE_MASK (GET_MODE (x)) != mask
6264 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6265 {
6266 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6267 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6268 int width = GET_MODE_BITSIZE (GET_MODE (x));
6269 rtx y;
6270
6271 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6272 number, sign extend it. */
6273 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6274 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6275 cval |= (HOST_WIDE_INT) -1 << width;
6276
6277 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6278 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6279 x = y;
6280 }
6281
d0ab8cd3 6282 break;
dfbe1b2f
RK
6283 }
6284
6139ff20 6285 goto binop;
dfbe1b2f
RK
6286
6287 case PLUS:
6139ff20
RK
6288 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6289 low-order bits (as in an alignment operation) and FOO is already
6290 aligned to that boundary, mask C1 to that boundary as well.
6291 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6292
6293 {
6294 int width = GET_MODE_BITSIZE (mode);
6295 unsigned HOST_WIDE_INT smask = mask;
6296
6297 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6298 number, sign extend it. */
6299
6300 if (width < HOST_BITS_PER_WIDE_INT
6301 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6302 smask |= (HOST_WIDE_INT) -1 << width;
6303
6304 if (GET_CODE (XEXP (x, 1)) == CONST_INT
0e9ff885
DM
6305 && exact_log2 (- smask) >= 0)
6306 {
6307#ifdef STACK_BIAS
6308 if (STACK_BIAS
6309 && (XEXP (x, 0) == stack_pointer_rtx
6310 || XEXP (x, 0) == frame_pointer_rtx))
6311 {
6312 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6313 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6314
6315 sp_mask &= ~ (sp_alignment - 1);
6316 if ((sp_mask & ~ mask) == 0
6317 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~ mask) != 0)
6318 return force_to_mode (plus_constant (XEXP (x, 0),
6319 ((INTVAL (XEXP (x, 1)) -
6320 STACK_BIAS) & mask)
6321 + STACK_BIAS),
6322 mode, mask, reg, next_select);
6323 }
6324#endif
6325 if ((nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
6326 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
6327 return force_to_mode (plus_constant (XEXP (x, 0),
6328 INTVAL (XEXP (x, 1)) & mask),
6329 mode, mask, reg, next_select);
6330 }
9fa6d012 6331 }
6139ff20 6332
0f41302f 6333 /* ... fall through ... */
6139ff20 6334
dfbe1b2f
RK
6335 case MINUS:
6336 case MULT:
6139ff20
RK
6337 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6338 most significant bit in MASK since carries from those bits will
6339 affect the bits we are interested in. */
6340 mask = fuller_mask;
6341 goto binop;
6342
dfbe1b2f
RK
6343 case IOR:
6344 case XOR:
6139ff20
RK
6345 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6346 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6347 operation which may be a bitfield extraction. Ensure that the
6348 constant we form is not wider than the mode of X. */
6349
6350 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6351 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6352 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6353 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6354 && GET_CODE (XEXP (x, 1)) == CONST_INT
6355 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6356 + floor_log2 (INTVAL (XEXP (x, 1))))
6357 < GET_MODE_BITSIZE (GET_MODE (x)))
6358 && (INTVAL (XEXP (x, 1))
01c82bbb 6359 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6360 {
6361 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6362 << INTVAL (XEXP (XEXP (x, 0), 1)));
6363 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6364 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6365 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6366 XEXP (XEXP (x, 0), 1));
e3d616e3 6367 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6368 }
6369
6370 binop:
dfbe1b2f 6371 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6372 change the mode if we have an operation of that mode. */
6373
e3d616e3
RK
6374 op0 = gen_lowpart_for_combine (op_mode,
6375 force_to_mode (XEXP (x, 0), mode, mask,
6376 reg, next_select));
6377 op1 = gen_lowpart_for_combine (op_mode,
6378 force_to_mode (XEXP (x, 1), mode, mask,
6379 reg, next_select));
6139ff20 6380
2dd484ed
RK
6381 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6382 MASK since OP1 might have been sign-extended but we never want
6383 to turn on extra bits, since combine might have previously relied
6384 on them being off. */
6385 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6386 && (INTVAL (op1) & mask) != 0)
6387 op1 = GEN_INT (INTVAL (op1) & mask);
6388
6139ff20
RK
6389 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6390 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6391 break;
dfbe1b2f
RK
6392
6393 case ASHIFT:
dfbe1b2f 6394 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6395 However, we cannot do anything with shifts where we cannot
6396 guarantee that the counts are smaller than the size of the mode
6397 because such a count will have a different meaning in a
6139ff20 6398 wider mode. */
f6785026
RK
6399
6400 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6401 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6402 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6403 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6404 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6405 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
6406 break;
6407
6139ff20
RK
6408 /* If the shift count is a constant and we can do arithmetic in
6409 the mode of the shift, refine which bits we need. Otherwise, use the
6410 conservative form of the mask. */
6411 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6412 && INTVAL (XEXP (x, 1)) >= 0
6413 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6414 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6415 mask >>= INTVAL (XEXP (x, 1));
6416 else
6417 mask = fuller_mask;
6418
6419 op0 = gen_lowpart_for_combine (op_mode,
6420 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6421 mask, reg, next_select));
6139ff20
RK
6422
6423 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6424 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6425 break;
dfbe1b2f
RK
6426
6427 case LSHIFTRT:
1347292b
JW
6428 /* Here we can only do something if the shift count is a constant,
6429 this shift constant is valid for the host, and we can do arithmetic
6430 in OP_MODE. */
dfbe1b2f
RK
6431
6432 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6433 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6434 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6435 {
6139ff20
RK
6436 rtx inner = XEXP (x, 0);
6437
6438 /* Select the mask of the bits we need for the shift operand. */
6439 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 6440
6139ff20
RK
6441 /* We can only change the mode of the shift if we can do arithmetic
6442 in the mode of the shift and MASK is no wider than the width of
6443 OP_MODE. */
6444 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6445 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6446 op_mode = GET_MODE (x);
6447
e3d616e3 6448 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
6449
6450 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6451 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6452 }
6139ff20
RK
6453
6454 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6455 shift and AND produces only copies of the sign bit (C2 is one less
6456 than a power of two), we can do this with just a shift. */
6457
6458 if (GET_CODE (x) == LSHIFTRT
6459 && GET_CODE (XEXP (x, 1)) == CONST_INT
6460 && ((INTVAL (XEXP (x, 1))
6461 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6462 >= GET_MODE_BITSIZE (GET_MODE (x)))
6463 && exact_log2 (mask + 1) >= 0
6464 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6465 >= exact_log2 (mask + 1)))
6466 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6467 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6468 - exact_log2 (mask + 1)));
d0ab8cd3
RK
6469 break;
6470
6471 case ASHIFTRT:
6139ff20
RK
6472 /* If we are just looking for the sign bit, we don't need this shift at
6473 all, even if it has a variable count. */
9bf22b75
RK
6474 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6475 && (mask == ((HOST_WIDE_INT) 1
6476 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6477 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6478
6479 /* If this is a shift by a constant, get a mask that contains those bits
6480 that are not copies of the sign bit. We then have two cases: If
6481 MASK only includes those bits, this can be a logical shift, which may
6482 allow simplifications. If MASK is a single-bit field not within
6483 those bits, we are requesting a copy of the sign bit and hence can
6484 shift the sign bit to the appropriate location. */
6485
6486 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6487 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6488 {
6489 int i = -1;
6490
b69960ac
RK
6491 /* If the considered data is wider then HOST_WIDE_INT, we can't
6492 represent a mask for all its bits in a single scalar.
6493 But we only care about the lower bits, so calculate these. */
6494
6a11342f 6495 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 6496 {
0f41302f 6497 nonzero = ~ (HOST_WIDE_INT) 0;
b69960ac
RK
6498
6499 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6500 is the number of bits a full-width mask would have set.
6501 We need only shift if these are fewer than nonzero can
6502 hold. If not, we must keep all bits set in nonzero. */
6503
6504 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6505 < HOST_BITS_PER_WIDE_INT)
6506 nonzero >>= INTVAL (XEXP (x, 1))
6507 + HOST_BITS_PER_WIDE_INT
6508 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6509 }
6510 else
6511 {
6512 nonzero = GET_MODE_MASK (GET_MODE (x));
6513 nonzero >>= INTVAL (XEXP (x, 1));
6514 }
6139ff20
RK
6515
6516 if ((mask & ~ nonzero) == 0
6517 || (i = exact_log2 (mask)) >= 0)
6518 {
6519 x = simplify_shift_const
6520 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6521 i < 0 ? INTVAL (XEXP (x, 1))
6522 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6523
6524 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 6525 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6526 }
6527 }
6528
6529 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6530 even if the shift count isn't a constant. */
6531 if (mask == 1)
6532 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6533
d0ab8cd3 6534 /* If this is a sign-extension operation that just affects bits
4c002f29
RK
6535 we don't care about, remove it. Be sure the call above returned
6536 something that is still a shift. */
d0ab8cd3 6537
4c002f29
RK
6538 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6539 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 6540 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
6541 && (INTVAL (XEXP (x, 1))
6542 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
6543 && GET_CODE (XEXP (x, 0)) == ASHIFT
6544 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6545 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
6546 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6547 reg, next_select);
6139ff20 6548
dfbe1b2f
RK
6549 break;
6550
6139ff20
RK
6551 case ROTATE:
6552 case ROTATERT:
6553 /* If the shift count is constant and we can do computations
6554 in the mode of X, compute where the bits we care about are.
6555 Otherwise, we can't do anything. Don't change the mode of
6556 the shift or propagate MODE into the shift, though. */
6557 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6558 && INTVAL (XEXP (x, 1)) >= 0)
6559 {
6560 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6561 GET_MODE (x), GEN_INT (mask),
6562 XEXP (x, 1));
7d171a1e 6563 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
6564 SUBST (XEXP (x, 0),
6565 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6566 INTVAL (temp), reg, next_select));
6139ff20
RK
6567 }
6568 break;
6569
dfbe1b2f 6570 case NEG:
180b8e4b
RK
6571 /* If we just want the low-order bit, the NEG isn't needed since it
6572 won't change the low-order bit. */
6573 if (mask == 1)
6574 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6575
6139ff20
RK
6576 /* We need any bits less significant than the most significant bit in
6577 MASK since carries from those bits will affect the bits we are
6578 interested in. */
6579 mask = fuller_mask;
6580 goto unop;
6581
dfbe1b2f 6582 case NOT:
6139ff20
RK
6583 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6584 same as the XOR case above. Ensure that the constant we form is not
6585 wider than the mode of X. */
6586
6587 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6588 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6589 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6590 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6591 < GET_MODE_BITSIZE (GET_MODE (x)))
6592 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6593 {
6594 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6595 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6596 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6597
e3d616e3 6598 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6599 }
6600
f82da7d2
JW
6601 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6602 use the full mask inside the NOT. */
6603 mask = fuller_mask;
6604
6139ff20 6605 unop:
e3d616e3
RK
6606 op0 = gen_lowpart_for_combine (op_mode,
6607 force_to_mode (XEXP (x, 0), mode, mask,
6608 reg, next_select));
6139ff20 6609 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 6610 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
6611 break;
6612
6613 case NE:
6614 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 6615 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 6616 which is equal to STORE_FLAG_VALUE. */
3aceff0d
RK
6617 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6618 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 6619 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 6620 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6621
d0ab8cd3
RK
6622 break;
6623
6624 case IF_THEN_ELSE:
6625 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6626 written in a narrower mode. We play it safe and do not do so. */
6627
6628 SUBST (XEXP (x, 1),
6629 gen_lowpart_for_combine (GET_MODE (x),
6630 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6631 mask, reg, next_select)));
d0ab8cd3
RK
6632 SUBST (XEXP (x, 2),
6633 gen_lowpart_for_combine (GET_MODE (x),
6634 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6635 mask, reg,next_select)));
d0ab8cd3 6636 break;
e9a25f70
JL
6637
6638 default:
6639 break;
dfbe1b2f
RK
6640 }
6641
d0ab8cd3 6642 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6643 return gen_lowpart_for_combine (mode, x);
6644}
6645\f
abe6e52f
RK
6646/* Return nonzero if X is an expression that has one of two values depending on
6647 whether some other value is zero or nonzero. In that case, we return the
6648 value that is being tested, *PTRUE is set to the value if the rtx being
6649 returned has a nonzero value, and *PFALSE is set to the other alternative.
6650
6651 If we return zero, we set *PTRUE and *PFALSE to X. */
6652
6653static rtx
6654if_then_else_cond (x, ptrue, pfalse)
6655 rtx x;
6656 rtx *ptrue, *pfalse;
6657{
6658 enum machine_mode mode = GET_MODE (x);
6659 enum rtx_code code = GET_CODE (x);
6660 int size = GET_MODE_BITSIZE (mode);
6661 rtx cond0, cond1, true0, true1, false0, false1;
6662 unsigned HOST_WIDE_INT nz;
6663
6664 /* If this is a unary operation whose operand has one of two values, apply
6665 our opcode to compute those values. */
6666 if (GET_RTX_CLASS (code) == '1'
6667 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6668 {
0c1c8ea6
RK
6669 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6670 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
6671 return cond0;
6672 }
6673
3a19aabc 6674 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 6675 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
6676 else if (code == COMPARE)
6677 ;
6678
abe6e52f
RK
6679 /* If this is a binary operation, see if either side has only one of two
6680 values. If either one does or if both do and they are conditional on
6681 the same value, compute the new true and false values. */
6682 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6683 || GET_RTX_CLASS (code) == '<')
6684 {
6685 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6686 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6687
6688 if ((cond0 != 0 || cond1 != 0)
6689 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6690 {
987e845a
JW
6691 /* If if_then_else_cond returned zero, then true/false are the
6692 same rtl. We must copy one of them to prevent invalid rtl
6693 sharing. */
6694 if (cond0 == 0)
6695 true0 = copy_rtx (true0);
6696 else if (cond1 == 0)
6697 true1 = copy_rtx (true1);
6698
abe6e52f
RK
6699 *ptrue = gen_binary (code, mode, true0, true1);
6700 *pfalse = gen_binary (code, mode, false0, false1);
6701 return cond0 ? cond0 : cond1;
6702 }
9210df58 6703
9210df58 6704 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
6705 operands is zero when the other is non-zero, and vice-versa,
6706 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 6707
0802d516
RK
6708 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6709 && (code == PLUS || code == IOR || code == XOR || code == MINUS
9210df58
RK
6710 || code == UMAX)
6711 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6712 {
6713 rtx op0 = XEXP (XEXP (x, 0), 1);
6714 rtx op1 = XEXP (XEXP (x, 1), 1);
6715
6716 cond0 = XEXP (XEXP (x, 0), 0);
6717 cond1 = XEXP (XEXP (x, 1), 0);
6718
6719 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6720 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6721 && reversible_comparison_p (cond1)
6722 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6723 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6724 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6725 || ((swap_condition (GET_CODE (cond0))
6726 == reverse_condition (GET_CODE (cond1)))
6727 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6728 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6729 && ! side_effects_p (x))
6730 {
6731 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6732 *pfalse = gen_binary (MULT, mode,
6733 (code == MINUS
0c1c8ea6 6734 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
6735 const_true_rtx);
6736 return cond0;
6737 }
6738 }
6739
6740 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6741 is always zero. */
0802d516
RK
6742 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6743 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
6744 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6745 {
6746 cond0 = XEXP (XEXP (x, 0), 0);
6747 cond1 = XEXP (XEXP (x, 1), 0);
6748
6749 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6750 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6751 && reversible_comparison_p (cond1)
6752 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6753 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6754 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6755 || ((swap_condition (GET_CODE (cond0))
6756 == reverse_condition (GET_CODE (cond1)))
6757 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6758 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6759 && ! side_effects_p (x))
6760 {
6761 *ptrue = *pfalse = const0_rtx;
6762 return cond0;
6763 }
6764 }
abe6e52f
RK
6765 }
6766
6767 else if (code == IF_THEN_ELSE)
6768 {
6769 /* If we have IF_THEN_ELSE already, extract the condition and
6770 canonicalize it if it is NE or EQ. */
6771 cond0 = XEXP (x, 0);
6772 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6773 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6774 return XEXP (cond0, 0);
6775 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6776 {
6777 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6778 return XEXP (cond0, 0);
6779 }
6780 else
6781 return cond0;
6782 }
6783
6784 /* If X is a normal SUBREG with both inner and outer modes integral,
6785 we can narrow both the true and false values of the inner expression,
6786 if there is a condition. */
6787 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6788 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6789 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6790 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6791 &true0, &false0)))
6792 {
00244e6b
RK
6793 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6794 *pfalse
6795 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 6796
abe6e52f
RK
6797 return cond0;
6798 }
6799
6800 /* If X is a constant, this isn't special and will cause confusions
6801 if we treat it as such. Likewise if it is equivalent to a constant. */
6802 else if (CONSTANT_P (x)
6803 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6804 ;
6805
6806 /* If X is known to be either 0 or -1, those are the true and
6807 false values when testing X. */
6808 else if (num_sign_bit_copies (x, mode) == size)
6809 {
6810 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6811 return x;
6812 }
6813
6814 /* Likewise for 0 or a single bit. */
6815 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6816 {
6817 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6818 return x;
6819 }
6820
6821 /* Otherwise fail; show no condition with true and false values the same. */
6822 *ptrue = *pfalse = x;
6823 return 0;
6824}
6825\f
1a26b032
RK
6826/* Return the value of expression X given the fact that condition COND
6827 is known to be true when applied to REG as its first operand and VAL
6828 as its second. X is known to not be shared and so can be modified in
6829 place.
6830
6831 We only handle the simplest cases, and specifically those cases that
6832 arise with IF_THEN_ELSE expressions. */
6833
6834static rtx
6835known_cond (x, cond, reg, val)
6836 rtx x;
6837 enum rtx_code cond;
6838 rtx reg, val;
6839{
6840 enum rtx_code code = GET_CODE (x);
f24ad0e4 6841 rtx temp;
1a26b032
RK
6842 char *fmt;
6843 int i, j;
6844
6845 if (side_effects_p (x))
6846 return x;
6847
6848 if (cond == EQ && rtx_equal_p (x, reg))
6849 return val;
6850
6851 /* If X is (abs REG) and we know something about REG's relationship
6852 with zero, we may be able to simplify this. */
6853
6854 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6855 switch (cond)
6856 {
6857 case GE: case GT: case EQ:
6858 return XEXP (x, 0);
6859 case LT: case LE:
0c1c8ea6
RK
6860 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6861 XEXP (x, 0));
e9a25f70
JL
6862 default:
6863 break;
1a26b032
RK
6864 }
6865
6866 /* The only other cases we handle are MIN, MAX, and comparisons if the
6867 operands are the same as REG and VAL. */
6868
6869 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6870 {
6871 if (rtx_equal_p (XEXP (x, 0), val))
6872 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6873
6874 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6875 {
6876 if (GET_RTX_CLASS (code) == '<')
6877 return (comparison_dominates_p (cond, code) ? const_true_rtx
6878 : (comparison_dominates_p (cond,
6879 reverse_condition (code))
6880 ? const0_rtx : x));
6881
6882 else if (code == SMAX || code == SMIN
6883 || code == UMIN || code == UMAX)
6884 {
6885 int unsignedp = (code == UMIN || code == UMAX);
6886
6887 if (code == SMAX || code == UMAX)
6888 cond = reverse_condition (cond);
6889
6890 switch (cond)
6891 {
6892 case GE: case GT:
6893 return unsignedp ? x : XEXP (x, 1);
6894 case LE: case LT:
6895 return unsignedp ? x : XEXP (x, 0);
6896 case GEU: case GTU:
6897 return unsignedp ? XEXP (x, 1) : x;
6898 case LEU: case LTU:
6899 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
6900 default:
6901 break;
1a26b032
RK
6902 }
6903 }
6904 }
6905 }
6906
6907 fmt = GET_RTX_FORMAT (code);
6908 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6909 {
6910 if (fmt[i] == 'e')
6911 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6912 else if (fmt[i] == 'E')
6913 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6914 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6915 cond, reg, val));
6916 }
6917
6918 return x;
6919}
6920\f
e11fa86f
RK
6921/* See if X and Y are equal for the purposes of seeing if we can rewrite an
6922 assignment as a field assignment. */
6923
6924static int
6925rtx_equal_for_field_assignment_p (x, y)
6926 rtx x;
6927 rtx y;
6928{
6929 rtx last_x, last_y;
6930
6931 if (x == y || rtx_equal_p (x, y))
6932 return 1;
6933
6934 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
6935 return 0;
6936
6937 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
6938 Note that all SUBREGs of MEM are paradoxical; otherwise they
6939 would have been rewritten. */
6940 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
6941 && GET_CODE (SUBREG_REG (y)) == MEM
6942 && rtx_equal_p (SUBREG_REG (y),
6943 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
6944 return 1;
6945
6946 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
6947 && GET_CODE (SUBREG_REG (x)) == MEM
6948 && rtx_equal_p (SUBREG_REG (x),
6949 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
6950 return 1;
6951
6952 last_x = get_last_value (x);
6953 last_y = get_last_value (y);
6954
0f47edd3
JL
6955 return ((last_x != 0
6956 && GET_CODE (last_x) != CLOBBER
6957 && rtx_equal_for_field_assignment_p (last_x, y))
6958 || (last_y != 0
6959 && GET_CODE (last_y) != CLOBBER
6960 && rtx_equal_for_field_assignment_p (x, last_y))
e11fa86f 6961 || (last_x != 0 && last_y != 0
0f47edd3
JL
6962 && GET_CODE (last_x) != CLOBBER
6963 && GET_CODE (last_y) != CLOBBER
e11fa86f
RK
6964 && rtx_equal_for_field_assignment_p (last_x, last_y)));
6965}
6966\f
230d793d
RS
6967/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6968 Return that assignment if so.
6969
6970 We only handle the most common cases. */
6971
6972static rtx
6973make_field_assignment (x)
6974 rtx x;
6975{
6976 rtx dest = SET_DEST (x);
6977 rtx src = SET_SRC (x);
dfbe1b2f 6978 rtx assign;
e11fa86f 6979 rtx rhs, lhs;
5f4f0e22
CH
6980 HOST_WIDE_INT c1;
6981 int pos, len;
dfbe1b2f
RK
6982 rtx other;
6983 enum machine_mode mode;
230d793d
RS
6984
6985 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6986 a clear of a one-bit field. We will have changed it to
6987 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6988 for a SUBREG. */
6989
6990 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6991 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6992 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 6993 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6994 {
8999a12e 6995 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6996 1, 1, 1, 0);
76184def 6997 if (assign != 0)
38a448ca 6998 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 6999 return x;
230d793d
RS
7000 }
7001
7002 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7003 && subreg_lowpart_p (XEXP (src, 0))
7004 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7005 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7006 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7007 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 7008 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7009 {
8999a12e 7010 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
7011 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7012 1, 1, 1, 0);
76184def 7013 if (assign != 0)
38a448ca 7014 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7015 return x;
230d793d
RS
7016 }
7017
9dd11dcb 7018 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
7019 one-bit field. */
7020 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7021 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 7022 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7023 {
8999a12e 7024 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7025 1, 1, 1, 0);
76184def 7026 if (assign != 0)
38a448ca 7027 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 7028 return x;
230d793d
RS
7029 }
7030
dfbe1b2f 7031 /* The other case we handle is assignments into a constant-position
9dd11dcb 7032 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
7033 a mask that has all one bits except for a group of zero bits and
7034 OTHER is known to have zeros where C1 has ones, this is such an
7035 assignment. Compute the position and length from C1. Shift OTHER
7036 to the appropriate position, force it to the required mode, and
7037 make the extraction. Check for the AND in both operands. */
7038
9dd11dcb 7039 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
7040 return x;
7041
7042 rhs = expand_compound_operation (XEXP (src, 0));
7043 lhs = expand_compound_operation (XEXP (src, 1));
7044
7045 if (GET_CODE (rhs) == AND
7046 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7047 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7048 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7049 else if (GET_CODE (lhs) == AND
7050 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7051 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7052 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
7053 else
7054 return x;
230d793d 7055
e11fa86f 7056 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 7057 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 7058 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 7059 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 7060 return x;
230d793d 7061
5f4f0e22 7062 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
7063 if (assign == 0)
7064 return x;
230d793d 7065
dfbe1b2f
RK
7066 /* The mode to use for the source is the mode of the assignment, or of
7067 what is inside a possible STRICT_LOW_PART. */
7068 mode = (GET_CODE (assign) == STRICT_LOW_PART
7069 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 7070
dfbe1b2f
RK
7071 /* Shift OTHER right POS places and make it the source, restricting it
7072 to the proper length and mode. */
230d793d 7073
5f4f0e22
CH
7074 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7075 GET_MODE (src), other, pos),
6139ff20
RK
7076 mode,
7077 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7078 ? GET_MODE_MASK (mode)
7079 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 7080 dest, 0);
230d793d 7081
dfbe1b2f 7082 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
7083}
7084\f
7085/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7086 if so. */
7087
7088static rtx
7089apply_distributive_law (x)
7090 rtx x;
7091{
7092 enum rtx_code code = GET_CODE (x);
7093 rtx lhs, rhs, other;
7094 rtx tem;
7095 enum rtx_code inner_code;
7096
d8a8a4da
RS
7097 /* Distributivity is not true for floating point.
7098 It can change the value. So don't do it.
7099 -- rms and moshier@world.std.com. */
3ad2180a 7100 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
7101 return x;
7102
230d793d
RS
7103 /* The outer operation can only be one of the following: */
7104 if (code != IOR && code != AND && code != XOR
7105 && code != PLUS && code != MINUS)
7106 return x;
7107
7108 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7109
0f41302f
MS
7110 /* If either operand is a primitive we can't do anything, so get out
7111 fast. */
230d793d 7112 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 7113 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
7114 return x;
7115
7116 lhs = expand_compound_operation (lhs);
7117 rhs = expand_compound_operation (rhs);
7118 inner_code = GET_CODE (lhs);
7119 if (inner_code != GET_CODE (rhs))
7120 return x;
7121
7122 /* See if the inner and outer operations distribute. */
7123 switch (inner_code)
7124 {
7125 case LSHIFTRT:
7126 case ASHIFTRT:
7127 case AND:
7128 case IOR:
7129 /* These all distribute except over PLUS. */
7130 if (code == PLUS || code == MINUS)
7131 return x;
7132 break;
7133
7134 case MULT:
7135 if (code != PLUS && code != MINUS)
7136 return x;
7137 break;
7138
7139 case ASHIFT:
45620ed4 7140 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
7141 break;
7142
7143 case SUBREG:
dfbe1b2f
RK
7144 /* Non-paradoxical SUBREGs distributes over all operations, provided
7145 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
7146 of a low-order part, we don't convert an fp operation to int or
7147 vice versa, and we would not be converting a single-word
dfbe1b2f 7148 operation into a multi-word operation. The latter test is not
2b4bd1bc 7149 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
7150 Some of the previous tests are redundant given the latter test, but
7151 are retained because they are required for correctness.
7152
7153 We produce the result slightly differently in this case. */
7154
7155 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7156 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7157 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
7158 || (GET_MODE_CLASS (GET_MODE (lhs))
7159 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7160 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 7161 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7162 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
7163 return x;
7164
7165 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7166 SUBREG_REG (lhs), SUBREG_REG (rhs));
7167 return gen_lowpart_for_combine (GET_MODE (x), tem);
7168
7169 default:
7170 return x;
7171 }
7172
7173 /* Set LHS and RHS to the inner operands (A and B in the example
7174 above) and set OTHER to the common operand (C in the example).
7175 These is only one way to do this unless the inner operation is
7176 commutative. */
7177 if (GET_RTX_CLASS (inner_code) == 'c'
7178 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7179 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7180 else if (GET_RTX_CLASS (inner_code) == 'c'
7181 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7182 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7183 else if (GET_RTX_CLASS (inner_code) == 'c'
7184 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7185 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7186 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7187 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7188 else
7189 return x;
7190
7191 /* Form the new inner operation, seeing if it simplifies first. */
7192 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7193
7194 /* There is one exception to the general way of distributing:
7195 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7196 if (code == XOR && inner_code == IOR)
7197 {
7198 inner_code = AND;
0c1c8ea6 7199 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
7200 }
7201
7202 /* We may be able to continuing distributing the result, so call
7203 ourselves recursively on the inner operation before forming the
7204 outer operation, which we return. */
7205 return gen_binary (inner_code, GET_MODE (x),
7206 apply_distributive_law (tem), other);
7207}
7208\f
7209/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7210 in MODE.
7211
7212 Return an equivalent form, if different from X. Otherwise, return X. If
7213 X is zero, we are to always construct the equivalent form. */
7214
7215static rtx
7216simplify_and_const_int (x, mode, varop, constop)
7217 rtx x;
7218 enum machine_mode mode;
7219 rtx varop;
5f4f0e22 7220 unsigned HOST_WIDE_INT constop;
230d793d 7221{
951553af 7222 unsigned HOST_WIDE_INT nonzero;
9fa6d012 7223 int width = GET_MODE_BITSIZE (mode);
42301240 7224 int i;
230d793d 7225
6139ff20
RK
7226 /* Simplify VAROP knowing that we will be only looking at some of the
7227 bits in it. */
e3d616e3 7228 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7229
6139ff20
RK
7230 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7231 CONST_INT, we are done. */
7232 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7233 return varop;
230d793d 7234
fc06d7aa
RK
7235 /* See what bits may be nonzero in VAROP. Unlike the general case of
7236 a call to nonzero_bits, here we don't care about bits outside
7237 MODE. */
7238
7239 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d 7240
9fa6d012
TG
7241 /* If this would be an entire word for the target, but is not for
7242 the host, then sign-extend on the host so that the number will look
7243 the same way on the host that it would on the target.
7244
7245 For example, when building a 64 bit alpha hosted 32 bit sparc
7246 targeted compiler, then we want the 32 bit unsigned value -1 to be
7247 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7248 The later confuses the sparc backend. */
7249
7250 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7251 && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
7252 nonzero |= ((HOST_WIDE_INT) (-1) << width);
7253
230d793d 7254 /* Turn off all bits in the constant that are known to already be zero.
951553af 7255 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7256 which is tested below. */
7257
951553af 7258 constop &= nonzero;
230d793d
RS
7259
7260 /* If we don't have any bits left, return zero. */
7261 if (constop == 0)
7262 return const0_rtx;
7263
42301240
RK
7264 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7265 a power of two, we can replace this with a ASHIFT. */
7266 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7267 && (i = exact_log2 (constop)) >= 0)
7268 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7269
6139ff20
RK
7270 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7271 or XOR, then try to apply the distributive law. This may eliminate
7272 operations if either branch can be simplified because of the AND.
7273 It may also make some cases more complex, but those cases probably
7274 won't match a pattern either with or without this. */
7275
7276 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7277 return
7278 gen_lowpart_for_combine
7279 (mode,
7280 apply_distributive_law
7281 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7282 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7283 XEXP (varop, 0), constop),
7284 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7285 XEXP (varop, 1), constop))));
7286
230d793d
RS
7287 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7288 if we already had one (just check for the simplest cases). */
7289 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7290 && GET_MODE (XEXP (x, 0)) == mode
7291 && SUBREG_REG (XEXP (x, 0)) == varop)
7292 varop = XEXP (x, 0);
7293 else
7294 varop = gen_lowpart_for_combine (mode, varop);
7295
0f41302f 7296 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7297 if (GET_CODE (varop) == CLOBBER)
7298 return x ? x : varop;
7299
7300 /* If we are only masking insignificant bits, return VAROP. */
951553af 7301 if (constop == nonzero)
230d793d
RS
7302 x = varop;
7303
7304 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7305 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7306 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7307
7308 else
7309 {
7310 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7311 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7312 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7313
7314 SUBST (XEXP (x, 0), varop);
7315 }
7316
7317 return x;
7318}
7319\f
b3728b0e
JW
7320/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7321 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7322 is less useful. We can't allow both, because that results in exponential
956d6950 7323 run time recursion. There is a nullstone testcase that triggered
b3728b0e
JW
7324 this. This macro avoids accidental uses of num_sign_bit_copies. */
7325#define num_sign_bit_copies()
7326
230d793d
RS
7327/* Given an expression, X, compute which bits in X can be non-zero.
7328 We don't care about bits outside of those defined in MODE.
7329
7330 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7331 a shift, AND, or zero_extract, we can do better. */
7332
5f4f0e22 7333static unsigned HOST_WIDE_INT
951553af 7334nonzero_bits (x, mode)
230d793d
RS
7335 rtx x;
7336 enum machine_mode mode;
7337{
951553af
RK
7338 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7339 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
7340 enum rtx_code code;
7341 int mode_width = GET_MODE_BITSIZE (mode);
7342 rtx tem;
7343
1c75dfa4
RK
7344 /* For floating-point values, assume all bits are needed. */
7345 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7346 return nonzero;
7347
230d793d
RS
7348 /* If X is wider than MODE, use its mode instead. */
7349 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7350 {
7351 mode = GET_MODE (x);
951553af 7352 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7353 mode_width = GET_MODE_BITSIZE (mode);
7354 }
7355
5f4f0e22 7356 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7357 /* Our only callers in this case look for single bit values. So
7358 just return the mode mask. Those tests will then be false. */
951553af 7359 return nonzero;
230d793d 7360
8baf60bb 7361#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7362 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
7363 and target machines, we can compute this from which bits of the
7364 object might be nonzero in its own mode, taking into account the fact
7365 that on many CISC machines, accessing an object in a wider mode
7366 causes the high-order bits to become undefined. So they are
7367 not known to be zero. */
7368
7369 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7370 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7371 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7372 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7373 {
7374 nonzero &= nonzero_bits (x, GET_MODE (x));
7375 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7376 return nonzero;
7377 }
7378#endif
7379
230d793d
RS
7380 code = GET_CODE (x);
7381 switch (code)
7382 {
7383 case REG:
320dd7a7
RK
7384#ifdef POINTERS_EXTEND_UNSIGNED
7385 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7386 all the bits above ptr_mode are known to be zero. */
7387 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7388 && REGNO_POINTER_FLAG (REGNO (x)))
7389 nonzero &= GET_MODE_MASK (ptr_mode);
7390#endif
7391
b0d71df9
RK
7392#ifdef STACK_BOUNDARY
7393 /* If this is the stack pointer, we may know something about its
7394 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
7395 stack to be momentarily aligned only to that amount, so we pick
7396 the least alignment. */
7397
ee49a9c7
JW
7398 /* We can't check for arg_pointer_rtx here, because it is not
7399 guaranteed to have as much alignment as the stack pointer.
7400 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7401 alignment but the argument pointer has only 64 bit alignment. */
7402
0e9ff885
DM
7403 if ((x == frame_pointer_rtx
7404 || x == stack_pointer_rtx
7405 || x == hard_frame_pointer_rtx
7406 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7407 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7408#ifdef STACK_BIAS
7409 && !STACK_BIAS
7410#endif
7411 )
230d793d 7412 {
b0d71df9 7413 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
7414
7415#ifdef PUSH_ROUNDING
91102d5a 7416 if (REGNO (x) == STACK_POINTER_REGNUM)
b0d71df9 7417 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
7418#endif
7419
320dd7a7
RK
7420 /* We must return here, otherwise we may get a worse result from
7421 one of the choices below. There is nothing useful below as
7422 far as the stack pointer is concerned. */
b0d71df9 7423 return nonzero &= ~ (sp_alignment - 1);
230d793d 7424 }
b0d71df9 7425#endif
230d793d 7426
55310dad
RK
7427 /* If X is a register whose nonzero bits value is current, use it.
7428 Otherwise, if X is a register whose value we can find, use that
7429 value. Otherwise, use the previously-computed global nonzero bits
7430 for this register. */
7431
7432 if (reg_last_set_value[REGNO (x)] != 0
7433 && reg_last_set_mode[REGNO (x)] == mode
b1f21e0a 7434 && (REG_N_SETS (REGNO (x)) == 1
55310dad
RK
7435 || reg_last_set_label[REGNO (x)] == label_tick)
7436 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7437 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
7438
7439 tem = get_last_value (x);
9afa3d54 7440
230d793d 7441 if (tem)
9afa3d54
RK
7442 {
7443#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7444 /* If X is narrower than MODE and TEM is a non-negative
7445 constant that would appear negative in the mode of X,
7446 sign-extend it for use in reg_nonzero_bits because some
7447 machines (maybe most) will actually do the sign-extension
7448 and this is the conservative approach.
7449
7450 ??? For 2.5, try to tighten up the MD files in this regard
7451 instead of this kludge. */
7452
7453 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7454 && GET_CODE (tem) == CONST_INT
7455 && INTVAL (tem) > 0
7456 && 0 != (INTVAL (tem)
7457 & ((HOST_WIDE_INT) 1
9e69be8c 7458 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7459 tem = GEN_INT (INTVAL (tem)
7460 | ((HOST_WIDE_INT) (-1)
7461 << GET_MODE_BITSIZE (GET_MODE (x))));
7462#endif
7463 return nonzero_bits (tem, mode);
7464 }
951553af
RK
7465 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7466 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7467 else
951553af 7468 return nonzero;
230d793d
RS
7469
7470 case CONST_INT:
9afa3d54
RK
7471#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7472 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
7473 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7474 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7475 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
7476#endif
7477
230d793d
RS
7478 return INTVAL (x);
7479
230d793d 7480 case MEM:
8baf60bb 7481#ifdef LOAD_EXTEND_OP
230d793d
RS
7482 /* In many, if not most, RISC machines, reading a byte from memory
7483 zeros the rest of the register. Noticing that fact saves a lot
7484 of extra zero-extends. */
8baf60bb
RK
7485 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7486 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 7487#endif
8baf60bb 7488 break;
230d793d 7489
230d793d
RS
7490 case EQ: case NE:
7491 case GT: case GTU:
7492 case LT: case LTU:
7493 case GE: case GEU:
7494 case LE: case LEU:
3f508eca 7495
c6965c0f
RK
7496 /* If this produces an integer result, we know which bits are set.
7497 Code here used to clear bits outside the mode of X, but that is
7498 now done above. */
230d793d 7499
c6965c0f
RK
7500 if (GET_MODE_CLASS (mode) == MODE_INT
7501 && mode_width <= HOST_BITS_PER_WIDE_INT)
7502 nonzero = STORE_FLAG_VALUE;
230d793d 7503 break;
230d793d 7504
230d793d 7505 case NEG:
b3728b0e
JW
7506#if 0
7507 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7508 and num_sign_bit_copies. */
d0ab8cd3
RK
7509 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7510 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7511 nonzero = 1;
b3728b0e 7512#endif
230d793d
RS
7513
7514 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 7515 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 7516 break;
d0ab8cd3
RK
7517
7518 case ABS:
b3728b0e
JW
7519#if 0
7520 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7521 and num_sign_bit_copies. */
d0ab8cd3
RK
7522 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7523 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7524 nonzero = 1;
b3728b0e 7525#endif
d0ab8cd3 7526 break;
230d793d
RS
7527
7528 case TRUNCATE:
951553af 7529 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
7530 break;
7531
7532 case ZERO_EXTEND:
951553af 7533 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 7534 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 7535 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
7536 break;
7537
7538 case SIGN_EXTEND:
7539 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7540 Otherwise, show all the bits in the outer mode but not the inner
7541 may be non-zero. */
951553af 7542 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
7543 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7544 {
951553af 7545 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
e3da301d
MS
7546 if (inner_nz
7547 & (((HOST_WIDE_INT) 1
7548 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 7549 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
7550 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7551 }
7552
951553af 7553 nonzero &= inner_nz;
230d793d
RS
7554 break;
7555
7556 case AND:
951553af
RK
7557 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7558 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7559 break;
7560
d0ab8cd3
RK
7561 case XOR: case IOR:
7562 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
7563 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7564 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7565 break;
7566
7567 case PLUS: case MINUS:
7568 case MULT:
7569 case DIV: case UDIV:
7570 case MOD: case UMOD:
7571 /* We can apply the rules of arithmetic to compute the number of
7572 high- and low-order zero bits of these operations. We start by
7573 computing the width (position of the highest-order non-zero bit)
7574 and the number of low-order zero bits for each value. */
7575 {
951553af
RK
7576 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7577 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7578 int width0 = floor_log2 (nz0) + 1;
7579 int width1 = floor_log2 (nz1) + 1;
7580 int low0 = floor_log2 (nz0 & -nz0);
7581 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
7582 HOST_WIDE_INT op0_maybe_minusp
7583 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7584 HOST_WIDE_INT op1_maybe_minusp
7585 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
7586 int result_width = mode_width;
7587 int result_low = 0;
7588
7589 switch (code)
7590 {
7591 case PLUS:
0e9ff885
DM
7592#ifdef STACK_BIAS
7593 if (STACK_BIAS
7594 && (XEXP (x, 0) == stack_pointer_rtx
7595 || XEXP (x, 0) == frame_pointer_rtx)
7596 && GET_CODE (XEXP (x, 1)) == CONST_INT)
7597 {
7598 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7599
7600 nz0 = (GET_MODE_MASK (mode) & ~ (sp_alignment - 1));
7601 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
7602 width0 = floor_log2 (nz0) + 1;
7603 width1 = floor_log2 (nz1) + 1;
7604 low0 = floor_log2 (nz0 & -nz0);
7605 low1 = floor_log2 (nz1 & -nz1);
7606 }
7607#endif
230d793d
RS
7608 result_width = MAX (width0, width1) + 1;
7609 result_low = MIN (low0, low1);
7610 break;
7611 case MINUS:
7612 result_low = MIN (low0, low1);
7613 break;
7614 case MULT:
7615 result_width = width0 + width1;
7616 result_low = low0 + low1;
7617 break;
7618 case DIV:
7619 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7620 result_width = width0;
7621 break;
7622 case UDIV:
7623 result_width = width0;
7624 break;
7625 case MOD:
7626 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7627 result_width = MIN (width0, width1);
7628 result_low = MIN (low0, low1);
7629 break;
7630 case UMOD:
7631 result_width = MIN (width0, width1);
7632 result_low = MIN (low0, low1);
7633 break;
e9a25f70
JL
7634 default:
7635 abort ();
230d793d
RS
7636 }
7637
7638 if (result_width < mode_width)
951553af 7639 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
7640
7641 if (result_low > 0)
951553af 7642 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
7643 }
7644 break;
7645
7646 case ZERO_EXTRACT:
7647 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 7648 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 7649 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
7650 break;
7651
7652 case SUBREG:
c3c2cb37
RK
7653 /* If this is a SUBREG formed for a promoted variable that has
7654 been zero-extended, we know that at least the high-order bits
7655 are zero, though others might be too. */
7656
7657 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
7658 nonzero = (GET_MODE_MASK (GET_MODE (x))
7659 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 7660
230d793d
RS
7661 /* If the inner mode is a single word for both the host and target
7662 machines, we can compute this from which bits of the inner
951553af 7663 object might be nonzero. */
230d793d 7664 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
7665 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7666 <= HOST_BITS_PER_WIDE_INT))
230d793d 7667 {
951553af 7668 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb
RK
7669
7670#ifndef WORD_REGISTER_OPERATIONS
230d793d
RS
7671 /* On many CISC machines, accessing an object in a wider mode
7672 causes the high-order bits to become undefined. So they are
7673 not known to be zero. */
7674 if (GET_MODE_SIZE (GET_MODE (x))
7675 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
7676 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7677 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
7678#endif
7679 }
7680 break;
7681
7682 case ASHIFTRT:
7683 case LSHIFTRT:
7684 case ASHIFT:
230d793d 7685 case ROTATE:
951553af 7686 /* The nonzero bits are in two classes: any bits within MODE
230d793d 7687 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 7688 nonzero bits are those that are significant in the operand of
230d793d
RS
7689 the shift when shifted the appropriate number of bits. This
7690 shows that high-order bits are cleared by the right shift and
7691 low-order bits by left shifts. */
7692 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7693 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 7694 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7695 {
7696 enum machine_mode inner_mode = GET_MODE (x);
7697 int width = GET_MODE_BITSIZE (inner_mode);
7698 int count = INTVAL (XEXP (x, 1));
5f4f0e22 7699 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
7700 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7701 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 7702 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
7703
7704 if (mode_width > width)
951553af 7705 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
7706
7707 if (code == LSHIFTRT)
7708 inner >>= count;
7709 else if (code == ASHIFTRT)
7710 {
7711 inner >>= count;
7712
951553af 7713 /* If the sign bit may have been nonzero before the shift, we
230d793d 7714 need to mark all the places it could have been copied to
951553af 7715 by the shift as possibly nonzero. */
5f4f0e22
CH
7716 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7717 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 7718 }
45620ed4 7719 else if (code == ASHIFT)
230d793d
RS
7720 inner <<= count;
7721 else
7722 inner = ((inner << (count % width)
7723 | (inner >> (width - (count % width)))) & mode_mask);
7724
951553af 7725 nonzero &= (outer | inner);
230d793d
RS
7726 }
7727 break;
7728
7729 case FFS:
7730 /* This is at most the number of bits in the mode. */
951553af 7731 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 7732 break;
d0ab8cd3
RK
7733
7734 case IF_THEN_ELSE:
951553af
RK
7735 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7736 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 7737 break;
e9a25f70
JL
7738
7739 default:
7740 break;
230d793d
RS
7741 }
7742
951553af 7743 return nonzero;
230d793d 7744}
b3728b0e
JW
7745
7746/* See the macro definition above. */
7747#undef num_sign_bit_copies
230d793d 7748\f
d0ab8cd3 7749/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
7750 be equal to the sign bit. X will be used in mode MODE; if MODE is
7751 VOIDmode, X will be used in its own mode. The returned value will always
7752 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
7753
7754static int
7755num_sign_bit_copies (x, mode)
7756 rtx x;
7757 enum machine_mode mode;
7758{
7759 enum rtx_code code = GET_CODE (x);
7760 int bitwidth;
7761 int num0, num1, result;
951553af 7762 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
7763 rtx tem;
7764
7765 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
7766 VOIDmode, we don't know anything. Likewise if one of the modes is
7767 floating-point. */
d0ab8cd3
RK
7768
7769 if (mode == VOIDmode)
7770 mode = GET_MODE (x);
7771
1c75dfa4 7772 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 7773 return 1;
d0ab8cd3
RK
7774
7775 bitwidth = GET_MODE_BITSIZE (mode);
7776
0f41302f 7777 /* For a smaller object, just ignore the high bits. */
312def2e
RK
7778 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7779 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7780 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7781
e9a25f70
JL
7782 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7783 {
0c314d1a
RK
7784#ifndef WORD_REGISTER_OPERATIONS
7785 /* If this machine does not do all register operations on the entire
7786 register and MODE is wider than the mode of X, we can say nothing
7787 at all about the high-order bits. */
e9a25f70
JL
7788 return 1;
7789#else
7790 /* Likewise on machines that do, if the mode of the object is smaller
7791 than a word and loads of that size don't sign extend, we can say
7792 nothing about the high order bits. */
7793 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
7794#ifdef LOAD_EXTEND_OP
7795 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
7796#endif
7797 )
7798 return 1;
0c314d1a 7799#endif
e9a25f70 7800 }
0c314d1a 7801
d0ab8cd3
RK
7802 switch (code)
7803 {
7804 case REG:
55310dad 7805
ff0dbdd1
RK
7806#ifdef POINTERS_EXTEND_UNSIGNED
7807 /* If pointers extend signed and this is a pointer in Pmode, say that
7808 all the bits above ptr_mode are known to be sign bit copies. */
7809 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7810 && REGNO_POINTER_FLAG (REGNO (x)))
7811 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7812#endif
7813
55310dad
RK
7814 if (reg_last_set_value[REGNO (x)] != 0
7815 && reg_last_set_mode[REGNO (x)] == mode
b1f21e0a 7816 && (REG_N_SETS (REGNO (x)) == 1
55310dad
RK
7817 || reg_last_set_label[REGNO (x)] == label_tick)
7818 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7819 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7820
7821 tem = get_last_value (x);
7822 if (tem != 0)
7823 return num_sign_bit_copies (tem, mode);
55310dad
RK
7824
7825 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7826 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7827 break;
7828
457816e2 7829 case MEM:
8baf60bb 7830#ifdef LOAD_EXTEND_OP
457816e2 7831 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
7832 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7833 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 7834#endif
8baf60bb 7835 break;
457816e2 7836
d0ab8cd3
RK
7837 case CONST_INT:
7838 /* If the constant is negative, take its 1's complement and remask.
7839 Then see how many zero bits we have. */
951553af 7840 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 7841 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
7842 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7843 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 7844
951553af 7845 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7846
7847 case SUBREG:
c3c2cb37
RK
7848 /* If this is a SUBREG for a promoted object that is sign-extended
7849 and we are looking at it in a wider mode, we know that at least the
7850 high-order bits are known to be sign bit copies. */
7851
7852 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
7853 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7854 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 7855
0f41302f 7856 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7857 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7858 {
7859 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7860 return MAX (1, (num0
7861 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7862 - bitwidth)));
7863 }
457816e2 7864
8baf60bb 7865#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 7866#ifdef LOAD_EXTEND_OP
8baf60bb
RK
7867 /* For paradoxical SUBREGs on machines where all register operations
7868 affect the entire register, just look inside. Note that we are
7869 passing MODE to the recursive call, so the number of sign bit copies
7870 will remain relative to that mode, not the inner mode. */
457816e2 7871
2aec5b7a
JW
7872 /* This works only if loads sign extend. Otherwise, if we get a
7873 reload for the inner part, it may be loaded from the stack, and
7874 then we lose all sign bit copies that existed before the store
7875 to the stack. */
7876
7877 if ((GET_MODE_SIZE (GET_MODE (x))
7878 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7879 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 7880 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 7881#endif
457816e2 7882#endif
d0ab8cd3
RK
7883 break;
7884
7885 case SIGN_EXTRACT:
7886 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7887 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7888 break;
7889
7890 case SIGN_EXTEND:
7891 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7892 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7893
7894 case TRUNCATE:
0f41302f 7895 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7896 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7897 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7898 - bitwidth)));
7899
7900 case NOT:
7901 return num_sign_bit_copies (XEXP (x, 0), mode);
7902
7903 case ROTATE: case ROTATERT:
7904 /* If we are rotating left by a number of bits less than the number
7905 of sign bit copies, we can just subtract that amount from the
7906 number. */
7907 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7908 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7909 {
7910 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7911 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7912 : bitwidth - INTVAL (XEXP (x, 1))));
7913 }
7914 break;
7915
7916 case NEG:
7917 /* In general, this subtracts one sign bit copy. But if the value
7918 is known to be positive, the number of sign bit copies is the
951553af
RK
7919 same as that of the input. Finally, if the input has just one bit
7920 that might be nonzero, all the bits are copies of the sign bit. */
7921 nonzero = nonzero_bits (XEXP (x, 0), mode);
7922 if (nonzero == 1)
d0ab8cd3
RK
7923 return bitwidth;
7924
7925 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7926 if (num0 > 1
ac49a949 7927 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7928 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
7929 num0--;
7930
7931 return num0;
7932
7933 case IOR: case AND: case XOR:
7934 case SMIN: case SMAX: case UMIN: case UMAX:
7935 /* Logical operations will preserve the number of sign-bit copies.
7936 MIN and MAX operations always return one of the operands. */
7937 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7938 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7939 return MIN (num0, num1);
7940
7941 case PLUS: case MINUS:
7942 /* For addition and subtraction, we can have a 1-bit carry. However,
7943 if we are subtracting 1 from a positive number, there will not
7944 be such a carry. Furthermore, if the positive number is known to
7945 be 0 or 1, we know the result is either -1 or 0. */
7946
3e3ea975 7947 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 7948 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7949 {
951553af
RK
7950 nonzero = nonzero_bits (XEXP (x, 0), mode);
7951 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7952 return (nonzero == 1 || nonzero == 0 ? bitwidth
7953 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7954 }
7955
7956 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7957 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7958 return MAX (1, MIN (num0, num1) - 1);
7959
7960 case MULT:
7961 /* The number of bits of the product is the sum of the number of
7962 bits of both terms. However, unless one of the terms if known
7963 to be positive, we must allow for an additional bit since negating
7964 a negative number can remove one sign bit copy. */
7965
7966 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7967 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7968
7969 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7970 if (result > 0
9295e6af 7971 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7972 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 7973 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
01c82bbb
RK
7974 && ((nonzero_bits (XEXP (x, 1), mode)
7975 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
d0ab8cd3
RK
7976 result--;
7977
7978 return MAX (1, result);
7979
7980 case UDIV:
7981 /* The result must be <= the first operand. */
7982 return num_sign_bit_copies (XEXP (x, 0), mode);
7983
7984 case UMOD:
7985 /* The result must be <= the scond operand. */
7986 return num_sign_bit_copies (XEXP (x, 1), mode);
7987
7988 case DIV:
7989 /* Similar to unsigned division, except that we have to worry about
7990 the case where the divisor is negative, in which case we have
7991 to add 1. */
7992 result = num_sign_bit_copies (XEXP (x, 0), mode);
7993 if (result > 1
ac49a949 7994 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7995 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7996 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7997 result --;
7998
7999 return result;
8000
8001 case MOD:
8002 result = num_sign_bit_copies (XEXP (x, 1), mode);
8003 if (result > 1
ac49a949 8004 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 8005 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
8006 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8007 result --;
8008
8009 return result;
8010
8011 case ASHIFTRT:
8012 /* Shifts by a constant add to the number of bits equal to the
8013 sign bit. */
8014 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8015 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8016 && INTVAL (XEXP (x, 1)) > 0)
8017 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8018
8019 return num0;
8020
8021 case ASHIFT:
d0ab8cd3
RK
8022 /* Left shifts destroy copies. */
8023 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8024 || INTVAL (XEXP (x, 1)) < 0
8025 || INTVAL (XEXP (x, 1)) >= bitwidth)
8026 return 1;
8027
8028 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8029 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8030
8031 case IF_THEN_ELSE:
8032 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8033 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8034 return MIN (num0, num1);
8035
d0ab8cd3
RK
8036 case EQ: case NE: case GE: case GT: case LE: case LT:
8037 case GEU: case GTU: case LEU: case LTU:
0802d516
RK
8038 if (STORE_FLAG_VALUE == -1)
8039 return bitwidth;
e9a25f70
JL
8040 break;
8041
8042 default:
8043 break;
d0ab8cd3
RK
8044 }
8045
8046 /* If we haven't been able to figure it out by one of the above rules,
8047 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
8048 count those bits and return one less than that amount. If we can't
8049 safely compute the mask for this mode, always return BITWIDTH. */
8050
8051 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 8052 return 1;
d0ab8cd3 8053
951553af 8054 nonzero = nonzero_bits (x, mode);
df6f4086 8055 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 8056 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8057}
8058\f
1a26b032
RK
8059/* Return the number of "extended" bits there are in X, when interpreted
8060 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8061 unsigned quantities, this is the number of high-order zero bits.
8062 For signed quantities, this is the number of copies of the sign bit
8063 minus 1. In both case, this function returns the number of "spare"
8064 bits. For example, if two quantities for which this function returns
8065 at least 1 are added, the addition is known not to overflow.
8066
8067 This function will always return 0 unless called during combine, which
8068 implies that it must be called from a define_split. */
8069
8070int
8071extended_count (x, mode, unsignedp)
8072 rtx x;
8073 enum machine_mode mode;
8074 int unsignedp;
8075{
951553af 8076 if (nonzero_sign_valid == 0)
1a26b032
RK
8077 return 0;
8078
8079 return (unsignedp
ac49a949
RS
8080 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8081 && (GET_MODE_BITSIZE (mode) - 1
951553af 8082 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
8083 : num_sign_bit_copies (x, mode) - 1);
8084}
8085\f
230d793d
RS
8086/* This function is called from `simplify_shift_const' to merge two
8087 outer operations. Specifically, we have already found that we need
8088 to perform operation *POP0 with constant *PCONST0 at the outermost
8089 position. We would now like to also perform OP1 with constant CONST1
8090 (with *POP0 being done last).
8091
8092 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8093 the resulting operation. *PCOMP_P is set to 1 if we would need to
8094 complement the innermost operand, otherwise it is unchanged.
8095
8096 MODE is the mode in which the operation will be done. No bits outside
8097 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 8098 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
8099
8100 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8101 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8102 result is simply *PCONST0.
8103
8104 If the resulting operation cannot be expressed as one operation, we
8105 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8106
8107static int
8108merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8109 enum rtx_code *pop0;
5f4f0e22 8110 HOST_WIDE_INT *pconst0;
230d793d 8111 enum rtx_code op1;
5f4f0e22 8112 HOST_WIDE_INT const1;
230d793d
RS
8113 enum machine_mode mode;
8114 int *pcomp_p;
8115{
8116 enum rtx_code op0 = *pop0;
5f4f0e22 8117 HOST_WIDE_INT const0 = *pconst0;
9fa6d012 8118 int width = GET_MODE_BITSIZE (mode);
230d793d
RS
8119
8120 const0 &= GET_MODE_MASK (mode);
8121 const1 &= GET_MODE_MASK (mode);
8122
8123 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8124 if (op0 == AND)
8125 const1 &= const0;
8126
8127 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8128 if OP0 is SET. */
8129
8130 if (op1 == NIL || op0 == SET)
8131 return 1;
8132
8133 else if (op0 == NIL)
8134 op0 = op1, const0 = const1;
8135
8136 else if (op0 == op1)
8137 {
8138 switch (op0)
8139 {
8140 case AND:
8141 const0 &= const1;
8142 break;
8143 case IOR:
8144 const0 |= const1;
8145 break;
8146 case XOR:
8147 const0 ^= const1;
8148 break;
8149 case PLUS:
8150 const0 += const1;
8151 break;
8152 case NEG:
8153 op0 = NIL;
8154 break;
e9a25f70
JL
8155 default:
8156 break;
230d793d
RS
8157 }
8158 }
8159
8160 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8161 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8162 return 0;
8163
8164 /* If the two constants aren't the same, we can't do anything. The
8165 remaining six cases can all be done. */
8166 else if (const0 != const1)
8167 return 0;
8168
8169 else
8170 switch (op0)
8171 {
8172 case IOR:
8173 if (op1 == AND)
8174 /* (a & b) | b == b */
8175 op0 = SET;
8176 else /* op1 == XOR */
8177 /* (a ^ b) | b == a | b */
b729186a 8178 {;}
230d793d
RS
8179 break;
8180
8181 case XOR:
8182 if (op1 == AND)
8183 /* (a & b) ^ b == (~a) & b */
8184 op0 = AND, *pcomp_p = 1;
8185 else /* op1 == IOR */
8186 /* (a | b) ^ b == a & ~b */
8187 op0 = AND, *pconst0 = ~ const0;
8188 break;
8189
8190 case AND:
8191 if (op1 == IOR)
8192 /* (a | b) & b == b */
8193 op0 = SET;
8194 else /* op1 == XOR */
8195 /* (a ^ b) & b) == (~a) & b */
8196 *pcomp_p = 1;
8197 break;
e9a25f70
JL
8198 default:
8199 break;
230d793d
RS
8200 }
8201
8202 /* Check for NO-OP cases. */
8203 const0 &= GET_MODE_MASK (mode);
8204 if (const0 == 0
8205 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8206 op0 = NIL;
8207 else if (const0 == 0 && op0 == AND)
8208 op0 = SET;
8209 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
8210 op0 = NIL;
8211
9fa6d012
TG
8212 /* If this would be an entire word for the target, but is not for
8213 the host, then sign-extend on the host so that the number will look
8214 the same way on the host that it would on the target.
8215
8216 For example, when building a 64 bit alpha hosted 32 bit sparc
8217 targeted compiler, then we want the 32 bit unsigned value -1 to be
8218 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8219 The later confuses the sparc backend. */
8220
8221 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8222 && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
8223 const0 |= ((HOST_WIDE_INT) (-1) << width);
8224
230d793d
RS
8225 *pop0 = op0;
8226 *pconst0 = const0;
8227
8228 return 1;
8229}
8230\f
8231/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8232 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8233 that we started with.
8234
8235 The shift is normally computed in the widest mode we find in VAROP, as
8236 long as it isn't a different number of words than RESULT_MODE. Exceptions
8237 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8238
8239static rtx
8240simplify_shift_const (x, code, result_mode, varop, count)
8241 rtx x;
8242 enum rtx_code code;
8243 enum machine_mode result_mode;
8244 rtx varop;
8245 int count;
8246{
8247 enum rtx_code orig_code = code;
8248 int orig_count = count;
8249 enum machine_mode mode = result_mode;
8250 enum machine_mode shift_mode, tmode;
8251 int mode_words
8252 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8253 /* We form (outer_op (code varop count) (outer_const)). */
8254 enum rtx_code outer_op = NIL;
c4e861e8 8255 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8256 rtx const_rtx;
8257 int complement_p = 0;
8258 rtx new;
8259
8260 /* If we were given an invalid count, don't do anything except exactly
8261 what was requested. */
8262
8263 if (count < 0 || count > GET_MODE_BITSIZE (mode))
8264 {
8265 if (x)
8266 return x;
8267
38a448ca 8268 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
230d793d
RS
8269 }
8270
8271 /* Unless one of the branches of the `if' in this loop does a `continue',
8272 we will `break' the loop after the `if'. */
8273
8274 while (count != 0)
8275 {
8276 /* If we have an operand of (clobber (const_int 0)), just return that
8277 value. */
8278 if (GET_CODE (varop) == CLOBBER)
8279 return varop;
8280
8281 /* If we discovered we had to complement VAROP, leave. Making a NOT
8282 here would cause an infinite loop. */
8283 if (complement_p)
8284 break;
8285
abc95ed3 8286 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8287 if (code == ROTATERT)
8288 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8289
230d793d 8290 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8291 shift is a right shift or a ROTATE, we must always do it in the mode
8292 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8293 widest mode encountered. */
f6789c77
RK
8294 shift_mode
8295 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8296 ? result_mode : mode);
230d793d
RS
8297
8298 /* Handle cases where the count is greater than the size of the mode
8299 minus 1. For ASHIFT, use the size minus one as the count (this can
8300 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8301 take the count modulo the size. For other shifts, the result is
8302 zero.
8303
8304 Since these shifts are being produced by the compiler by combining
8305 multiple operations, each of which are defined, we know what the
8306 result is supposed to be. */
8307
8308 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8309 {
8310 if (code == ASHIFTRT)
8311 count = GET_MODE_BITSIZE (shift_mode) - 1;
8312 else if (code == ROTATE || code == ROTATERT)
8313 count %= GET_MODE_BITSIZE (shift_mode);
8314 else
8315 {
8316 /* We can't simply return zero because there may be an
8317 outer op. */
8318 varop = const0_rtx;
8319 count = 0;
8320 break;
8321 }
8322 }
8323
8324 /* Negative counts are invalid and should not have been made (a
8325 programmer-specified negative count should have been handled
0f41302f 8326 above). */
230d793d
RS
8327 else if (count < 0)
8328 abort ();
8329
312def2e
RK
8330 /* An arithmetic right shift of a quantity known to be -1 or 0
8331 is a no-op. */
8332 if (code == ASHIFTRT
8333 && (num_sign_bit_copies (varop, shift_mode)
8334 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8335 {
312def2e
RK
8336 count = 0;
8337 break;
8338 }
d0ab8cd3 8339
312def2e
RK
8340 /* If we are doing an arithmetic right shift and discarding all but
8341 the sign bit copies, this is equivalent to doing a shift by the
8342 bitsize minus one. Convert it into that shift because it will often
8343 allow other simplifications. */
500c518b 8344
312def2e
RK
8345 if (code == ASHIFTRT
8346 && (count + num_sign_bit_copies (varop, shift_mode)
8347 >= GET_MODE_BITSIZE (shift_mode)))
8348 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8349
230d793d
RS
8350 /* We simplify the tests below and elsewhere by converting
8351 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8352 `make_compound_operation' will convert it to a ASHIFTRT for
8353 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8354 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8355 && code == ASHIFTRT
951553af 8356 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8357 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8358 == 0))
230d793d
RS
8359 code = LSHIFTRT;
8360
8361 switch (GET_CODE (varop))
8362 {
8363 case SIGN_EXTEND:
8364 case ZERO_EXTEND:
8365 case SIGN_EXTRACT:
8366 case ZERO_EXTRACT:
8367 new = expand_compound_operation (varop);
8368 if (new != varop)
8369 {
8370 varop = new;
8371 continue;
8372 }
8373 break;
8374
8375 case MEM:
8376 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8377 minus the width of a smaller mode, we can do this with a
8378 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8379 if ((code == ASHIFTRT || code == LSHIFTRT)
8380 && ! mode_dependent_address_p (XEXP (varop, 0))
8381 && ! MEM_VOLATILE_P (varop)
8382 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8383 MODE_INT, 1)) != BLKmode)
8384 {
f76b9db2 8385 if (BYTES_BIG_ENDIAN)
38a448ca 8386 new = gen_rtx_MEM (tmode, XEXP (varop, 0));
f76b9db2 8387 else
38a448ca
RH
8388 new = gen_rtx_MEM (tmode,
8389 plus_constant (XEXP (varop, 0),
8390 count / BITS_PER_UNIT));
e24b00c8
ILT
8391 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
8392 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
8393 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
230d793d
RS
8394 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8395 : ZERO_EXTEND, mode, new);
8396 count = 0;
8397 continue;
8398 }
8399 break;
8400
8401 case USE:
8402 /* Similar to the case above, except that we can only do this if
8403 the resulting mode is the same as that of the underlying
8404 MEM and adjust the address depending on the *bits* endianness
8405 because of the way that bit-field extract insns are defined. */
8406 if ((code == ASHIFTRT || code == LSHIFTRT)
8407 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8408 MODE_INT, 1)) != BLKmode
8409 && tmode == GET_MODE (XEXP (varop, 0)))
8410 {
f76b9db2
ILT
8411 if (BITS_BIG_ENDIAN)
8412 new = XEXP (varop, 0);
8413 else
8414 {
8415 new = copy_rtx (XEXP (varop, 0));
8416 SUBST (XEXP (new, 0),
8417 plus_constant (XEXP (new, 0),
8418 count / BITS_PER_UNIT));
8419 }
230d793d
RS
8420
8421 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8422 : ZERO_EXTEND, mode, new);
8423 count = 0;
8424 continue;
8425 }
8426 break;
8427
8428 case SUBREG:
8429 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8430 the same number of words as what we've seen so far. Then store
8431 the widest mode in MODE. */
f9e67232
RS
8432 if (subreg_lowpart_p (varop)
8433 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8434 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
8435 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8436 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8437 == mode_words))
8438 {
8439 varop = SUBREG_REG (varop);
8440 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8441 mode = GET_MODE (varop);
8442 continue;
8443 }
8444 break;
8445
8446 case MULT:
8447 /* Some machines use MULT instead of ASHIFT because MULT
8448 is cheaper. But it is still better on those machines to
8449 merge two shifts into one. */
8450 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8451 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8452 {
8453 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8454 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
8455 continue;
8456 }
8457 break;
8458
8459 case UDIV:
8460 /* Similar, for when divides are cheaper. */
8461 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8462 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8463 {
8464 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8465 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
8466 continue;
8467 }
8468 break;
8469
8470 case ASHIFTRT:
8471 /* If we are extracting just the sign bit of an arithmetic right
8472 shift, that shift is not needed. */
8473 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8474 {
8475 varop = XEXP (varop, 0);
8476 continue;
8477 }
8478
0f41302f 8479 /* ... fall through ... */
230d793d
RS
8480
8481 case LSHIFTRT:
8482 case ASHIFT:
230d793d
RS
8483 case ROTATE:
8484 /* Here we have two nested shifts. The result is usually the
8485 AND of a new shift with a mask. We compute the result below. */
8486 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8487 && INTVAL (XEXP (varop, 1)) >= 0
8488 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
8489 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8490 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
8491 {
8492 enum rtx_code first_code = GET_CODE (varop);
8493 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 8494 unsigned HOST_WIDE_INT mask;
230d793d 8495 rtx mask_rtx;
230d793d 8496
230d793d
RS
8497 /* We have one common special case. We can't do any merging if
8498 the inner code is an ASHIFTRT of a smaller mode. However, if
8499 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8500 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8501 we can convert it to
8502 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8503 This simplifies certain SIGN_EXTEND operations. */
8504 if (code == ASHIFT && first_code == ASHIFTRT
8505 && (GET_MODE_BITSIZE (result_mode)
8506 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8507 {
8508 /* C3 has the low-order C1 bits zero. */
8509
5f4f0e22
CH
8510 mask = (GET_MODE_MASK (mode)
8511 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 8512
5f4f0e22 8513 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 8514 XEXP (varop, 0), mask);
5f4f0e22 8515 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
8516 varop, count);
8517 count = first_count;
8518 code = ASHIFTRT;
8519 continue;
8520 }
8521
d0ab8cd3
RK
8522 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8523 than C1 high-order bits equal to the sign bit, we can convert
8524 this to either an ASHIFT or a ASHIFTRT depending on the
8525 two counts.
230d793d
RS
8526
8527 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8528
8529 if (code == ASHIFTRT && first_code == ASHIFT
8530 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
8531 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8532 > first_count))
230d793d 8533 {
d0ab8cd3
RK
8534 count -= first_count;
8535 if (count < 0)
8536 count = - count, code = ASHIFT;
8537 varop = XEXP (varop, 0);
8538 continue;
230d793d
RS
8539 }
8540
8541 /* There are some cases we can't do. If CODE is ASHIFTRT,
8542 we can only do this if FIRST_CODE is also ASHIFTRT.
8543
8544 We can't do the case when CODE is ROTATE and FIRST_CODE is
8545 ASHIFTRT.
8546
8547 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 8548 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
8549
8550 Finally, we can't do any of these if the mode is too wide
8551 unless the codes are the same.
8552
8553 Handle the case where the shift codes are the same
8554 first. */
8555
8556 if (code == first_code)
8557 {
8558 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
8559 && (code == ASHIFTRT || code == LSHIFTRT
8560 || code == ROTATE))
230d793d
RS
8561 break;
8562
8563 count += first_count;
8564 varop = XEXP (varop, 0);
8565 continue;
8566 }
8567
8568 if (code == ASHIFTRT
8569 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 8570 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 8571 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
8572 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8573 || first_code == ROTATE
230d793d
RS
8574 || code == ROTATE)))
8575 break;
8576
8577 /* To compute the mask to apply after the shift, shift the
951553af 8578 nonzero bits of the inner shift the same way the
230d793d
RS
8579 outer shift will. */
8580
951553af 8581 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
8582
8583 mask_rtx
8584 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 8585 GEN_INT (count));
230d793d
RS
8586
8587 /* Give up if we can't compute an outer operation to use. */
8588 if (mask_rtx == 0
8589 || GET_CODE (mask_rtx) != CONST_INT
8590 || ! merge_outer_ops (&outer_op, &outer_const, AND,
8591 INTVAL (mask_rtx),
8592 result_mode, &complement_p))
8593 break;
8594
8595 /* If the shifts are in the same direction, we add the
8596 counts. Otherwise, we subtract them. */
8597 if ((code == ASHIFTRT || code == LSHIFTRT)
8598 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8599 count += first_count;
8600 else
8601 count -= first_count;
8602
8603 /* If COUNT is positive, the new shift is usually CODE,
8604 except for the two exceptions below, in which case it is
8605 FIRST_CODE. If the count is negative, FIRST_CODE should
8606 always be used */
8607 if (count > 0
8608 && ((first_code == ROTATE && code == ASHIFT)
8609 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8610 code = first_code;
8611 else if (count < 0)
8612 code = first_code, count = - count;
8613
8614 varop = XEXP (varop, 0);
8615 continue;
8616 }
8617
8618 /* If we have (A << B << C) for any shift, we can convert this to
8619 (A << C << B). This wins if A is a constant. Only try this if
8620 B is not a constant. */
8621
8622 else if (GET_CODE (varop) == code
8623 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8624 && 0 != (new
8625 = simplify_binary_operation (code, mode,
8626 XEXP (varop, 0),
5f4f0e22 8627 GEN_INT (count))))
230d793d
RS
8628 {
8629 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8630 count = 0;
8631 continue;
8632 }
8633 break;
8634
8635 case NOT:
8636 /* Make this fit the case below. */
8637 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 8638 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
8639 continue;
8640
8641 case IOR:
8642 case AND:
8643 case XOR:
8644 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8645 with C the size of VAROP - 1 and the shift is logical if
8646 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8647 we have an (le X 0) operation. If we have an arithmetic shift
8648 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8649 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8650
8651 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8652 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8653 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8654 && (code == LSHIFTRT || code == ASHIFTRT)
8655 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8656 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8657 {
8658 count = 0;
8659 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8660 const0_rtx);
8661
8662 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8663 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8664
8665 continue;
8666 }
8667
8668 /* If we have (shift (logical)), move the logical to the outside
8669 to allow it to possibly combine with another logical and the
8670 shift to combine with another shift. This also canonicalizes to
8671 what a ZERO_EXTRACT looks like. Also, some machines have
8672 (and (shift)) insns. */
8673
8674 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8675 && (new = simplify_binary_operation (code, result_mode,
8676 XEXP (varop, 1),
5f4f0e22 8677 GEN_INT (count))) != 0
7d171a1e 8678 && GET_CODE(new) == CONST_INT
230d793d
RS
8679 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8680 INTVAL (new), result_mode, &complement_p))
8681 {
8682 varop = XEXP (varop, 0);
8683 continue;
8684 }
8685
8686 /* If we can't do that, try to simplify the shift in each arm of the
8687 logical expression, make a new logical expression, and apply
8688 the inverse distributive law. */
8689 {
00d4ca1c 8690 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 8691 XEXP (varop, 0), count);
00d4ca1c 8692 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
8693 XEXP (varop, 1), count);
8694
21a64bf1 8695 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
8696 varop = apply_distributive_law (varop);
8697
8698 count = 0;
8699 }
8700 break;
8701
8702 case EQ:
45620ed4 8703 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 8704 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
8705 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8706 that may be nonzero. */
8707 if (code == LSHIFTRT
230d793d
RS
8708 && XEXP (varop, 1) == const0_rtx
8709 && GET_MODE (XEXP (varop, 0)) == result_mode
8710 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 8711 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8712 && ((STORE_FLAG_VALUE
5f4f0e22 8713 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 8714 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8715 && merge_outer_ops (&outer_op, &outer_const, XOR,
8716 (HOST_WIDE_INT) 1, result_mode,
8717 &complement_p))
230d793d
RS
8718 {
8719 varop = XEXP (varop, 0);
8720 count = 0;
8721 continue;
8722 }
8723 break;
8724
8725 case NEG:
d0ab8cd3
RK
8726 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8727 than the number of bits in the mode is equivalent to A. */
8728 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 8729 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 8730 {
d0ab8cd3 8731 varop = XEXP (varop, 0);
230d793d
RS
8732 count = 0;
8733 continue;
8734 }
8735
8736 /* NEG commutes with ASHIFT since it is multiplication. Move the
8737 NEG outside to allow shifts to combine. */
8738 if (code == ASHIFT
5f4f0e22
CH
8739 && merge_outer_ops (&outer_op, &outer_const, NEG,
8740 (HOST_WIDE_INT) 0, result_mode,
8741 &complement_p))
230d793d
RS
8742 {
8743 varop = XEXP (varop, 0);
8744 continue;
8745 }
8746 break;
8747
8748 case PLUS:
d0ab8cd3
RK
8749 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8750 is one less than the number of bits in the mode is
8751 equivalent to (xor A 1). */
230d793d
RS
8752 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8753 && XEXP (varop, 1) == constm1_rtx
951553af 8754 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8755 && merge_outer_ops (&outer_op, &outer_const, XOR,
8756 (HOST_WIDE_INT) 1, result_mode,
8757 &complement_p))
230d793d
RS
8758 {
8759 count = 0;
8760 varop = XEXP (varop, 0);
8761 continue;
8762 }
8763
3f508eca 8764 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 8765 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
8766 bits are known zero in FOO, we can replace the PLUS with FOO.
8767 Similarly in the other operand order. This code occurs when
8768 we are computing the size of a variable-size array. */
8769
8770 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8771 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
8772 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8773 && (nonzero_bits (XEXP (varop, 1), result_mode)
8774 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
8775 {
8776 varop = XEXP (varop, 0);
8777 continue;
8778 }
8779 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8780 && count < HOST_BITS_PER_WIDE_INT
ac49a949 8781 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 8782 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 8783 >> count)
951553af
RK
8784 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8785 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
8786 result_mode)))
8787 {
8788 varop = XEXP (varop, 1);
8789 continue;
8790 }
8791
230d793d
RS
8792 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8793 if (code == ASHIFT
8794 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8795 && (new = simplify_binary_operation (ASHIFT, result_mode,
8796 XEXP (varop, 1),
5f4f0e22 8797 GEN_INT (count))) != 0
7d171a1e 8798 && GET_CODE(new) == CONST_INT
230d793d
RS
8799 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8800 INTVAL (new), result_mode, &complement_p))
8801 {
8802 varop = XEXP (varop, 0);
8803 continue;
8804 }
8805 break;
8806
8807 case MINUS:
8808 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8809 with C the size of VAROP - 1 and the shift is logical if
8810 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8811 we have a (gt X 0) operation. If the shift is arithmetic with
8812 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8813 we have a (neg (gt X 0)) operation. */
8814
0802d516
RK
8815 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8816 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 8817 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
8818 && (code == LSHIFTRT || code == ASHIFTRT)
8819 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8820 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8821 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8822 {
8823 count = 0;
8824 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8825 const0_rtx);
8826
8827 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8828 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8829
8830 continue;
8831 }
8832 break;
6e0ef100
JC
8833
8834 case TRUNCATE:
8835 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
8836 if the truncate does not affect the value. */
8837 if (code == LSHIFTRT
8838 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
8839 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8840 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
8841 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
8842 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
8843 {
8844 rtx varop_inner = XEXP (varop, 0);
8845
8846 varop_inner = gen_rtx_combine (LSHIFTRT,
8847 GET_MODE (varop_inner),
8848 XEXP (varop_inner, 0),
8849 GEN_INT (count + INTVAL (XEXP (varop_inner, 1))));
8850 varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
8851 varop_inner);
8852 count = 0;
8853 continue;
8854 }
8855 break;
e9a25f70
JL
8856
8857 default:
8858 break;
230d793d
RS
8859 }
8860
8861 break;
8862 }
8863
8864 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
8865 a right shift or ROTATE, we must always do it in the mode it was
8866 originally done in. Otherwise, we can do it in MODE, the widest mode
8867 encountered. The code we care about is that of the shift that will
8868 actually be done, not the shift that was originally requested. */
8869 shift_mode
8870 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8871 ? result_mode : mode);
230d793d
RS
8872
8873 /* We have now finished analyzing the shift. The result should be
8874 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8875 OUTER_OP is non-NIL, it is an operation that needs to be applied
8876 to the result of the shift. OUTER_CONST is the relevant constant,
8877 but we must turn off all bits turned off in the shift.
8878
8879 If we were passed a value for X, see if we can use any pieces of
8880 it. If not, make new rtx. */
8881
8882 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8883 && GET_CODE (XEXP (x, 1)) == CONST_INT
8884 && INTVAL (XEXP (x, 1)) == count)
8885 const_rtx = XEXP (x, 1);
8886 else
5f4f0e22 8887 const_rtx = GEN_INT (count);
230d793d
RS
8888
8889 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8890 && GET_MODE (XEXP (x, 0)) == shift_mode
8891 && SUBREG_REG (XEXP (x, 0)) == varop)
8892 varop = XEXP (x, 0);
8893 else if (GET_MODE (varop) != shift_mode)
8894 varop = gen_lowpart_for_combine (shift_mode, varop);
8895
0f41302f 8896 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
8897 if (GET_CODE (varop) == CLOBBER)
8898 return x ? x : varop;
8899
8900 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8901 if (new != 0)
8902 x = new;
8903 else
8904 {
8905 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8906 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8907
8908 SUBST (XEXP (x, 0), varop);
8909 SUBST (XEXP (x, 1), const_rtx);
8910 }
8911
224eeff2
RK
8912 /* If we have an outer operation and we just made a shift, it is
8913 possible that we could have simplified the shift were it not
8914 for the outer operation. So try to do the simplification
8915 recursively. */
8916
8917 if (outer_op != NIL && GET_CODE (x) == code
8918 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8919 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8920 INTVAL (XEXP (x, 1)));
8921
230d793d
RS
8922 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8923 turn off all the bits that the shift would have turned off. */
8924 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 8925 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
8926 GET_MODE_MASK (result_mode) >> orig_count);
8927
8928 /* Do the remainder of the processing in RESULT_MODE. */
8929 x = gen_lowpart_for_combine (result_mode, x);
8930
8931 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8932 operation. */
8933 if (complement_p)
0c1c8ea6 8934 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
8935
8936 if (outer_op != NIL)
8937 {
5f4f0e22 8938 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9fa6d012
TG
8939 {
8940 int width = GET_MODE_BITSIZE (result_mode);
8941
8942 outer_const &= GET_MODE_MASK (result_mode);
8943
8944 /* If this would be an entire word for the target, but is not for
8945 the host, then sign-extend on the host so that the number will
8946 look the same way on the host that it would on the target.
8947
8948 For example, when building a 64 bit alpha hosted 32 bit sparc
8949 targeted compiler, then we want the 32 bit unsigned value -1 to be
8950 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8951 The later confuses the sparc backend. */
8952
8953 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8954 && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
8955 outer_const |= ((HOST_WIDE_INT) (-1) << width);
8956 }
230d793d
RS
8957
8958 if (outer_op == AND)
5f4f0e22 8959 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
8960 else if (outer_op == SET)
8961 /* This means that we have determined that the result is
8962 equivalent to a constant. This should be rare. */
5f4f0e22 8963 x = GEN_INT (outer_const);
230d793d 8964 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 8965 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 8966 else
5f4f0e22 8967 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
8968 }
8969
8970 return x;
8971}
8972\f
8973/* Like recog, but we receive the address of a pointer to a new pattern.
8974 We try to match the rtx that the pointer points to.
8975 If that fails, we may try to modify or replace the pattern,
8976 storing the replacement into the same pointer object.
8977
8978 Modifications include deletion or addition of CLOBBERs.
8979
8980 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8981 the CLOBBERs are placed.
8982
a29ca9db
RK
8983 PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
8984 we had to add.
8985
230d793d
RS
8986 The value is the final insn code from the pattern ultimately matched,
8987 or -1. */
8988
8989static int
a29ca9db 8990recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
230d793d
RS
8991 rtx *pnewpat;
8992 rtx insn;
8993 rtx *pnotes;
a29ca9db 8994 int *padded_scratches;
230d793d
RS
8995{
8996 register rtx pat = *pnewpat;
8997 int insn_code_number;
8998 int num_clobbers_to_add = 0;
8999 int i;
9000 rtx notes = 0;
9001
a29ca9db
RK
9002 *padded_scratches = 0;
9003
974f4146
RK
9004 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9005 we use to indicate that something didn't match. If we find such a
9006 thing, force rejection. */
d96023cf 9007 if (GET_CODE (pat) == PARALLEL)
974f4146 9008 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
9009 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9010 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
9011 return -1;
9012
230d793d
RS
9013 /* Is the result of combination a valid instruction? */
9014 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9015
9016 /* If it isn't, there is the possibility that we previously had an insn
9017 that clobbered some register as a side effect, but the combined
9018 insn doesn't need to do that. So try once more without the clobbers
9019 unless this represents an ASM insn. */
9020
9021 if (insn_code_number < 0 && ! check_asm_operands (pat)
9022 && GET_CODE (pat) == PARALLEL)
9023 {
9024 int pos;
9025
9026 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9027 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9028 {
9029 if (i != pos)
9030 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9031 pos++;
9032 }
9033
9034 SUBST_INT (XVECLEN (pat, 0), pos);
9035
9036 if (pos == 1)
9037 pat = XVECEXP (pat, 0, 0);
9038
9039 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9040 }
9041
9042 /* If we had any clobbers to add, make a new pattern than contains
9043 them. Then check to make sure that all of them are dead. */
9044 if (num_clobbers_to_add)
9045 {
38a448ca
RH
9046 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9047 gen_rtvec (GET_CODE (pat) == PARALLEL
9048 ? XVECLEN (pat, 0) + num_clobbers_to_add
9049 : num_clobbers_to_add + 1));
230d793d
RS
9050
9051 if (GET_CODE (pat) == PARALLEL)
9052 for (i = 0; i < XVECLEN (pat, 0); i++)
9053 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9054 else
9055 XVECEXP (newpat, 0, 0) = pat;
9056
9057 add_clobbers (newpat, insn_code_number);
9058
9059 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9060 i < XVECLEN (newpat, 0); i++)
9061 {
9062 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9063 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9064 return -1;
a29ca9db
RK
9065 else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
9066 (*padded_scratches)++;
38a448ca
RH
9067 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9068 XEXP (XVECEXP (newpat, 0, i), 0), notes);
230d793d
RS
9069 }
9070 pat = newpat;
9071 }
9072
9073 *pnewpat = pat;
9074 *pnotes = notes;
9075
9076 return insn_code_number;
9077}
9078\f
9079/* Like gen_lowpart but for use by combine. In combine it is not possible
9080 to create any new pseudoregs. However, it is safe to create
9081 invalid memory addresses, because combine will try to recognize
9082 them and all they will do is make the combine attempt fail.
9083
9084 If for some reason this cannot do its job, an rtx
9085 (clobber (const_int 0)) is returned.
9086 An insn containing that will not be recognized. */
9087
9088#undef gen_lowpart
9089
9090static rtx
9091gen_lowpart_for_combine (mode, x)
9092 enum machine_mode mode;
9093 register rtx x;
9094{
9095 rtx result;
9096
9097 if (GET_MODE (x) == mode)
9098 return x;
9099
eae957a8
RK
9100 /* We can only support MODE being wider than a word if X is a
9101 constant integer or has a mode the same size. */
9102
9103 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9104 && ! ((GET_MODE (x) == VOIDmode
9105 && (GET_CODE (x) == CONST_INT
9106 || GET_CODE (x) == CONST_DOUBLE))
9107 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
38a448ca 9108 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9109
9110 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9111 won't know what to do. So we will strip off the SUBREG here and
9112 process normally. */
9113 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9114 {
9115 x = SUBREG_REG (x);
9116 if (GET_MODE (x) == mode)
9117 return x;
9118 }
9119
9120 result = gen_lowpart_common (mode, x);
64bf47a2
RK
9121 if (result != 0
9122 && GET_CODE (result) == SUBREG
9123 && GET_CODE (SUBREG_REG (result)) == REG
9124 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9125 && (GET_MODE_SIZE (GET_MODE (result))
9126 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
b1f21e0a 9127 REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
64bf47a2 9128
230d793d
RS
9129 if (result)
9130 return result;
9131
9132 if (GET_CODE (x) == MEM)
9133 {
9134 register int offset = 0;
9135 rtx new;
9136
9137 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9138 address. */
9139 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
38a448ca 9140 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9141
9142 /* If we want to refer to something bigger than the original memref,
9143 generate a perverse subreg instead. That will force a reload
9144 of the original memref X. */
9145 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
38a448ca 9146 return gen_rtx_SUBREG (mode, x, 0);
230d793d 9147
f76b9db2
ILT
9148 if (WORDS_BIG_ENDIAN)
9149 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9150 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9151 if (BYTES_BIG_ENDIAN)
9152 {
9153 /* Adjust the address so that the address-after-the-data is
9154 unchanged. */
9155 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9156 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9157 }
38a448ca 9158 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
230d793d
RS
9159 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
9160 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
9161 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
9162 return new;
9163 }
9164
9165 /* If X is a comparison operator, rewrite it in a new mode. This
9166 probably won't match, but may allow further simplifications. */
9167 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9168 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9169
9170 /* If we couldn't simplify X any other way, just enclose it in a
9171 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 9172 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 9173 else
dfbe1b2f
RK
9174 {
9175 int word = 0;
9176
9177 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9178 word = ((GET_MODE_SIZE (GET_MODE (x))
9179 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9180 / UNITS_PER_WORD);
38a448ca 9181 return gen_rtx_SUBREG (mode, x, word);
dfbe1b2f 9182 }
230d793d
RS
9183}
9184\f
9185/* Make an rtx expression. This is a subset of gen_rtx and only supports
9186 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9187
9188 If the identical expression was previously in the insn (in the undobuf),
9189 it will be returned. Only if it is not found will a new expression
9190 be made. */
9191
9192/*VARARGS2*/
9193static rtx
4f90e4a0 9194gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 9195{
4f90e4a0 9196#ifndef __STDC__
230d793d
RS
9197 enum rtx_code code;
9198 enum machine_mode mode;
4f90e4a0
RK
9199#endif
9200 va_list p;
230d793d
RS
9201 int n_args;
9202 rtx args[3];
b729186a 9203 int j;
230d793d
RS
9204 char *fmt;
9205 rtx rt;
241cea85 9206 struct undo *undo;
230d793d 9207
4f90e4a0
RK
9208 VA_START (p, mode);
9209
9210#ifndef __STDC__
230d793d
RS
9211 code = va_arg (p, enum rtx_code);
9212 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
9213#endif
9214
230d793d
RS
9215 n_args = GET_RTX_LENGTH (code);
9216 fmt = GET_RTX_FORMAT (code);
9217
9218 if (n_args == 0 || n_args > 3)
9219 abort ();
9220
9221 /* Get each arg and verify that it is supposed to be an expression. */
9222 for (j = 0; j < n_args; j++)
9223 {
9224 if (*fmt++ != 'e')
9225 abort ();
9226
9227 args[j] = va_arg (p, rtx);
9228 }
9229
9230 /* See if this is in undobuf. Be sure we don't use objects that came
9231 from another insn; this could produce circular rtl structures. */
9232
241cea85
RK
9233 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9234 if (!undo->is_int
9235 && GET_CODE (undo->old_contents.r) == code
9236 && GET_MODE (undo->old_contents.r) == mode)
230d793d
RS
9237 {
9238 for (j = 0; j < n_args; j++)
241cea85 9239 if (XEXP (undo->old_contents.r, j) != args[j])
230d793d
RS
9240 break;
9241
9242 if (j == n_args)
241cea85 9243 return undo->old_contents.r;
230d793d
RS
9244 }
9245
9246 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9247 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9248 rt = rtx_alloc (code);
9249 PUT_MODE (rt, mode);
9250 XEXP (rt, 0) = args[0];
9251 if (n_args > 1)
9252 {
9253 XEXP (rt, 1) = args[1];
9254 if (n_args > 2)
9255 XEXP (rt, 2) = args[2];
9256 }
9257 return rt;
9258}
9259
9260/* These routines make binary and unary operations by first seeing if they
9261 fold; if not, a new expression is allocated. */
9262
9263static rtx
9264gen_binary (code, mode, op0, op1)
9265 enum rtx_code code;
9266 enum machine_mode mode;
9267 rtx op0, op1;
9268{
9269 rtx result;
1a26b032
RK
9270 rtx tem;
9271
9272 if (GET_RTX_CLASS (code) == 'c'
9273 && (GET_CODE (op0) == CONST_INT
9274 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9275 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
9276
9277 if (GET_RTX_CLASS (code) == '<')
9278 {
9279 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
9280
9281 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9282 just (REL_OP X Y). */
9210df58
RK
9283 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9284 {
9285 op1 = XEXP (op0, 1);
9286 op0 = XEXP (op0, 0);
9287 op_mode = GET_MODE (op0);
9288 }
9289
230d793d
RS
9290 if (op_mode == VOIDmode)
9291 op_mode = GET_MODE (op1);
9292 result = simplify_relational_operation (code, op_mode, op0, op1);
9293 }
9294 else
9295 result = simplify_binary_operation (code, mode, op0, op1);
9296
9297 if (result)
9298 return result;
9299
9300 /* Put complex operands first and constants second. */
9301 if (GET_RTX_CLASS (code) == 'c'
9302 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9303 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9304 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9305 || (GET_CODE (op0) == SUBREG
9306 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9307 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9308 return gen_rtx_combine (code, mode, op1, op0);
9309
9310 return gen_rtx_combine (code, mode, op0, op1);
9311}
9312
9313static rtx
0c1c8ea6 9314gen_unary (code, mode, op0_mode, op0)
230d793d 9315 enum rtx_code code;
0c1c8ea6 9316 enum machine_mode mode, op0_mode;
230d793d
RS
9317 rtx op0;
9318{
0c1c8ea6 9319 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
9320
9321 if (result)
9322 return result;
9323
9324 return gen_rtx_combine (code, mode, op0);
9325}
9326\f
9327/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9328 comparison code that will be tested.
9329
9330 The result is a possibly different comparison code to use. *POP0 and
9331 *POP1 may be updated.
9332
9333 It is possible that we might detect that a comparison is either always
9334 true or always false. However, we do not perform general constant
5089e22e 9335 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9336 should have been detected earlier. Hence we ignore all such cases. */
9337
9338static enum rtx_code
9339simplify_comparison (code, pop0, pop1)
9340 enum rtx_code code;
9341 rtx *pop0;
9342 rtx *pop1;
9343{
9344 rtx op0 = *pop0;
9345 rtx op1 = *pop1;
9346 rtx tem, tem1;
9347 int i;
9348 enum machine_mode mode, tmode;
9349
9350 /* Try a few ways of applying the same transformation to both operands. */
9351 while (1)
9352 {
3a19aabc
RK
9353#ifndef WORD_REGISTER_OPERATIONS
9354 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9355 so check specially. */
9356 if (code != GTU && code != GEU && code != LTU && code != LEU
9357 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9358 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9359 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9360 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9361 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9362 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 9363 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
9364 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9365 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9366 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9367 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9368 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9369 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9370 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9371 && (INTVAL (XEXP (op0, 1))
9372 == (GET_MODE_BITSIZE (GET_MODE (op0))
9373 - (GET_MODE_BITSIZE
9374 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9375 {
9376 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9377 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9378 }
9379#endif
9380
230d793d
RS
9381 /* If both operands are the same constant shift, see if we can ignore the
9382 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 9383 this shift are known to be zero for both inputs and if the type of
230d793d 9384 comparison is compatible with the shift. */
67232b23
RK
9385 if (GET_CODE (op0) == GET_CODE (op1)
9386 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9387 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 9388 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
9389 && (code != GT && code != LT && code != GE && code != LE))
9390 || (GET_CODE (op0) == ASHIFTRT
9391 && (code != GTU && code != LTU
9392 && code != GEU && code != GEU)))
9393 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9394 && INTVAL (XEXP (op0, 1)) >= 0
9395 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9396 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
9397 {
9398 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 9399 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9400 int shift_count = INTVAL (XEXP (op0, 1));
9401
9402 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9403 mask &= (mask >> shift_count) << shift_count;
45620ed4 9404 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
9405 mask = (mask & (mask << shift_count)) >> shift_count;
9406
951553af
RK
9407 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9408 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
9409 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9410 else
9411 break;
9412 }
9413
9414 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9415 SUBREGs are of the same mode, and, in both cases, the AND would
9416 be redundant if the comparison was done in the narrower mode,
9417 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
9418 and the operand's possibly nonzero bits are 0xffffff01; in that case
9419 if we only care about QImode, we don't need the AND). This case
9420 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
9421 STORE_FLAG_VALUE == 1 (e.g., the 386).
9422
9423 Similarly, check for a case where the AND's are ZERO_EXTEND
9424 operations from some narrower mode even though a SUBREG is not
9425 present. */
230d793d
RS
9426
9427 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9428 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 9429 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 9430 {
7e4dc511
RK
9431 rtx inner_op0 = XEXP (op0, 0);
9432 rtx inner_op1 = XEXP (op1, 0);
9433 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9434 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9435 int changed = 0;
9436
9437 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9438 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9439 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9440 && (GET_MODE (SUBREG_REG (inner_op0))
9441 == GET_MODE (SUBREG_REG (inner_op1)))
9442 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9443 <= HOST_BITS_PER_WIDE_INT)
01c82bbb
RK
9444 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9445 GET_MODE (SUBREG_REG (op0)))))
9446 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9447 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
9448 {
9449 op0 = SUBREG_REG (inner_op0);
9450 op1 = SUBREG_REG (inner_op1);
9451
9452 /* The resulting comparison is always unsigned since we masked
0f41302f 9453 off the original sign bit. */
7e4dc511
RK
9454 code = unsigned_condition (code);
9455
9456 changed = 1;
9457 }
230d793d 9458
7e4dc511
RK
9459 else if (c0 == c1)
9460 for (tmode = GET_CLASS_NARROWEST_MODE
9461 (GET_MODE_CLASS (GET_MODE (op0)));
9462 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9463 if (c0 == GET_MODE_MASK (tmode))
9464 {
9465 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9466 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 9467 code = unsigned_condition (code);
7e4dc511
RK
9468 changed = 1;
9469 break;
9470 }
9471
9472 if (! changed)
9473 break;
230d793d 9474 }
3a19aabc 9475
ad25ba17
RK
9476 /* If both operands are NOT, we can strip off the outer operation
9477 and adjust the comparison code for swapped operands; similarly for
9478 NEG, except that this must be an equality comparison. */
9479 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9480 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9481 && (code == EQ || code == NE)))
9482 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 9483
230d793d
RS
9484 else
9485 break;
9486 }
9487
9488 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
9489 comparison code appropriately, but don't do this if the second operand
9490 is already a constant integer. */
9491 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
230d793d
RS
9492 {
9493 tem = op0, op0 = op1, op1 = tem;
9494 code = swap_condition (code);
9495 }
9496
9497 /* We now enter a loop during which we will try to simplify the comparison.
9498 For the most part, we only are concerned with comparisons with zero,
9499 but some things may really be comparisons with zero but not start
9500 out looking that way. */
9501
9502 while (GET_CODE (op1) == CONST_INT)
9503 {
9504 enum machine_mode mode = GET_MODE (op0);
9505 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 9506 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9507 int equality_comparison_p;
9508 int sign_bit_comparison_p;
9509 int unsigned_comparison_p;
5f4f0e22 9510 HOST_WIDE_INT const_op;
230d793d
RS
9511
9512 /* We only want to handle integral modes. This catches VOIDmode,
9513 CCmode, and the floating-point modes. An exception is that we
9514 can handle VOIDmode if OP0 is a COMPARE or a comparison
9515 operation. */
9516
9517 if (GET_MODE_CLASS (mode) != MODE_INT
9518 && ! (mode == VOIDmode
9519 && (GET_CODE (op0) == COMPARE
9520 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9521 break;
9522
9523 /* Get the constant we are comparing against and turn off all bits
9524 not on in our mode. */
9525 const_op = INTVAL (op1);
5f4f0e22 9526 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 9527 const_op &= mask;
230d793d
RS
9528
9529 /* If we are comparing against a constant power of two and the value
951553af 9530 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
9531 `and'ed with that bit), we can replace this with a comparison
9532 with zero. */
9533 if (const_op
9534 && (code == EQ || code == NE || code == GE || code == GEU
9535 || code == LT || code == LTU)
5f4f0e22 9536 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9537 && exact_log2 (const_op) >= 0
951553af 9538 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
9539 {
9540 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9541 op1 = const0_rtx, const_op = 0;
9542 }
9543
d0ab8cd3
RK
9544 /* Similarly, if we are comparing a value known to be either -1 or
9545 0 with -1, change it to the opposite comparison against zero. */
9546
9547 if (const_op == -1
9548 && (code == EQ || code == NE || code == GT || code == LE
9549 || code == GEU || code == LTU)
9550 && num_sign_bit_copies (op0, mode) == mode_width)
9551 {
9552 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9553 op1 = const0_rtx, const_op = 0;
9554 }
9555
230d793d 9556 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
9557 comparisons against zero and then prefer equality comparisons.
9558 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
9559
9560 switch (code)
9561 {
9562 case LT:
4803a34a
RK
9563 /* < C is equivalent to <= (C - 1) */
9564 if (const_op > 0)
230d793d 9565 {
4803a34a 9566 const_op -= 1;
5f4f0e22 9567 op1 = GEN_INT (const_op);
230d793d
RS
9568 code = LE;
9569 /* ... fall through to LE case below. */
9570 }
9571 else
9572 break;
9573
9574 case LE:
4803a34a
RK
9575 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9576 if (const_op < 0)
9577 {
9578 const_op += 1;
5f4f0e22 9579 op1 = GEN_INT (const_op);
4803a34a
RK
9580 code = LT;
9581 }
230d793d
RS
9582
9583 /* If we are doing a <= 0 comparison on a value known to have
9584 a zero sign bit, we can replace this with == 0. */
9585 else if (const_op == 0
5f4f0e22 9586 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9587 && (nonzero_bits (op0, mode)
5f4f0e22 9588 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9589 code = EQ;
9590 break;
9591
9592 case GE:
0f41302f 9593 /* >= C is equivalent to > (C - 1). */
4803a34a 9594 if (const_op > 0)
230d793d 9595 {
4803a34a 9596 const_op -= 1;
5f4f0e22 9597 op1 = GEN_INT (const_op);
230d793d
RS
9598 code = GT;
9599 /* ... fall through to GT below. */
9600 }
9601 else
9602 break;
9603
9604 case GT:
4803a34a
RK
9605 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9606 if (const_op < 0)
9607 {
9608 const_op += 1;
5f4f0e22 9609 op1 = GEN_INT (const_op);
4803a34a
RK
9610 code = GE;
9611 }
230d793d
RS
9612
9613 /* If we are doing a > 0 comparison on a value known to have
9614 a zero sign bit, we can replace this with != 0. */
9615 else if (const_op == 0
5f4f0e22 9616 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9617 && (nonzero_bits (op0, mode)
5f4f0e22 9618 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9619 code = NE;
9620 break;
9621
230d793d 9622 case LTU:
4803a34a
RK
9623 /* < C is equivalent to <= (C - 1). */
9624 if (const_op > 0)
9625 {
9626 const_op -= 1;
5f4f0e22 9627 op1 = GEN_INT (const_op);
4803a34a 9628 code = LEU;
0f41302f 9629 /* ... fall through ... */
4803a34a 9630 }
d0ab8cd3
RK
9631
9632 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
9633 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9634 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9635 {
9636 const_op = 0, op1 = const0_rtx;
9637 code = GE;
9638 break;
9639 }
4803a34a
RK
9640 else
9641 break;
230d793d
RS
9642
9643 case LEU:
9644 /* unsigned <= 0 is equivalent to == 0 */
9645 if (const_op == 0)
9646 code = EQ;
d0ab8cd3 9647
0f41302f 9648 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
9649 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9650 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9651 {
9652 const_op = 0, op1 = const0_rtx;
9653 code = GE;
9654 }
230d793d
RS
9655 break;
9656
4803a34a
RK
9657 case GEU:
9658 /* >= C is equivalent to < (C - 1). */
9659 if (const_op > 1)
9660 {
9661 const_op -= 1;
5f4f0e22 9662 op1 = GEN_INT (const_op);
4803a34a 9663 code = GTU;
0f41302f 9664 /* ... fall through ... */
4803a34a 9665 }
d0ab8cd3
RK
9666
9667 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
9668 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9669 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9670 {
9671 const_op = 0, op1 = const0_rtx;
9672 code = LT;
8b2e69e1 9673 break;
d0ab8cd3 9674 }
4803a34a
RK
9675 else
9676 break;
9677
230d793d
RS
9678 case GTU:
9679 /* unsigned > 0 is equivalent to != 0 */
9680 if (const_op == 0)
9681 code = NE;
d0ab8cd3
RK
9682
9683 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
9684 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9685 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9686 {
9687 const_op = 0, op1 = const0_rtx;
9688 code = LT;
9689 }
230d793d 9690 break;
e9a25f70
JL
9691
9692 default:
9693 break;
230d793d
RS
9694 }
9695
9696 /* Compute some predicates to simplify code below. */
9697
9698 equality_comparison_p = (code == EQ || code == NE);
9699 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9700 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9701 || code == LEU);
9702
6139ff20
RK
9703 /* If this is a sign bit comparison and we can do arithmetic in
9704 MODE, say that we will only be needing the sign bit of OP0. */
9705 if (sign_bit_comparison_p
9706 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9707 op0 = force_to_mode (op0, mode,
9708 ((HOST_WIDE_INT) 1
9709 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 9710 NULL_RTX, 0);
6139ff20 9711
230d793d
RS
9712 /* Now try cases based on the opcode of OP0. If none of the cases
9713 does a "continue", we exit this loop immediately after the
9714 switch. */
9715
9716 switch (GET_CODE (op0))
9717 {
9718 case ZERO_EXTRACT:
9719 /* If we are extracting a single bit from a variable position in
9720 a constant that has only a single bit set and are comparing it
9721 with zero, we can convert this into an equality comparison
d7cd794f 9722 between the position and the location of the single bit. */
230d793d 9723
230d793d
RS
9724 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9725 && XEXP (op0, 1) == const1_rtx
9726 && equality_comparison_p && const_op == 0
d7cd794f 9727 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 9728 {
f76b9db2 9729 if (BITS_BIG_ENDIAN)
d7cd794f 9730#ifdef HAVE_extzv
f76b9db2
ILT
9731 i = (GET_MODE_BITSIZE
9732 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
d7cd794f
RK
9733#else
9734 i = BITS_PER_WORD - 1 - i;
230d793d
RS
9735#endif
9736
9737 op0 = XEXP (op0, 2);
5f4f0e22 9738 op1 = GEN_INT (i);
230d793d
RS
9739 const_op = i;
9740
9741 /* Result is nonzero iff shift count is equal to I. */
9742 code = reverse_condition (code);
9743 continue;
9744 }
230d793d 9745
0f41302f 9746 /* ... fall through ... */
230d793d
RS
9747
9748 case SIGN_EXTRACT:
9749 tem = expand_compound_operation (op0);
9750 if (tem != op0)
9751 {
9752 op0 = tem;
9753 continue;
9754 }
9755 break;
9756
9757 case NOT:
9758 /* If testing for equality, we can take the NOT of the constant. */
9759 if (equality_comparison_p
9760 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9761 {
9762 op0 = XEXP (op0, 0);
9763 op1 = tem;
9764 continue;
9765 }
9766
9767 /* If just looking at the sign bit, reverse the sense of the
9768 comparison. */
9769 if (sign_bit_comparison_p)
9770 {
9771 op0 = XEXP (op0, 0);
9772 code = (code == GE ? LT : GE);
9773 continue;
9774 }
9775 break;
9776
9777 case NEG:
9778 /* If testing for equality, we can take the NEG of the constant. */
9779 if (equality_comparison_p
9780 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9781 {
9782 op0 = XEXP (op0, 0);
9783 op1 = tem;
9784 continue;
9785 }
9786
9787 /* The remaining cases only apply to comparisons with zero. */
9788 if (const_op != 0)
9789 break;
9790
9791 /* When X is ABS or is known positive,
9792 (neg X) is < 0 if and only if X != 0. */
9793
9794 if (sign_bit_comparison_p
9795 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 9796 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9797 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9798 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
9799 {
9800 op0 = XEXP (op0, 0);
9801 code = (code == LT ? NE : EQ);
9802 continue;
9803 }
9804
3bed8141 9805 /* If we have NEG of something whose two high-order bits are the
0f41302f 9806 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 9807 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
9808 {
9809 op0 = XEXP (op0, 0);
9810 code = swap_condition (code);
9811 continue;
9812 }
9813 break;
9814
9815 case ROTATE:
9816 /* If we are testing equality and our count is a constant, we
9817 can perform the inverse operation on our RHS. */
9818 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9819 && (tem = simplify_binary_operation (ROTATERT, mode,
9820 op1, XEXP (op0, 1))) != 0)
9821 {
9822 op0 = XEXP (op0, 0);
9823 op1 = tem;
9824 continue;
9825 }
9826
9827 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9828 a particular bit. Convert it to an AND of a constant of that
9829 bit. This will be converted into a ZERO_EXTRACT. */
9830 if (const_op == 0 && sign_bit_comparison_p
9831 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9832 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9833 {
5f4f0e22
CH
9834 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9835 ((HOST_WIDE_INT) 1
9836 << (mode_width - 1
9837 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9838 code = (code == LT ? NE : EQ);
9839 continue;
9840 }
9841
0f41302f 9842 /* ... fall through ... */
230d793d
RS
9843
9844 case ABS:
9845 /* ABS is ignorable inside an equality comparison with zero. */
9846 if (const_op == 0 && equality_comparison_p)
9847 {
9848 op0 = XEXP (op0, 0);
9849 continue;
9850 }
9851 break;
9852
9853
9854 case SIGN_EXTEND:
9855 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9856 to (compare FOO CONST) if CONST fits in FOO's mode and we
9857 are either testing inequality or have an unsigned comparison
9858 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9859 if (! unsigned_comparison_p
9860 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9861 <= HOST_BITS_PER_WIDE_INT)
9862 && ((unsigned HOST_WIDE_INT) const_op
9863 < (((HOST_WIDE_INT) 1
9864 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
9865 {
9866 op0 = XEXP (op0, 0);
9867 continue;
9868 }
9869 break;
9870
9871 case SUBREG:
a687e897 9872 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 9873 both constants are smaller than 1/2 the maximum positive
a687e897
RK
9874 value in MODE, and the comparison is equality or unsigned.
9875 In that case, if A is either zero-extended to MODE or has
9876 sufficient sign bits so that the high-order bit in MODE
9877 is a copy of the sign in the inner mode, we can prove that it is
9878 safe to do the operation in the wider mode. This simplifies
9879 many range checks. */
9880
9881 if (mode_width <= HOST_BITS_PER_WIDE_INT
9882 && subreg_lowpart_p (op0)
9883 && GET_CODE (SUBREG_REG (op0)) == PLUS
9884 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9885 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9886 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9887 < GET_MODE_MASK (mode) / 2)
adb7a1cb 9888 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
9889 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9890 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
9891 & ~ GET_MODE_MASK (mode))
9892 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9893 GET_MODE (SUBREG_REG (op0)))
9894 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9895 - GET_MODE_BITSIZE (mode)))))
9896 {
9897 op0 = SUBREG_REG (op0);
9898 continue;
9899 }
9900
fe0cf571
RK
9901 /* If the inner mode is narrower and we are extracting the low part,
9902 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9903 if (subreg_lowpart_p (op0)
89f1c7f2
RS
9904 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9905 /* Fall through */ ;
9906 else
230d793d
RS
9907 break;
9908
0f41302f 9909 /* ... fall through ... */
230d793d
RS
9910
9911 case ZERO_EXTEND:
9912 if ((unsigned_comparison_p || equality_comparison_p)
9913 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9914 <= HOST_BITS_PER_WIDE_INT)
9915 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
9916 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9917 {
9918 op0 = XEXP (op0, 0);
9919 continue;
9920 }
9921 break;
9922
9923 case PLUS:
20fdd649 9924 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 9925 this for equality comparisons due to pathological cases involving
230d793d 9926 overflows. */
20fdd649
RK
9927 if (equality_comparison_p
9928 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9929 op1, XEXP (op0, 1))))
230d793d
RS
9930 {
9931 op0 = XEXP (op0, 0);
9932 op1 = tem;
9933 continue;
9934 }
9935
9936 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9937 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9938 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9939 {
9940 op0 = XEXP (XEXP (op0, 0), 0);
9941 code = (code == LT ? EQ : NE);
9942 continue;
9943 }
9944 break;
9945
9946 case MINUS:
20fdd649
RK
9947 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9948 (eq B (minus A C)), whichever simplifies. We can only do
9949 this for equality comparisons due to pathological cases involving
9950 overflows. */
9951 if (equality_comparison_p
9952 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9953 XEXP (op0, 1), op1)))
9954 {
9955 op0 = XEXP (op0, 0);
9956 op1 = tem;
9957 continue;
9958 }
9959
9960 if (equality_comparison_p
9961 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9962 XEXP (op0, 0), op1)))
9963 {
9964 op0 = XEXP (op0, 1);
9965 op1 = tem;
9966 continue;
9967 }
9968
230d793d
RS
9969 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9970 of bits in X minus 1, is one iff X > 0. */
9971 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9972 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9973 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9974 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9975 {
9976 op0 = XEXP (op0, 1);
9977 code = (code == GE ? LE : GT);
9978 continue;
9979 }
9980 break;
9981
9982 case XOR:
9983 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9984 if C is zero or B is a constant. */
9985 if (equality_comparison_p
9986 && 0 != (tem = simplify_binary_operation (XOR, mode,
9987 XEXP (op0, 1), op1)))
9988 {
9989 op0 = XEXP (op0, 0);
9990 op1 = tem;
9991 continue;
9992 }
9993 break;
9994
9995 case EQ: case NE:
9996 case LT: case LTU: case LE: case LEU:
9997 case GT: case GTU: case GE: case GEU:
9998 /* We can't do anything if OP0 is a condition code value, rather
9999 than an actual data value. */
10000 if (const_op != 0
10001#ifdef HAVE_cc0
10002 || XEXP (op0, 0) == cc0_rtx
10003#endif
10004 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10005 break;
10006
10007 /* Get the two operands being compared. */
10008 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10009 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10010 else
10011 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10012
10013 /* Check for the cases where we simply want the result of the
10014 earlier test or the opposite of that result. */
10015 if (code == NE
10016 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 10017 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 10018 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 10019 && (STORE_FLAG_VALUE
5f4f0e22
CH
10020 & (((HOST_WIDE_INT) 1
10021 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
10022 && (code == LT
10023 || (code == GE && reversible_comparison_p (op0)))))
10024 {
10025 code = (code == LT || code == NE
10026 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10027 op0 = tem, op1 = tem1;
10028 continue;
10029 }
10030 break;
10031
10032 case IOR:
10033 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10034 iff X <= 0. */
10035 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10036 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10037 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10038 {
10039 op0 = XEXP (op0, 1);
10040 code = (code == GE ? GT : LE);
10041 continue;
10042 }
10043 break;
10044
10045 case AND:
10046 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10047 will be converted to a ZERO_EXTRACT later. */
10048 if (const_op == 0 && equality_comparison_p
45620ed4 10049 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
10050 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10051 {
10052 op0 = simplify_and_const_int
10053 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10054 XEXP (op0, 1),
10055 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 10056 (HOST_WIDE_INT) 1);
230d793d
RS
10057 continue;
10058 }
10059
10060 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10061 zero and X is a comparison and C1 and C2 describe only bits set
10062 in STORE_FLAG_VALUE, we can compare with X. */
10063 if (const_op == 0 && equality_comparison_p
5f4f0e22 10064 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
10065 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10066 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10067 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10068 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 10069 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
10070 {
10071 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10072 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10073 if ((~ STORE_FLAG_VALUE & mask) == 0
10074 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10075 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10076 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10077 {
10078 op0 = XEXP (XEXP (op0, 0), 0);
10079 continue;
10080 }
10081 }
10082
10083 /* If we are doing an equality comparison of an AND of a bit equal
10084 to the sign bit, replace this with a LT or GE comparison of
10085 the underlying value. */
10086 if (equality_comparison_p
10087 && const_op == 0
10088 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10089 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10090 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 10091 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
10092 {
10093 op0 = XEXP (op0, 0);
10094 code = (code == EQ ? GE : LT);
10095 continue;
10096 }
10097
10098 /* If this AND operation is really a ZERO_EXTEND from a narrower
10099 mode, the constant fits within that mode, and this is either an
10100 equality or unsigned comparison, try to do this comparison in
10101 the narrower mode. */
10102 if ((equality_comparison_p || unsigned_comparison_p)
10103 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10104 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10105 & GET_MODE_MASK (mode))
10106 + 1)) >= 0
10107 && const_op >> i == 0
10108 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10109 {
10110 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10111 continue;
10112 }
10113 break;
10114
10115 case ASHIFT:
45620ed4 10116 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 10117 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 10118 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
10119 shifted right N bits so long as the low-order N bits of C are
10120 zero. */
10121 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10122 && INTVAL (XEXP (op0, 1)) >= 0
10123 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
10124 < HOST_BITS_PER_WIDE_INT)
10125 && ((const_op
34785d05 10126 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 10127 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10128 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
10129 & ~ (mask >> (INTVAL (XEXP (op0, 1))
10130 + ! equality_comparison_p))) == 0)
10131 {
10132 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 10133 op1 = GEN_INT (const_op);
230d793d
RS
10134 op0 = XEXP (op0, 0);
10135 continue;
10136 }
10137
dfbe1b2f 10138 /* If we are doing a sign bit comparison, it means we are testing
230d793d 10139 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 10140 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10141 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10142 {
5f4f0e22
CH
10143 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10144 ((HOST_WIDE_INT) 1
10145 << (mode_width - 1
10146 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10147 code = (code == LT ? NE : EQ);
10148 continue;
10149 }
dfbe1b2f
RK
10150
10151 /* If this an equality comparison with zero and we are shifting
10152 the low bit to the sign bit, we can convert this to an AND of the
10153 low-order bit. */
10154 if (const_op == 0 && equality_comparison_p
10155 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10156 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10157 {
5f4f0e22
CH
10158 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10159 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
10160 continue;
10161 }
230d793d
RS
10162 break;
10163
10164 case ASHIFTRT:
d0ab8cd3
RK
10165 /* If this is an equality comparison with zero, we can do this
10166 as a logical shift, which might be much simpler. */
10167 if (equality_comparison_p && const_op == 0
10168 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10169 {
10170 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10171 XEXP (op0, 0),
10172 INTVAL (XEXP (op0, 1)));
10173 continue;
10174 }
10175
230d793d
RS
10176 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10177 do the comparison in a narrower mode. */
10178 if (! unsigned_comparison_p
10179 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10180 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10181 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10182 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 10183 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
10184 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10185 || ((unsigned HOST_WIDE_INT) - const_op
10186 <= GET_MODE_MASK (tmode))))
230d793d
RS
10187 {
10188 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10189 continue;
10190 }
10191
0f41302f 10192 /* ... fall through ... */
230d793d
RS
10193 case LSHIFTRT:
10194 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 10195 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
10196 by comparing FOO with C shifted left N bits so long as no
10197 overflow occurs. */
10198 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10199 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
10200 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10201 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10202 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10203 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
10204 && (const_op == 0
10205 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10206 < mode_width)))
10207 {
10208 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 10209 op1 = GEN_INT (const_op);
230d793d
RS
10210 op0 = XEXP (op0, 0);
10211 continue;
10212 }
10213
10214 /* If we are using this shift to extract just the sign bit, we
10215 can replace this with an LT or GE comparison. */
10216 if (const_op == 0
10217 && (equality_comparison_p || sign_bit_comparison_p)
10218 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10219 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10220 {
10221 op0 = XEXP (op0, 0);
10222 code = (code == NE || code == GT ? LT : GE);
10223 continue;
10224 }
10225 break;
e9a25f70
JL
10226
10227 default:
10228 break;
230d793d
RS
10229 }
10230
10231 break;
10232 }
10233
10234 /* Now make any compound operations involved in this comparison. Then,
76d31c63 10235 check for an outmost SUBREG on OP0 that is not doing anything or is
230d793d
RS
10236 paradoxical. The latter case can only occur when it is known that the
10237 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10238 We can never remove a SUBREG for a non-equality comparison because the
10239 sign bit is in a different place in the underlying object. */
10240
10241 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10242 op1 = make_compound_operation (op1, SET);
10243
10244 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10245 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10246 && (code == NE || code == EQ)
10247 && ((GET_MODE_SIZE (GET_MODE (op0))
10248 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10249 {
10250 op0 = SUBREG_REG (op0);
10251 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10252 }
10253
10254 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10255 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10256 && (code == NE || code == EQ)
ac49a949
RS
10257 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10258 <= HOST_BITS_PER_WIDE_INT)
951553af 10259 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10260 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
10261 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10262 op1),
951553af 10263 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10264 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
10265 op0 = SUBREG_REG (op0), op1 = tem;
10266
10267 /* We now do the opposite procedure: Some machines don't have compare
10268 insns in all modes. If OP0's mode is an integer mode smaller than a
10269 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
10270 mode for which we can do the compare. There are a number of cases in
10271 which we can use the wider mode. */
230d793d
RS
10272
10273 mode = GET_MODE (op0);
10274 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10275 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10276 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10277 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
10278 (tmode != VOIDmode
10279 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 10280 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 10281 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 10282 {
951553af 10283 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
10284 narrower mode and this is an equality or unsigned comparison,
10285 we can use the wider mode. Similarly for sign-extended
7e4dc511 10286 values, in which case it is true for all comparisons. */
a687e897
RK
10287 if (((code == EQ || code == NE
10288 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
10289 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10290 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
10291 || ((num_sign_bit_copies (op0, tmode)
10292 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 10293 && (num_sign_bit_copies (op1, tmode)
58744483 10294 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
10295 {
10296 op0 = gen_lowpart_for_combine (tmode, op0);
10297 op1 = gen_lowpart_for_combine (tmode, op1);
10298 break;
10299 }
230d793d 10300
a687e897
RK
10301 /* If this is a test for negative, we can make an explicit
10302 test of the sign bit. */
10303
10304 if (op1 == const0_rtx && (code == LT || code == GE)
10305 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 10306 {
a687e897
RK
10307 op0 = gen_binary (AND, tmode,
10308 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
10309 GEN_INT ((HOST_WIDE_INT) 1
10310 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 10311 code = (code == LT) ? NE : EQ;
a687e897 10312 break;
230d793d 10313 }
230d793d
RS
10314 }
10315
b7a775b2
RK
10316#ifdef CANONICALIZE_COMPARISON
10317 /* If this machine only supports a subset of valid comparisons, see if we
10318 can convert an unsupported one into a supported one. */
10319 CANONICALIZE_COMPARISON (code, op0, op1);
10320#endif
10321
230d793d
RS
10322 *pop0 = op0;
10323 *pop1 = op1;
10324
10325 return code;
10326}
10327\f
10328/* Return 1 if we know that X, a comparison operation, is not operating
10329 on a floating-point value or is EQ or NE, meaning that we can safely
10330 reverse it. */
10331
10332static int
10333reversible_comparison_p (x)
10334 rtx x;
10335{
10336 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 10337 || flag_fast_math
230d793d
RS
10338 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10339 return 1;
10340
10341 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10342 {
10343 case MODE_INT:
3ad2180a
RK
10344 case MODE_PARTIAL_INT:
10345 case MODE_COMPLEX_INT:
230d793d
RS
10346 return 1;
10347
10348 case MODE_CC:
9210df58
RK
10349 /* If the mode of the condition codes tells us that this is safe,
10350 we need look no further. */
10351 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10352 return 1;
10353
10354 /* Otherwise try and find where the condition codes were last set and
10355 use that. */
230d793d
RS
10356 x = get_last_value (XEXP (x, 0));
10357 return (x && GET_CODE (x) == COMPARE
3ad2180a 10358 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
e9a25f70
JL
10359
10360 default:
10361 return 0;
230d793d 10362 }
230d793d
RS
10363}
10364\f
10365/* Utility function for following routine. Called when X is part of a value
10366 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10367 for each register mentioned. Similar to mention_regs in cse.c */
10368
10369static void
10370update_table_tick (x)
10371 rtx x;
10372{
10373 register enum rtx_code code = GET_CODE (x);
10374 register char *fmt = GET_RTX_FORMAT (code);
10375 register int i;
10376
10377 if (code == REG)
10378 {
10379 int regno = REGNO (x);
10380 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10381 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10382
10383 for (i = regno; i < endregno; i++)
10384 reg_last_set_table_tick[i] = label_tick;
10385
10386 return;
10387 }
10388
10389 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10390 /* Note that we can't have an "E" in values stored; see
10391 get_last_value_validate. */
10392 if (fmt[i] == 'e')
10393 update_table_tick (XEXP (x, i));
10394}
10395
10396/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10397 are saying that the register is clobbered and we no longer know its
7988fd36
RK
10398 value. If INSN is zero, don't update reg_last_set; this is only permitted
10399 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
10400
10401static void
10402record_value_for_reg (reg, insn, value)
10403 rtx reg;
10404 rtx insn;
10405 rtx value;
10406{
10407 int regno = REGNO (reg);
10408 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10409 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10410 int i;
10411
10412 /* If VALUE contains REG and we have a previous value for REG, substitute
10413 the previous value. */
10414 if (value && insn && reg_overlap_mentioned_p (reg, value))
10415 {
10416 rtx tem;
10417
10418 /* Set things up so get_last_value is allowed to see anything set up to
10419 our insn. */
10420 subst_low_cuid = INSN_CUID (insn);
10421 tem = get_last_value (reg);
10422
10423 if (tem)
10424 value = replace_rtx (copy_rtx (value), reg, tem);
10425 }
10426
10427 /* For each register modified, show we don't know its value, that
ef026f91
RS
10428 we don't know about its bitwise content, that its value has been
10429 updated, and that we don't know the location of the death of the
10430 register. */
230d793d
RS
10431 for (i = regno; i < endregno; i ++)
10432 {
10433 if (insn)
10434 reg_last_set[i] = insn;
10435 reg_last_set_value[i] = 0;
ef026f91
RS
10436 reg_last_set_mode[i] = 0;
10437 reg_last_set_nonzero_bits[i] = 0;
10438 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
10439 reg_last_death[i] = 0;
10440 }
10441
10442 /* Mark registers that are being referenced in this value. */
10443 if (value)
10444 update_table_tick (value);
10445
10446 /* Now update the status of each register being set.
10447 If someone is using this register in this block, set this register
10448 to invalid since we will get confused between the two lives in this
10449 basic block. This makes using this register always invalid. In cse, we
10450 scan the table to invalidate all entries using this register, but this
10451 is too much work for us. */
10452
10453 for (i = regno; i < endregno; i++)
10454 {
10455 reg_last_set_label[i] = label_tick;
10456 if (value && reg_last_set_table_tick[i] == label_tick)
10457 reg_last_set_invalid[i] = 1;
10458 else
10459 reg_last_set_invalid[i] = 0;
10460 }
10461
10462 /* The value being assigned might refer to X (like in "x++;"). In that
10463 case, we must replace it with (clobber (const_int 0)) to prevent
10464 infinite loops. */
9a893315 10465 if (value && ! get_last_value_validate (&value, insn,
230d793d
RS
10466 reg_last_set_label[regno], 0))
10467 {
10468 value = copy_rtx (value);
9a893315
JW
10469 if (! get_last_value_validate (&value, insn,
10470 reg_last_set_label[regno], 1))
230d793d
RS
10471 value = 0;
10472 }
10473
55310dad
RK
10474 /* For the main register being modified, update the value, the mode, the
10475 nonzero bits, and the number of sign bit copies. */
10476
230d793d
RS
10477 reg_last_set_value[regno] = value;
10478
55310dad
RK
10479 if (value)
10480 {
2afabb48 10481 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
10482 reg_last_set_mode[regno] = GET_MODE (reg);
10483 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10484 reg_last_set_sign_bit_copies[regno]
10485 = num_sign_bit_copies (value, GET_MODE (reg));
10486 }
230d793d
RS
10487}
10488
10489/* Used for communication between the following two routines. */
10490static rtx record_dead_insn;
10491
10492/* Called via note_stores from record_dead_and_set_regs to handle one
10493 SET or CLOBBER in an insn. */
10494
10495static void
10496record_dead_and_set_regs_1 (dest, setter)
10497 rtx dest, setter;
10498{
ca89d290
RK
10499 if (GET_CODE (dest) == SUBREG)
10500 dest = SUBREG_REG (dest);
10501
230d793d
RS
10502 if (GET_CODE (dest) == REG)
10503 {
10504 /* If we are setting the whole register, we know its value. Otherwise
10505 show that we don't know the value. We can handle SUBREG in
10506 some cases. */
10507 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10508 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10509 else if (GET_CODE (setter) == SET
10510 && GET_CODE (SET_DEST (setter)) == SUBREG
10511 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 10512 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 10513 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
10514 record_value_for_reg (dest, record_dead_insn,
10515 gen_lowpart_for_combine (GET_MODE (dest),
10516 SET_SRC (setter)));
230d793d 10517 else
5f4f0e22 10518 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
10519 }
10520 else if (GET_CODE (dest) == MEM
10521 /* Ignore pushes, they clobber nothing. */
10522 && ! push_operand (dest, GET_MODE (dest)))
10523 mem_last_set = INSN_CUID (record_dead_insn);
10524}
10525
10526/* Update the records of when each REG was most recently set or killed
10527 for the things done by INSN. This is the last thing done in processing
10528 INSN in the combiner loop.
10529
ef026f91
RS
10530 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10531 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10532 and also the similar information mem_last_set (which insn most recently
10533 modified memory) and last_call_cuid (which insn was the most recent
10534 subroutine call). */
230d793d
RS
10535
10536static void
10537record_dead_and_set_regs (insn)
10538 rtx insn;
10539{
10540 register rtx link;
55310dad
RK
10541 int i;
10542
230d793d
RS
10543 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10544 {
dbc131f3
RK
10545 if (REG_NOTE_KIND (link) == REG_DEAD
10546 && GET_CODE (XEXP (link, 0)) == REG)
10547 {
10548 int regno = REGNO (XEXP (link, 0));
10549 int endregno
10550 = regno + (regno < FIRST_PSEUDO_REGISTER
10551 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10552 : 1);
dbc131f3
RK
10553
10554 for (i = regno; i < endregno; i++)
10555 reg_last_death[i] = insn;
10556 }
230d793d 10557 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 10558 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
10559 }
10560
10561 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
10562 {
10563 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10564 if (call_used_regs[i])
10565 {
10566 reg_last_set_value[i] = 0;
ef026f91
RS
10567 reg_last_set_mode[i] = 0;
10568 reg_last_set_nonzero_bits[i] = 0;
10569 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
10570 reg_last_death[i] = 0;
10571 }
10572
10573 last_call_cuid = mem_last_set = INSN_CUID (insn);
10574 }
230d793d
RS
10575
10576 record_dead_insn = insn;
10577 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10578}
10579\f
10580/* Utility routine for the following function. Verify that all the registers
10581 mentioned in *LOC are valid when *LOC was part of a value set when
10582 label_tick == TICK. Return 0 if some are not.
10583
10584 If REPLACE is non-zero, replace the invalid reference with
10585 (clobber (const_int 0)) and return 1. This replacement is useful because
10586 we often can get useful information about the form of a value (e.g., if
10587 it was produced by a shift that always produces -1 or 0) even though
10588 we don't know exactly what registers it was produced from. */
10589
10590static int
9a893315 10591get_last_value_validate (loc, insn, tick, replace)
230d793d 10592 rtx *loc;
9a893315 10593 rtx insn;
230d793d
RS
10594 int tick;
10595 int replace;
10596{
10597 rtx x = *loc;
10598 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
10599 int len = GET_RTX_LENGTH (GET_CODE (x));
10600 int i;
10601
10602 if (GET_CODE (x) == REG)
10603 {
10604 int regno = REGNO (x);
10605 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10606 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10607 int j;
10608
10609 for (j = regno; j < endregno; j++)
10610 if (reg_last_set_invalid[j]
10611 /* If this is a pseudo-register that was only set once, it is
10612 always valid. */
b1f21e0a 10613 || (! (regno >= FIRST_PSEUDO_REGISTER && REG_N_SETS (regno) == 1)
230d793d
RS
10614 && reg_last_set_label[j] > tick))
10615 {
10616 if (replace)
38a448ca 10617 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
10618 return replace;
10619 }
10620
10621 return 1;
10622 }
9a893315
JW
10623 /* If this is a memory reference, make sure that there were
10624 no stores after it that might have clobbered the value. We don't
10625 have alias info, so we assume any store invalidates it. */
10626 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
10627 && INSN_CUID (insn) <= mem_last_set)
10628 {
10629 if (replace)
38a448ca 10630 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
10631 return replace;
10632 }
230d793d
RS
10633
10634 for (i = 0; i < len; i++)
10635 if ((fmt[i] == 'e'
9a893315 10636 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
230d793d
RS
10637 /* Don't bother with these. They shouldn't occur anyway. */
10638 || fmt[i] == 'E')
10639 return 0;
10640
10641 /* If we haven't found a reason for it to be invalid, it is valid. */
10642 return 1;
10643}
10644
10645/* Get the last value assigned to X, if known. Some registers
10646 in the value may be replaced with (clobber (const_int 0)) if their value
10647 is known longer known reliably. */
10648
10649static rtx
10650get_last_value (x)
10651 rtx x;
10652{
10653 int regno;
10654 rtx value;
10655
10656 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10657 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 10658 we cannot predict what values the "extra" bits might have. */
230d793d
RS
10659 if (GET_CODE (x) == SUBREG
10660 && subreg_lowpart_p (x)
10661 && (GET_MODE_SIZE (GET_MODE (x))
10662 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10663 && (value = get_last_value (SUBREG_REG (x))) != 0)
10664 return gen_lowpart_for_combine (GET_MODE (x), value);
10665
10666 if (GET_CODE (x) != REG)
10667 return 0;
10668
10669 regno = REGNO (x);
10670 value = reg_last_set_value[regno];
10671
0f41302f
MS
10672 /* If we don't have a value or if it isn't for this basic block,
10673 return 0. */
230d793d
RS
10674
10675 if (value == 0
b1f21e0a 10676 || (REG_N_SETS (regno) != 1
55310dad 10677 && reg_last_set_label[regno] != label_tick))
230d793d
RS
10678 return 0;
10679
4255220d 10680 /* If the value was set in a later insn than the ones we are processing,
4090a6b3
RK
10681 we can't use it even if the register was only set once, but make a quick
10682 check to see if the previous insn set it to something. This is commonly
0d9641d1
JW
10683 the case when the same pseudo is used by repeated insns.
10684
10685 This does not work if there exists an instruction which is temporarily
10686 not on the insn chain. */
d0ab8cd3 10687
bcd49eb7 10688 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
10689 {
10690 rtx insn, set;
10691
bcd49eb7
JW
10692 /* We can not do anything useful in this case, because there is
10693 an instruction which is not on the insn chain. */
10694 if (subst_prev_insn)
10695 return 0;
10696
4255220d
JW
10697 /* Skip over USE insns. They are not useful here, and they may have
10698 been made by combine, in which case they do not have a INSN_CUID
d6c80562 10699 value. We can't use prev_real_insn, because that would incorrectly
e340018d
JW
10700 take us backwards across labels. Skip over BARRIERs also, since
10701 they could have been made by combine. If we see one, we must be
10702 optimizing dead code, so it doesn't matter what we do. */
d6c80562
JW
10703 for (insn = prev_nonnote_insn (subst_insn);
10704 insn && ((GET_CODE (insn) == INSN
10705 && GET_CODE (PATTERN (insn)) == USE)
e340018d 10706 || GET_CODE (insn) == BARRIER
4255220d 10707 || INSN_CUID (insn) >= subst_low_cuid);
d6c80562 10708 insn = prev_nonnote_insn (insn))
3adde2a5 10709 ;
d0ab8cd3
RK
10710
10711 if (insn
10712 && (set = single_set (insn)) != 0
10713 && rtx_equal_p (SET_DEST (set), x))
10714 {
10715 value = SET_SRC (set);
10716
10717 /* Make sure that VALUE doesn't reference X. Replace any
ddd5a7c1 10718 explicit references with a CLOBBER. If there are any remaining
d0ab8cd3
RK
10719 references (rare), don't use the value. */
10720
10721 if (reg_mentioned_p (x, value))
10722 value = replace_rtx (copy_rtx (value), x,
38a448ca 10723 gen_rtx_CLOBBER (GET_MODE (x), const0_rtx));
d0ab8cd3
RK
10724
10725 if (reg_overlap_mentioned_p (x, value))
10726 return 0;
10727 }
10728 else
10729 return 0;
10730 }
10731
10732 /* If the value has all its registers valid, return it. */
9a893315
JW
10733 if (get_last_value_validate (&value, reg_last_set[regno],
10734 reg_last_set_label[regno], 0))
230d793d
RS
10735 return value;
10736
10737 /* Otherwise, make a copy and replace any invalid register with
10738 (clobber (const_int 0)). If that fails for some reason, return 0. */
10739
10740 value = copy_rtx (value);
9a893315
JW
10741 if (get_last_value_validate (&value, reg_last_set[regno],
10742 reg_last_set_label[regno], 1))
230d793d
RS
10743 return value;
10744
10745 return 0;
10746}
10747\f
10748/* Return nonzero if expression X refers to a REG or to memory
10749 that is set in an instruction more recent than FROM_CUID. */
10750
10751static int
10752use_crosses_set_p (x, from_cuid)
10753 register rtx x;
10754 int from_cuid;
10755{
10756 register char *fmt;
10757 register int i;
10758 register enum rtx_code code = GET_CODE (x);
10759
10760 if (code == REG)
10761 {
10762 register int regno = REGNO (x);
e28f5732
RK
10763 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10764 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10765
230d793d
RS
10766#ifdef PUSH_ROUNDING
10767 /* Don't allow uses of the stack pointer to be moved,
10768 because we don't know whether the move crosses a push insn. */
10769 if (regno == STACK_POINTER_REGNUM)
10770 return 1;
10771#endif
e28f5732
RK
10772 for (;regno < endreg; regno++)
10773 if (reg_last_set[regno]
10774 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10775 return 1;
10776 return 0;
230d793d
RS
10777 }
10778
10779 if (code == MEM && mem_last_set > from_cuid)
10780 return 1;
10781
10782 fmt = GET_RTX_FORMAT (code);
10783
10784 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10785 {
10786 if (fmt[i] == 'E')
10787 {
10788 register int j;
10789 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10790 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10791 return 1;
10792 }
10793 else if (fmt[i] == 'e'
10794 && use_crosses_set_p (XEXP (x, i), from_cuid))
10795 return 1;
10796 }
10797 return 0;
10798}
10799\f
10800/* Define three variables used for communication between the following
10801 routines. */
10802
10803static int reg_dead_regno, reg_dead_endregno;
10804static int reg_dead_flag;
10805
10806/* Function called via note_stores from reg_dead_at_p.
10807
ddd5a7c1 10808 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
10809 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10810
10811static void
10812reg_dead_at_p_1 (dest, x)
10813 rtx dest;
10814 rtx x;
10815{
10816 int regno, endregno;
10817
10818 if (GET_CODE (dest) != REG)
10819 return;
10820
10821 regno = REGNO (dest);
10822 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10823 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10824
10825 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10826 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10827}
10828
10829/* Return non-zero if REG is known to be dead at INSN.
10830
10831 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10832 referencing REG, it is dead. If we hit a SET referencing REG, it is
10833 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
10834 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10835 must be assumed to be always live. */
230d793d
RS
10836
10837static int
10838reg_dead_at_p (reg, insn)
10839 rtx reg;
10840 rtx insn;
10841{
10842 int block, i;
10843
10844 /* Set variables for reg_dead_at_p_1. */
10845 reg_dead_regno = REGNO (reg);
10846 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10847 ? HARD_REGNO_NREGS (reg_dead_regno,
10848 GET_MODE (reg))
10849 : 1);
10850
10851 reg_dead_flag = 0;
10852
6e25d159
RK
10853 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10854 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10855 {
10856 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10857 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10858 return 0;
10859 }
10860
230d793d
RS
10861 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10862 beginning of function. */
60715d0b 10863 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
10864 insn = prev_nonnote_insn (insn))
10865 {
10866 note_stores (PATTERN (insn), reg_dead_at_p_1);
10867 if (reg_dead_flag)
10868 return reg_dead_flag == 1 ? 1 : 0;
10869
10870 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10871 return 1;
10872 }
10873
10874 /* Get the basic block number that we were in. */
10875 if (insn == 0)
10876 block = 0;
10877 else
10878 {
10879 for (block = 0; block < n_basic_blocks; block++)
10880 if (insn == basic_block_head[block])
10881 break;
10882
10883 if (block == n_basic_blocks)
10884 return 0;
10885 }
10886
10887 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
8e08106d 10888 if (REGNO_REG_SET_P (basic_block_live_at_start[block], i))
230d793d
RS
10889 return 0;
10890
10891 return 1;
10892}
6e25d159
RK
10893\f
10894/* Note hard registers in X that are used. This code is similar to
10895 that in flow.c, but much simpler since we don't care about pseudos. */
10896
10897static void
10898mark_used_regs_combine (x)
10899 rtx x;
10900{
10901 register RTX_CODE code = GET_CODE (x);
10902 register int regno;
10903 int i;
10904
10905 switch (code)
10906 {
10907 case LABEL_REF:
10908 case SYMBOL_REF:
10909 case CONST_INT:
10910 case CONST:
10911 case CONST_DOUBLE:
10912 case PC:
10913 case ADDR_VEC:
10914 case ADDR_DIFF_VEC:
10915 case ASM_INPUT:
10916#ifdef HAVE_cc0
10917 /* CC0 must die in the insn after it is set, so we don't need to take
10918 special note of it here. */
10919 case CC0:
10920#endif
10921 return;
10922
10923 case CLOBBER:
10924 /* If we are clobbering a MEM, mark any hard registers inside the
10925 address as used. */
10926 if (GET_CODE (XEXP (x, 0)) == MEM)
10927 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10928 return;
10929
10930 case REG:
10931 regno = REGNO (x);
10932 /* A hard reg in a wide mode may really be multiple registers.
10933 If so, mark all of them just like the first. */
10934 if (regno < FIRST_PSEUDO_REGISTER)
10935 {
10936 /* None of this applies to the stack, frame or arg pointers */
10937 if (regno == STACK_POINTER_REGNUM
10938#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10939 || regno == HARD_FRAME_POINTER_REGNUM
10940#endif
10941#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10942 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10943#endif
10944 || regno == FRAME_POINTER_REGNUM)
10945 return;
10946
10947 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10948 while (i-- > 0)
10949 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10950 }
10951 return;
10952
10953 case SET:
10954 {
10955 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10956 the address. */
10957 register rtx testreg = SET_DEST (x);
10958
e048778f
RK
10959 while (GET_CODE (testreg) == SUBREG
10960 || GET_CODE (testreg) == ZERO_EXTRACT
10961 || GET_CODE (testreg) == SIGN_EXTRACT
10962 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
10963 testreg = XEXP (testreg, 0);
10964
10965 if (GET_CODE (testreg) == MEM)
10966 mark_used_regs_combine (XEXP (testreg, 0));
10967
10968 mark_used_regs_combine (SET_SRC (x));
6e25d159 10969 }
e9a25f70
JL
10970 return;
10971
10972 default:
10973 break;
6e25d159
RK
10974 }
10975
10976 /* Recursively scan the operands of this expression. */
10977
10978 {
10979 register char *fmt = GET_RTX_FORMAT (code);
10980
10981 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10982 {
10983 if (fmt[i] == 'e')
10984 mark_used_regs_combine (XEXP (x, i));
10985 else if (fmt[i] == 'E')
10986 {
10987 register int j;
10988
10989 for (j = 0; j < XVECLEN (x, i); j++)
10990 mark_used_regs_combine (XVECEXP (x, i, j));
10991 }
10992 }
10993 }
10994}
10995
230d793d
RS
10996\f
10997/* Remove register number REGNO from the dead registers list of INSN.
10998
10999 Return the note used to record the death, if there was one. */
11000
11001rtx
11002remove_death (regno, insn)
11003 int regno;
11004 rtx insn;
11005{
11006 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11007
11008 if (note)
1a26b032 11009 {
b1f21e0a 11010 REG_N_DEATHS (regno)--;
1a26b032
RK
11011 remove_note (insn, note);
11012 }
230d793d
RS
11013
11014 return note;
11015}
11016
11017/* For each register (hardware or pseudo) used within expression X, if its
11018 death is in an instruction with cuid between FROM_CUID (inclusive) and
11019 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11020 list headed by PNOTES.
11021
6eb12cef
RK
11022 That said, don't move registers killed by maybe_kill_insn.
11023
230d793d
RS
11024 This is done when X is being merged by combination into TO_INSN. These
11025 notes will then be distributed as needed. */
11026
11027static void
6eb12cef 11028move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 11029 rtx x;
6eb12cef 11030 rtx maybe_kill_insn;
230d793d
RS
11031 int from_cuid;
11032 rtx to_insn;
11033 rtx *pnotes;
11034{
11035 register char *fmt;
11036 register int len, i;
11037 register enum rtx_code code = GET_CODE (x);
11038
11039 if (code == REG)
11040 {
11041 register int regno = REGNO (x);
11042 register rtx where_dead = reg_last_death[regno];
e340018d
JW
11043 register rtx before_dead, after_dead;
11044
6eb12cef
RK
11045 /* Don't move the register if it gets killed in between from and to */
11046 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11047 && !reg_referenced_p (x, maybe_kill_insn))
11048 return;
11049
e340018d
JW
11050 /* WHERE_DEAD could be a USE insn made by combine, so first we
11051 make sure that we have insns with valid INSN_CUID values. */
11052 before_dead = where_dead;
11053 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11054 before_dead = PREV_INSN (before_dead);
11055 after_dead = where_dead;
11056 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11057 after_dead = NEXT_INSN (after_dead);
11058
11059 if (before_dead && after_dead
11060 && INSN_CUID (before_dead) >= from_cuid
11061 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11062 || (where_dead != after_dead
11063 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 11064 {
dbc131f3 11065 rtx note = remove_death (regno, where_dead);
230d793d
RS
11066
11067 /* It is possible for the call above to return 0. This can occur
11068 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
11069 In that case make a new note.
11070
11071 We must also check for the case where X is a hard register
11072 and NOTE is a death note for a range of hard registers
11073 including X. In that case, we must put REG_DEAD notes for
11074 the remaining registers in place of NOTE. */
11075
11076 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11077 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 11078 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3
RK
11079 {
11080 int deadregno = REGNO (XEXP (note, 0));
11081 int deadend
11082 = (deadregno + HARD_REGNO_NREGS (deadregno,
11083 GET_MODE (XEXP (note, 0))));
11084 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11085 int i;
11086
11087 for (i = deadregno; i < deadend; i++)
11088 if (i < regno || i >= ourend)
11089 REG_NOTES (where_dead)
38a448ca
RH
11090 = gen_rtx_EXPR_LIST (REG_DEAD,
11091 gen_rtx_REG (reg_raw_mode[i], i),
11092 REG_NOTES (where_dead));
dbc131f3 11093 }
24e46fc4
JW
11094 /* If we didn't find any note, or if we found a REG_DEAD note that
11095 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
11096 register, then to be safe we must check for REG_DEAD notes
11097 for each register other than the first. They could have
11098 their own REG_DEAD notes lying around. */
24e46fc4
JW
11099 else if ((note == 0
11100 || (note != 0
11101 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11102 < GET_MODE_SIZE (GET_MODE (x)))))
11103 && regno < FIRST_PSEUDO_REGISTER
fabd69e8
RK
11104 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11105 {
11106 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
24e46fc4 11107 int i, offset;
fabd69e8
RK
11108 rtx oldnotes = 0;
11109
24e46fc4
JW
11110 if (note)
11111 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11112 else
11113 offset = 1;
11114
11115 for (i = regno + offset; i < ourend; i++)
38a448ca 11116 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
6eb12cef 11117 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 11118 }
230d793d 11119
dbc131f3 11120 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
11121 {
11122 XEXP (note, 1) = *pnotes;
11123 *pnotes = note;
11124 }
11125 else
38a448ca 11126 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
1a26b032 11127
b1f21e0a 11128 REG_N_DEATHS (regno)++;
230d793d
RS
11129 }
11130
11131 return;
11132 }
11133
11134 else if (GET_CODE (x) == SET)
11135 {
11136 rtx dest = SET_DEST (x);
11137
6eb12cef 11138 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 11139
a7c99304
RK
11140 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11141 that accesses one word of a multi-word item, some
11142 piece of everything register in the expression is used by
11143 this insn, so remove any old death. */
11144
11145 if (GET_CODE (dest) == ZERO_EXTRACT
11146 || GET_CODE (dest) == STRICT_LOW_PART
11147 || (GET_CODE (dest) == SUBREG
11148 && (((GET_MODE_SIZE (GET_MODE (dest))
11149 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11150 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11151 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 11152 {
6eb12cef 11153 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 11154 return;
230d793d
RS
11155 }
11156
a7c99304
RK
11157 /* If this is some other SUBREG, we know it replaces the entire
11158 value, so use that as the destination. */
11159 if (GET_CODE (dest) == SUBREG)
11160 dest = SUBREG_REG (dest);
11161
11162 /* If this is a MEM, adjust deaths of anything used in the address.
11163 For a REG (the only other possibility), the entire value is
11164 being replaced so the old value is not used in this insn. */
230d793d
RS
11165
11166 if (GET_CODE (dest) == MEM)
6eb12cef
RK
11167 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11168 to_insn, pnotes);
230d793d
RS
11169 return;
11170 }
11171
11172 else if (GET_CODE (x) == CLOBBER)
11173 return;
11174
11175 len = GET_RTX_LENGTH (code);
11176 fmt = GET_RTX_FORMAT (code);
11177
11178 for (i = 0; i < len; i++)
11179 {
11180 if (fmt[i] == 'E')
11181 {
11182 register int j;
11183 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
11184 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11185 to_insn, pnotes);
230d793d
RS
11186 }
11187 else if (fmt[i] == 'e')
6eb12cef 11188 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
11189 }
11190}
11191\f
a7c99304
RK
11192/* Return 1 if X is the target of a bit-field assignment in BODY, the
11193 pattern of an insn. X must be a REG. */
230d793d
RS
11194
11195static int
a7c99304
RK
11196reg_bitfield_target_p (x, body)
11197 rtx x;
230d793d
RS
11198 rtx body;
11199{
11200 int i;
11201
11202 if (GET_CODE (body) == SET)
a7c99304
RK
11203 {
11204 rtx dest = SET_DEST (body);
11205 rtx target;
11206 int regno, tregno, endregno, endtregno;
11207
11208 if (GET_CODE (dest) == ZERO_EXTRACT)
11209 target = XEXP (dest, 0);
11210 else if (GET_CODE (dest) == STRICT_LOW_PART)
11211 target = SUBREG_REG (XEXP (dest, 0));
11212 else
11213 return 0;
11214
11215 if (GET_CODE (target) == SUBREG)
11216 target = SUBREG_REG (target);
11217
11218 if (GET_CODE (target) != REG)
11219 return 0;
11220
11221 tregno = REGNO (target), regno = REGNO (x);
11222 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11223 return target == x;
11224
11225 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11226 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11227
11228 return endregno > tregno && regno < endtregno;
11229 }
230d793d
RS
11230
11231 else if (GET_CODE (body) == PARALLEL)
11232 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 11233 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
11234 return 1;
11235
11236 return 0;
11237}
11238\f
11239/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11240 as appropriate. I3 and I2 are the insns resulting from the combination
11241 insns including FROM (I2 may be zero).
11242
11243 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11244 not need REG_DEAD notes because they are being substituted for. This
11245 saves searching in the most common cases.
11246
11247 Each note in the list is either ignored or placed on some insns, depending
11248 on the type of note. */
11249
11250static void
11251distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11252 rtx notes;
11253 rtx from_insn;
11254 rtx i3, i2;
11255 rtx elim_i2, elim_i1;
11256{
11257 rtx note, next_note;
11258 rtx tem;
11259
11260 for (note = notes; note; note = next_note)
11261 {
11262 rtx place = 0, place2 = 0;
11263
11264 /* If this NOTE references a pseudo register, ensure it references
11265 the latest copy of that register. */
11266 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11267 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11268 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11269
11270 next_note = XEXP (note, 1);
11271 switch (REG_NOTE_KIND (note))
11272 {
c9903b44
DE
11273 case REG_BR_PROB:
11274 case REG_EXEC_COUNT:
11275 /* Doesn't matter much where we put this, as long as it's somewhere.
11276 It is preferable to keep these notes on branches, which is most
11277 likely to be i3. */
11278 place = i3;
11279 break;
11280
230d793d 11281 case REG_UNUSED:
07d0cbdd 11282 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
11283 REG_UNUSED notes from that insn.
11284
11285 Any clobbers from i2 or i1 can only exist if they were added by
11286 recog_for_combine. In that case, recog_for_combine created the
11287 necessary REG_UNUSED notes. Trying to keep any original
11288 REG_UNUSED notes from these insns can cause incorrect output
11289 if it is for the same register as the original i3 dest.
11290 In that case, we will notice that the register is set in i3,
11291 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
11292 is wrong. However, it is possible to have REG_UNUSED notes from
11293 i2 or i1 for register which were both used and clobbered, so
11294 we keep notes from i2 or i1 if they will turn into REG_DEAD
11295 notes. */
176c9e6b 11296
230d793d
RS
11297 /* If this register is set or clobbered in I3, put the note there
11298 unless there is one already. */
07d0cbdd 11299 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 11300 {
07d0cbdd
JW
11301 if (from_insn != i3)
11302 break;
11303
230d793d
RS
11304 if (! (GET_CODE (XEXP (note, 0)) == REG
11305 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11306 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11307 place = i3;
11308 }
11309 /* Otherwise, if this register is used by I3, then this register
11310 now dies here, so we must put a REG_DEAD note here unless there
11311 is one already. */
11312 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11313 && ! (GET_CODE (XEXP (note, 0)) == REG
11314 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
11315 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11316 {
11317 PUT_REG_NOTE_KIND (note, REG_DEAD);
11318 place = i3;
11319 }
11320 break;
11321
11322 case REG_EQUAL:
11323 case REG_EQUIV:
11324 case REG_NONNEG:
9ae8ffe7 11325 case REG_NOALIAS:
230d793d
RS
11326 /* These notes say something about results of an insn. We can
11327 only support them if they used to be on I3 in which case they
a687e897
RK
11328 remain on I3. Otherwise they are ignored.
11329
11330 If the note refers to an expression that is not a constant, we
11331 must also ignore the note since we cannot tell whether the
11332 equivalence is still true. It might be possible to do
11333 slightly better than this (we only have a problem if I2DEST
11334 or I1DEST is present in the expression), but it doesn't
11335 seem worth the trouble. */
11336
11337 if (from_insn == i3
11338 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
11339 place = i3;
11340 break;
11341
11342 case REG_INC:
11343 case REG_NO_CONFLICT:
11344 case REG_LABEL:
11345 /* These notes say something about how a register is used. They must
11346 be present on any use of the register in I2 or I3. */
11347 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11348 place = i3;
11349
11350 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11351 {
11352 if (place)
11353 place2 = i2;
11354 else
11355 place = i2;
11356 }
11357 break;
11358
11359 case REG_WAS_0:
11360 /* It is too much trouble to try to see if this note is still
11361 correct in all situations. It is better to simply delete it. */
11362 break;
11363
11364 case REG_RETVAL:
11365 /* If the insn previously containing this note still exists,
11366 put it back where it was. Otherwise move it to the previous
11367 insn. Adjust the corresponding REG_LIBCALL note. */
11368 if (GET_CODE (from_insn) != NOTE)
11369 place = from_insn;
11370 else
11371 {
5f4f0e22 11372 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
11373 place = prev_real_insn (from_insn);
11374 if (tem && place)
11375 XEXP (tem, 0) = place;
11376 }
11377 break;
11378
11379 case REG_LIBCALL:
11380 /* This is handled similarly to REG_RETVAL. */
11381 if (GET_CODE (from_insn) != NOTE)
11382 place = from_insn;
11383 else
11384 {
5f4f0e22 11385 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
11386 place = next_real_insn (from_insn);
11387 if (tem && place)
11388 XEXP (tem, 0) = place;
11389 }
11390 break;
11391
11392 case REG_DEAD:
11393 /* If the register is used as an input in I3, it dies there.
11394 Similarly for I2, if it is non-zero and adjacent to I3.
11395
11396 If the register is not used as an input in either I3 or I2
11397 and it is not one of the registers we were supposed to eliminate,
11398 there are two possibilities. We might have a non-adjacent I2
11399 or we might have somehow eliminated an additional register
11400 from a computation. For example, we might have had A & B where
11401 we discover that B will always be zero. In this case we will
11402 eliminate the reference to A.
11403
11404 In both cases, we must search to see if we can find a previous
11405 use of A and put the death note there. */
11406
6e2d1486
RK
11407 if (from_insn
11408 && GET_CODE (from_insn) == CALL_INSN
11409 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11410 place = from_insn;
11411 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
11412 place = i3;
11413 else if (i2 != 0 && next_nonnote_insn (i2) == i3
11414 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11415 place = i2;
11416
11417 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11418 break;
11419
510dd77e
RK
11420 /* If the register is used in both I2 and I3 and it dies in I3,
11421 we might have added another reference to it. If reg_n_refs
11422 was 2, bump it to 3. This has to be correct since the
11423 register must have been set somewhere. The reason this is
11424 done is because local-alloc.c treats 2 references as a
11425 special case. */
11426
11427 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
b1f21e0a 11428 && REG_N_REFS (REGNO (XEXP (note, 0)))== 2
510dd77e 11429 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
b1f21e0a 11430 REG_N_REFS (REGNO (XEXP (note, 0))) = 3;
510dd77e 11431
230d793d 11432 if (place == 0)
38d8473f
RK
11433 {
11434 for (tem = prev_nonnote_insn (i3);
11435 place == 0 && tem
11436 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
11437 tem = prev_nonnote_insn (tem))
11438 {
11439 /* If the register is being set at TEM, see if that is all
11440 TEM is doing. If so, delete TEM. Otherwise, make this
11441 into a REG_UNUSED note instead. */
11442 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11443 {
11444 rtx set = single_set (tem);
11445
11446 /* Verify that it was the set, and not a clobber that
11447 modified the register. */
11448
11449 if (set != 0 && ! side_effects_p (SET_SRC (set))
d02089a5
RK
11450 && (rtx_equal_p (XEXP (note, 0), SET_DEST (set))
11451 || (GET_CODE (SET_DEST (set)) == SUBREG
11452 && rtx_equal_p (XEXP (note, 0),
11453 XEXP (SET_DEST (set), 0)))))
38d8473f
RK
11454 {
11455 /* Move the notes and links of TEM elsewhere.
11456 This might delete other dead insns recursively.
11457 First set the pattern to something that won't use
11458 any register. */
11459
11460 PATTERN (tem) = pc_rtx;
11461
11462 distribute_notes (REG_NOTES (tem), tem, tem,
11463 NULL_RTX, NULL_RTX, NULL_RTX);
11464 distribute_links (LOG_LINKS (tem));
11465
11466 PUT_CODE (tem, NOTE);
11467 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11468 NOTE_SOURCE_FILE (tem) = 0;
11469 }
11470 else
11471 {
11472 PUT_REG_NOTE_KIND (note, REG_UNUSED);
11473
11474 /* If there isn't already a REG_UNUSED note, put one
11475 here. */
11476 if (! find_regno_note (tem, REG_UNUSED,
11477 REGNO (XEXP (note, 0))))
11478 place = tem;
11479 break;
230d793d
RS
11480 }
11481 }
13018fad
RE
11482 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11483 || (GET_CODE (tem) == CALL_INSN
11484 && find_reg_fusage (tem, USE, XEXP (note, 0))))
230d793d
RS
11485 {
11486 place = tem;
932d1119
RK
11487
11488 /* If we are doing a 3->2 combination, and we have a
11489 register which formerly died in i3 and was not used
11490 by i2, which now no longer dies in i3 and is used in
11491 i2 but does not die in i2, and place is between i2
11492 and i3, then we may need to move a link from place to
11493 i2. */
a8908849
RK
11494 if (i2 && INSN_UID (place) <= max_uid_cuid
11495 && INSN_CUID (place) > INSN_CUID (i2)
932d1119
RK
11496 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11497 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11498 {
11499 rtx links = LOG_LINKS (place);
11500 LOG_LINKS (place) = 0;
11501 distribute_links (links);
11502 }
230d793d
RS
11503 break;
11504 }
38d8473f
RK
11505 }
11506
11507 /* If we haven't found an insn for the death note and it
11508 is still a REG_DEAD note, but we have hit a CODE_LABEL,
11509 insert a USE insn for the register at that label and
11510 put the death node there. This prevents problems with
11511 call-state tracking in caller-save.c. */
11512 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
e2cce0cf
RK
11513 {
11514 place
38a448ca 11515 = emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (note, 0)),
e2cce0cf
RK
11516 tem);
11517
11518 /* If this insn was emitted between blocks, then update
11519 basic_block_head of the current block to include it. */
11520 if (basic_block_end[this_basic_block - 1] == tem)
11521 basic_block_head[this_basic_block] = place;
11522 }
38d8473f 11523 }
230d793d
RS
11524
11525 /* If the register is set or already dead at PLACE, we needn't do
11526 anything with this note if it is still a REG_DEAD note.
11527
11528 Note that we cannot use just `dead_or_set_p' here since we can
11529 convert an assignment to a register into a bit-field assignment.
11530 Therefore, we must also omit the note if the register is the
11531 target of a bitfield assignment. */
11532
11533 if (place && REG_NOTE_KIND (note) == REG_DEAD)
11534 {
11535 int regno = REGNO (XEXP (note, 0));
11536
11537 if (dead_or_set_p (place, XEXP (note, 0))
11538 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11539 {
11540 /* Unless the register previously died in PLACE, clear
11541 reg_last_death. [I no longer understand why this is
11542 being done.] */
11543 if (reg_last_death[regno] != place)
11544 reg_last_death[regno] = 0;
11545 place = 0;
11546 }
11547 else
11548 reg_last_death[regno] = place;
11549
11550 /* If this is a death note for a hard reg that is occupying
11551 multiple registers, ensure that we are still using all
11552 parts of the object. If we find a piece of the object
11553 that is unused, we must add a USE for that piece before
11554 PLACE and put the appropriate REG_DEAD note on it.
11555
11556 An alternative would be to put a REG_UNUSED for the pieces
11557 on the insn that set the register, but that can't be done if
11558 it is not in the same block. It is simpler, though less
11559 efficient, to add the USE insns. */
11560
11561 if (place && regno < FIRST_PSEUDO_REGISTER
11562 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11563 {
11564 int endregno
11565 = regno + HARD_REGNO_NREGS (regno,
11566 GET_MODE (XEXP (note, 0)));
11567 int all_used = 1;
11568 int i;
11569
11570 for (i = regno; i < endregno; i++)
9fd5bb62
JW
11571 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11572 && ! find_regno_fusage (place, USE, i))
230d793d 11573 {
38a448ca 11574 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
28f6d3af
RK
11575 rtx p;
11576
11577 /* See if we already placed a USE note for this
11578 register in front of PLACE. */
11579 for (p = place;
11580 GET_CODE (PREV_INSN (p)) == INSN
11581 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11582 p = PREV_INSN (p))
11583 if (rtx_equal_p (piece,
11584 XEXP (PATTERN (PREV_INSN (p)), 0)))
11585 {
11586 p = 0;
11587 break;
11588 }
11589
11590 if (p)
11591 {
11592 rtx use_insn
38a448ca
RH
11593 = emit_insn_before (gen_rtx_USE (VOIDmode,
11594 piece),
28f6d3af
RK
11595 p);
11596 REG_NOTES (use_insn)
38a448ca
RH
11597 = gen_rtx_EXPR_LIST (REG_DEAD, piece,
11598 REG_NOTES (use_insn));
28f6d3af 11599 }
230d793d 11600
5089e22e 11601 all_used = 0;
230d793d
RS
11602 }
11603
a394b17b
JW
11604 /* Check for the case where the register dying partially
11605 overlaps the register set by this insn. */
11606 if (all_used)
11607 for (i = regno; i < endregno; i++)
11608 if (dead_or_set_regno_p (place, i))
11609 {
11610 all_used = 0;
11611 break;
11612 }
11613
230d793d
RS
11614 if (! all_used)
11615 {
11616 /* Put only REG_DEAD notes for pieces that are
11617 still used and that are not already dead or set. */
11618
11619 for (i = regno; i < endregno; i++)
11620 {
38a448ca 11621 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
230d793d 11622
17cbf358
JW
11623 if ((reg_referenced_p (piece, PATTERN (place))
11624 || (GET_CODE (place) == CALL_INSN
11625 && find_reg_fusage (place, USE, piece)))
230d793d
RS
11626 && ! dead_or_set_p (place, piece)
11627 && ! reg_bitfield_target_p (piece,
11628 PATTERN (place)))
38a448ca
RH
11629 REG_NOTES (place)
11630 = gen_rtx_EXPR_LIST (REG_DEAD,
11631 piece, REG_NOTES (place));
230d793d
RS
11632 }
11633
11634 place = 0;
11635 }
11636 }
11637 }
11638 break;
11639
11640 default:
11641 /* Any other notes should not be present at this point in the
11642 compilation. */
11643 abort ();
11644 }
11645
11646 if (place)
11647 {
11648 XEXP (note, 1) = REG_NOTES (place);
11649 REG_NOTES (place) = note;
11650 }
1a26b032
RK
11651 else if ((REG_NOTE_KIND (note) == REG_DEAD
11652 || REG_NOTE_KIND (note) == REG_UNUSED)
11653 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 11654 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
230d793d
RS
11655
11656 if (place2)
1a26b032
RK
11657 {
11658 if ((REG_NOTE_KIND (note) == REG_DEAD
11659 || REG_NOTE_KIND (note) == REG_UNUSED)
11660 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 11661 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 11662
38a448ca
RH
11663 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
11664 REG_NOTE_KIND (note),
11665 XEXP (note, 0),
11666 REG_NOTES (place2));
1a26b032 11667 }
230d793d
RS
11668 }
11669}
11670\f
11671/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
11672 I3, I2, and I1 to new locations. This is also called in one case to
11673 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
11674
11675static void
11676distribute_links (links)
11677 rtx links;
11678{
11679 rtx link, next_link;
11680
11681 for (link = links; link; link = next_link)
11682 {
11683 rtx place = 0;
11684 rtx insn;
11685 rtx set, reg;
11686
11687 next_link = XEXP (link, 1);
11688
11689 /* If the insn that this link points to is a NOTE or isn't a single
11690 set, ignore it. In the latter case, it isn't clear what we
11691 can do other than ignore the link, since we can't tell which
11692 register it was for. Such links wouldn't be used by combine
11693 anyway.
11694
11695 It is not possible for the destination of the target of the link to
11696 have been changed by combine. The only potential of this is if we
11697 replace I3, I2, and I1 by I3 and I2. But in that case the
11698 destination of I2 also remains unchanged. */
11699
11700 if (GET_CODE (XEXP (link, 0)) == NOTE
11701 || (set = single_set (XEXP (link, 0))) == 0)
11702 continue;
11703
11704 reg = SET_DEST (set);
11705 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11706 || GET_CODE (reg) == SIGN_EXTRACT
11707 || GET_CODE (reg) == STRICT_LOW_PART)
11708 reg = XEXP (reg, 0);
11709
11710 /* A LOG_LINK is defined as being placed on the first insn that uses
11711 a register and points to the insn that sets the register. Start
11712 searching at the next insn after the target of the link and stop
11713 when we reach a set of the register or the end of the basic block.
11714
11715 Note that this correctly handles the link that used to point from
5089e22e 11716 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
11717 since most links don't point very far away. */
11718
11719 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3
RK
11720 (insn && (this_basic_block == n_basic_blocks - 1
11721 || basic_block_head[this_basic_block + 1] != insn));
230d793d
RS
11722 insn = NEXT_INSN (insn))
11723 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11724 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11725 {
11726 if (reg_referenced_p (reg, PATTERN (insn)))
11727 place = insn;
11728 break;
11729 }
6e2d1486
RK
11730 else if (GET_CODE (insn) == CALL_INSN
11731 && find_reg_fusage (insn, USE, reg))
11732 {
11733 place = insn;
11734 break;
11735 }
230d793d
RS
11736
11737 /* If we found a place to put the link, place it there unless there
11738 is already a link to the same insn as LINK at that point. */
11739
11740 if (place)
11741 {
11742 rtx link2;
11743
11744 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11745 if (XEXP (link2, 0) == XEXP (link, 0))
11746 break;
11747
11748 if (link2 == 0)
11749 {
11750 XEXP (link, 1) = LOG_LINKS (place);
11751 LOG_LINKS (place) = link;
abe6e52f
RK
11752
11753 /* Set added_links_insn to the earliest insn we added a
11754 link to. */
11755 if (added_links_insn == 0
11756 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11757 added_links_insn = place;
230d793d
RS
11758 }
11759 }
11760 }
11761}
11762\f
1427d6d2
RK
11763/* Compute INSN_CUID for INSN, which is an insn made by combine. */
11764
11765static int
11766insn_cuid (insn)
11767 rtx insn;
11768{
11769 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
11770 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
11771 insn = NEXT_INSN (insn);
11772
11773 if (INSN_UID (insn) > max_uid_cuid)
11774 abort ();
11775
11776 return INSN_CUID (insn);
11777}
11778\f
230d793d
RS
11779void
11780dump_combine_stats (file)
11781 FILE *file;
11782{
11783 fprintf
11784 (file,
11785 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11786 combine_attempts, combine_merges, combine_extras, combine_successes);
11787}
11788
11789void
11790dump_combine_total_stats (file)
11791 FILE *file;
11792{
11793 fprintf
11794 (file,
11795 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11796 total_attempts, total_merges, total_extras, total_successes);
11797}
This page took 2.218682 seconds and 5 git commands to generate.