]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
(subdi3): Delete % from constraints.
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
0c314d1a 2 Copyright (C) 1987, 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
230d793d 76#include "config.h"
4f90e4a0 77#ifdef __STDC__
04fe4385 78#include <stdarg.h>
4f90e4a0 79#else
04fe4385 80#include <varargs.h>
4f90e4a0 81#endif
dfa3449b 82
9c3b4c8b
RS
83/* Must precede rtl.h for FFS. */
84#include <stdio.h>
85
230d793d
RS
86#include "rtl.h"
87#include "flags.h"
88#include "regs.h"
55310dad 89#include "hard-reg-set.h"
230d793d
RS
90#include "expr.h"
91#include "basic-block.h"
92#include "insn-config.h"
93#include "insn-flags.h"
94#include "insn-codes.h"
95#include "insn-attr.h"
96#include "recog.h"
97#include "real.h"
98
99/* It is not safe to use ordinary gen_lowpart in combine.
100 Use gen_lowpart_for_combine instead. See comments there. */
101#define gen_lowpart dont_use_gen_lowpart_you_dummy
102
103/* Number of attempts to combine instructions in this function. */
104
105static int combine_attempts;
106
107/* Number of attempts that got as far as substitution in this function. */
108
109static int combine_merges;
110
111/* Number of instructions combined with added SETs in this function. */
112
113static int combine_extras;
114
115/* Number of instructions combined in this function. */
116
117static int combine_successes;
118
119/* Totals over entire compilation. */
120
121static int total_attempts, total_merges, total_extras, total_successes;
9210df58
RK
122
123/* Define a defulat value for REVERSIBLE_CC_MODE.
124 We can never assume that a condition code mode is safe to reverse unless
125 the md tells us so. */
126#ifndef REVERSIBLE_CC_MODE
127#define REVERSIBLE_CC_MODE(MODE) 0
128#endif
230d793d
RS
129\f
130/* Vector mapping INSN_UIDs to cuids.
5089e22e 131 The cuids are like uids but increase monotonically always.
230d793d
RS
132 Combine always uses cuids so that it can compare them.
133 But actually renumbering the uids, which we used to do,
134 proves to be a bad idea because it makes it hard to compare
135 the dumps produced by earlier passes with those from later passes. */
136
137static int *uid_cuid;
138
139/* Get the cuid of an insn. */
140
141#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
142
143/* Maximum register number, which is the size of the tables below. */
144
145static int combine_max_regno;
146
147/* Record last point of death of (hard or pseudo) register n. */
148
149static rtx *reg_last_death;
150
151/* Record last point of modification of (hard or pseudo) register n. */
152
153static rtx *reg_last_set;
154
155/* Record the cuid of the last insn that invalidated memory
156 (anything that writes memory, and subroutine calls, but not pushes). */
157
158static int mem_last_set;
159
160/* Record the cuid of the last CALL_INSN
161 so we can tell whether a potential combination crosses any calls. */
162
163static int last_call_cuid;
164
165/* When `subst' is called, this is the insn that is being modified
166 (by combining in a previous insn). The PATTERN of this insn
167 is still the old pattern partially modified and it should not be
168 looked at, but this may be used to examine the successors of the insn
169 to judge whether a simplification is valid. */
170
171static rtx subst_insn;
172
173/* This is the lowest CUID that `subst' is currently dealing with.
174 get_last_value will not return a value if the register was set at or
175 after this CUID. If not for this mechanism, we could get confused if
176 I2 or I1 in try_combine were an insn that used the old value of a register
177 to obtain a new value. In that case, we might erroneously get the
178 new value of the register when we wanted the old one. */
179
180static int subst_low_cuid;
181
6e25d159
RK
182/* This contains any hard registers that are used in newpat; reg_dead_at_p
183 must consider all these registers to be always live. */
184
185static HARD_REG_SET newpat_used_regs;
186
abe6e52f
RK
187/* This is an insn to which a LOG_LINKS entry has been added. If this
188 insn is the earlier than I2 or I3, combine should rescan starting at
189 that location. */
190
191static rtx added_links_insn;
192
230d793d
RS
193/* This is the value of undobuf.num_undo when we started processing this
194 substitution. This will prevent gen_rtx_combine from re-used a piece
195 from the previous expression. Doing so can produce circular rtl
196 structures. */
197
198static int previous_num_undos;
ca5c3ef4 199
0d4d42c3
RK
200/* Basic block number of the block in which we are performing combines. */
201static int this_basic_block;
230d793d
RS
202\f
203/* The next group of arrays allows the recording of the last value assigned
204 to (hard or pseudo) register n. We use this information to see if a
5089e22e 205 operation being processed is redundant given a prior operation performed
230d793d
RS
206 on the register. For example, an `and' with a constant is redundant if
207 all the zero bits are already known to be turned off.
208
209 We use an approach similar to that used by cse, but change it in the
210 following ways:
211
212 (1) We do not want to reinitialize at each label.
213 (2) It is useful, but not critical, to know the actual value assigned
214 to a register. Often just its form is helpful.
215
216 Therefore, we maintain the following arrays:
217
218 reg_last_set_value the last value assigned
219 reg_last_set_label records the value of label_tick when the
220 register was assigned
221 reg_last_set_table_tick records the value of label_tick when a
222 value using the register is assigned
223 reg_last_set_invalid set to non-zero when it is not valid
224 to use the value of this register in some
225 register's value
226
227 To understand the usage of these tables, it is important to understand
228 the distinction between the value in reg_last_set_value being valid
229 and the register being validly contained in some other expression in the
230 table.
231
232 Entry I in reg_last_set_value is valid if it is non-zero, and either
233 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
234
235 Register I may validly appear in any expression returned for the value
236 of another register if reg_n_sets[i] is 1. It may also appear in the
237 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
238 reg_last_set_invalid[j] is zero.
239
240 If an expression is found in the table containing a register which may
241 not validly appear in an expression, the register is replaced by
242 something that won't match, (clobber (const_int 0)).
243
244 reg_last_set_invalid[i] is set non-zero when register I is being assigned
245 to and reg_last_set_table_tick[i] == label_tick. */
246
247/* Record last value assigned to (hard or pseudo) register n. */
248
249static rtx *reg_last_set_value;
250
251/* Record the value of label_tick when the value for register n is placed in
252 reg_last_set_value[n]. */
253
568356af 254static int *reg_last_set_label;
230d793d
RS
255
256/* Record the value of label_tick when an expression involving register n
257 is placed in reg_last_set_value. */
258
568356af 259static int *reg_last_set_table_tick;
230d793d
RS
260
261/* Set non-zero if references to register n in expressions should not be
262 used. */
263
264static char *reg_last_set_invalid;
265
266/* Incremented for each label. */
267
568356af 268static int label_tick;
230d793d
RS
269
270/* Some registers that are set more than once and used in more than one
271 basic block are nevertheless always set in similar ways. For example,
272 a QImode register may be loaded from memory in two places on a machine
273 where byte loads zero extend.
274
951553af 275 We record in the following array what we know about the nonzero
230d793d
RS
276 bits of a register, specifically which bits are known to be zero.
277
278 If an entry is zero, it means that we don't know anything special. */
279
55310dad 280static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 281
951553af 282/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 283 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 284
951553af 285static enum machine_mode nonzero_bits_mode;
230d793d 286
d0ab8cd3
RK
287/* Nonzero if we know that a register has some leading bits that are always
288 equal to the sign bit. */
289
290static char *reg_sign_bit_copies;
291
951553af 292/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
293 It is zero while computing them and after combine has completed. This
294 former test prevents propagating values based on previously set values,
295 which can be incorrect if a variable is modified in a loop. */
230d793d 296
951553af 297static int nonzero_sign_valid;
55310dad
RK
298
299/* These arrays are maintained in parallel with reg_last_set_value
300 and are used to store the mode in which the register was last set,
301 the bits that were known to be zero when it was last set, and the
302 number of sign bits copies it was known to have when it was last set. */
303
304static enum machine_mode *reg_last_set_mode;
305static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
306static char *reg_last_set_sign_bit_copies;
230d793d
RS
307\f
308/* Record one modification to rtl structure
309 to be undone by storing old_contents into *where.
310 is_int is 1 if the contents are an int. */
311
312struct undo
313{
230d793d 314 int is_int;
f5393ab9
RS
315 union {rtx r; int i;} old_contents;
316 union {rtx *r; int *i;} where;
230d793d
RS
317};
318
319/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
320 num_undo says how many are currently recorded.
321
322 storage is nonzero if we must undo the allocation of new storage.
323 The value of storage is what to pass to obfree.
324
325 other_insn is nonzero if we have modified some other insn in the process
326 of working on subst_insn. It must be verified too. */
327
328#define MAX_UNDO 50
329
330struct undobuf
331{
332 int num_undo;
333 char *storage;
334 struct undo undo[MAX_UNDO];
335 rtx other_insn;
336};
337
338static struct undobuf undobuf;
339
cc876596 340/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 341 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
342 set to NEWVAL, do not record this change. Because computing NEWVAL might
343 also call SUBST, we have to compute it before we put anything into
344 the undo table. */
230d793d
RS
345
346#define SUBST(INTO, NEWVAL) \
cc876596
RK
347 do { rtx _new = (NEWVAL); \
348 if (undobuf.num_undo < MAX_UNDO) \
230d793d 349 { \
230d793d 350 undobuf.undo[undobuf.num_undo].is_int = 0; \
f5393ab9
RS
351 undobuf.undo[undobuf.num_undo].where.r = &INTO; \
352 undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \
cc876596 353 INTO = _new; \
f5393ab9 354 if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \
230d793d
RS
355 undobuf.num_undo++; \
356 } \
357 } while (0)
358
359/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
360 expression.
361 Note that substitution for the value of a CONST_INT is not safe. */
362
363#define SUBST_INT(INTO, NEWVAL) \
364 do { if (undobuf.num_undo < MAX_UNDO) \
365{ \
7c046e4e
RK
366 undobuf.undo[undobuf.num_undo].is_int = 1; \
367 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
368 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
230d793d 369 INTO = NEWVAL; \
7c046e4e 370 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
230d793d
RS
371 undobuf.num_undo++; \
372 } \
373 } while (0)
374
375/* Number of times the pseudo being substituted for
376 was found and replaced. */
377
378static int n_occurrences;
379
ef026f91 380static void init_reg_last_arrays PROTO(());
fe2db4fb
RK
381static void setup_incoming_promotions PROTO(());
382static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
383static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
384static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
385static rtx try_combine PROTO((rtx, rtx, rtx));
386static void undo_all PROTO((void));
387static rtx *find_split_point PROTO((rtx *, rtx));
388static rtx subst PROTO((rtx, rtx, rtx, int, int));
8079805d
RK
389static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
390static rtx simplify_if_then_else PROTO((rtx));
391static rtx simplify_set PROTO((rtx));
392static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
393static rtx expand_compound_operation PROTO((rtx));
394static rtx expand_field_assignment PROTO((rtx));
395static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
396 int, int, int));
71923da7 397static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
398static rtx make_compound_operation PROTO((rtx, enum rtx_code));
399static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 400static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 401 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 402static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb
RK
403static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
404static rtx make_field_assignment PROTO((rtx));
405static rtx apply_distributive_law PROTO((rtx));
406static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
407 unsigned HOST_WIDE_INT));
408static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
409static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
410static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
411 enum rtx_code, HOST_WIDE_INT,
412 enum machine_mode, int *));
413static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
414 rtx, int));
415static int recog_for_combine PROTO((rtx *, rtx, rtx *));
416static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 417static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 418 ...));
fe2db4fb
RK
419static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
420 rtx, rtx));
0c1c8ea6
RK
421static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
422 enum machine_mode, rtx));
fe2db4fb
RK
423static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
424static int reversible_comparison_p PROTO((rtx));
425static void update_table_tick PROTO((rtx));
426static void record_value_for_reg PROTO((rtx, rtx, rtx));
427static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
428static void record_dead_and_set_regs PROTO((rtx));
429static int get_last_value_validate PROTO((rtx *, int, int));
430static rtx get_last_value PROTO((rtx));
431static int use_crosses_set_p PROTO((rtx, int));
432static void reg_dead_at_p_1 PROTO((rtx, rtx));
433static int reg_dead_at_p PROTO((rtx, rtx));
434static void move_deaths PROTO((rtx, int, rtx, rtx *));
435static int reg_bitfield_target_p PROTO((rtx, rtx));
436static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
437static void distribute_links PROTO((rtx));
6e25d159 438static void mark_used_regs_combine PROTO((rtx));
230d793d
RS
439\f
440/* Main entry point for combiner. F is the first insn of the function.
441 NREGS is the first unused pseudo-reg number. */
442
443void
444combine_instructions (f, nregs)
445 rtx f;
446 int nregs;
447{
448 register rtx insn, next, prev;
449 register int i;
450 register rtx links, nextlinks;
451
452 combine_attempts = 0;
453 combine_merges = 0;
454 combine_extras = 0;
455 combine_successes = 0;
bef9925b 456 undobuf.num_undo = previous_num_undos = 0;
230d793d
RS
457
458 combine_max_regno = nregs;
459
ef026f91
RS
460 reg_nonzero_bits
461 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
462 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
463
4c9a05bc 464 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
465 bzero (reg_sign_bit_copies, nregs * sizeof (char));
466
230d793d
RS
467 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
468 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
469 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
470 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
471 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 472 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
473 reg_last_set_mode
474 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
475 reg_last_set_nonzero_bits
476 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
477 reg_last_set_sign_bit_copies
478 = (char *) alloca (nregs * sizeof (char));
479
ef026f91 480 init_reg_last_arrays ();
230d793d
RS
481
482 init_recog_no_volatile ();
483
484 /* Compute maximum uid value so uid_cuid can be allocated. */
485
486 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
487 if (INSN_UID (insn) > i)
488 i = INSN_UID (insn);
489
490 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
491
951553af 492 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 493
951553af 494 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
495 when, for example, we have j <<= 1 in a loop. */
496
951553af 497 nonzero_sign_valid = 0;
230d793d
RS
498
499 /* Compute the mapping from uids to cuids.
500 Cuids are numbers assigned to insns, like uids,
501 except that cuids increase monotonically through the code.
502
503 Scan all SETs and see if we can deduce anything about what
951553af 504 bits are known to be zero for some registers and how many copies
d79f08e0
RK
505 of the sign bit are known to exist for those registers.
506
507 Also set any known values so that we can use it while searching
508 for what bits are known to be set. */
509
510 label_tick = 1;
230d793d 511
7988fd36
RK
512 setup_incoming_promotions ();
513
230d793d
RS
514 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
515 {
516 INSN_CUID (insn) = ++i;
d79f08e0
RK
517 subst_low_cuid = i;
518 subst_insn = insn;
519
230d793d 520 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
521 {
522 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
523 record_dead_and_set_regs (insn);
524 }
525
526 if (GET_CODE (insn) == CODE_LABEL)
527 label_tick++;
230d793d
RS
528 }
529
951553af 530 nonzero_sign_valid = 1;
230d793d
RS
531
532 /* Now scan all the insns in forward order. */
533
0d4d42c3 534 this_basic_block = -1;
230d793d
RS
535 label_tick = 1;
536 last_call_cuid = 0;
537 mem_last_set = 0;
ef026f91 538 init_reg_last_arrays ();
7988fd36
RK
539 setup_incoming_promotions ();
540
230d793d
RS
541 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
542 {
543 next = 0;
544
0d4d42c3 545 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 546 if (this_basic_block + 1 < n_basic_blocks
0d4d42c3
RK
547 && basic_block_head[this_basic_block + 1] == insn)
548 this_basic_block++;
549
230d793d
RS
550 if (GET_CODE (insn) == CODE_LABEL)
551 label_tick++;
552
0d4d42c3 553 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
554 {
555 /* Try this insn with each insn it links back to. */
556
557 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 558 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
559 goto retry;
560
561 /* Try each sequence of three linked insns ending with this one. */
562
563 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
564 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
565 nextlinks = XEXP (nextlinks, 1))
566 if ((next = try_combine (insn, XEXP (links, 0),
567 XEXP (nextlinks, 0))) != 0)
568 goto retry;
569
570#ifdef HAVE_cc0
571 /* Try to combine a jump insn that uses CC0
572 with a preceding insn that sets CC0, and maybe with its
573 logical predecessor as well.
574 This is how we make decrement-and-branch insns.
575 We need this special code because data flow connections
576 via CC0 do not get entered in LOG_LINKS. */
577
578 if (GET_CODE (insn) == JUMP_INSN
579 && (prev = prev_nonnote_insn (insn)) != 0
580 && GET_CODE (prev) == INSN
581 && sets_cc0_p (PATTERN (prev)))
582 {
5f4f0e22 583 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
584 goto retry;
585
586 for (nextlinks = LOG_LINKS (prev); nextlinks;
587 nextlinks = XEXP (nextlinks, 1))
588 if ((next = try_combine (insn, prev,
589 XEXP (nextlinks, 0))) != 0)
590 goto retry;
591 }
592
593 /* Do the same for an insn that explicitly references CC0. */
594 if (GET_CODE (insn) == INSN
595 && (prev = prev_nonnote_insn (insn)) != 0
596 && GET_CODE (prev) == INSN
597 && sets_cc0_p (PATTERN (prev))
598 && GET_CODE (PATTERN (insn)) == SET
599 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
600 {
5f4f0e22 601 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
602 goto retry;
603
604 for (nextlinks = LOG_LINKS (prev); nextlinks;
605 nextlinks = XEXP (nextlinks, 1))
606 if ((next = try_combine (insn, prev,
607 XEXP (nextlinks, 0))) != 0)
608 goto retry;
609 }
610
611 /* Finally, see if any of the insns that this insn links to
612 explicitly references CC0. If so, try this insn, that insn,
5089e22e 613 and its predecessor if it sets CC0. */
230d793d
RS
614 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
615 if (GET_CODE (XEXP (links, 0)) == INSN
616 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
617 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
618 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
619 && GET_CODE (prev) == INSN
620 && sets_cc0_p (PATTERN (prev))
621 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
622 goto retry;
623#endif
624
625 /* Try combining an insn with two different insns whose results it
626 uses. */
627 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
628 for (nextlinks = XEXP (links, 1); nextlinks;
629 nextlinks = XEXP (nextlinks, 1))
630 if ((next = try_combine (insn, XEXP (links, 0),
631 XEXP (nextlinks, 0))) != 0)
632 goto retry;
633
634 if (GET_CODE (insn) != NOTE)
635 record_dead_and_set_regs (insn);
636
637 retry:
638 ;
639 }
640 }
641
642 total_attempts += combine_attempts;
643 total_merges += combine_merges;
644 total_extras += combine_extras;
645 total_successes += combine_successes;
1a26b032 646
951553af 647 nonzero_sign_valid = 0;
230d793d 648}
ef026f91
RS
649
650/* Wipe the reg_last_xxx arrays in preparation for another pass. */
651
652static void
653init_reg_last_arrays ()
654{
655 int nregs = combine_max_regno;
656
4c9a05bc
RK
657 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
658 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
659 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
660 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
661 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 662 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
663 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
664 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
665 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
666}
230d793d 667\f
7988fd36
RK
668/* Set up any promoted values for incoming argument registers. */
669
ee791cc3 670static void
7988fd36
RK
671setup_incoming_promotions ()
672{
673#ifdef PROMOTE_FUNCTION_ARGS
674 int regno;
675 rtx reg;
676 enum machine_mode mode;
677 int unsignedp;
678 rtx first = get_insns ();
679
680 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
681 if (FUNCTION_ARG_REGNO_P (regno)
682 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
683 record_value_for_reg (reg, first,
684 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
500c518b
RK
685 GET_MODE (reg),
686 gen_rtx (CLOBBER, mode, const0_rtx)));
7988fd36
RK
687#endif
688}
689\f
230d793d 690/* Called via note_stores. If X is a pseudo that is used in more than
5f4f0e22 691 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
951553af 692 set, record what bits are known zero. If we are clobbering X,
230d793d
RS
693 ignore this "set" because the clobbered value won't be used.
694
695 If we are setting only a portion of X and we can't figure out what
696 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
697 be happening.
698
699 Similarly, set how many bits of X are known to be copies of the sign bit
700 at all locations in the function. This is the smallest number implied
701 by any set of X. */
230d793d
RS
702
703static void
951553af 704set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
705 rtx x;
706 rtx set;
707{
d0ab8cd3
RK
708 int num;
709
230d793d
RS
710 if (GET_CODE (x) == REG
711 && REGNO (x) >= FIRST_PSEUDO_REGISTER
712 && reg_n_sets[REGNO (x)] > 1
713 && reg_basic_block[REGNO (x)] < 0
e8095e80
RK
714 /* If this register is undefined at the start of the file, we can't
715 say what its contents were. */
716 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
717 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
5f4f0e22 718 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
719 {
720 if (GET_CODE (set) == CLOBBER)
e8095e80
RK
721 {
722 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
723 reg_sign_bit_copies[REGNO (x)] = 0;
724 return;
725 }
230d793d
RS
726
727 /* If this is a complex assignment, see if we can convert it into a
5089e22e 728 simple assignment. */
230d793d 729 set = expand_field_assignment (set);
d79f08e0
RK
730
731 /* If this is a simple assignment, or we have a paradoxical SUBREG,
732 set what we know about X. */
733
734 if (SET_DEST (set) == x
735 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
736 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
737 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 738 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 739 {
9afa3d54
RK
740 rtx src = SET_SRC (set);
741
742#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
743 /* If X is narrower than a word and SRC is a non-negative
744 constant that would appear negative in the mode of X,
745 sign-extend it for use in reg_nonzero_bits because some
746 machines (maybe most) will actually do the sign-extension
747 and this is the conservative approach.
748
749 ??? For 2.5, try to tighten up the MD files in this regard
750 instead of this kludge. */
751
752 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
753 && GET_CODE (src) == CONST_INT
754 && INTVAL (src) > 0
755 && 0 != (INTVAL (src)
756 & ((HOST_WIDE_INT) 1
9e69be8c 757 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
758 src = GEN_INT (INTVAL (src)
759 | ((HOST_WIDE_INT) (-1)
760 << GET_MODE_BITSIZE (GET_MODE (x))));
761#endif
762
951553af 763 reg_nonzero_bits[REGNO (x)]
9afa3d54 764 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
765 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
766 if (reg_sign_bit_copies[REGNO (x)] == 0
767 || reg_sign_bit_copies[REGNO (x)] > num)
768 reg_sign_bit_copies[REGNO (x)] = num;
769 }
230d793d 770 else
d0ab8cd3 771 {
951553af 772 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
d0ab8cd3
RK
773 reg_sign_bit_copies[REGNO (x)] = 0;
774 }
230d793d
RS
775 }
776}
777\f
778/* See if INSN can be combined into I3. PRED and SUCC are optionally
779 insns that were previously combined into I3 or that will be combined
780 into the merger of INSN and I3.
781
782 Return 0 if the combination is not allowed for any reason.
783
784 If the combination is allowed, *PDEST will be set to the single
785 destination of INSN and *PSRC to the single source, and this function
786 will return 1. */
787
788static int
789can_combine_p (insn, i3, pred, succ, pdest, psrc)
790 rtx insn;
791 rtx i3;
792 rtx pred, succ;
793 rtx *pdest, *psrc;
794{
795 int i;
796 rtx set = 0, src, dest;
797 rtx p, link;
798 int all_adjacent = (succ ? (next_active_insn (insn) == succ
799 && next_active_insn (succ) == i3)
800 : next_active_insn (insn) == i3);
801
802 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
803 or a PARALLEL consisting of such a SET and CLOBBERs.
804
805 If INSN has CLOBBER parallel parts, ignore them for our processing.
806 By definition, these happen during the execution of the insn. When it
807 is merged with another insn, all bets are off. If they are, in fact,
808 needed and aren't also supplied in I3, they may be added by
809 recog_for_combine. Otherwise, it won't match.
810
811 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
812 note.
813
814 Get the source and destination of INSN. If more than one, can't
815 combine. */
816
817 if (GET_CODE (PATTERN (insn)) == SET)
818 set = PATTERN (insn);
819 else if (GET_CODE (PATTERN (insn)) == PARALLEL
820 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
821 {
822 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
823 {
824 rtx elt = XVECEXP (PATTERN (insn), 0, i);
825
826 switch (GET_CODE (elt))
827 {
828 /* We can ignore CLOBBERs. */
829 case CLOBBER:
830 break;
831
832 case SET:
833 /* Ignore SETs whose result isn't used but not those that
834 have side-effects. */
835 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
836 && ! side_effects_p (elt))
837 break;
838
839 /* If we have already found a SET, this is a second one and
840 so we cannot combine with this insn. */
841 if (set)
842 return 0;
843
844 set = elt;
845 break;
846
847 default:
848 /* Anything else means we can't combine. */
849 return 0;
850 }
851 }
852
853 if (set == 0
854 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
855 so don't do anything with it. */
856 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
857 return 0;
858 }
859 else
860 return 0;
861
862 if (set == 0)
863 return 0;
864
865 set = expand_field_assignment (set);
866 src = SET_SRC (set), dest = SET_DEST (set);
867
868 /* Don't eliminate a store in the stack pointer. */
869 if (dest == stack_pointer_rtx
230d793d
RS
870 /* If we couldn't eliminate a field assignment, we can't combine. */
871 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
872 /* Don't combine with an insn that sets a register to itself if it has
873 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 874 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
875 /* Can't merge a function call. */
876 || GET_CODE (src) == CALL
cd5e8f1f 877 /* Don't eliminate a function call argument. */
4dca5ec5
RK
878 || (GET_CODE (i3) == CALL_INSN
879 && (find_reg_fusage (i3, USE, dest)
880 || (GET_CODE (dest) == REG
881 && REGNO (dest) < FIRST_PSEUDO_REGISTER
882 && global_regs[REGNO (dest)])))
230d793d
RS
883 /* Don't substitute into an incremented register. */
884 || FIND_REG_INC_NOTE (i3, dest)
885 || (succ && FIND_REG_INC_NOTE (succ, dest))
886 /* Don't combine the end of a libcall into anything. */
5f4f0e22 887 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
888 /* Make sure that DEST is not used after SUCC but before I3. */
889 || (succ && ! all_adjacent
890 && reg_used_between_p (dest, succ, i3))
891 /* Make sure that the value that is to be substituted for the register
892 does not use any registers whose values alter in between. However,
893 If the insns are adjacent, a use can't cross a set even though we
894 think it might (this can happen for a sequence of insns each setting
895 the same destination; reg_last_set of that register might point to
d81481d3
RK
896 a NOTE). If INSN has a REG_EQUIV note, the register is always
897 equivalent to the memory so the substitution is valid even if there
898 are intervening stores. Also, don't move a volatile asm or
899 UNSPEC_VOLATILE across any other insns. */
230d793d 900 || (! all_adjacent
d81481d3
RK
901 && (((GET_CODE (src) != MEM
902 || ! find_reg_note (insn, REG_EQUIV, src))
903 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
904 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
905 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
906 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
907 better register allocation by not doing the combine. */
908 || find_reg_note (i3, REG_NO_CONFLICT, dest)
909 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
910 /* Don't combine across a CALL_INSN, because that would possibly
911 change whether the life span of some REGs crosses calls or not,
912 and it is a pain to update that information.
913 Exception: if source is a constant, moving it later can't hurt.
914 Accept that special case, because it helps -fforce-addr a lot. */
915 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
916 return 0;
917
918 /* DEST must either be a REG or CC0. */
919 if (GET_CODE (dest) == REG)
920 {
921 /* If register alignment is being enforced for multi-word items in all
922 cases except for parameters, it is possible to have a register copy
923 insn referencing a hard register that is not allowed to contain the
924 mode being copied and which would not be valid as an operand of most
925 insns. Eliminate this problem by not combining with such an insn.
926
927 Also, on some machines we don't want to extend the life of a hard
928 register. */
929
930 if (GET_CODE (src) == REG
931 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
932 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
933 /* Don't extend the life of a hard register unless it is
934 user variable (if we have few registers) or it can't
935 fit into the desired register (meaning something special
936 is going on). */
230d793d 937 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e
RK
938 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
939#ifdef SMALL_REGISTER_CLASSES
940 || ! REG_USERVAR_P (src)
230d793d 941#endif
c448a43e 942 ))))
230d793d
RS
943 return 0;
944 }
945 else if (GET_CODE (dest) != CC0)
946 return 0;
947
5f96750d
RS
948 /* Don't substitute for a register intended as a clobberable operand.
949 Similarly, don't substitute an expression containing a register that
950 will be clobbered in I3. */
230d793d
RS
951 if (GET_CODE (PATTERN (i3)) == PARALLEL)
952 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
953 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
954 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
955 src)
956 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
957 return 0;
958
959 /* If INSN contains anything volatile, or is an `asm' (whether volatile
960 or not), reject, unless nothing volatile comes between it and I3,
961 with the exception of SUCC. */
962
963 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
964 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
965 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
966 && p != succ && volatile_refs_p (PATTERN (p)))
967 return 0;
968
4b2cb4a2
RS
969 /* If there are any volatile insns between INSN and I3, reject, because
970 they might affect machine state. */
971
972 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
973 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
974 && p != succ && volatile_insn_p (PATTERN (p)))
975 return 0;
976
230d793d
RS
977 /* If INSN or I2 contains an autoincrement or autodecrement,
978 make sure that register is not used between there and I3,
979 and not already used in I3 either.
980 Also insist that I3 not be a jump; if it were one
981 and the incremented register were spilled, we would lose. */
982
983#ifdef AUTO_INC_DEC
984 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
985 if (REG_NOTE_KIND (link) == REG_INC
986 && (GET_CODE (i3) == JUMP_INSN
987 || reg_used_between_p (XEXP (link, 0), insn, i3)
988 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
989 return 0;
990#endif
991
992#ifdef HAVE_cc0
993 /* Don't combine an insn that follows a CC0-setting insn.
994 An insn that uses CC0 must not be separated from the one that sets it.
995 We do, however, allow I2 to follow a CC0-setting insn if that insn
996 is passed as I1; in that case it will be deleted also.
997 We also allow combining in this case if all the insns are adjacent
998 because that would leave the two CC0 insns adjacent as well.
999 It would be more logical to test whether CC0 occurs inside I1 or I2,
1000 but that would be much slower, and this ought to be equivalent. */
1001
1002 p = prev_nonnote_insn (insn);
1003 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1004 && ! all_adjacent)
1005 return 0;
1006#endif
1007
1008 /* If we get here, we have passed all the tests and the combination is
1009 to be allowed. */
1010
1011 *pdest = dest;
1012 *psrc = src;
1013
1014 return 1;
1015}
1016\f
1017/* LOC is the location within I3 that contains its pattern or the component
1018 of a PARALLEL of the pattern. We validate that it is valid for combining.
1019
1020 One problem is if I3 modifies its output, as opposed to replacing it
1021 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1022 so would produce an insn that is not equivalent to the original insns.
1023
1024 Consider:
1025
1026 (set (reg:DI 101) (reg:DI 100))
1027 (set (subreg:SI (reg:DI 101) 0) <foo>)
1028
1029 This is NOT equivalent to:
1030
1031 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1032 (set (reg:DI 101) (reg:DI 100))])
1033
1034 Not only does this modify 100 (in which case it might still be valid
1035 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1036
1037 We can also run into a problem if I2 sets a register that I1
1038 uses and I1 gets directly substituted into I3 (not via I2). In that
1039 case, we would be getting the wrong value of I2DEST into I3, so we
1040 must reject the combination. This case occurs when I2 and I1 both
1041 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1042 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1043 of a SET must prevent combination from occurring.
1044
1045 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
c448a43e
RK
1046 if the destination of a SET is a hard register that isn't a user
1047 variable.
230d793d
RS
1048
1049 Before doing the above check, we first try to expand a field assignment
1050 into a set of logical operations.
1051
1052 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1053 we place a register that is both set and used within I3. If more than one
1054 such register is detected, we fail.
1055
1056 Return 1 if the combination is valid, zero otherwise. */
1057
1058static int
1059combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1060 rtx i3;
1061 rtx *loc;
1062 rtx i2dest;
1063 rtx i1dest;
1064 int i1_not_in_src;
1065 rtx *pi3dest_killed;
1066{
1067 rtx x = *loc;
1068
1069 if (GET_CODE (x) == SET)
1070 {
1071 rtx set = expand_field_assignment (x);
1072 rtx dest = SET_DEST (set);
1073 rtx src = SET_SRC (set);
1074 rtx inner_dest = dest, inner_src = src;
1075
1076 SUBST (*loc, set);
1077
1078 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1079 || GET_CODE (inner_dest) == SUBREG
1080 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1081 inner_dest = XEXP (inner_dest, 0);
1082
1083 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1084 was added. */
1085#if 0
1086 while (GET_CODE (inner_src) == STRICT_LOW_PART
1087 || GET_CODE (inner_src) == SUBREG
1088 || GET_CODE (inner_src) == ZERO_EXTRACT)
1089 inner_src = XEXP (inner_src, 0);
1090
1091 /* If it is better that two different modes keep two different pseudos,
1092 avoid combining them. This avoids producing the following pattern
1093 on a 386:
1094 (set (subreg:SI (reg/v:QI 21) 0)
1095 (lshiftrt:SI (reg/v:SI 20)
1096 (const_int 24)))
1097 If that were made, reload could not handle the pair of
1098 reg 20/21, since it would try to get any GENERAL_REGS
1099 but some of them don't handle QImode. */
1100
1101 if (rtx_equal_p (inner_src, i2dest)
1102 && GET_CODE (inner_dest) == REG
1103 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1104 return 0;
1105#endif
1106
1107 /* Check for the case where I3 modifies its output, as
1108 discussed above. */
1109 if ((inner_dest != dest
1110 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1111 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
1112 /* This is the same test done in can_combine_p except that we
1113 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1114 CALL operation. */
230d793d 1115 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1116 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1117 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1118 GET_MODE (inner_dest))
3f508eca 1119#ifdef SMALL_REGISTER_CLASSES
c448a43e 1120 || (GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest))
230d793d 1121#endif
c448a43e 1122 ))
230d793d
RS
1123 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1124 return 0;
1125
1126 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1127 so record that for later.
1128 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1129 STACK_POINTER_REGNUM, since these are always considered to be
1130 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1131 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1132 && reg_referenced_p (dest, PATTERN (i3))
1133 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1134#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1135 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1136#endif
36a9c2e9
JL
1137#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1138 && (REGNO (dest) != ARG_POINTER_REGNUM
1139 || ! fixed_regs [REGNO (dest)])
1140#endif
1141 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1142 {
1143 if (*pi3dest_killed)
1144 return 0;
1145
1146 *pi3dest_killed = dest;
1147 }
1148 }
1149
1150 else if (GET_CODE (x) == PARALLEL)
1151 {
1152 int i;
1153
1154 for (i = 0; i < XVECLEN (x, 0); i++)
1155 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1156 i1_not_in_src, pi3dest_killed))
1157 return 0;
1158 }
1159
1160 return 1;
1161}
1162\f
1163/* Try to combine the insns I1 and I2 into I3.
1164 Here I1 and I2 appear earlier than I3.
1165 I1 can be zero; then we combine just I2 into I3.
1166
1167 It we are combining three insns and the resulting insn is not recognized,
1168 try splitting it into two insns. If that happens, I2 and I3 are retained
1169 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1170 are pseudo-deleted.
1171
abe6e52f
RK
1172 Return 0 if the combination does not work. Then nothing is changed.
1173 If we did the combination, return the insn at which combine should
1174 resume scanning. */
230d793d
RS
1175
1176static rtx
1177try_combine (i3, i2, i1)
1178 register rtx i3, i2, i1;
1179{
1180 /* New patterns for I3 and I3, respectively. */
1181 rtx newpat, newi2pat = 0;
1182 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1183 int added_sets_1, added_sets_2;
1184 /* Total number of SETs to put into I3. */
1185 int total_sets;
1186 /* Nonzero is I2's body now appears in I3. */
1187 int i2_is_used;
1188 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1189 int insn_code_number, i2_code_number, other_code_number;
1190 /* Contains I3 if the destination of I3 is used in its source, which means
1191 that the old life of I3 is being killed. If that usage is placed into
1192 I2 and not in I3, a REG_DEAD note must be made. */
1193 rtx i3dest_killed = 0;
1194 /* SET_DEST and SET_SRC of I2 and I1. */
1195 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1196 /* PATTERN (I2), or a copy of it in certain cases. */
1197 rtx i2pat;
1198 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1199 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1200 int i1_feeds_i3 = 0;
1201 /* Notes that must be added to REG_NOTES in I3 and I2. */
1202 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1203 /* Notes that we substituted I3 into I2 instead of the normal case. */
1204 int i3_subst_into_i2 = 0;
df7d75de
RK
1205 /* Notes that I1, I2 or I3 is a MULT operation. */
1206 int have_mult = 0;
230d793d
RS
1207
1208 int maxreg;
1209 rtx temp;
1210 register rtx link;
1211 int i;
1212
1213 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1214 This can occur when flow deletes an insn that it has merged into an
1215 auto-increment address. We also can't do anything if I3 has a
1216 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1217 libcall. */
1218
1219 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1220 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1221 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1222 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1223 return 0;
1224
1225 combine_attempts++;
1226
1227 undobuf.num_undo = previous_num_undos = 0;
1228 undobuf.other_insn = 0;
1229
1230 /* Save the current high-water-mark so we can free storage if we didn't
1231 accept this combination. */
1232 undobuf.storage = (char *) oballoc (0);
1233
6e25d159
RK
1234 /* Reset the hard register usage information. */
1235 CLEAR_HARD_REG_SET (newpat_used_regs);
1236
230d793d
RS
1237 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1238 code below, set I1 to be the earlier of the two insns. */
1239 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1240 temp = i1, i1 = i2, i2 = temp;
1241
abe6e52f 1242 added_links_insn = 0;
137e889e 1243
230d793d
RS
1244 /* First check for one important special-case that the code below will
1245 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1246 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1247 we may be able to replace that destination with the destination of I3.
1248 This occurs in the common code where we compute both a quotient and
1249 remainder into a structure, in which case we want to do the computation
1250 directly into the structure to avoid register-register copies.
1251
1252 We make very conservative checks below and only try to handle the
1253 most common cases of this. For example, we only handle the case
1254 where I2 and I3 are adjacent to avoid making difficult register
1255 usage tests. */
1256
1257 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1258 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1259 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1260#ifdef SMALL_REGISTER_CLASSES
1261 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
c448a43e
RK
1262 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1263 || REG_USERVAR_P (SET_DEST (PATTERN (i3))))
230d793d
RS
1264#endif
1265 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1266 && GET_CODE (PATTERN (i2)) == PARALLEL
1267 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1268 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1269 below would need to check what is inside (and reg_overlap_mentioned_p
1270 doesn't support those codes anyway). Don't allow those destinations;
1271 the resulting insn isn't likely to be recognized anyway. */
1272 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1273 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1274 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1275 SET_DEST (PATTERN (i3)))
1276 && next_real_insn (i2) == i3)
5089e22e
RS
1277 {
1278 rtx p2 = PATTERN (i2);
1279
1280 /* Make sure that the destination of I3,
1281 which we are going to substitute into one output of I2,
1282 is not used within another output of I2. We must avoid making this:
1283 (parallel [(set (mem (reg 69)) ...)
1284 (set (reg 69) ...)])
1285 which is not well-defined as to order of actions.
1286 (Besides, reload can't handle output reloads for this.)
1287
1288 The problem can also happen if the dest of I3 is a memory ref,
1289 if another dest in I2 is an indirect memory ref. */
1290 for (i = 0; i < XVECLEN (p2, 0); i++)
1291 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1292 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1293 SET_DEST (XVECEXP (p2, 0, i))))
1294 break;
230d793d 1295
5089e22e
RS
1296 if (i == XVECLEN (p2, 0))
1297 for (i = 0; i < XVECLEN (p2, 0); i++)
1298 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1299 {
1300 combine_merges++;
230d793d 1301
5089e22e
RS
1302 subst_insn = i3;
1303 subst_low_cuid = INSN_CUID (i2);
230d793d 1304
c4e861e8 1305 added_sets_2 = added_sets_1 = 0;
5089e22e 1306 i2dest = SET_SRC (PATTERN (i3));
230d793d 1307
5089e22e
RS
1308 /* Replace the dest in I2 with our dest and make the resulting
1309 insn the new pattern for I3. Then skip to where we
1310 validate the pattern. Everything was set up above. */
1311 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1312 SET_DEST (PATTERN (i3)));
1313
1314 newpat = p2;
176c9e6b 1315 i3_subst_into_i2 = 1;
5089e22e
RS
1316 goto validate_replacement;
1317 }
1318 }
230d793d
RS
1319
1320#ifndef HAVE_cc0
1321 /* If we have no I1 and I2 looks like:
1322 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1323 (set Y OP)])
1324 make up a dummy I1 that is
1325 (set Y OP)
1326 and change I2 to be
1327 (set (reg:CC X) (compare:CC Y (const_int 0)))
1328
1329 (We can ignore any trailing CLOBBERs.)
1330
1331 This undoes a previous combination and allows us to match a branch-and-
1332 decrement insn. */
1333
1334 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1335 && XVECLEN (PATTERN (i2), 0) >= 2
1336 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1337 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1338 == MODE_CC)
1339 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1340 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1341 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1342 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1343 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1344 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1345 {
1346 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1347 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1348 break;
1349
1350 if (i == 1)
1351 {
1352 /* We make I1 with the same INSN_UID as I2. This gives it
1353 the same INSN_CUID for value tracking. Our fake I1 will
1354 never appear in the insn stream so giving it the same INSN_UID
1355 as I2 will not cause a problem. */
1356
3adde2a5
RK
1357 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1358 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
230d793d
RS
1359
1360 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1361 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1362 SET_DEST (PATTERN (i1)));
1363 }
1364 }
1365#endif
1366
1367 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1368 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1369 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1370 {
1371 undo_all ();
1372 return 0;
1373 }
1374
1375 /* Record whether I2DEST is used in I2SRC and similarly for the other
1376 cases. Knowing this will help in register status updating below. */
1377 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1378 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1379 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1380
916f14f1 1381 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1382 in I2SRC. */
1383 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1384
1385 /* Ensure that I3's pattern can be the destination of combines. */
1386 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1387 i1 && i2dest_in_i1src && i1_feeds_i3,
1388 &i3dest_killed))
1389 {
1390 undo_all ();
1391 return 0;
1392 }
1393
df7d75de
RK
1394 /* See if any of the insns is a MULT operation. Unless one is, we will
1395 reject a combination that is, since it must be slower. Be conservative
1396 here. */
1397 if (GET_CODE (i2src) == MULT
1398 || (i1 != 0 && GET_CODE (i1src) == MULT)
1399 || (GET_CODE (PATTERN (i3)) == SET
1400 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1401 have_mult = 1;
1402
230d793d
RS
1403 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1404 We used to do this EXCEPT in one case: I3 has a post-inc in an
1405 output operand. However, that exception can give rise to insns like
1406 mov r3,(r3)+
1407 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1408 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1409
1410#if 0
1411 if (!(GET_CODE (PATTERN (i3)) == SET
1412 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1413 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1414 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1415 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1416 /* It's not the exception. */
1417#endif
1418#ifdef AUTO_INC_DEC
1419 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1420 if (REG_NOTE_KIND (link) == REG_INC
1421 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1422 || (i1 != 0
1423 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1424 {
1425 undo_all ();
1426 return 0;
1427 }
1428#endif
1429
1430 /* See if the SETs in I1 or I2 need to be kept around in the merged
1431 instruction: whenever the value set there is still needed past I3.
1432 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1433
1434 For the SET in I1, we have two cases: If I1 and I2 independently
1435 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1436 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1437 in I1 needs to be kept around unless I1DEST dies or is set in either
1438 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1439 I1DEST. If so, we know I1 feeds into I2. */
1440
1441 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1442
1443 added_sets_1
1444 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1445 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1446
1447 /* If the set in I2 needs to be kept around, we must make a copy of
1448 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1449 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1450 an already-substituted copy. This also prevents making self-referential
1451 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1452 I2DEST. */
1453
1454 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1455 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1456 : PATTERN (i2));
1457
1458 if (added_sets_2)
1459 i2pat = copy_rtx (i2pat);
1460
1461 combine_merges++;
1462
1463 /* Substitute in the latest insn for the regs set by the earlier ones. */
1464
1465 maxreg = max_reg_num ();
1466
1467 subst_insn = i3;
230d793d
RS
1468
1469 /* It is possible that the source of I2 or I1 may be performing an
1470 unneeded operation, such as a ZERO_EXTEND of something that is known
1471 to have the high part zero. Handle that case by letting subst look at
1472 the innermost one of them.
1473
1474 Another way to do this would be to have a function that tries to
1475 simplify a single insn instead of merging two or more insns. We don't
1476 do this because of the potential of infinite loops and because
1477 of the potential extra memory required. However, doing it the way
1478 we are is a bit of a kludge and doesn't catch all cases.
1479
1480 But only do this if -fexpensive-optimizations since it slows things down
1481 and doesn't usually win. */
1482
1483 if (flag_expensive_optimizations)
1484 {
1485 /* Pass pc_rtx so no substitutions are done, just simplifications.
1486 The cases that we are interested in here do not involve the few
1487 cases were is_replaced is checked. */
1488 if (i1)
d0ab8cd3
RK
1489 {
1490 subst_low_cuid = INSN_CUID (i1);
1491 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1492 }
230d793d 1493 else
d0ab8cd3
RK
1494 {
1495 subst_low_cuid = INSN_CUID (i2);
1496 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1497 }
230d793d
RS
1498
1499 previous_num_undos = undobuf.num_undo;
1500 }
1501
1502#ifndef HAVE_cc0
1503 /* Many machines that don't use CC0 have insns that can both perform an
1504 arithmetic operation and set the condition code. These operations will
1505 be represented as a PARALLEL with the first element of the vector
1506 being a COMPARE of an arithmetic operation with the constant zero.
1507 The second element of the vector will set some pseudo to the result
1508 of the same arithmetic operation. If we simplify the COMPARE, we won't
1509 match such a pattern and so will generate an extra insn. Here we test
1510 for this case, where both the comparison and the operation result are
1511 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1512 I2SRC. Later we will make the PARALLEL that contains I2. */
1513
1514 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1515 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1516 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1517 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1518 {
1519 rtx *cc_use;
1520 enum machine_mode compare_mode;
1521
1522 newpat = PATTERN (i3);
1523 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1524
1525 i2_is_used = 1;
1526
1527#ifdef EXTRA_CC_MODES
1528 /* See if a COMPARE with the operand we substituted in should be done
1529 with the mode that is currently being used. If not, do the same
1530 processing we do in `subst' for a SET; namely, if the destination
1531 is used only once, try to replace it with a register of the proper
1532 mode and also replace the COMPARE. */
1533 if (undobuf.other_insn == 0
1534 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1535 &undobuf.other_insn))
77fa0940
RK
1536 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1537 i2src, const0_rtx))
230d793d
RS
1538 != GET_MODE (SET_DEST (newpat))))
1539 {
1540 int regno = REGNO (SET_DEST (newpat));
1541 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1542
1543 if (regno < FIRST_PSEUDO_REGISTER
1544 || (reg_n_sets[regno] == 1 && ! added_sets_2
1545 && ! REG_USERVAR_P (SET_DEST (newpat))))
1546 {
1547 if (regno >= FIRST_PSEUDO_REGISTER)
1548 SUBST (regno_reg_rtx[regno], new_dest);
1549
1550 SUBST (SET_DEST (newpat), new_dest);
1551 SUBST (XEXP (*cc_use, 0), new_dest);
1552 SUBST (SET_SRC (newpat),
1553 gen_rtx_combine (COMPARE, compare_mode,
1554 i2src, const0_rtx));
1555 }
1556 else
1557 undobuf.other_insn = 0;
1558 }
1559#endif
1560 }
1561 else
1562#endif
1563 {
1564 n_occurrences = 0; /* `subst' counts here */
1565
1566 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1567 need to make a unique copy of I2SRC each time we substitute it
1568 to avoid self-referential rtl. */
1569
d0ab8cd3 1570 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1571 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1572 ! i1_feeds_i3 && i1dest_in_i1src);
1573 previous_num_undos = undobuf.num_undo;
1574
1575 /* Record whether i2's body now appears within i3's body. */
1576 i2_is_used = n_occurrences;
1577 }
1578
1579 /* If we already got a failure, don't try to do more. Otherwise,
1580 try to substitute in I1 if we have it. */
1581
1582 if (i1 && GET_CODE (newpat) != CLOBBER)
1583 {
1584 /* Before we can do this substitution, we must redo the test done
1585 above (see detailed comments there) that ensures that I1DEST
1586 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1587
5f4f0e22
CH
1588 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1589 0, NULL_PTR))
230d793d
RS
1590 {
1591 undo_all ();
1592 return 0;
1593 }
1594
1595 n_occurrences = 0;
d0ab8cd3 1596 subst_low_cuid = INSN_CUID (i1);
230d793d
RS
1597 newpat = subst (newpat, i1dest, i1src, 0, 0);
1598 previous_num_undos = undobuf.num_undo;
1599 }
1600
916f14f1
RK
1601 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1602 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1603 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1604 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1605 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1606 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1607 > 1))
230d793d
RS
1608 /* Fail if we tried to make a new register (we used to abort, but there's
1609 really no reason to). */
1610 || max_reg_num () != maxreg
1611 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1612 || GET_CODE (newpat) == CLOBBER
1613 /* Fail if this new pattern is a MULT and we didn't have one before
1614 at the outer level. */
1615 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1616 && ! have_mult))
230d793d
RS
1617 {
1618 undo_all ();
1619 return 0;
1620 }
1621
1622 /* If the actions of the earlier insns must be kept
1623 in addition to substituting them into the latest one,
1624 we must make a new PARALLEL for the latest insn
1625 to hold additional the SETs. */
1626
1627 if (added_sets_1 || added_sets_2)
1628 {
1629 combine_extras++;
1630
1631 if (GET_CODE (newpat) == PARALLEL)
1632 {
1633 rtvec old = XVEC (newpat, 0);
1634 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1635 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
4c9a05bc 1636 bcopy ((char *) &old->elem[0], (char *) &XVECEXP (newpat, 0, 0),
230d793d
RS
1637 sizeof (old->elem[0]) * old->num_elem);
1638 }
1639 else
1640 {
1641 rtx old = newpat;
1642 total_sets = 1 + added_sets_1 + added_sets_2;
1643 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1644 XVECEXP (newpat, 0, 0) = old;
1645 }
1646
1647 if (added_sets_1)
1648 XVECEXP (newpat, 0, --total_sets)
1649 = (GET_CODE (PATTERN (i1)) == PARALLEL
1650 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1651
1652 if (added_sets_2)
1653 {
1654 /* If there is no I1, use I2's body as is. We used to also not do
1655 the subst call below if I2 was substituted into I3,
1656 but that could lose a simplification. */
1657 if (i1 == 0)
1658 XVECEXP (newpat, 0, --total_sets) = i2pat;
1659 else
1660 /* See comment where i2pat is assigned. */
1661 XVECEXP (newpat, 0, --total_sets)
1662 = subst (i2pat, i1dest, i1src, 0, 0);
1663 }
1664 }
1665
1666 /* We come here when we are replacing a destination in I2 with the
1667 destination of I3. */
1668 validate_replacement:
1669
6e25d159
RK
1670 /* Note which hard regs this insn has as inputs. */
1671 mark_used_regs_combine (newpat);
1672
230d793d
RS
1673 /* Is the result of combination a valid instruction? */
1674 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1675
1676 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1677 the second SET's destination is a register that is unused. In that case,
1678 we just need the first SET. This can occur when simplifying a divmod
1679 insn. We *must* test for this case here because the code below that
1680 splits two independent SETs doesn't handle this case correctly when it
1681 updates the register status. Also check the case where the first
1682 SET's destination is unused. That would not cause incorrect code, but
1683 does cause an unneeded insn to remain. */
1684
1685 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1686 && XVECLEN (newpat, 0) == 2
1687 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1688 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1689 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1690 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1691 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1692 && asm_noperands (newpat) < 0)
1693 {
1694 newpat = XVECEXP (newpat, 0, 0);
1695 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1696 }
1697
1698 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1699 && XVECLEN (newpat, 0) == 2
1700 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1701 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1702 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1703 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1704 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1705 && asm_noperands (newpat) < 0)
1706 {
1707 newpat = XVECEXP (newpat, 0, 1);
1708 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1709 }
1710
1711 /* If we were combining three insns and the result is a simple SET
1712 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1713 insns. There are two ways to do this. It can be split using a
1714 machine-specific method (like when you have an addition of a large
1715 constant) or by combine in the function find_split_point. */
1716
230d793d
RS
1717 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1718 && asm_noperands (newpat) < 0)
1719 {
916f14f1 1720 rtx m_split, *split;
42495ca0 1721 rtx ni2dest = i2dest;
916f14f1
RK
1722
1723 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1724 use I2DEST as a scratch register will help. In the latter case,
1725 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1726
1727 m_split = split_insns (newpat, i3);
a70c61d9
JW
1728
1729 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1730 inputs of NEWPAT. */
1731
1732 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1733 possible to try that as a scratch reg. This would require adding
1734 more code to make it work though. */
1735
1736 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1737 {
1738 /* If I2DEST is a hard register or the only use of a pseudo,
1739 we can change its mode. */
1740 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1741 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1742 && GET_CODE (i2dest) == REG
42495ca0
RK
1743 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1744 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1745 && ! REG_USERVAR_P (i2dest))))
1746 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1747 REGNO (i2dest));
1748
1749 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1750 gen_rtvec (2, newpat,
1751 gen_rtx (CLOBBER,
1752 VOIDmode,
1753 ni2dest))),
1754 i3);
1755 }
916f14f1
RK
1756
1757 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1758 && XVECLEN (m_split, 0) == 2
1759 && (next_real_insn (i2) == i3
1760 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1761 INSN_CUID (i2))))
916f14f1 1762 {
1a26b032 1763 rtx i2set, i3set;
d0ab8cd3 1764 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1765 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1766
e4ba89be
RK
1767 i3set = single_set (XVECEXP (m_split, 0, 1));
1768 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1769
42495ca0
RK
1770 /* In case we changed the mode of I2DEST, replace it in the
1771 pseudo-register table here. We can't do it above in case this
1772 code doesn't get executed and we do a split the other way. */
1773
1774 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1775 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1776
916f14f1 1777 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
1778
1779 /* If I2 or I3 has multiple SETs, we won't know how to track
1780 register status, so don't use these insns. */
1781
1782 if (i2_code_number >= 0 && i2set && i3set)
8888fada
RK
1783 insn_code_number = recog_for_combine (&newi3pat, i3,
1784 &new_i3_notes);
c767f54b 1785
d0ab8cd3
RK
1786 if (insn_code_number >= 0)
1787 newpat = newi3pat;
1788
c767f54b 1789 /* It is possible that both insns now set the destination of I3.
22609cbf 1790 If so, we must show an extra use of it. */
c767f54b 1791
1a26b032
RK
1792 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1793 && GET_CODE (SET_DEST (i2set)) == REG
1794 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
22609cbf 1795 reg_n_sets[REGNO (SET_DEST (i2set))]++;
916f14f1 1796 }
230d793d
RS
1797
1798 /* If we can split it and use I2DEST, go ahead and see if that
1799 helps things be recognized. Verify that none of the registers
1800 are set between I2 and I3. */
d0ab8cd3 1801 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1802#ifdef HAVE_cc0
1803 && GET_CODE (i2dest) == REG
1804#endif
1805 /* We need I2DEST in the proper mode. If it is a hard register
1806 or the only use of a pseudo, we can change its mode. */
1807 && (GET_MODE (*split) == GET_MODE (i2dest)
1808 || GET_MODE (*split) == VOIDmode
1809 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1810 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1811 && ! REG_USERVAR_P (i2dest)))
1812 && (next_real_insn (i2) == i3
1813 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1814 /* We can't overwrite I2DEST if its value is still used by
1815 NEWPAT. */
1816 && ! reg_referenced_p (i2dest, newpat))
1817 {
1818 rtx newdest = i2dest;
df7d75de
RK
1819 enum rtx_code split_code = GET_CODE (*split);
1820 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
1821
1822 /* Get NEWDEST as a register in the proper mode. We have already
1823 validated that we can do this. */
df7d75de 1824 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 1825 {
df7d75de 1826 newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
230d793d
RS
1827
1828 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1829 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1830 }
1831
1832 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1833 an ASHIFT. This can occur if it was inside a PLUS and hence
1834 appeared to be a memory address. This is a kludge. */
df7d75de 1835 if (split_code == MULT
230d793d
RS
1836 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1837 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
df7d75de 1838 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
5f4f0e22 1839 XEXP (*split, 0), GEN_INT (i)));
230d793d
RS
1840
1841#ifdef INSN_SCHEDULING
1842 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1843 be written as a ZERO_EXTEND. */
df7d75de
RK
1844 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
1845 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
1846 XEXP (*split, 0)));
1847#endif
1848
1849 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1850 SUBST (*split, newdest);
1851 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
df7d75de
RK
1852
1853 /* If the split point was a MULT and we didn't have one before,
1854 don't use one now. */
1855 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
230d793d
RS
1856 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1857 }
1858 }
1859
1860 /* Check for a case where we loaded from memory in a narrow mode and
1861 then sign extended it, but we need both registers. In that case,
1862 we have a PARALLEL with both loads from the same memory location.
1863 We can split this into a load from memory followed by a register-register
1864 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
1865 eliminate the copy.
1866
1867 We cannot do this if the destination of the second assignment is
1868 a register that we have already assumed is zero-extended. Similarly
1869 for a SUBREG of such a register. */
230d793d
RS
1870
1871 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1872 && GET_CODE (newpat) == PARALLEL
1873 && XVECLEN (newpat, 0) == 2
1874 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1875 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1876 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1877 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1878 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1879 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1880 INSN_CUID (i2))
1881 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1882 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
1883 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1884 (GET_CODE (temp) == REG
1885 && reg_nonzero_bits[REGNO (temp)] != 0
1886 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1887 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1888 && (reg_nonzero_bits[REGNO (temp)]
1889 != GET_MODE_MASK (word_mode))))
1890 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1891 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1892 (GET_CODE (temp) == REG
1893 && reg_nonzero_bits[REGNO (temp)] != 0
1894 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1895 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1896 && (reg_nonzero_bits[REGNO (temp)]
1897 != GET_MODE_MASK (word_mode)))))
230d793d
RS
1898 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1899 SET_SRC (XVECEXP (newpat, 0, 1)))
1900 && ! find_reg_note (i3, REG_UNUSED,
1901 SET_DEST (XVECEXP (newpat, 0, 0))))
1902 {
472fbdd1
RK
1903 rtx ni2dest;
1904
230d793d 1905 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 1906 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
1907 newpat = XVECEXP (newpat, 0, 1);
1908 SUBST (SET_SRC (newpat),
472fbdd1 1909 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
230d793d
RS
1910 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1911 if (i2_code_number >= 0)
1912 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
1913
1914 if (insn_code_number >= 0)
1915 {
1916 rtx insn;
1917 rtx link;
1918
1919 /* If we will be able to accept this, we have made a change to the
1920 destination of I3. This can invalidate a LOG_LINKS pointing
1921 to I3. No other part of combine.c makes such a transformation.
1922
1923 The new I3 will have a destination that was previously the
1924 destination of I1 or I2 and which was used in i2 or I3. Call
1925 distribute_links to make a LOG_LINK from the next use of
1926 that destination. */
1927
1928 PATTERN (i3) = newpat;
5f4f0e22 1929 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
1930
1931 /* I3 now uses what used to be its destination and which is
1932 now I2's destination. That means we need a LOG_LINK from
1933 I3 to I2. But we used to have one, so we still will.
1934
1935 However, some later insn might be using I2's dest and have
1936 a LOG_LINK pointing at I3. We must remove this link.
1937 The simplest way to remove the link is to point it at I1,
1938 which we know will be a NOTE. */
1939
1940 for (insn = NEXT_INSN (i3);
0d4d42c3
RK
1941 insn && (this_basic_block == n_basic_blocks - 1
1942 || insn != basic_block_head[this_basic_block + 1]);
5089e22e
RS
1943 insn = NEXT_INSN (insn))
1944 {
1945 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 1946 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
1947 {
1948 for (link = LOG_LINKS (insn); link;
1949 link = XEXP (link, 1))
1950 if (XEXP (link, 0) == i3)
1951 XEXP (link, 0) = i1;
1952
1953 break;
1954 }
1955 }
1956 }
230d793d
RS
1957 }
1958
1959 /* Similarly, check for a case where we have a PARALLEL of two independent
1960 SETs but we started with three insns. In this case, we can do the sets
1961 as two separate insns. This case occurs when some SET allows two
1962 other insns to combine, but the destination of that SET is still live. */
1963
1964 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1965 && GET_CODE (newpat) == PARALLEL
1966 && XVECLEN (newpat, 0) == 2
1967 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1968 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1969 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1970 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1971 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1972 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1973 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1974 INSN_CUID (i2))
1975 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1976 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1977 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1978 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1979 XVECEXP (newpat, 0, 0))
1980 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1981 XVECEXP (newpat, 0, 1)))
1982 {
1983 newi2pat = XVECEXP (newpat, 0, 1);
1984 newpat = XVECEXP (newpat, 0, 0);
1985
1986 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1987 if (i2_code_number >= 0)
1988 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1989 }
1990
1991 /* If it still isn't recognized, fail and change things back the way they
1992 were. */
1993 if ((insn_code_number < 0
1994 /* Is the result a reasonable ASM_OPERANDS? */
1995 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1996 {
1997 undo_all ();
1998 return 0;
1999 }
2000
2001 /* If we had to change another insn, make sure it is valid also. */
2002 if (undobuf.other_insn)
2003 {
230d793d
RS
2004 rtx other_pat = PATTERN (undobuf.other_insn);
2005 rtx new_other_notes;
2006 rtx note, next;
2007
6e25d159
RK
2008 CLEAR_HARD_REG_SET (newpat_used_regs);
2009
230d793d
RS
2010 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2011 &new_other_notes);
2012
2013 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2014 {
2015 undo_all ();
2016 return 0;
2017 }
2018
2019 PATTERN (undobuf.other_insn) = other_pat;
2020
2021 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2022 are still valid. Then add any non-duplicate notes added by
2023 recog_for_combine. */
2024 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2025 {
2026 next = XEXP (note, 1);
2027
2028 if (REG_NOTE_KIND (note) == REG_UNUSED
2029 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2030 {
2031 if (GET_CODE (XEXP (note, 0)) == REG)
2032 reg_n_deaths[REGNO (XEXP (note, 0))]--;
2033
2034 remove_note (undobuf.other_insn, note);
2035 }
230d793d
RS
2036 }
2037
1a26b032
RK
2038 for (note = new_other_notes; note; note = XEXP (note, 1))
2039 if (GET_CODE (XEXP (note, 0)) == REG)
2040 reg_n_deaths[REGNO (XEXP (note, 0))]++;
2041
230d793d 2042 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2043 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2044 }
2045
2046 /* We now know that we can do this combination. Merge the insns and
2047 update the status of registers and LOG_LINKS. */
2048
2049 {
2050 rtx i3notes, i2notes, i1notes = 0;
2051 rtx i3links, i2links, i1links = 0;
2052 rtx midnotes = 0;
230d793d
RS
2053 register int regno;
2054 /* Compute which registers we expect to eliminate. */
2055 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2056 ? 0 : i2dest);
2057 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2058
2059 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2060 clear them. */
2061 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2062 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2063 if (i1)
2064 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2065
2066 /* Ensure that we do not have something that should not be shared but
2067 occurs multiple times in the new insns. Check this by first
5089e22e 2068 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2069
2070 reset_used_flags (i3notes);
2071 reset_used_flags (i2notes);
2072 reset_used_flags (i1notes);
2073 reset_used_flags (newpat);
2074 reset_used_flags (newi2pat);
2075 if (undobuf.other_insn)
2076 reset_used_flags (PATTERN (undobuf.other_insn));
2077
2078 i3notes = copy_rtx_if_shared (i3notes);
2079 i2notes = copy_rtx_if_shared (i2notes);
2080 i1notes = copy_rtx_if_shared (i1notes);
2081 newpat = copy_rtx_if_shared (newpat);
2082 newi2pat = copy_rtx_if_shared (newi2pat);
2083 if (undobuf.other_insn)
2084 reset_used_flags (PATTERN (undobuf.other_insn));
2085
2086 INSN_CODE (i3) = insn_code_number;
2087 PATTERN (i3) = newpat;
2088 if (undobuf.other_insn)
2089 INSN_CODE (undobuf.other_insn) = other_code_number;
2090
2091 /* We had one special case above where I2 had more than one set and
2092 we replaced a destination of one of those sets with the destination
2093 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2094 in this basic block. Note that this (expensive) case is rare.
2095
2096 Also, in this case, we must pretend that all REG_NOTEs for I2
2097 actually came from I3, so that REG_UNUSED notes from I2 will be
2098 properly handled. */
2099
2100 if (i3_subst_into_i2)
2101 {
2102 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2103 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2104 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2105 && ! find_reg_note (i2, REG_UNUSED,
2106 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2107 for (temp = NEXT_INSN (i2);
2108 temp && (this_basic_block == n_basic_blocks - 1
2109 || basic_block_head[this_basic_block] != temp);
2110 temp = NEXT_INSN (temp))
2111 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2112 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2113 if (XEXP (link, 0) == i2)
2114 XEXP (link, 0) = i3;
2115
2116 if (i3notes)
2117 {
2118 rtx link = i3notes;
2119 while (XEXP (link, 1))
2120 link = XEXP (link, 1);
2121 XEXP (link, 1) = i2notes;
2122 }
2123 else
2124 i3notes = i2notes;
2125 i2notes = 0;
2126 }
230d793d
RS
2127
2128 LOG_LINKS (i3) = 0;
2129 REG_NOTES (i3) = 0;
2130 LOG_LINKS (i2) = 0;
2131 REG_NOTES (i2) = 0;
2132
2133 if (newi2pat)
2134 {
2135 INSN_CODE (i2) = i2_code_number;
2136 PATTERN (i2) = newi2pat;
2137 }
2138 else
2139 {
2140 PUT_CODE (i2, NOTE);
2141 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2142 NOTE_SOURCE_FILE (i2) = 0;
2143 }
2144
2145 if (i1)
2146 {
2147 LOG_LINKS (i1) = 0;
2148 REG_NOTES (i1) = 0;
2149 PUT_CODE (i1, NOTE);
2150 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2151 NOTE_SOURCE_FILE (i1) = 0;
2152 }
2153
2154 /* Get death notes for everything that is now used in either I3 or
2155 I2 and used to die in a previous insn. */
2156
2157 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2158 if (newi2pat)
2159 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2160
2161 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2162 if (i3notes)
5f4f0e22
CH
2163 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2164 elim_i2, elim_i1);
230d793d 2165 if (i2notes)
5f4f0e22
CH
2166 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2167 elim_i2, elim_i1);
230d793d 2168 if (i1notes)
5f4f0e22
CH
2169 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2170 elim_i2, elim_i1);
230d793d 2171 if (midnotes)
5f4f0e22
CH
2172 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2173 elim_i2, elim_i1);
230d793d
RS
2174
2175 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2176 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2177 so we always pass it as i3. We have not counted the notes in
2178 reg_n_deaths yet, so we need to do so now. */
2179
230d793d 2180 if (newi2pat && new_i2_notes)
1a26b032
RK
2181 {
2182 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2183 if (GET_CODE (XEXP (temp, 0)) == REG)
2184 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2185
2186 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2187 }
2188
230d793d 2189 if (new_i3_notes)
1a26b032
RK
2190 {
2191 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2192 if (GET_CODE (XEXP (temp, 0)) == REG)
2193 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2194
2195 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2196 }
230d793d
RS
2197
2198 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1a26b032
RK
2199 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2200 Show an additional death due to the REG_DEAD note we make here. If
2201 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2202
230d793d 2203 if (i3dest_killed)
1a26b032
RK
2204 {
2205 if (GET_CODE (i3dest_killed) == REG)
2206 reg_n_deaths[REGNO (i3dest_killed)]++;
2207
2208 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2209 NULL_RTX),
2210 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2211 NULL_RTX, NULL_RTX);
2212 }
58c8c593
RK
2213
2214 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2215 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2216 we passed I3 in that case, it might delete I2. */
2217
230d793d 2218 if (i2dest_in_i2src)
58c8c593 2219 {
1a26b032
RK
2220 if (GET_CODE (i2dest) == REG)
2221 reg_n_deaths[REGNO (i2dest)]++;
2222
58c8c593
RK
2223 if (newi2pat && reg_set_p (i2dest, newi2pat))
2224 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2225 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2226 else
2227 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2228 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2229 NULL_RTX, NULL_RTX);
2230 }
2231
230d793d 2232 if (i1dest_in_i1src)
58c8c593 2233 {
1a26b032
RK
2234 if (GET_CODE (i1dest) == REG)
2235 reg_n_deaths[REGNO (i1dest)]++;
2236
58c8c593
RK
2237 if (newi2pat && reg_set_p (i1dest, newi2pat))
2238 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2239 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2240 else
2241 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2242 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2243 NULL_RTX, NULL_RTX);
2244 }
230d793d
RS
2245
2246 distribute_links (i3links);
2247 distribute_links (i2links);
2248 distribute_links (i1links);
2249
2250 if (GET_CODE (i2dest) == REG)
2251 {
d0ab8cd3
RK
2252 rtx link;
2253 rtx i2_insn = 0, i2_val = 0, set;
2254
2255 /* The insn that used to set this register doesn't exist, and
2256 this life of the register may not exist either. See if one of
2257 I3's links points to an insn that sets I2DEST. If it does,
2258 that is now the last known value for I2DEST. If we don't update
2259 this and I2 set the register to a value that depended on its old
230d793d
RS
2260 contents, we will get confused. If this insn is used, thing
2261 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2262
2263 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2264 if ((set = single_set (XEXP (link, 0))) != 0
2265 && rtx_equal_p (i2dest, SET_DEST (set)))
2266 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2267
2268 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2269
2270 /* If the reg formerly set in I2 died only once and that was in I3,
2271 zero its use count so it won't make `reload' do any work. */
5af91171 2272 if (! added_sets_2 && newi2pat == 0 && ! i2dest_in_i2src)
230d793d
RS
2273 {
2274 regno = REGNO (i2dest);
2275 reg_n_sets[regno]--;
2276 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2277 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2278 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2279 reg_n_refs[regno] = 0;
2280 }
2281 }
2282
2283 if (i1 && GET_CODE (i1dest) == REG)
2284 {
d0ab8cd3
RK
2285 rtx link;
2286 rtx i1_insn = 0, i1_val = 0, set;
2287
2288 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2289 if ((set = single_set (XEXP (link, 0))) != 0
2290 && rtx_equal_p (i1dest, SET_DEST (set)))
2291 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2292
2293 record_value_for_reg (i1dest, i1_insn, i1_val);
2294
230d793d 2295 regno = REGNO (i1dest);
5af91171 2296 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d
RS
2297 {
2298 reg_n_sets[regno]--;
2299 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2300 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2301 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2302 reg_n_refs[regno] = 0;
2303 }
2304 }
2305
951553af 2306 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2307 to this insn. */
2308
951553af 2309 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2310 if (newi2pat)
951553af 2311 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2312
230d793d
RS
2313 /* If I3 is now an unconditional jump, ensure that it has a
2314 BARRIER following it since it may have initially been a
381ee8af 2315 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2316
2317 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2318 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2319 || GET_CODE (temp) != BARRIER))
230d793d
RS
2320 emit_barrier_after (i3);
2321 }
2322
2323 combine_successes++;
2324
abe6e52f
RK
2325 if (added_links_insn
2326 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2327 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2328 return added_links_insn;
2329 else
2330 return newi2pat ? i2 : i3;
230d793d
RS
2331}
2332\f
2333/* Undo all the modifications recorded in undobuf. */
2334
2335static void
2336undo_all ()
2337{
2338 register int i;
2339 if (undobuf.num_undo > MAX_UNDO)
2340 undobuf.num_undo = MAX_UNDO;
2341 for (i = undobuf.num_undo - 1; i >= 0; i--)
7c046e4e
RK
2342 {
2343 if (undobuf.undo[i].is_int)
2344 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2345 else
f5393ab9 2346 *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
7c046e4e
RK
2347
2348 }
230d793d
RS
2349
2350 obfree (undobuf.storage);
2351 undobuf.num_undo = 0;
2352}
2353\f
2354/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2355 where we have an arithmetic expression and return that point. LOC will
2356 be inside INSN.
230d793d
RS
2357
2358 try_combine will call this function to see if an insn can be split into
2359 two insns. */
2360
2361static rtx *
d0ab8cd3 2362find_split_point (loc, insn)
230d793d 2363 rtx *loc;
d0ab8cd3 2364 rtx insn;
230d793d
RS
2365{
2366 rtx x = *loc;
2367 enum rtx_code code = GET_CODE (x);
2368 rtx *split;
2369 int len = 0, pos, unsignedp;
2370 rtx inner;
2371
2372 /* First special-case some codes. */
2373 switch (code)
2374 {
2375 case SUBREG:
2376#ifdef INSN_SCHEDULING
2377 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2378 point. */
2379 if (GET_CODE (SUBREG_REG (x)) == MEM)
2380 return loc;
2381#endif
d0ab8cd3 2382 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2383
230d793d 2384 case MEM:
916f14f1 2385#ifdef HAVE_lo_sum
230d793d
RS
2386 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2387 using LO_SUM and HIGH. */
2388 if (GET_CODE (XEXP (x, 0)) == CONST
2389 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2390 {
2391 SUBST (XEXP (x, 0),
2392 gen_rtx_combine (LO_SUM, Pmode,
2393 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2394 XEXP (x, 0)));
2395 return &XEXP (XEXP (x, 0), 0);
2396 }
230d793d
RS
2397#endif
2398
916f14f1
RK
2399 /* If we have a PLUS whose second operand is a constant and the
2400 address is not valid, perhaps will can split it up using
2401 the machine-specific way to split large constants. We use
d0ab8cd3 2402 the first psuedo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2403 it will not remain in the result. */
2404 if (GET_CODE (XEXP (x, 0)) == PLUS
2405 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2406 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2407 {
2408 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2409 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2410 subst_insn);
2411
2412 /* This should have produced two insns, each of which sets our
2413 placeholder. If the source of the second is a valid address,
2414 we can make put both sources together and make a split point
2415 in the middle. */
2416
2417 if (seq && XVECLEN (seq, 0) == 2
2418 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2419 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2420 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2421 && ! reg_mentioned_p (reg,
2422 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2423 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2424 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2425 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2426 && memory_address_p (GET_MODE (x),
2427 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2428 {
2429 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2430 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2431
2432 /* Replace the placeholder in SRC2 with SRC1. If we can
2433 find where in SRC2 it was placed, that can become our
2434 split point and we can replace this address with SRC2.
2435 Just try two obvious places. */
2436
2437 src2 = replace_rtx (src2, reg, src1);
2438 split = 0;
2439 if (XEXP (src2, 0) == src1)
2440 split = &XEXP (src2, 0);
2441 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2442 && XEXP (XEXP (src2, 0), 0) == src1)
2443 split = &XEXP (XEXP (src2, 0), 0);
2444
2445 if (split)
2446 {
2447 SUBST (XEXP (x, 0), src2);
2448 return split;
2449 }
2450 }
1a26b032
RK
2451
2452 /* If that didn't work, perhaps the first operand is complex and
2453 needs to be computed separately, so make a split point there.
2454 This will occur on machines that just support REG + CONST
2455 and have a constant moved through some previous computation. */
2456
2457 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2458 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2459 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2460 == 'o')))
2461 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2462 }
2463 break;
2464
230d793d
RS
2465 case SET:
2466#ifdef HAVE_cc0
2467 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2468 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2469 we need to put the operand into a register. So split at that
2470 point. */
2471
2472 if (SET_DEST (x) == cc0_rtx
2473 && GET_CODE (SET_SRC (x)) != COMPARE
2474 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2475 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2476 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2477 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2478 return &SET_SRC (x);
2479#endif
2480
2481 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2482 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2483 if (split && split != &SET_SRC (x))
2484 return split;
2485
2486 /* See if this is a bitfield assignment with everything constant. If
2487 so, this is an IOR of an AND, so split it into that. */
2488 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2489 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2490 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2491 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2492 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2493 && GET_CODE (SET_SRC (x)) == CONST_INT
2494 && ((INTVAL (XEXP (SET_DEST (x), 1))
2495 + INTVAL (XEXP (SET_DEST (x), 2)))
2496 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2497 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2498 {
2499 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2500 int len = INTVAL (XEXP (SET_DEST (x), 1));
2501 int src = INTVAL (SET_SRC (x));
2502 rtx dest = XEXP (SET_DEST (x), 0);
2503 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2504 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d
RS
2505
2506#if BITS_BIG_ENDIAN
2507 pos = GET_MODE_BITSIZE (mode) - len - pos;
2508#endif
2509
2510 if (src == mask)
2511 SUBST (SET_SRC (x),
5f4f0e22 2512 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2513 else
2514 SUBST (SET_SRC (x),
2515 gen_binary (IOR, mode,
2516 gen_binary (AND, mode, dest,
5f4f0e22
CH
2517 GEN_INT (~ (mask << pos)
2518 & GET_MODE_MASK (mode))),
2519 GEN_INT (src << pos)));
230d793d
RS
2520
2521 SUBST (SET_DEST (x), dest);
2522
d0ab8cd3 2523 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2524 if (split && split != &SET_SRC (x))
2525 return split;
2526 }
2527
2528 /* Otherwise, see if this is an operation that we can split into two.
2529 If so, try to split that. */
2530 code = GET_CODE (SET_SRC (x));
2531
2532 switch (code)
2533 {
d0ab8cd3
RK
2534 case AND:
2535 /* If we are AND'ing with a large constant that is only a single
2536 bit and the result is only being used in a context where we
2537 need to know if it is zero or non-zero, replace it with a bit
2538 extraction. This will avoid the large constant, which might
2539 have taken more than one insn to make. If the constant were
2540 not a valid argument to the AND but took only one insn to make,
2541 this is no worse, but if it took more than one insn, it will
2542 be better. */
2543
2544 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2545 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2546 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2547 && GET_CODE (SET_DEST (x)) == REG
2548 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2549 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2550 && XEXP (*split, 0) == SET_DEST (x)
2551 && XEXP (*split, 1) == const0_rtx)
2552 {
2553 SUBST (SET_SRC (x),
2554 make_extraction (GET_MODE (SET_DEST (x)),
2555 XEXP (SET_SRC (x), 0),
2556 pos, NULL_RTX, 1, 1, 0, 0));
2557 return find_split_point (loc, insn);
2558 }
2559 break;
2560
230d793d
RS
2561 case SIGN_EXTEND:
2562 inner = XEXP (SET_SRC (x), 0);
2563 pos = 0;
2564 len = GET_MODE_BITSIZE (GET_MODE (inner));
2565 unsignedp = 0;
2566 break;
2567
2568 case SIGN_EXTRACT:
2569 case ZERO_EXTRACT:
2570 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2571 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2572 {
2573 inner = XEXP (SET_SRC (x), 0);
2574 len = INTVAL (XEXP (SET_SRC (x), 1));
2575 pos = INTVAL (XEXP (SET_SRC (x), 2));
2576
2577#if BITS_BIG_ENDIAN
2578 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2579#endif
2580 unsignedp = (code == ZERO_EXTRACT);
2581 }
2582 break;
2583 }
2584
2585 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2586 {
2587 enum machine_mode mode = GET_MODE (SET_SRC (x));
2588
d0ab8cd3
RK
2589 /* For unsigned, we have a choice of a shift followed by an
2590 AND or two shifts. Use two shifts for field sizes where the
2591 constant might be too large. We assume here that we can
2592 always at least get 8-bit constants in an AND insn, which is
2593 true for every current RISC. */
2594
2595 if (unsignedp && len <= 8)
230d793d
RS
2596 {
2597 SUBST (SET_SRC (x),
2598 gen_rtx_combine
2599 (AND, mode,
2600 gen_rtx_combine (LSHIFTRT, mode,
2601 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2602 GEN_INT (pos)),
2603 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2604
d0ab8cd3 2605 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2606 if (split && split != &SET_SRC (x))
2607 return split;
2608 }
2609 else
2610 {
2611 SUBST (SET_SRC (x),
2612 gen_rtx_combine
d0ab8cd3 2613 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2614 gen_rtx_combine (ASHIFT, mode,
2615 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2616 GEN_INT (GET_MODE_BITSIZE (mode)
2617 - len - pos)),
2618 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2619
d0ab8cd3 2620 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2621 if (split && split != &SET_SRC (x))
2622 return split;
2623 }
2624 }
2625
2626 /* See if this is a simple operation with a constant as the second
2627 operand. It might be that this constant is out of range and hence
2628 could be used as a split point. */
2629 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2630 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2631 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2632 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2633 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2634 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2635 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2636 == 'o'))))
2637 return &XEXP (SET_SRC (x), 1);
2638
2639 /* Finally, see if this is a simple operation with its first operand
2640 not in a register. The operation might require this operand in a
2641 register, so return it as a split point. We can always do this
2642 because if the first operand were another operation, we would have
2643 already found it as a split point. */
2644 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2645 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2646 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2647 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2648 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2649 return &XEXP (SET_SRC (x), 0);
2650
2651 return 0;
2652
2653 case AND:
2654 case IOR:
2655 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2656 it is better to write this as (not (ior A B)) so we can split it.
2657 Similarly for IOR. */
2658 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2659 {
2660 SUBST (*loc,
2661 gen_rtx_combine (NOT, GET_MODE (x),
2662 gen_rtx_combine (code == IOR ? AND : IOR,
2663 GET_MODE (x),
2664 XEXP (XEXP (x, 0), 0),
2665 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2666 return find_split_point (loc, insn);
230d793d
RS
2667 }
2668
2669 /* Many RISC machines have a large set of logical insns. If the
2670 second operand is a NOT, put it first so we will try to split the
2671 other operand first. */
2672 if (GET_CODE (XEXP (x, 1)) == NOT)
2673 {
2674 rtx tem = XEXP (x, 0);
2675 SUBST (XEXP (x, 0), XEXP (x, 1));
2676 SUBST (XEXP (x, 1), tem);
2677 }
2678 break;
2679 }
2680
2681 /* Otherwise, select our actions depending on our rtx class. */
2682 switch (GET_RTX_CLASS (code))
2683 {
2684 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2685 case '3':
d0ab8cd3 2686 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2687 if (split)
2688 return split;
2689 /* ... fall through ... */
2690 case '2':
2691 case 'c':
2692 case '<':
d0ab8cd3 2693 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2694 if (split)
2695 return split;
2696 /* ... fall through ... */
2697 case '1':
2698 /* Some machines have (and (shift ...) ...) insns. If X is not
2699 an AND, but XEXP (X, 0) is, use it as our split point. */
2700 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2701 return &XEXP (x, 0);
2702
d0ab8cd3 2703 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2704 if (split)
2705 return split;
2706 return loc;
2707 }
2708
2709 /* Otherwise, we don't have a split point. */
2710 return 0;
2711}
2712\f
2713/* Throughout X, replace FROM with TO, and return the result.
2714 The result is TO if X is FROM;
2715 otherwise the result is X, but its contents may have been modified.
2716 If they were modified, a record was made in undobuf so that
2717 undo_all will (among other things) return X to its original state.
2718
2719 If the number of changes necessary is too much to record to undo,
2720 the excess changes are not made, so the result is invalid.
2721 The changes already made can still be undone.
2722 undobuf.num_undo is incremented for such changes, so by testing that
2723 the caller can tell whether the result is valid.
2724
2725 `n_occurrences' is incremented each time FROM is replaced.
2726
2727 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2728
5089e22e 2729 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2730 by copying if `n_occurrences' is non-zero. */
2731
2732static rtx
2733subst (x, from, to, in_dest, unique_copy)
2734 register rtx x, from, to;
2735 int in_dest;
2736 int unique_copy;
2737{
f24ad0e4 2738 register enum rtx_code code = GET_CODE (x);
230d793d 2739 enum machine_mode op0_mode = VOIDmode;
8079805d
RK
2740 register char *fmt;
2741 register int len, i;
2742 rtx new;
230d793d
RS
2743
2744/* Two expressions are equal if they are identical copies of a shared
2745 RTX or if they are both registers with the same register number
2746 and mode. */
2747
2748#define COMBINE_RTX_EQUAL_P(X,Y) \
2749 ((X) == (Y) \
2750 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2751 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2752
2753 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2754 {
2755 n_occurrences++;
2756 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2757 }
2758
2759 /* If X and FROM are the same register but different modes, they will
2760 not have been seen as equal above. However, flow.c will make a
2761 LOG_LINKS entry for that case. If we do nothing, we will try to
2762 rerecognize our original insn and, when it succeeds, we will
2763 delete the feeding insn, which is incorrect.
2764
2765 So force this insn not to match in this (rare) case. */
2766 if (! in_dest && code == REG && GET_CODE (from) == REG
2767 && REGNO (x) == REGNO (from))
2768 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2769
2770 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2771 of which may contain things that can be combined. */
2772 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2773 return x;
2774
2775 /* It is possible to have a subexpression appear twice in the insn.
2776 Suppose that FROM is a register that appears within TO.
2777 Then, after that subexpression has been scanned once by `subst',
2778 the second time it is scanned, TO may be found. If we were
2779 to scan TO here, we would find FROM within it and create a
2780 self-referent rtl structure which is completely wrong. */
2781 if (COMBINE_RTX_EQUAL_P (x, to))
2782 return to;
2783
2784 len = GET_RTX_LENGTH (code);
2785 fmt = GET_RTX_FORMAT (code);
2786
2787 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2788 set up to skip this common case. All other cases where we want to
2789 suppress replacing something inside a SET_SRC are handled via the
2790 IN_DEST operand. */
2791 if (code == SET
2792 && (GET_CODE (SET_DEST (x)) == REG
2793 || GET_CODE (SET_DEST (x)) == CC0
2794 || GET_CODE (SET_DEST (x)) == PC))
2795 fmt = "ie";
2796
2797 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2798 if (fmt[0] == 'e')
2799 op0_mode = GET_MODE (XEXP (x, 0));
2800
2801 for (i = 0; i < len; i++)
2802 {
2803 if (fmt[i] == 'E')
2804 {
2805 register int j;
2806 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2807 {
230d793d
RS
2808 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2809 {
2810 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2811 n_occurrences++;
2812 }
2813 else
2814 {
2815 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2816
2817 /* If this substitution failed, this whole thing fails. */
2818 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2819 return new;
2820 }
2821
2822 SUBST (XVECEXP (x, i, j), new);
2823 }
2824 }
2825 else if (fmt[i] == 'e')
2826 {
230d793d
RS
2827 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2828 {
42301240
RK
2829 /* In general, don't install a subreg involving two modes not
2830 tieable. It can worsen register allocation, and can even
2831 make invalid reload insns, since the reg inside may need to
2832 be copied from in the outside mode, and that may be invalid
2833 if it is an fp reg copied in integer mode.
2834
2835 We allow two exceptions to this: It is valid if it is inside
2836 another SUBREG and the mode of that SUBREG and the mode of
2837 the inside of TO is tieable and it is valid if X is a SET
2838 that copies FROM to CC0. */
2839 if (GET_CODE (to) == SUBREG
2840 && ! MODES_TIEABLE_P (GET_MODE (to),
2841 GET_MODE (SUBREG_REG (to)))
2842 && ! (code == SUBREG
8079805d
RK
2843 && MODES_TIEABLE_P (GET_MODE (x),
2844 GET_MODE (SUBREG_REG (to))))
42301240
RK
2845#ifdef HAVE_cc0
2846 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2847#endif
2848 )
2849 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2850
230d793d
RS
2851 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2852 n_occurrences++;
2853 }
2854 else
2855 /* If we are in a SET_DEST, suppress most cases unless we
2856 have gone inside a MEM, in which case we want to
2857 simplify the address. We assume here that things that
2858 are actually part of the destination have their inner
2859 parts in the first expression. This is true for SUBREG,
2860 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2861 things aside from REG and MEM that should appear in a
2862 SET_DEST. */
2863 new = subst (XEXP (x, i), from, to,
2864 (((in_dest
2865 && (code == SUBREG || code == STRICT_LOW_PART
2866 || code == ZERO_EXTRACT))
2867 || code == SET)
2868 && i == 0), unique_copy);
2869
2870 /* If we found that we will have to reject this combination,
2871 indicate that by returning the CLOBBER ourselves, rather than
2872 an expression containing it. This will speed things up as
2873 well as prevent accidents where two CLOBBERs are considered
2874 to be equal, thus producing an incorrect simplification. */
2875
2876 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2877 return new;
2878
2879 SUBST (XEXP (x, i), new);
2880 }
2881 }
2882
8079805d
RK
2883 /* Try to simplify X. If the simplification changed the code, it is likely
2884 that further simplification will help, so loop, but limit the number
2885 of repetitions that will be performed. */
2886
2887 for (i = 0; i < 4; i++)
2888 {
2889 /* If X is sufficiently simple, don't bother trying to do anything
2890 with it. */
2891 if (code != CONST_INT && code != REG && code != CLOBBER)
2892 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 2893
8079805d
RK
2894 if (GET_CODE (x) == code)
2895 break;
d0ab8cd3 2896
8079805d 2897 code = GET_CODE (x);
eeb43d32 2898
8079805d
RK
2899 /* We no longer know the original mode of operand 0 since we
2900 have changed the form of X) */
2901 op0_mode = VOIDmode;
2902 }
eeb43d32 2903
8079805d
RK
2904 return x;
2905}
2906\f
2907/* Simplify X, a piece of RTL. We just operate on the expression at the
2908 outer level; call `subst' to simplify recursively. Return the new
2909 expression.
2910
2911 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
2912 will be the iteration even if an expression with a code different from
2913 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 2914
8079805d
RK
2915static rtx
2916simplify_rtx (x, op0_mode, last, in_dest)
2917 rtx x;
2918 enum machine_mode op0_mode;
2919 int last;
2920 int in_dest;
2921{
2922 enum rtx_code code = GET_CODE (x);
2923 enum machine_mode mode = GET_MODE (x);
2924 rtx temp;
2925 int i;
d0ab8cd3 2926
230d793d
RS
2927 /* If this is a commutative operation, put a constant last and a complex
2928 expression first. We don't need to do this for comparisons here. */
2929 if (GET_RTX_CLASS (code) == 'c'
2930 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2931 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2932 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2933 || (GET_CODE (XEXP (x, 0)) == SUBREG
2934 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2935 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2936 {
2937 temp = XEXP (x, 0);
2938 SUBST (XEXP (x, 0), XEXP (x, 1));
2939 SUBST (XEXP (x, 1), temp);
2940 }
2941
22609cbf
RK
2942 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2943 sign extension of a PLUS with a constant, reverse the order of the sign
2944 extension and the addition. Note that this not the same as the original
2945 code, but overflow is undefined for signed values. Also note that the
2946 PLUS will have been partially moved "inside" the sign-extension, so that
2947 the first operand of X will really look like:
2948 (ashiftrt (plus (ashift A C4) C5) C4).
2949 We convert this to
2950 (plus (ashiftrt (ashift A C4) C2) C4)
2951 and replace the first operand of X with that expression. Later parts
2952 of this function may simplify the expression further.
2953
2954 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2955 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2956 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2957
2958 We do this to simplify address expressions. */
2959
2960 if ((code == PLUS || code == MINUS || code == MULT)
2961 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2962 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2963 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2964 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2965 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2966 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2967 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2968 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2969 XEXP (XEXP (XEXP (x, 0), 0), 1),
2970 XEXP (XEXP (x, 0), 1))) != 0)
2971 {
2972 rtx new
2973 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2974 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2975 INTVAL (XEXP (XEXP (x, 0), 1)));
2976
2977 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2978 INTVAL (XEXP (XEXP (x, 0), 1)));
2979
2980 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2981 }
2982
d0ab8cd3
RK
2983 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2984 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
2985 things. Check for cases where both arms are testing the same
2986 condition.
2987
2988 Don't do anything if all operands are very simple. */
2989
2990 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
2991 || GET_RTX_CLASS (code) == '<')
2992 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
2993 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
2994 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
2995 == 'o')))
2996 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
2997 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
2998 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
2999 == 'o')))))
3000 || (GET_RTX_CLASS (code) == '1'
3001 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3002 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3003 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3004 == 'o'))))))
d0ab8cd3 3005 {
abe6e52f
RK
3006 rtx cond, true, false;
3007
3008 cond = if_then_else_cond (x, &true, &false);
3009 if (cond != 0)
3010 {
3011 rtx cop1 = const0_rtx;
3012 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3013
9210df58
RK
3014 /* Simplify the alternative arms; this may collapse the true and
3015 false arms to store-flag values. */
3016 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3017 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3018
3019 /* Restarting if we generate a store-flag expression will cause
3020 us to loop. Just drop through in this case. */
3021
abe6e52f
RK
3022 /* If the result values are STORE_FLAG_VALUE and zero, we can
3023 just make the comparison operation. */
3024 if (true == const_true_rtx && false == const0_rtx)
3025 x = gen_binary (cond_code, mode, cond, cop1);
3026 else if (true == const0_rtx && false == const_true_rtx)
3027 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3028
3029 /* Likewise, we can make the negate of a comparison operation
3030 if the result values are - STORE_FLAG_VALUE and zero. */
3031 else if (GET_CODE (true) == CONST_INT
3032 && INTVAL (true) == - STORE_FLAG_VALUE
3033 && false == const0_rtx)
0c1c8ea6 3034 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3035 gen_binary (cond_code, mode, cond, cop1));
3036 else if (GET_CODE (false) == CONST_INT
3037 && INTVAL (false) == - STORE_FLAG_VALUE
3038 && true == const0_rtx)
0c1c8ea6 3039 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3040 gen_binary (reverse_condition (cond_code),
3041 mode, cond, cop1));
3042 else
8079805d
RK
3043 return gen_rtx (IF_THEN_ELSE, mode,
3044 gen_binary (cond_code, VOIDmode, cond, cop1),
3045 true, false);
5109d49f 3046
9210df58
RK
3047 code = GET_CODE (x);
3048 op0_mode = VOIDmode;
abe6e52f 3049 }
d0ab8cd3
RK
3050 }
3051
230d793d
RS
3052 /* Try to fold this expression in case we have constants that weren't
3053 present before. */
3054 temp = 0;
3055 switch (GET_RTX_CLASS (code))
3056 {
3057 case '1':
3058 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3059 break;
3060 case '<':
3061 temp = simplify_relational_operation (code, op0_mode,
3062 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3063#ifdef FLOAT_STORE_FLAG_VALUE
3064 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3065 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3066 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3067#endif
230d793d
RS
3068 break;
3069 case 'c':
3070 case '2':
3071 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3072 break;
3073 case 'b':
3074 case '3':
3075 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3076 XEXP (x, 1), XEXP (x, 2));
3077 break;
3078 }
3079
3080 if (temp)
d0ab8cd3 3081 x = temp, code = GET_CODE (temp);
230d793d 3082
230d793d 3083 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3084 if (code == PLUS || code == MINUS
3085 || code == AND || code == IOR || code == XOR)
230d793d
RS
3086 {
3087 x = apply_distributive_law (x);
3088 code = GET_CODE (x);
3089 }
3090
3091 /* If CODE is an associative operation not otherwise handled, see if we
3092 can associate some operands. This can win if they are constants or
3093 if they are logically related (i.e. (a & b) & a. */
3094 if ((code == PLUS || code == MINUS
3095 || code == MULT || code == AND || code == IOR || code == XOR
3096 || code == DIV || code == UDIV
3097 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3098 && INTEGRAL_MODE_P (mode))
230d793d
RS
3099 {
3100 if (GET_CODE (XEXP (x, 0)) == code)
3101 {
3102 rtx other = XEXP (XEXP (x, 0), 0);
3103 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3104 rtx inner_op1 = XEXP (x, 1);
3105 rtx inner;
3106
3107 /* Make sure we pass the constant operand if any as the second
3108 one if this is a commutative operation. */
3109 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3110 {
3111 rtx tem = inner_op0;
3112 inner_op0 = inner_op1;
3113 inner_op1 = tem;
3114 }
3115 inner = simplify_binary_operation (code == MINUS ? PLUS
3116 : code == DIV ? MULT
3117 : code == UDIV ? MULT
3118 : code,
3119 mode, inner_op0, inner_op1);
3120
3121 /* For commutative operations, try the other pair if that one
3122 didn't simplify. */
3123 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3124 {
3125 other = XEXP (XEXP (x, 0), 1);
3126 inner = simplify_binary_operation (code, mode,
3127 XEXP (XEXP (x, 0), 0),
3128 XEXP (x, 1));
3129 }
3130
3131 if (inner)
8079805d 3132 return gen_binary (code, mode, other, inner);
230d793d
RS
3133 }
3134 }
3135
3136 /* A little bit of algebraic simplification here. */
3137 switch (code)
3138 {
3139 case MEM:
3140 /* Ensure that our address has any ASHIFTs converted to MULT in case
3141 address-recognizing predicates are called later. */
3142 temp = make_compound_operation (XEXP (x, 0), MEM);
3143 SUBST (XEXP (x, 0), temp);
3144 break;
3145
3146 case SUBREG:
3147 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3148 is paradoxical. If we can't do that safely, then it becomes
3149 something nonsensical so that this combination won't take place. */
3150
3151 if (GET_CODE (SUBREG_REG (x)) == MEM
3152 && (GET_MODE_SIZE (mode)
3153 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3154 {
3155 rtx inner = SUBREG_REG (x);
3156 int endian_offset = 0;
3157 /* Don't change the mode of the MEM
3158 if that would change the meaning of the address. */
3159 if (MEM_VOLATILE_P (SUBREG_REG (x))
3160 || mode_dependent_address_p (XEXP (inner, 0)))
3161 return gen_rtx (CLOBBER, mode, const0_rtx);
3162
3163#if BYTES_BIG_ENDIAN
3164 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3165 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3166 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3167 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
3168#endif
3169 /* Note if the plus_constant doesn't make a valid address
3170 then this combination won't be accepted. */
3171 x = gen_rtx (MEM, mode,
3172 plus_constant (XEXP (inner, 0),
3173 (SUBREG_WORD (x) * UNITS_PER_WORD
3174 + endian_offset)));
3175 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3176 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3177 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3178 return x;
3179 }
3180
3181 /* If we are in a SET_DEST, these other cases can't apply. */
3182 if (in_dest)
3183 return x;
3184
3185 /* Changing mode twice with SUBREG => just change it once,
3186 or not at all if changing back to starting mode. */
3187 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3188 {
3189 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3190 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3191 return SUBREG_REG (SUBREG_REG (x));
3192
3193 SUBST_INT (SUBREG_WORD (x),
3194 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3195 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3196 }
3197
3198 /* SUBREG of a hard register => just change the register number
3199 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3200 suppress this combination. If the hard register is the stack,
3201 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3202
3203 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3204 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3205 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3206#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3207 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3208#endif
26ecfc76
RK
3209#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3210 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3211#endif
3212 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3213 {
3214 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3215 mode))
3216 return gen_rtx (REG, mode,
3217 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3218 else
3219 return gen_rtx (CLOBBER, mode, const0_rtx);
3220 }
3221
3222 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3223 word and low-order part. Only do this if we are narrowing
3224 the constant; if it is being widened, we have no idea what
3225 the extra bits will have been set to. */
230d793d
RS
3226
3227 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3228 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 3229 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
3230 && GET_MODE_CLASS (mode) == MODE_INT)
3231 {
3232 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3233 0, op0_mode);
230d793d
RS
3234 if (temp)
3235 return temp;
3236 }
3237
19808e22
RS
3238 /* If we want a subreg of a constant, at offset 0,
3239 take the low bits. On a little-endian machine, that's
3240 always valid. On a big-endian machine, it's valid
3241 only if the constant's mode fits in one word. */
a4bde0b1 3242 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
19808e22
RS
3243 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3244#if WORDS_BIG_ENDIAN
097e45d1 3245 && GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD
19808e22
RS
3246#endif
3247 )
230d793d
RS
3248 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3249
b65c1b5b
RK
3250 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3251 since we are saying that the high bits don't matter. */
3252 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3253 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3254 return SUBREG_REG (x);
3255
87e3e0c1
RK
3256 /* Note that we cannot do any narrowing for non-constants since
3257 we might have been counting on using the fact that some bits were
3258 zero. We now do this in the SET. */
3259
230d793d
RS
3260 break;
3261
3262 case NOT:
3263 /* (not (plus X -1)) can become (neg X). */
3264 if (GET_CODE (XEXP (x, 0)) == PLUS
3265 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3266 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3267
3268 /* Similarly, (not (neg X)) is (plus X -1). */
3269 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3270 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3271 constm1_rtx);
230d793d 3272
d0ab8cd3
RK
3273 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3274 if (GET_CODE (XEXP (x, 0)) == XOR
3275 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3276 && (temp = simplify_unary_operation (NOT, mode,
3277 XEXP (XEXP (x, 0), 1),
3278 mode)) != 0)
787745f5 3279 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3280
230d793d
RS
3281 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3282 other than 1, but that is not valid. We could do a similar
3283 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3284 but this doesn't seem common enough to bother with. */
3285 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3286 && XEXP (XEXP (x, 0), 0) == const1_rtx)
0c1c8ea6 3287 return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
8079805d 3288 XEXP (XEXP (x, 0), 1));
230d793d
RS
3289
3290 if (GET_CODE (XEXP (x, 0)) == SUBREG
3291 && subreg_lowpart_p (XEXP (x, 0))
3292 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3293 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3294 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3295 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3296 {
3297 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3298
3299 x = gen_rtx (ROTATE, inner_mode,
0c1c8ea6 3300 gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
230d793d 3301 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3302 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3303 }
3304
3305#if STORE_FLAG_VALUE == -1
3306 /* (not (comparison foo bar)) can be done by reversing the comparison
3307 code if valid. */
3308 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3309 && reversible_comparison_p (XEXP (x, 0)))
3310 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3311 mode, XEXP (XEXP (x, 0), 0),
3312 XEXP (XEXP (x, 0), 1));
500c518b
RK
3313
3314 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3315 is (lt foo (const_int 0)), so we can perform the above
3316 simplification. */
3317
3318 if (XEXP (x, 1) == const1_rtx
3319 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3320 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3321 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3322 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3323#endif
3324
3325 /* Apply De Morgan's laws to reduce number of patterns for machines
3326 with negating logical insns (and-not, nand, etc.). If result has
3327 only one NOT, put it first, since that is how the patterns are
3328 coded. */
3329
3330 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3331 {
3332 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3333
3334 if (GET_CODE (in1) == NOT)
3335 in1 = XEXP (in1, 0);
3336 else
3337 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3338
3339 if (GET_CODE (in2) == NOT)
3340 in2 = XEXP (in2, 0);
3341 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3342 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3343 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3344 else
3345 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3346
3347 if (GET_CODE (in2) == NOT)
3348 {
3349 rtx tem = in2;
3350 in2 = in1; in1 = tem;
3351 }
3352
8079805d
RK
3353 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3354 mode, in1, in2);
230d793d
RS
3355 }
3356 break;
3357
3358 case NEG:
3359 /* (neg (plus X 1)) can become (not X). */
3360 if (GET_CODE (XEXP (x, 0)) == PLUS
3361 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3362 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3363
3364 /* Similarly, (neg (not X)) is (plus X 1). */
3365 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3366 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3367
230d793d
RS
3368 /* (neg (minus X Y)) can become (minus Y X). */
3369 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3370 && (! FLOAT_MODE_P (mode)
230d793d 3371 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3372 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3373 || flag_fast_math))
8079805d
RK
3374 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3375 XEXP (XEXP (x, 0), 0));
230d793d 3376
d0ab8cd3
RK
3377 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3378 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3379 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3380 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3381
230d793d
RS
3382 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3383 if we can then eliminate the NEG (e.g.,
3384 if the operand is a constant). */
3385
3386 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3387 {
3388 temp = simplify_unary_operation (NEG, mode,
3389 XEXP (XEXP (x, 0), 0), mode);
3390 if (temp)
3391 {
3392 SUBST (XEXP (XEXP (x, 0), 0), temp);
3393 return XEXP (x, 0);
3394 }
3395 }
3396
3397 temp = expand_compound_operation (XEXP (x, 0));
3398
3399 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3400 replaced by (lshiftrt X C). This will convert
3401 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3402
3403 if (GET_CODE (temp) == ASHIFTRT
3404 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3405 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3406 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3407 INTVAL (XEXP (temp, 1)));
230d793d 3408
951553af 3409 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3410 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3411 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3412 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3413 or a SUBREG of one since we'd be making the expression more
3414 complex if it was just a register. */
3415
3416 if (GET_CODE (temp) != REG
3417 && ! (GET_CODE (temp) == SUBREG
3418 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3419 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3420 {
3421 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3422 (NULL_RTX, ASHIFTRT, mode,
3423 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3424 GET_MODE_BITSIZE (mode) - 1 - i),
3425 GET_MODE_BITSIZE (mode) - 1 - i);
3426
3427 /* If all we did was surround TEMP with the two shifts, we
3428 haven't improved anything, so don't use it. Otherwise,
3429 we are better off with TEMP1. */
3430 if (GET_CODE (temp1) != ASHIFTRT
3431 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3432 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3433 return temp1;
230d793d
RS
3434 }
3435 break;
3436
3437 case FLOAT_TRUNCATE:
3438 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3439 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3440 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3441 return XEXP (XEXP (x, 0), 0);
4635f748
RK
3442
3443 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3444 (OP:SF foo:SF) if OP is NEG or ABS. */
3445 if ((GET_CODE (XEXP (x, 0)) == ABS
3446 || GET_CODE (XEXP (x, 0)) == NEG)
3447 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3448 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
3449 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3450 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
3451
3452 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3453 is (float_truncate:SF x). */
3454 if (GET_CODE (XEXP (x, 0)) == SUBREG
3455 && subreg_lowpart_p (XEXP (x, 0))
3456 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3457 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
3458 break;
3459
3460#ifdef HAVE_cc0
3461 case COMPARE:
3462 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3463 using cc0, in which case we want to leave it as a COMPARE
3464 so we can distinguish it from a register-register-copy. */
3465 if (XEXP (x, 1) == const0_rtx)
3466 return XEXP (x, 0);
3467
3468 /* In IEEE floating point, x-0 is not the same as x. */
3469 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3470 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3471 || flag_fast_math)
230d793d
RS
3472 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3473 return XEXP (x, 0);
3474 break;
3475#endif
3476
3477 case CONST:
3478 /* (const (const X)) can become (const X). Do it this way rather than
3479 returning the inner CONST since CONST can be shared with a
3480 REG_EQUAL note. */
3481 if (GET_CODE (XEXP (x, 0)) == CONST)
3482 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3483 break;
3484
3485#ifdef HAVE_lo_sum
3486 case LO_SUM:
3487 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3488 can add in an offset. find_split_point will split this address up
3489 again if it doesn't match. */
3490 if (GET_CODE (XEXP (x, 0)) == HIGH
3491 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3492 return XEXP (x, 1);
3493 break;
3494#endif
3495
3496 case PLUS:
3497 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3498 outermost. That's because that's the way indexed addresses are
3499 supposed to appear. This code used to check many more cases, but
3500 they are now checked elsewhere. */
3501 if (GET_CODE (XEXP (x, 0)) == PLUS
3502 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3503 return gen_binary (PLUS, mode,
3504 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3505 XEXP (x, 1)),
3506 XEXP (XEXP (x, 0), 1));
3507
3508 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3509 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3510 bit-field and can be replaced by either a sign_extend or a
3511 sign_extract. The `and' may be a zero_extend. */
3512 if (GET_CODE (XEXP (x, 0)) == XOR
3513 && GET_CODE (XEXP (x, 1)) == CONST_INT
3514 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3515 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3516 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3517 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3518 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3519 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3520 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3521 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3522 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3523 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3524 == i + 1))))
8079805d
RK
3525 return simplify_shift_const
3526 (NULL_RTX, ASHIFTRT, mode,
3527 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3528 XEXP (XEXP (XEXP (x, 0), 0), 0),
3529 GET_MODE_BITSIZE (mode) - (i + 1)),
3530 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 3531
bc0776c6
RK
3532 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3533 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3534 is 1. This produces better code than the alternative immediately
3535 below. */
3536 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3537 && reversible_comparison_p (XEXP (x, 0))
3538 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3539 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 3540 return
0c1c8ea6 3541 gen_unary (NEG, mode, mode,
8079805d
RK
3542 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3543 mode, XEXP (XEXP (x, 0), 0),
3544 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
3545
3546 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3547 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3548 the bitsize of the mode - 1. This allows simplification of
3549 "a = (b & 8) == 0;" */
3550 if (XEXP (x, 1) == constm1_rtx
3551 && GET_CODE (XEXP (x, 0)) != REG
3552 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3553 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3554 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
3555 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3556 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3557 gen_rtx_combine (XOR, mode,
3558 XEXP (x, 0), const1_rtx),
3559 GET_MODE_BITSIZE (mode) - 1),
3560 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
3561
3562 /* If we are adding two things that have no bits in common, convert
3563 the addition into an IOR. This will often be further simplified,
3564 for example in cases like ((a & 1) + (a & 2)), which can
3565 become a & 3. */
3566
ac49a949 3567 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3568 && (nonzero_bits (XEXP (x, 0), mode)
3569 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 3570 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
3571 break;
3572
3573 case MINUS:
5109d49f
RK
3574#if STORE_FLAG_VALUE == 1
3575 /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3576 code if valid. */
3577 if (XEXP (x, 0) == const1_rtx
3578 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3579 && reversible_comparison_p (XEXP (x, 1)))
3580 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3581 mode, XEXP (XEXP (x, 1), 0),
3582 XEXP (XEXP (x, 1), 1));
3583#endif
3584
230d793d
RS
3585 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3586 (and <foo> (const_int pow2-1)) */
3587 if (GET_CODE (XEXP (x, 1)) == AND
3588 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3589 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3590 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
3591 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3592 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
3593
3594 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3595 integers. */
3596 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
3597 return gen_binary (MINUS, mode,
3598 gen_binary (MINUS, mode, XEXP (x, 0),
3599 XEXP (XEXP (x, 1), 0)),
3600 XEXP (XEXP (x, 1), 1));
230d793d
RS
3601 break;
3602
3603 case MULT:
3604 /* If we have (mult (plus A B) C), apply the distributive law and then
3605 the inverse distributive law to see if things simplify. This
3606 occurs mostly in addresses, often when unrolling loops. */
3607
3608 if (GET_CODE (XEXP (x, 0)) == PLUS)
3609 {
3610 x = apply_distributive_law
3611 (gen_binary (PLUS, mode,
3612 gen_binary (MULT, mode,
3613 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3614 gen_binary (MULT, mode,
3615 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3616
3617 if (GET_CODE (x) != MULT)
8079805d 3618 return x;
230d793d 3619 }
230d793d
RS
3620 break;
3621
3622 case UDIV:
3623 /* If this is a divide by a power of two, treat it as a shift if
3624 its first operand is a shift. */
3625 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3626 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3627 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3628 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3629 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3630 || GET_CODE (XEXP (x, 0)) == ROTATE
3631 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 3632 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3633 break;
3634
3635 case EQ: case NE:
3636 case GT: case GTU: case GE: case GEU:
3637 case LT: case LTU: case LE: case LEU:
3638 /* If the first operand is a condition code, we can't do anything
3639 with it. */
3640 if (GET_CODE (XEXP (x, 0)) == COMPARE
3641 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3642#ifdef HAVE_cc0
3643 && XEXP (x, 0) != cc0_rtx
3644#endif
3645 ))
3646 {
3647 rtx op0 = XEXP (x, 0);
3648 rtx op1 = XEXP (x, 1);
3649 enum rtx_code new_code;
3650
3651 if (GET_CODE (op0) == COMPARE)
3652 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3653
3654 /* Simplify our comparison, if possible. */
3655 new_code = simplify_comparison (code, &op0, &op1);
3656
3657#if STORE_FLAG_VALUE == 1
3658 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 3659 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
3660 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3661 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3662 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3663 (plus X 1).
3664
3665 Remove any ZERO_EXTRACT we made when thinking this was a
3666 comparison. It may now be simpler to use, e.g., an AND. If a
3667 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3668 the call to make_compound_operation in the SET case. */
3669
3f508eca 3670 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3671 && op1 == const0_rtx
5109d49f 3672 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3673 return gen_lowpart_for_combine (mode,
3674 expand_compound_operation (op0));
5109d49f
RK
3675
3676 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3677 && op1 == const0_rtx
3678 && (num_sign_bit_copies (op0, mode)
3679 == GET_MODE_BITSIZE (mode)))
3680 {
3681 op0 = expand_compound_operation (op0);
0c1c8ea6 3682 return gen_unary (NEG, mode, mode,
8079805d 3683 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3684 }
3685
3f508eca 3686 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3687 && op1 == const0_rtx
5109d49f 3688 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3689 {
3690 op0 = expand_compound_operation (op0);
8079805d
RK
3691 return gen_binary (XOR, mode,
3692 gen_lowpart_for_combine (mode, op0),
3693 const1_rtx);
5109d49f 3694 }
818b11b9 3695
5109d49f
RK
3696 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3697 && op1 == const0_rtx
3698 && (num_sign_bit_copies (op0, mode)
3699 == GET_MODE_BITSIZE (mode)))
3700 {
3701 op0 = expand_compound_operation (op0);
8079805d 3702 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 3703 }
230d793d
RS
3704#endif
3705
3706#if STORE_FLAG_VALUE == -1
5109d49f
RK
3707 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3708 those above. */
3f508eca 3709 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3710 && op1 == const0_rtx
5109d49f
RK
3711 && (num_sign_bit_copies (op0, mode)
3712 == GET_MODE_BITSIZE (mode)))
3713 return gen_lowpart_for_combine (mode,
3714 expand_compound_operation (op0));
3715
3716 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3717 && op1 == const0_rtx
3718 && nonzero_bits (op0, mode) == 1)
3719 {
3720 op0 = expand_compound_operation (op0);
0c1c8ea6 3721 return gen_unary (NEG, mode, mode,
8079805d 3722 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3723 }
3724
3725 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3726 && op1 == const0_rtx
3727 && (num_sign_bit_copies (op0, mode)
3728 == GET_MODE_BITSIZE (mode)))
230d793d 3729 {
818b11b9 3730 op0 = expand_compound_operation (op0);
0c1c8ea6 3731 return gen_unary (NOT, mode, mode,
8079805d 3732 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3733 }
3734
3735 /* If X is 0/1, (eq X 0) is X-1. */
3736 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3737 && op1 == const0_rtx
3738 && nonzero_bits (op0, mode) == 1)
3739 {
3740 op0 = expand_compound_operation (op0);
8079805d 3741 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d
RS
3742 }
3743#endif
3744
3745 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
3746 one bit that might be nonzero, we can convert (ne x 0) to
3747 (ashift x c) where C puts the bit in the sign bit. Remove any
3748 AND with STORE_FLAG_VALUE when we are done, since we are only
3749 going to test the sign bit. */
3f508eca 3750 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22
CH
3751 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3752 && (STORE_FLAG_VALUE
3753 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3754 && op1 == const0_rtx
3755 && mode == GET_MODE (op0)
5109d49f 3756 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 3757 {
818b11b9
RK
3758 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3759 expand_compound_operation (op0),
230d793d
RS
3760 GET_MODE_BITSIZE (mode) - 1 - i);
3761 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3762 return XEXP (x, 0);
3763 else
3764 return x;
3765 }
3766
3767 /* If the code changed, return a whole new comparison. */
3768 if (new_code != code)
3769 return gen_rtx_combine (new_code, mode, op0, op1);
3770
3771 /* Otherwise, keep this operation, but maybe change its operands.
3772 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3773 SUBST (XEXP (x, 0), op0);
3774 SUBST (XEXP (x, 1), op1);
3775 }
3776 break;
3777
3778 case IF_THEN_ELSE:
8079805d 3779 return simplify_if_then_else (x);
9210df58 3780
8079805d
RK
3781 case ZERO_EXTRACT:
3782 case SIGN_EXTRACT:
3783 case ZERO_EXTEND:
3784 case SIGN_EXTEND:
3785 /* If we are processing SET_DEST, we are done. */
3786 if (in_dest)
3787 return x;
d0ab8cd3 3788
8079805d 3789 return expand_compound_operation (x);
d0ab8cd3 3790
8079805d
RK
3791 case SET:
3792 return simplify_set (x);
1a26b032 3793
8079805d
RK
3794 case AND:
3795 case IOR:
3796 case XOR:
3797 return simplify_logical (x, last);
d0ab8cd3 3798
8079805d
RK
3799 case ABS:
3800 /* (abs (neg <foo>)) -> (abs <foo>) */
3801 if (GET_CODE (XEXP (x, 0)) == NEG)
3802 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 3803
8079805d
RK
3804 /* If operand is something known to be positive, ignore the ABS. */
3805 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3806 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3807 <= HOST_BITS_PER_WIDE_INT)
3808 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3809 & ((HOST_WIDE_INT) 1
3810 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3811 == 0)))
3812 return XEXP (x, 0);
1a26b032 3813
1a26b032 3814
8079805d
RK
3815 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3816 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
3817 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 3818
8079805d 3819 break;
1a26b032 3820
8079805d
RK
3821 case FFS:
3822 /* (ffs (*_extend <X>)) = (ffs <X>) */
3823 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3824 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3825 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3826 break;
1a26b032 3827
8079805d
RK
3828 case FLOAT:
3829 /* (float (sign_extend <X>)) = (float <X>). */
3830 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3831 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3832 break;
1a26b032 3833
8079805d
RK
3834 case ASHIFT:
3835 case LSHIFTRT:
3836 case ASHIFTRT:
3837 case ROTATE:
3838 case ROTATERT:
3839 /* If this is a shift by a constant amount, simplify it. */
3840 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3841 return simplify_shift_const (x, code, mode, XEXP (x, 0),
3842 INTVAL (XEXP (x, 1)));
3843
3844#ifdef SHIFT_COUNT_TRUNCATED
3845 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
3846 SUBST (XEXP (x, 1),
3847 force_to_mode (XEXP (x, 1), GET_MODE (x),
3848 ((HOST_WIDE_INT) 1
3849 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
3850 - 1,
3851 NULL_RTX, 0));
3852#endif
3853
3854 break;
3855 }
3856
3857 return x;
3858}
3859\f
3860/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 3861
8079805d
RK
3862static rtx
3863simplify_if_then_else (x)
3864 rtx x;
3865{
3866 enum machine_mode mode = GET_MODE (x);
3867 rtx cond = XEXP (x, 0);
3868 rtx true = XEXP (x, 1);
3869 rtx false = XEXP (x, 2);
3870 enum rtx_code true_code = GET_CODE (cond);
3871 int comparison_p = GET_RTX_CLASS (true_code) == '<';
3872 rtx temp;
3873 int i;
3874
3875 /* Simplify storing of the truth value. */
3876 if (comparison_p && true == const_true_rtx && false == const0_rtx)
3877 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
3878
3879 /* Also when the truth value has to be reversed. */
3880 if (comparison_p && reversible_comparison_p (cond)
3881 && true == const0_rtx && false == const_true_rtx)
3882 return gen_binary (reverse_condition (true_code),
3883 mode, XEXP (cond, 0), XEXP (cond, 1));
3884
3885 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
3886 in it is being compared against certain values. Get the true and false
3887 comparisons and see if that says anything about the value of each arm. */
3888
3889 if (comparison_p && reversible_comparison_p (cond)
3890 && GET_CODE (XEXP (cond, 0)) == REG)
3891 {
3892 HOST_WIDE_INT nzb;
3893 rtx from = XEXP (cond, 0);
3894 enum rtx_code false_code = reverse_condition (true_code);
3895 rtx true_val = XEXP (cond, 1);
3896 rtx false_val = true_val;
3897 int swapped = 0;
9210df58 3898
8079805d 3899 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 3900
8079805d 3901 if (false_code == EQ)
1a26b032 3902 {
8079805d
RK
3903 swapped = 1, true_code = EQ, false_code = NE;
3904 temp = true, true = false, false = temp;
3905 }
5109d49f 3906
8079805d
RK
3907 /* If we are comparing against zero and the expression being tested has
3908 only a single bit that might be nonzero, that is its value when it is
3909 not equal to zero. Similarly if it is known to be -1 or 0. */
3910
3911 if (true_code == EQ && true_val == const0_rtx
3912 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3913 false_code = EQ, false_val = GEN_INT (nzb);
3914 else if (true_code == EQ && true_val == const0_rtx
3915 && (num_sign_bit_copies (from, GET_MODE (from))
3916 == GET_MODE_BITSIZE (GET_MODE (from))))
3917 false_code = EQ, false_val = constm1_rtx;
3918
3919 /* Now simplify an arm if we know the value of the register in the
3920 branch and it is used in the arm. Be careful due to the potential
3921 of locally-shared RTL. */
3922
3923 if (reg_mentioned_p (from, true))
3924 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
3925 pc_rtx, pc_rtx, 0, 0);
3926 if (reg_mentioned_p (from, false))
3927 false = subst (known_cond (copy_rtx (false), false_code,
3928 from, false_val),
3929 pc_rtx, pc_rtx, 0, 0);
3930
3931 SUBST (XEXP (x, 1), swapped ? false : true);
3932 SUBST (XEXP (x, 2), swapped ? true : false);
3933
3934 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
3935 }
5109d49f 3936
8079805d
RK
3937 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3938 reversed, do so to avoid needing two sets of patterns for
3939 subtract-and-branch insns. Similarly if we have a constant in the true
3940 arm, the false arm is the same as the first operand of the comparison, or
3941 the false arm is more complicated than the true arm. */
3942
3943 if (comparison_p && reversible_comparison_p (cond)
3944 && (true == pc_rtx
3945 || (CONSTANT_P (true)
3946 && GET_CODE (false) != CONST_INT && false != pc_rtx)
3947 || true == const0_rtx
3948 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
3949 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
3950 || (GET_CODE (true) == SUBREG
3951 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
3952 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
3953 || reg_mentioned_p (true, false)
3954 || rtx_equal_p (false, XEXP (cond, 0))))
3955 {
3956 true_code = reverse_condition (true_code);
3957 SUBST (XEXP (x, 0),
3958 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
3959 XEXP (cond, 1)));
5109d49f 3960
8079805d
RK
3961 SUBST (XEXP (x, 1), false);
3962 SUBST (XEXP (x, 2), true);
1a26b032 3963
8079805d
RK
3964 temp = true, true = false, false = temp, cond = XEXP (x, 0);
3965 }
abe6e52f 3966
8079805d 3967 /* If the two arms are identical, we don't need the comparison. */
1a26b032 3968
8079805d
RK
3969 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
3970 return true;
1a26b032 3971
8079805d
RK
3972 /* Look for cases where we have (abs x) or (neg (abs X)). */
3973
3974 if (GET_MODE_CLASS (mode) == MODE_INT
3975 && GET_CODE (false) == NEG
3976 && rtx_equal_p (true, XEXP (false, 0))
3977 && comparison_p
3978 && rtx_equal_p (true, XEXP (cond, 0))
3979 && ! side_effects_p (true))
3980 switch (true_code)
3981 {
3982 case GT:
3983 case GE:
0c1c8ea6 3984 return gen_unary (ABS, mode, mode, true);
8079805d
RK
3985 case LT:
3986 case LE:
0c1c8ea6 3987 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
8079805d
RK
3988 }
3989
3990 /* Look for MIN or MAX. */
3991
3992 if ((! FLOAT_MODE_P (mode) | flag_fast_math)
3993 && comparison_p
3994 && rtx_equal_p (XEXP (cond, 0), true)
3995 && rtx_equal_p (XEXP (cond, 1), false)
3996 && ! side_effects_p (cond))
3997 switch (true_code)
3998 {
3999 case GE:
4000 case GT:
4001 return gen_binary (SMAX, mode, true, false);
4002 case LE:
4003 case LT:
4004 return gen_binary (SMIN, mode, true, false);
4005 case GEU:
4006 case GTU:
4007 return gen_binary (UMAX, mode, true, false);
4008 case LEU:
4009 case LTU:
4010 return gen_binary (UMIN, mode, true, false);
4011 }
4012
4013#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
4014
4015 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4016 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4017 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4018 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4019 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
d5a4ebdc 4020 neither of the above, but it isn't worth checking for. */
8079805d
RK
4021
4022 if (comparison_p && mode != VOIDmode && ! side_effects_p (x))
4023 {
4024 rtx t = make_compound_operation (true, SET);
4025 rtx f = make_compound_operation (false, SET);
4026 rtx cond_op0 = XEXP (cond, 0);
4027 rtx cond_op1 = XEXP (cond, 1);
4028 enum rtx_code op, extend_op = NIL;
4029 enum machine_mode m = mode;
f24ad0e4 4030 rtx z = 0, c1;
8079805d 4031
8079805d
RK
4032 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4033 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4034 || GET_CODE (t) == ASHIFT
4035 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4036 && rtx_equal_p (XEXP (t, 0), f))
4037 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4038
4039 /* If an identity-zero op is commutative, check whether there
4040 would be a match if we swapped the operands. */
4041 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4042 || GET_CODE (t) == XOR)
4043 && rtx_equal_p (XEXP (t, 1), f))
4044 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4045 else if (GET_CODE (t) == SIGN_EXTEND
4046 && (GET_CODE (XEXP (t, 0)) == PLUS
4047 || GET_CODE (XEXP (t, 0)) == MINUS
4048 || GET_CODE (XEXP (t, 0)) == IOR
4049 || GET_CODE (XEXP (t, 0)) == XOR
4050 || GET_CODE (XEXP (t, 0)) == ASHIFT
4051 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4052 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4053 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4054 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4055 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4056 && (num_sign_bit_copies (f, GET_MODE (f))
4057 > (GET_MODE_BITSIZE (mode)
4058 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4059 {
4060 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4061 extend_op = SIGN_EXTEND;
4062 m = GET_MODE (XEXP (t, 0));
1a26b032 4063 }
8079805d
RK
4064 else if (GET_CODE (t) == SIGN_EXTEND
4065 && (GET_CODE (XEXP (t, 0)) == PLUS
4066 || GET_CODE (XEXP (t, 0)) == IOR
4067 || GET_CODE (XEXP (t, 0)) == XOR)
4068 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4069 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4070 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4071 && (num_sign_bit_copies (f, GET_MODE (f))
4072 > (GET_MODE_BITSIZE (mode)
4073 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4074 {
4075 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4076 extend_op = SIGN_EXTEND;
4077 m = GET_MODE (XEXP (t, 0));
4078 }
4079 else if (GET_CODE (t) == ZERO_EXTEND
4080 && (GET_CODE (XEXP (t, 0)) == PLUS
4081 || GET_CODE (XEXP (t, 0)) == MINUS
4082 || GET_CODE (XEXP (t, 0)) == IOR
4083 || GET_CODE (XEXP (t, 0)) == XOR
4084 || GET_CODE (XEXP (t, 0)) == ASHIFT
4085 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4086 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4087 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4088 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4089 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4090 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4091 && ((nonzero_bits (f, GET_MODE (f))
4092 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4093 == 0))
4094 {
4095 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4096 extend_op = ZERO_EXTEND;
4097 m = GET_MODE (XEXP (t, 0));
4098 }
4099 else if (GET_CODE (t) == ZERO_EXTEND
4100 && (GET_CODE (XEXP (t, 0)) == PLUS
4101 || GET_CODE (XEXP (t, 0)) == IOR
4102 || GET_CODE (XEXP (t, 0)) == XOR)
4103 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4104 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4105 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4106 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4107 && ((nonzero_bits (f, GET_MODE (f))
4108 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4109 == 0))
4110 {
4111 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4112 extend_op = ZERO_EXTEND;
4113 m = GET_MODE (XEXP (t, 0));
4114 }
4115
4116 if (z)
4117 {
4118 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4119 pc_rtx, pc_rtx, 0, 0);
4120 temp = gen_binary (MULT, m, temp,
4121 gen_binary (MULT, m, c1, const_true_rtx));
4122 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4123 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4124
4125 if (extend_op != NIL)
0c1c8ea6 4126 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4127
4128 return temp;
4129 }
4130 }
5109d49f 4131#endif
224eeff2 4132
8079805d
RK
4133 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4134 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4135 negation of a single bit, we can convert this operation to a shift. We
4136 can actually do this more generally, but it doesn't seem worth it. */
4137
4138 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4139 && false == const0_rtx && GET_CODE (true) == CONST_INT
4140 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4141 && (i = exact_log2 (INTVAL (true))) >= 0)
4142 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4143 == GET_MODE_BITSIZE (mode))
4144 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4145 return
4146 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4147 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4148
8079805d
RK
4149 return x;
4150}
4151\f
4152/* Simplify X, a SET expression. Return the new expression. */
230d793d 4153
8079805d
RK
4154static rtx
4155simplify_set (x)
4156 rtx x;
4157{
4158 rtx src = SET_SRC (x);
4159 rtx dest = SET_DEST (x);
4160 enum machine_mode mode
4161 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4162 rtx other_insn;
4163 rtx *cc_use;
4164
4165 /* (set (pc) (return)) gets written as (return). */
4166 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4167 return src;
230d793d 4168
87e3e0c1
RK
4169 /* Now that we know for sure which bits of SRC we are using, see if we can
4170 simplify the expression for the object knowing that we only need the
4171 low-order bits. */
4172
4173 if (GET_MODE_CLASS (mode) == MODE_INT)
4174 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4175
8079805d
RK
4176 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4177 the comparison result and try to simplify it unless we already have used
4178 undobuf.other_insn. */
4179 if ((GET_CODE (src) == COMPARE
230d793d 4180#ifdef HAVE_cc0
8079805d 4181 || dest == cc0_rtx
230d793d 4182#endif
8079805d
RK
4183 )
4184 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4185 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4186 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4187 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4188 {
4189 enum rtx_code old_code = GET_CODE (*cc_use);
4190 enum rtx_code new_code;
4191 rtx op0, op1;
4192 int other_changed = 0;
4193 enum machine_mode compare_mode = GET_MODE (dest);
4194
4195 if (GET_CODE (src) == COMPARE)
4196 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4197 else
4198 op0 = src, op1 = const0_rtx;
230d793d 4199
8079805d
RK
4200 /* Simplify our comparison, if possible. */
4201 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4202
c141a106 4203#ifdef EXTRA_CC_MODES
8079805d
RK
4204 /* If this machine has CC modes other than CCmode, check to see if we
4205 need to use a different CC mode here. */
4206 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4207#endif /* EXTRA_CC_MODES */
230d793d 4208
c141a106 4209#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4210 /* If the mode changed, we have to change SET_DEST, the mode in the
4211 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4212 a hard register, just build new versions with the proper mode. If it
4213 is a pseudo, we lose unless it is only time we set the pseudo, in
4214 which case we can safely change its mode. */
4215 if (compare_mode != GET_MODE (dest))
4216 {
4217 int regno = REGNO (dest);
4218 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4219
4220 if (regno < FIRST_PSEUDO_REGISTER
4221 || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest)))
230d793d 4222 {
8079805d
RK
4223 if (regno >= FIRST_PSEUDO_REGISTER)
4224 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4225
8079805d
RK
4226 SUBST (SET_DEST (x), new_dest);
4227 SUBST (XEXP (*cc_use, 0), new_dest);
4228 other_changed = 1;
230d793d 4229
8079805d 4230 dest = new_dest;
230d793d 4231 }
8079805d 4232 }
230d793d
RS
4233#endif
4234
8079805d
RK
4235 /* If the code changed, we have to build a new comparison in
4236 undobuf.other_insn. */
4237 if (new_code != old_code)
4238 {
4239 unsigned HOST_WIDE_INT mask;
4240
4241 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4242 dest, const0_rtx));
4243
4244 /* If the only change we made was to change an EQ into an NE or
4245 vice versa, OP0 has only one bit that might be nonzero, and OP1
4246 is zero, check if changing the user of the condition code will
4247 produce a valid insn. If it won't, we can keep the original code
4248 in that insn by surrounding our operation with an XOR. */
4249
4250 if (((old_code == NE && new_code == EQ)
4251 || (old_code == EQ && new_code == NE))
4252 && ! other_changed && op1 == const0_rtx
4253 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4254 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4255 {
8079805d 4256 rtx pat = PATTERN (other_insn), note = 0;
230d793d 4257
8079805d
RK
4258 if ((recog_for_combine (&pat, other_insn, &note) < 0
4259 && ! check_asm_operands (pat)))
4260 {
4261 PUT_CODE (*cc_use, old_code);
4262 other_insn = 0;
230d793d 4263
8079805d 4264 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4265 }
230d793d
RS
4266 }
4267
8079805d
RK
4268 other_changed = 1;
4269 }
4270
4271 if (other_changed)
4272 undobuf.other_insn = other_insn;
230d793d
RS
4273
4274#ifdef HAVE_cc0
8079805d
RK
4275 /* If we are now comparing against zero, change our source if
4276 needed. If we do not use cc0, we always have a COMPARE. */
4277 if (op1 == const0_rtx && dest == cc0_rtx)
4278 {
4279 SUBST (SET_SRC (x), op0);
4280 src = op0;
4281 }
4282 else
230d793d
RS
4283#endif
4284
8079805d
RK
4285 /* Otherwise, if we didn't previously have a COMPARE in the
4286 correct mode, we need one. */
4287 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4288 {
4289 SUBST (SET_SRC (x),
4290 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4291 src = SET_SRC (x);
230d793d
RS
4292 }
4293 else
4294 {
8079805d
RK
4295 /* Otherwise, update the COMPARE if needed. */
4296 SUBST (XEXP (src, 0), op0);
4297 SUBST (XEXP (src, 1), op1);
230d793d 4298 }
8079805d
RK
4299 }
4300 else
4301 {
4302 /* Get SET_SRC in a form where we have placed back any
4303 compound expressions. Then do the checks below. */
4304 src = make_compound_operation (src, SET);
4305 SUBST (SET_SRC (x), src);
4306 }
230d793d 4307
8079805d
RK
4308 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4309 and X being a REG or (subreg (reg)), we may be able to convert this to
4310 (set (subreg:m2 x) (op)).
df62f951 4311
8079805d
RK
4312 We can always do this if M1 is narrower than M2 because that means that
4313 we only care about the low bits of the result.
df62f951 4314
8079805d
RK
4315 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4316 perform a narrower operation that requested since the high-order bits will
4317 be undefined. On machine where it is defined, this transformation is safe
4318 as long as M1 and M2 have the same number of words. */
df62f951 4319
8079805d
RK
4320 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4321 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4322 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4323 / UNITS_PER_WORD)
4324 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4325 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4326#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4327 && (GET_MODE_SIZE (GET_MODE (src))
4328 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4329#endif
8079805d
RK
4330 && (GET_CODE (dest) == REG
4331 || (GET_CODE (dest) == SUBREG
4332 && GET_CODE (SUBREG_REG (dest)) == REG)))
4333 {
4334 SUBST (SET_DEST (x),
4335 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4336 dest));
4337 SUBST (SET_SRC (x), SUBREG_REG (src));
4338
4339 src = SET_SRC (x), dest = SET_DEST (x);
4340 }
df62f951 4341
8baf60bb 4342#ifdef LOAD_EXTEND_OP
8079805d
RK
4343 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4344 would require a paradoxical subreg. Replace the subreg with a
4345 zero_extend to avoid the reload that would otherwise be required. */
4346
4347 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4348 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4349 && SUBREG_WORD (src) == 0
4350 && (GET_MODE_SIZE (GET_MODE (src))
4351 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4352 && GET_CODE (SUBREG_REG (src)) == MEM)
4353 {
4354 SUBST (SET_SRC (x),
4355 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4356 GET_MODE (src), XEXP (src, 0)));
4357
4358 src = SET_SRC (x);
4359 }
230d793d
RS
4360#endif
4361
8079805d
RK
4362 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4363 are comparing an item known to be 0 or -1 against 0, use a logical
4364 operation instead. Check for one of the arms being an IOR of the other
4365 arm with some value. We compute three terms to be IOR'ed together. In
4366 practice, at most two will be nonzero. Then we do the IOR's. */
4367
4368 if (GET_CODE (dest) != PC
4369 && GET_CODE (src) == IF_THEN_ELSE
094030c9
DE
4370#ifdef HAVE_conditional_move
4371 && ! HAVE_conditional_move
4372#endif
36b8d792 4373 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4374 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4375 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 4376 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
8079805d
RK
4377 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4378 GET_MODE (XEXP (XEXP (src, 0), 0)))
4379 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4380 && ! side_effects_p (src))
4381 {
4382 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4383 ? XEXP (src, 1) : XEXP (src, 2));
4384 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4385 ? XEXP (src, 2) : XEXP (src, 1));
4386 rtx term1 = const0_rtx, term2, term3;
4387
4388 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4389 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4390 else if (GET_CODE (true) == IOR
4391 && rtx_equal_p (XEXP (true, 1), false))
4392 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4393 else if (GET_CODE (false) == IOR
4394 && rtx_equal_p (XEXP (false, 0), true))
4395 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4396 else if (GET_CODE (false) == IOR
4397 && rtx_equal_p (XEXP (false, 1), true))
4398 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4399
4400 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4401 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 4402 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
4403 XEXP (XEXP (src, 0), 0)),
4404 false);
4405
4406 SUBST (SET_SRC (x),
4407 gen_binary (IOR, GET_MODE (src),
4408 gen_binary (IOR, GET_MODE (src), term1, term2),
4409 term3));
4410
4411 src = SET_SRC (x);
4412 }
230d793d 4413
246e00f2
RK
4414 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4415 whole thing fail. */
4416 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4417 return src;
4418 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4419 return dest;
4420 else
4421 /* Convert this into a field assignment operation, if possible. */
4422 return make_field_assignment (x);
8079805d
RK
4423}
4424\f
4425/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4426 result. LAST is nonzero if this is the last retry. */
4427
4428static rtx
4429simplify_logical (x, last)
4430 rtx x;
4431 int last;
4432{
4433 enum machine_mode mode = GET_MODE (x);
4434 rtx op0 = XEXP (x, 0);
4435 rtx op1 = XEXP (x, 1);
4436
4437 switch (GET_CODE (x))
4438 {
230d793d 4439 case AND:
8079805d
RK
4440 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4441 insn (and may simplify more). */
4442 if (GET_CODE (op0) == XOR
4443 && rtx_equal_p (XEXP (op0, 0), op1)
4444 && ! side_effects_p (op1))
0c1c8ea6
RK
4445 x = gen_binary (AND, mode,
4446 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
4447
4448 if (GET_CODE (op0) == XOR
4449 && rtx_equal_p (XEXP (op0, 1), op1)
4450 && ! side_effects_p (op1))
0c1c8ea6
RK
4451 x = gen_binary (AND, mode,
4452 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
4453
4454 /* Similarly for (~ (A ^ B)) & A. */
4455 if (GET_CODE (op0) == NOT
4456 && GET_CODE (XEXP (op0, 0)) == XOR
4457 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4458 && ! side_effects_p (op1))
4459 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4460
4461 if (GET_CODE (op0) == NOT
4462 && GET_CODE (XEXP (op0, 0)) == XOR
4463 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4464 && ! side_effects_p (op1))
4465 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4466
4467 if (GET_CODE (op1) == CONST_INT)
230d793d 4468 {
8079805d 4469 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
4470
4471 /* If we have (ior (and (X C1) C2)) and the next restart would be
4472 the last, simplify this by making C1 as small as possible
4473 and then exit. */
8079805d
RK
4474 if (last
4475 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4476 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4477 && GET_CODE (op1) == CONST_INT)
4478 return gen_binary (IOR, mode,
4479 gen_binary (AND, mode, XEXP (op0, 0),
4480 GEN_INT (INTVAL (XEXP (op0, 1))
4481 & ~ INTVAL (op1))), op1);
230d793d
RS
4482
4483 if (GET_CODE (x) != AND)
8079805d 4484 return x;
230d793d
RS
4485 }
4486
4487 /* Convert (A | B) & A to A. */
8079805d
RK
4488 if (GET_CODE (op0) == IOR
4489 && (rtx_equal_p (XEXP (op0, 0), op1)
4490 || rtx_equal_p (XEXP (op0, 1), op1))
4491 && ! side_effects_p (XEXP (op0, 0))
4492 && ! side_effects_p (XEXP (op0, 1)))
4493 return op1;
230d793d 4494
d0ab8cd3 4495 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4496 we start with some combination of logical operations and apply
4497 the distributive law followed by the inverse distributive law.
4498 Most of the time, this results in no change. However, if some of
4499 the operands are the same or inverses of each other, simplifications
4500 will result.
4501
4502 For example, (and (ior A B) (not B)) can occur as the result of
4503 expanding a bit field assignment. When we apply the distributive
4504 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 4505 which then simplifies to (and (A (not B))).
230d793d 4506
8079805d 4507 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
4508 the inverse distributive law to see if things simplify. */
4509
8079805d 4510 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
4511 {
4512 x = apply_distributive_law
8079805d
RK
4513 (gen_binary (GET_CODE (op0), mode,
4514 gen_binary (AND, mode, XEXP (op0, 0), op1),
4515 gen_binary (AND, mode, XEXP (op0, 1), op1)));
230d793d 4516 if (GET_CODE (x) != AND)
8079805d 4517 return x;
230d793d
RS
4518 }
4519
8079805d
RK
4520 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4521 return apply_distributive_law
4522 (gen_binary (GET_CODE (op1), mode,
4523 gen_binary (AND, mode, XEXP (op1, 0), op0),
4524 gen_binary (AND, mode, XEXP (op1, 1), op0)));
230d793d
RS
4525
4526 /* Similarly, taking advantage of the fact that
4527 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4528
8079805d
RK
4529 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4530 return apply_distributive_law
4531 (gen_binary (XOR, mode,
4532 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4533 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
230d793d 4534
8079805d
RK
4535 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4536 return apply_distributive_law
4537 (gen_binary (XOR, mode,
4538 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4539 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
230d793d
RS
4540 break;
4541
4542 case IOR:
951553af 4543 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 4544 if (GET_CODE (op1) == CONST_INT
ac49a949 4545 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
4546 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4547 return op1;
d0ab8cd3 4548
230d793d 4549 /* Convert (A & B) | A to A. */
8079805d
RK
4550 if (GET_CODE (op0) == AND
4551 && (rtx_equal_p (XEXP (op0, 0), op1)
4552 || rtx_equal_p (XEXP (op0, 1), op1))
4553 && ! side_effects_p (XEXP (op0, 0))
4554 && ! side_effects_p (XEXP (op0, 1)))
4555 return op1;
230d793d
RS
4556
4557 /* If we have (ior (and A B) C), apply the distributive law and then
4558 the inverse distributive law to see if things simplify. */
4559
8079805d 4560 if (GET_CODE (op0) == AND)
230d793d
RS
4561 {
4562 x = apply_distributive_law
4563 (gen_binary (AND, mode,
8079805d
RK
4564 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4565 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
230d793d
RS
4566
4567 if (GET_CODE (x) != IOR)
8079805d 4568 return x;
230d793d
RS
4569 }
4570
8079805d 4571 if (GET_CODE (op1) == AND)
230d793d
RS
4572 {
4573 x = apply_distributive_law
4574 (gen_binary (AND, mode,
8079805d
RK
4575 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4576 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
230d793d
RS
4577
4578 if (GET_CODE (x) != IOR)
8079805d 4579 return x;
230d793d
RS
4580 }
4581
4582 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4583 mode size to (rotate A CX). */
4584
8079805d
RK
4585 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4586 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4587 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4588 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4589 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4590 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 4591 == GET_MODE_BITSIZE (mode)))
8079805d
RK
4592 return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4593 (GET_CODE (op0) == ASHIFT
4594 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 4595
71923da7
RK
4596 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4597 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4598 does not affect any of the bits in OP1, it can really be done
4599 as a PLUS and we can associate. We do this by seeing if OP1
4600 can be safely shifted left C bits. */
4601 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4602 && GET_CODE (XEXP (op0, 0)) == PLUS
4603 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4604 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4605 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4606 {
4607 int count = INTVAL (XEXP (op0, 1));
4608 HOST_WIDE_INT mask = INTVAL (op1) << count;
4609
4610 if (mask >> count == INTVAL (op1)
4611 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4612 {
4613 SUBST (XEXP (XEXP (op0, 0), 1),
4614 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
4615 return op0;
4616 }
4617 }
230d793d
RS
4618 break;
4619
4620 case XOR:
4621 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4622 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4623 (NOT y). */
4624 {
4625 int num_negated = 0;
230d793d 4626
8079805d
RK
4627 if (GET_CODE (op0) == NOT)
4628 num_negated++, op0 = XEXP (op0, 0);
4629 if (GET_CODE (op1) == NOT)
4630 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
4631
4632 if (num_negated == 2)
4633 {
8079805d
RK
4634 SUBST (XEXP (x, 0), op0);
4635 SUBST (XEXP (x, 1), op1);
230d793d
RS
4636 }
4637 else if (num_negated == 1)
0c1c8ea6 4638 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
4639 }
4640
4641 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4642 correspond to a machine insn or result in further simplifications
4643 if B is a constant. */
4644
8079805d
RK
4645 if (GET_CODE (op0) == AND
4646 && rtx_equal_p (XEXP (op0, 1), op1)
4647 && ! side_effects_p (op1))
0c1c8ea6
RK
4648 return gen_binary (AND, mode,
4649 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 4650 op1);
230d793d 4651
8079805d
RK
4652 else if (GET_CODE (op0) == AND
4653 && rtx_equal_p (XEXP (op0, 0), op1)
4654 && ! side_effects_p (op1))
0c1c8ea6
RK
4655 return gen_binary (AND, mode,
4656 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 4657 op1);
230d793d
RS
4658
4659#if STORE_FLAG_VALUE == 1
4660 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4661 comparison. */
8079805d
RK
4662 if (op1 == const1_rtx
4663 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4664 && reversible_comparison_p (op0))
4665 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4666 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
4667
4668 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4669 is (lt foo (const_int 0)), so we can perform the above
4670 simplification. */
4671
8079805d
RK
4672 if (op1 == const1_rtx
4673 && GET_CODE (op0) == LSHIFTRT
4674 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4675 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
4676 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
4677#endif
4678
4679 /* (xor (comparison foo bar) (const_int sign-bit))
4680 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22
CH
4681 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4682 && (STORE_FLAG_VALUE
4683 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
4684 && op1 == const_true_rtx
4685 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4686 && reversible_comparison_p (op0))
4687 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4688 mode, XEXP (op0, 0), XEXP (op0, 1));
230d793d
RS
4689 break;
4690 }
4691
4692 return x;
4693}
4694\f
4695/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4696 operations" because they can be replaced with two more basic operations.
4697 ZERO_EXTEND is also considered "compound" because it can be replaced with
4698 an AND operation, which is simpler, though only one operation.
4699
4700 The function expand_compound_operation is called with an rtx expression
4701 and will convert it to the appropriate shifts and AND operations,
4702 simplifying at each stage.
4703
4704 The function make_compound_operation is called to convert an expression
4705 consisting of shifts and ANDs into the equivalent compound expression.
4706 It is the inverse of this function, loosely speaking. */
4707
4708static rtx
4709expand_compound_operation (x)
4710 rtx x;
4711{
4712 int pos = 0, len;
4713 int unsignedp = 0;
4714 int modewidth;
4715 rtx tem;
4716
4717 switch (GET_CODE (x))
4718 {
4719 case ZERO_EXTEND:
4720 unsignedp = 1;
4721 case SIGN_EXTEND:
75473182
RS
4722 /* We can't necessarily use a const_int for a multiword mode;
4723 it depends on implicitly extending the value.
4724 Since we don't know the right way to extend it,
4725 we can't tell whether the implicit way is right.
4726
4727 Even for a mode that is no wider than a const_int,
4728 we can't win, because we need to sign extend one of its bits through
4729 the rest of it, and we don't know which bit. */
230d793d 4730 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 4731 return x;
230d793d 4732
8079805d
RK
4733 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
4734 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
4735 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
4736 reloaded. If not for that, MEM's would very rarely be safe.
4737
4738 Reject MODEs bigger than a word, because we might not be able
4739 to reference a two-register group starting with an arbitrary register
4740 (and currently gen_lowpart might crash for a SUBREG). */
4741
4742 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
4743 return x;
4744
4745 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4746 /* If the inner object has VOIDmode (the only way this can happen
4747 is if it is a ASM_OPERANDS), we can't do anything since we don't
4748 know how much masking to do. */
4749 if (len == 0)
4750 return x;
4751
4752 break;
4753
4754 case ZERO_EXTRACT:
4755 unsignedp = 1;
4756 case SIGN_EXTRACT:
4757 /* If the operand is a CLOBBER, just return it. */
4758 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4759 return XEXP (x, 0);
4760
4761 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4762 || GET_CODE (XEXP (x, 2)) != CONST_INT
4763 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4764 return x;
4765
4766 len = INTVAL (XEXP (x, 1));
4767 pos = INTVAL (XEXP (x, 2));
4768
4769 /* If this goes outside the object being extracted, replace the object
4770 with a (use (mem ...)) construct that only combine understands
4771 and is used only for this purpose. */
4772 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4773 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4774
4775#if BITS_BIG_ENDIAN
4776 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4777#endif
4778 break;
4779
4780 default:
4781 return x;
4782 }
4783
4784 /* If we reach here, we want to return a pair of shifts. The inner
4785 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4786 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4787 logical depending on the value of UNSIGNEDP.
4788
4789 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4790 converted into an AND of a shift.
4791
4792 We must check for the case where the left shift would have a negative
4793 count. This can happen in a case like (x >> 31) & 255 on machines
4794 that can't shift by a constant. On those machines, we would first
4795 combine the shift with the AND to produce a variable-position
4796 extraction. Then the constant of 31 would be substituted in to produce
4797 a such a position. */
4798
4799 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4800 if (modewidth >= pos - len)
5f4f0e22 4801 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 4802 GET_MODE (x),
5f4f0e22
CH
4803 simplify_shift_const (NULL_RTX, ASHIFT,
4804 GET_MODE (x),
230d793d
RS
4805 XEXP (x, 0),
4806 modewidth - pos - len),
4807 modewidth - len);
4808
5f4f0e22
CH
4809 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4810 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4811 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
4812 GET_MODE (x),
4813 XEXP (x, 0), pos),
5f4f0e22 4814 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4815 else
4816 /* Any other cases we can't handle. */
4817 return x;
4818
4819
4820 /* If we couldn't do this for some reason, return the original
4821 expression. */
4822 if (GET_CODE (tem) == CLOBBER)
4823 return x;
4824
4825 return tem;
4826}
4827\f
4828/* X is a SET which contains an assignment of one object into
4829 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4830 or certain SUBREGS). If possible, convert it into a series of
4831 logical operations.
4832
4833 We half-heartedly support variable positions, but do not at all
4834 support variable lengths. */
4835
4836static rtx
4837expand_field_assignment (x)
4838 rtx x;
4839{
4840 rtx inner;
4841 rtx pos; /* Always counts from low bit. */
4842 int len;
4843 rtx mask;
4844 enum machine_mode compute_mode;
4845
4846 /* Loop until we find something we can't simplify. */
4847 while (1)
4848 {
4849 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4850 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4851 {
4852 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4853 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4854 pos = const0_rtx;
4855 }
4856 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4857 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4858 {
4859 inner = XEXP (SET_DEST (x), 0);
4860 len = INTVAL (XEXP (SET_DEST (x), 1));
4861 pos = XEXP (SET_DEST (x), 2);
4862
4863 /* If the position is constant and spans the width of INNER,
4864 surround INNER with a USE to indicate this. */
4865 if (GET_CODE (pos) == CONST_INT
4866 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4867 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4868
4869#if BITS_BIG_ENDIAN
4870 if (GET_CODE (pos) == CONST_INT)
5f4f0e22
CH
4871 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4872 - INTVAL (pos));
230d793d
RS
4873 else if (GET_CODE (pos) == MINUS
4874 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4875 && (INTVAL (XEXP (pos, 1))
4876 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4877 /* If position is ADJUST - X, new position is X. */
4878 pos = XEXP (pos, 0);
4879 else
4880 pos = gen_binary (MINUS, GET_MODE (pos),
5f4f0e22
CH
4881 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4882 - len),
4883 pos);
230d793d
RS
4884#endif
4885 }
4886
4887 /* A SUBREG between two modes that occupy the same numbers of words
4888 can be done by moving the SUBREG to the source. */
4889 else if (GET_CODE (SET_DEST (x)) == SUBREG
4890 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4891 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4892 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4893 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4894 {
4895 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4896 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4897 SET_SRC (x)));
4898 continue;
4899 }
4900 else
4901 break;
4902
4903 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4904 inner = SUBREG_REG (inner);
4905
4906 compute_mode = GET_MODE (inner);
4907
4908 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
4909 if (len < HOST_BITS_PER_WIDE_INT)
4910 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4911 else
4912 break;
4913
4914 /* Now compute the equivalent expression. Make a copy of INNER
4915 for the SET_DEST in case it is a MEM into which we will substitute;
4916 we don't want shared RTL in that case. */
4917 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4918 gen_binary (IOR, compute_mode,
4919 gen_binary (AND, compute_mode,
4920 gen_unary (NOT, compute_mode,
0c1c8ea6 4921 compute_mode,
230d793d
RS
4922 gen_binary (ASHIFT,
4923 compute_mode,
4924 mask, pos)),
4925 inner),
4926 gen_binary (ASHIFT, compute_mode,
4927 gen_binary (AND, compute_mode,
4928 gen_lowpart_for_combine
4929 (compute_mode,
4930 SET_SRC (x)),
4931 mask),
4932 pos)));
4933 }
4934
4935 return x;
4936}
4937\f
8999a12e
RK
4938/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4939 it is an RTX that represents a variable starting position; otherwise,
4940 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
4941
4942 INNER may be a USE. This will occur when we started with a bitfield
4943 that went outside the boundary of the object in memory, which is
4944 allowed on most machines. To isolate this case, we produce a USE
4945 whose mode is wide enough and surround the MEM with it. The only
4946 code that understands the USE is this routine. If it is not removed,
4947 it will cause the resulting insn not to match.
4948
4949 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4950 signed reference.
4951
4952 IN_DEST is non-zero if this is a reference in the destination of a
4953 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4954 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4955 be used.
4956
4957 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4958 ZERO_EXTRACT should be built even for bits starting at bit 0.
4959
4960 MODE is the desired mode of the result (if IN_DEST == 0). */
4961
4962static rtx
4963make_extraction (mode, inner, pos, pos_rtx, len,
4964 unsignedp, in_dest, in_compare)
4965 enum machine_mode mode;
4966 rtx inner;
4967 int pos;
4968 rtx pos_rtx;
4969 int len;
4970 int unsignedp;
4971 int in_dest, in_compare;
4972{
94b4b17a
RS
4973 /* This mode describes the size of the storage area
4974 to fetch the overall value from. Within that, we
4975 ignore the POS lowest bits, etc. */
230d793d
RS
4976 enum machine_mode is_mode = GET_MODE (inner);
4977 enum machine_mode inner_mode;
4978 enum machine_mode wanted_mem_mode = byte_mode;
4979 enum machine_mode pos_mode = word_mode;
4980 enum machine_mode extraction_mode = word_mode;
4981 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4982 int spans_byte = 0;
4983 rtx new = 0;
8999a12e 4984 rtx orig_pos_rtx = pos_rtx;
6139ff20 4985 int orig_pos;
230d793d
RS
4986
4987 /* Get some information about INNER and get the innermost object. */
4988 if (GET_CODE (inner) == USE)
94b4b17a 4989 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
4990 /* We don't need to adjust the position because we set up the USE
4991 to pretend that it was a full-word object. */
4992 spans_byte = 1, inner = XEXP (inner, 0);
4993 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
4994 {
4995 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4996 consider just the QI as the memory to extract from.
4997 The subreg adds or removes high bits; its mode is
4998 irrelevant to the meaning of this extraction,
4999 since POS and LEN count from the lsb. */
5000 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5001 is_mode = GET_MODE (SUBREG_REG (inner));
5002 inner = SUBREG_REG (inner);
5003 }
230d793d
RS
5004
5005 inner_mode = GET_MODE (inner);
5006
5007 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5008 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5009
5010 /* See if this can be done without an extraction. We never can if the
5011 width of the field is not the same as that of some integer mode. For
5012 registers, we can only avoid the extraction if the position is at the
5013 low-order bit and this is either not in the destination or we have the
5014 appropriate STRICT_LOW_PART operation available.
5015
5016 For MEM, we can avoid an extract if the field starts on an appropriate
5017 boundary and we can change the mode of the memory reference. However,
5018 we cannot directly access the MEM if we have a USE and the underlying
5019 MEM is not TMODE. This combination means that MEM was being used in a
5020 context where bits outside its mode were being referenced; that is only
5021 valid in bit-field insns. */
5022
5023 if (tmode != BLKmode
5024 && ! (spans_byte && inner_mode != tmode)
8999a12e 5025 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
230d793d 5026 && (! in_dest
df62f951
RK
5027 || (GET_CODE (inner) == REG
5028 && (movstrict_optab->handlers[(int) tmode].insn_code
5029 != CODE_FOR_nothing))))
8999a12e 5030 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5031 && (pos
5032 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5033 : BITS_PER_UNIT)) == 0
230d793d
RS
5034 /* We can't do this if we are widening INNER_MODE (it
5035 may not be aligned, for one thing). */
5036 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5037 && (inner_mode == tmode
5038 || (! mode_dependent_address_p (XEXP (inner, 0))
5039 && ! MEM_VOLATILE_P (inner))))))
5040 {
230d793d
RS
5041 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5042 field. If the original and current mode are the same, we need not
5043 adjust the offset. Otherwise, we do if bytes big endian.
5044
5045 If INNER is not a MEM, get a piece consisting of the just the field
df62f951 5046 of interest (in this case POS must be 0). */
230d793d
RS
5047
5048 if (GET_CODE (inner) == MEM)
5049 {
94b4b17a
RS
5050 int offset;
5051 /* POS counts from lsb, but make OFFSET count in memory order. */
5052 if (BYTES_BIG_ENDIAN)
5053 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5054 else
5055 offset = pos / BITS_PER_UNIT;
230d793d
RS
5056
5057 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5058 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5059 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5060 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5061 }
df62f951 5062 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5063 {
5064 /* We can't call gen_lowpart_for_combine here since we always want
5065 a SUBREG and it would sometimes return a new hard register. */
5066 if (tmode != inner_mode)
5067 new = gen_rtx (SUBREG, tmode, inner,
5068 (WORDS_BIG_ENDIAN
5069 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5070 ? ((GET_MODE_SIZE (inner_mode)
5071 - GET_MODE_SIZE (tmode))
5072 / UNITS_PER_WORD)
5073 : 0));
5074 else
5075 new = inner;
5076 }
230d793d 5077 else
6139ff20
RK
5078 new = force_to_mode (inner, tmode,
5079 len >= HOST_BITS_PER_WIDE_INT
5080 ? GET_MODE_MASK (tmode)
5081 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5082 NULL_RTX, 0);
230d793d
RS
5083
5084 /* If this extraction is going into the destination of a SET,
5085 make a STRICT_LOW_PART unless we made a MEM. */
5086
5087 if (in_dest)
5088 return (GET_CODE (new) == MEM ? new
77fa0940
RK
5089 : (GET_CODE (new) != SUBREG
5090 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5091 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5092
5093 /* Otherwise, sign- or zero-extend unless we already are in the
5094 proper mode. */
5095
5096 return (mode == tmode ? new
5097 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5098 mode, new));
5099 }
5100
cc471082
RS
5101 /* Unless this is a COMPARE or we have a funny memory reference,
5102 don't do anything with zero-extending field extracts starting at
5103 the low-order bit since they are simple AND operations. */
8999a12e
RK
5104 if (pos_rtx == 0 && pos == 0 && ! in_dest
5105 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5106 return 0;
5107
e7373556
RK
5108 /* Unless we are allowed to span bytes, reject this if we would be
5109 spanning bytes or if the position is not a constant and the length
5110 is not 1. In all other cases, we would only be going outside
5111 out object in cases when an original shift would have been
5112 undefined. */
5113 if (! spans_byte
5114 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5115 || (pos_rtx != 0 && len != 1)))
5116 return 0;
5117
230d793d
RS
5118 /* Get the mode to use should INNER be a MEM, the mode for the position,
5119 and the mode for the result. */
5120#ifdef HAVE_insv
5121 if (in_dest)
5122 {
5123 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5124 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5125 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5126 }
5127#endif
5128
5129#ifdef HAVE_extzv
5130 if (! in_dest && unsignedp)
5131 {
5132 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5133 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5134 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5135 }
5136#endif
5137
5138#ifdef HAVE_extv
5139 if (! in_dest && ! unsignedp)
5140 {
5141 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5142 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5143 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5144 }
5145#endif
5146
5147 /* Never narrow an object, since that might not be safe. */
5148
5149 if (mode != VOIDmode
5150 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5151 extraction_mode = mode;
5152
5153 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5154 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5155 pos_mode = GET_MODE (pos_rtx);
5156
5157 /* If this is not from memory or we have to change the mode of memory and
5158 cannot, the desired mode is EXTRACTION_MODE. */
5159 if (GET_CODE (inner) != MEM
5160 || (inner_mode != wanted_mem_mode
5161 && (mode_dependent_address_p (XEXP (inner, 0))
5162 || MEM_VOLATILE_P (inner))))
5163 wanted_mem_mode = extraction_mode;
5164
6139ff20
RK
5165 orig_pos = pos;
5166
230d793d
RS
5167#if BITS_BIG_ENDIAN
5168 /* If position is constant, compute new position. Otherwise, build
5169 subtraction. */
8999a12e 5170 if (pos_rtx == 0)
230d793d
RS
5171 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
5172 - len - pos);
5173 else
5174 pos_rtx
5175 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5f4f0e22
CH
5176 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
5177 GET_MODE_BITSIZE (wanted_mem_mode))
5178 - len),
5179 pos_rtx);
230d793d
RS
5180#endif
5181
5182 /* If INNER has a wider mode, make it smaller. If this is a constant
5183 extract, try to adjust the byte to point to the byte containing
5184 the value. */
5185 if (wanted_mem_mode != VOIDmode
5186 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
5187 && ((GET_CODE (inner) == MEM
5188 && (inner_mode == wanted_mem_mode
5189 || (! mode_dependent_address_p (XEXP (inner, 0))
5190 && ! MEM_VOLATILE_P (inner))))))
5191 {
5192 int offset = 0;
5193
5194 /* The computations below will be correct if the machine is big
5195 endian in both bits and bytes or little endian in bits and bytes.
5196 If it is mixed, we must adjust. */
5197
230d793d
RS
5198 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5199 adjust OFFSET to compensate. */
5200#if BYTES_BIG_ENDIAN
5201 if (! spans_byte
5202 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5203 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5204#endif
5205
5206 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5207 if (pos_rtx == 0)
230d793d
RS
5208 {
5209 offset += pos / BITS_PER_UNIT;
5210 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
5211 }
5212
c6b3f1f2
JW
5213#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5214 if (! spans_byte && is_mode != wanted_mem_mode)
5215 offset = (GET_MODE_SIZE (is_mode)
5216 - GET_MODE_SIZE (wanted_mem_mode) - offset);
5217#endif
5218
230d793d
RS
5219 if (offset != 0 || inner_mode != wanted_mem_mode)
5220 {
5221 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
5222 plus_constant (XEXP (inner, 0), offset));
5223 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5224 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5225 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5226 inner = newmem;
5227 }
5228 }
5229
5230 /* If INNER is not memory, we can always get it into the proper mode. */
5231 else if (GET_CODE (inner) != MEM)
d0ab8cd3 5232 inner = force_to_mode (inner, extraction_mode,
6139ff20
RK
5233 pos_rtx || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5234 ? GET_MODE_MASK (extraction_mode)
5235 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
e3d616e3 5236 NULL_RTX, 0);
230d793d
RS
5237
5238 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5239 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5240 if (pos_rtx != 0
230d793d
RS
5241 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5242 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5243 else if (pos_rtx != 0
230d793d
RS
5244 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5245 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5246
8999a12e
RK
5247 /* Make POS_RTX unless we already have it and it is correct. If we don't
5248 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5249 be a CONST_INT. */
5250 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5251 pos_rtx = orig_pos_rtx;
5252
5253 else if (pos_rtx == 0)
5f4f0e22 5254 pos_rtx = GEN_INT (pos);
230d793d
RS
5255
5256 /* Make the required operation. See if we can use existing rtx. */
5257 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5258 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5259 if (! in_dest)
5260 new = gen_lowpart_for_combine (mode, new);
5261
5262 return new;
5263}
5264\f
71923da7
RK
5265/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5266 with any other operations in X. Return X without that shift if so. */
5267
5268static rtx
5269extract_left_shift (x, count)
5270 rtx x;
5271 int count;
5272{
5273 enum rtx_code code = GET_CODE (x);
5274 enum machine_mode mode = GET_MODE (x);
5275 rtx tem;
5276
5277 switch (code)
5278 {
5279 case ASHIFT:
5280 /* This is the shift itself. If it is wide enough, we will return
5281 either the value being shifted if the shift count is equal to
5282 COUNT or a shift for the difference. */
5283 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5284 && INTVAL (XEXP (x, 1)) >= count)
5285 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5286 INTVAL (XEXP (x, 1)) - count);
5287 break;
5288
5289 case NEG: case NOT:
5290 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 5291 return gen_unary (code, mode, mode, tem);
71923da7
RK
5292
5293 break;
5294
5295 case PLUS: case IOR: case XOR: case AND:
5296 /* If we can safely shift this constant and we find the inner shift,
5297 make a new operation. */
5298 if (GET_CODE (XEXP (x,1)) == CONST_INT
5299 && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5300 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5301 return gen_binary (code, mode, tem,
5302 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5303
5304 break;
5305 }
5306
5307 return 0;
5308}
5309\f
230d793d
RS
5310/* Look at the expression rooted at X. Look for expressions
5311 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5312 Form these expressions.
5313
5314 Return the new rtx, usually just X.
5315
5316 Also, for machines like the Vax that don't have logical shift insns,
5317 try to convert logical to arithmetic shift operations in cases where
5318 they are equivalent. This undoes the canonicalizations to logical
5319 shifts done elsewhere.
5320
5321 We try, as much as possible, to re-use rtl expressions to save memory.
5322
5323 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5324 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5325 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5326 or a COMPARE against zero, it is COMPARE. */
5327
5328static rtx
5329make_compound_operation (x, in_code)
5330 rtx x;
5331 enum rtx_code in_code;
5332{
5333 enum rtx_code code = GET_CODE (x);
5334 enum machine_mode mode = GET_MODE (x);
5335 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 5336 rtx rhs, lhs;
230d793d 5337 enum rtx_code next_code;
f24ad0e4 5338 int i;
230d793d 5339 rtx new = 0;
280f58ba 5340 rtx tem;
230d793d
RS
5341 char *fmt;
5342
5343 /* Select the code to be used in recursive calls. Once we are inside an
5344 address, we stay there. If we have a comparison, set to COMPARE,
5345 but once inside, go back to our default of SET. */
5346
42495ca0 5347 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5348 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5349 && XEXP (x, 1) == const0_rtx) ? COMPARE
5350 : in_code == COMPARE ? SET : in_code);
5351
5352 /* Process depending on the code of this operation. If NEW is set
5353 non-zero, it will be returned. */
5354
5355 switch (code)
5356 {
5357 case ASHIFT:
230d793d
RS
5358 /* Convert shifts by constants into multiplications if inside
5359 an address. */
5360 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5361 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5362 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5363 {
5364 new = make_compound_operation (XEXP (x, 0), next_code);
5365 new = gen_rtx_combine (MULT, mode, new,
5366 GEN_INT ((HOST_WIDE_INT) 1
5367 << INTVAL (XEXP (x, 1))));
5368 }
230d793d
RS
5369 break;
5370
5371 case AND:
5372 /* If the second operand is not a constant, we can't do anything
5373 with it. */
5374 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5375 break;
5376
5377 /* If the constant is a power of two minus one and the first operand
5378 is a logical right shift, make an extraction. */
5379 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5380 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5381 {
5382 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5383 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5384 0, in_code == COMPARE);
5385 }
dfbe1b2f 5386
230d793d
RS
5387 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5388 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5389 && subreg_lowpart_p (XEXP (x, 0))
5390 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5391 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5392 {
5393 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5394 next_code);
aadfb062 5395 new = make_extraction (mode, new, 0,
280f58ba
RK
5396 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5397 0, in_code == COMPARE);
5398 }
45620ed4 5399 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
5400 else if ((GET_CODE (XEXP (x, 0)) == XOR
5401 || GET_CODE (XEXP (x, 0)) == IOR)
5402 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5403 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5404 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5405 {
5406 /* Apply the distributive law, and then try to make extractions. */
5407 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5408 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5409 XEXP (x, 1)),
5410 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5411 XEXP (x, 1)));
5412 new = make_compound_operation (new, in_code);
5413 }
a7c99304
RK
5414
5415 /* If we are have (and (rotate X C) M) and C is larger than the number
5416 of bits in M, this is an extraction. */
5417
5418 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5419 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5420 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5421 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5422 {
5423 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5424 new = make_extraction (mode, new,
5425 (GET_MODE_BITSIZE (mode)
5426 - INTVAL (XEXP (XEXP (x, 0), 1))),
5427 NULL_RTX, i, 1, 0, in_code == COMPARE);
5428 }
a7c99304
RK
5429
5430 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5431 a logical shift and our mask turns off all the propagated sign
5432 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5433 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5434 && (lshr_optab->handlers[(int) mode].insn_code
5435 == CODE_FOR_nothing)
230d793d
RS
5436 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5437 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5438 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5439 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5440 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5441 {
5f4f0e22 5442 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5443
5444 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5445 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5446 SUBST (XEXP (x, 0),
280f58ba
RK
5447 gen_rtx_combine (ASHIFTRT, mode,
5448 make_compound_operation (XEXP (XEXP (x, 0), 0),
5449 next_code),
230d793d
RS
5450 XEXP (XEXP (x, 0), 1)));
5451 }
5452
5453 /* If the constant is one less than a power of two, this might be
5454 representable by an extraction even if no shift is present.
5455 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5456 we are in a COMPARE. */
5457 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5458 new = make_extraction (mode,
5459 make_compound_operation (XEXP (x, 0),
5460 next_code),
5461 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5462
5463 /* If we are in a comparison and this is an AND with a power of two,
5464 convert this into the appropriate bit extract. */
5465 else if (in_code == COMPARE
5466 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5467 new = make_extraction (mode,
5468 make_compound_operation (XEXP (x, 0),
5469 next_code),
5470 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5471
5472 break;
5473
5474 case LSHIFTRT:
5475 /* If the sign bit is known to be zero, replace this with an
5476 arithmetic shift. */
d0ab8cd3
RK
5477 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5478 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5479 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5480 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5481 {
280f58ba
RK
5482 new = gen_rtx_combine (ASHIFTRT, mode,
5483 make_compound_operation (XEXP (x, 0),
5484 next_code),
5485 XEXP (x, 1));
230d793d
RS
5486 break;
5487 }
5488
5489 /* ... fall through ... */
5490
5491 case ASHIFTRT:
71923da7
RK
5492 lhs = XEXP (x, 0);
5493 rhs = XEXP (x, 1);
5494
230d793d
RS
5495 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5496 this is a SIGN_EXTRACT. */
71923da7
RK
5497 if (GET_CODE (rhs) == CONST_INT
5498 && GET_CODE (lhs) == ASHIFT
5499 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5500 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 5501 {
71923da7 5502 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 5503 new = make_extraction (mode, new,
71923da7
RK
5504 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5505 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
5506 code == LSHIFTRT, 0, in_code == COMPARE);
5507 }
5508
71923da7
RK
5509 /* See if we have operations between an ASHIFTRT and an ASHIFT.
5510 If so, try to merge the shifts into a SIGN_EXTEND. We could
5511 also do this for some cases of SIGN_EXTRACT, but it doesn't
5512 seem worth the effort; the case checked for occurs on Alpha. */
5513
5514 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
5515 && ! (GET_CODE (lhs) == SUBREG
5516 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
5517 && GET_CODE (rhs) == CONST_INT
5518 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
5519 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
5520 new = make_extraction (mode, make_compound_operation (new, next_code),
5521 0, NULL_RTX, mode_width - INTVAL (rhs),
5522 code == LSHIFTRT, 0, in_code == COMPARE);
5523
230d793d 5524 break;
280f58ba
RK
5525
5526 case SUBREG:
5527 /* Call ourselves recursively on the inner expression. If we are
5528 narrowing the object and it has a different RTL code from
5529 what it originally did, do this SUBREG as a force_to_mode. */
5530
0a5cbff6 5531 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
5532 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5533 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5534 && subreg_lowpart_p (x))
0a5cbff6
RK
5535 {
5536 rtx newer = force_to_mode (tem, mode,
e3d616e3 5537 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
5538
5539 /* If we have something other than a SUBREG, we might have
5540 done an expansion, so rerun outselves. */
5541 if (GET_CODE (newer) != SUBREG)
5542 newer = make_compound_operation (newer, in_code);
5543
5544 return newer;
5545 }
230d793d
RS
5546 }
5547
5548 if (new)
5549 {
df62f951 5550 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
5551 code = GET_CODE (x);
5552 }
5553
5554 /* Now recursively process each operand of this operation. */
5555 fmt = GET_RTX_FORMAT (code);
5556 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5557 if (fmt[i] == 'e')
5558 {
5559 new = make_compound_operation (XEXP (x, i), next_code);
5560 SUBST (XEXP (x, i), new);
5561 }
5562
5563 return x;
5564}
5565\f
5566/* Given M see if it is a value that would select a field of bits
5567 within an item, but not the entire word. Return -1 if not.
5568 Otherwise, return the starting position of the field, where 0 is the
5569 low-order bit.
5570
5571 *PLEN is set to the length of the field. */
5572
5573static int
5574get_pos_from_mask (m, plen)
5f4f0e22 5575 unsigned HOST_WIDE_INT m;
230d793d
RS
5576 int *plen;
5577{
5578 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5579 int pos = exact_log2 (m & - m);
5580
5581 if (pos < 0)
5582 return -1;
5583
5584 /* Now shift off the low-order zero bits and see if we have a power of
5585 two minus 1. */
5586 *plen = exact_log2 ((m >> pos) + 1);
5587
5588 if (*plen <= 0)
5589 return -1;
5590
5591 return pos;
5592}
5593\f
6139ff20
RK
5594/* See if X can be simplified knowing that we will only refer to it in
5595 MODE and will only refer to those bits that are nonzero in MASK.
5596 If other bits are being computed or if masking operations are done
5597 that select a superset of the bits in MASK, they can sometimes be
5598 ignored.
5599
5600 Return a possibly simplified expression, but always convert X to
5601 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
5602
5603 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
5604 replace X with REG.
5605
5606 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
5607 are all off in X. This is used when X will be complemented, by either
180b8e4b 5608 NOT, NEG, or XOR. */
dfbe1b2f
RK
5609
5610static rtx
e3d616e3 5611force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
5612 rtx x;
5613 enum machine_mode mode;
6139ff20 5614 unsigned HOST_WIDE_INT mask;
dfbe1b2f 5615 rtx reg;
e3d616e3 5616 int just_select;
dfbe1b2f
RK
5617{
5618 enum rtx_code code = GET_CODE (x);
180b8e4b 5619 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
5620 enum machine_mode op_mode;
5621 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
5622 rtx op0, op1, temp;
5623
246e00f2
RK
5624 /* If this is a CALL, don't do anything. Some of the code below
5625 will do the wrong thing since the mode of a CALL is VOIDmode. */
5626 if (code == CALL)
5627 return x;
5628
6139ff20
RK
5629 /* We want to perform the operation is its present mode unless we know
5630 that the operation is valid in MODE, in which case we do the operation
5631 in MODE. */
1c75dfa4
RK
5632 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
5633 && code_to_optab[(int) code] != 0
ef026f91
RS
5634 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5635 != CODE_FOR_nothing))
5636 ? mode : GET_MODE (x));
e3d616e3 5637
aa988991
RS
5638 /* It is not valid to do a right-shift in a narrower mode
5639 than the one it came in with. */
5640 if ((code == LSHIFTRT || code == ASHIFTRT)
5641 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
5642 op_mode = GET_MODE (x);
ef026f91
RS
5643
5644 /* Truncate MASK to fit OP_MODE. */
5645 if (op_mode)
5646 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
5647
5648 /* When we have an arithmetic operation, or a shift whose count we
5649 do not know, we need to assume that all bit the up to the highest-order
5650 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
5651 if (op_mode)
5652 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5653 ? GET_MODE_MASK (op_mode)
5654 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5655 else
5656 fuller_mask = ~ (HOST_WIDE_INT) 0;
5657
5658 /* Determine what bits of X are guaranteed to be (non)zero. */
5659 nonzero = nonzero_bits (x, mode);
6139ff20
RK
5660
5661 /* If none of the bits in X are needed, return a zero. */
e3d616e3 5662 if (! just_select && (nonzero & mask) == 0)
6139ff20 5663 return const0_rtx;
dfbe1b2f 5664
6139ff20
RK
5665 /* If X is a CONST_INT, return a new one. Do this here since the
5666 test below will fail. */
5667 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
5668 {
5669 HOST_WIDE_INT cval = INTVAL (x) & mask;
5670 int width = GET_MODE_BITSIZE (mode);
5671
5672 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5673 number, sign extend it. */
5674 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5675 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5676 cval |= (HOST_WIDE_INT) -1 << width;
5677
5678 return GEN_INT (cval);
5679 }
dfbe1b2f 5680
180b8e4b
RK
5681 /* If X is narrower than MODE and we want all the bits in X's mode, just
5682 get X in the proper mode. */
5683 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5684 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
5685 return gen_lowpart_for_combine (mode, x);
5686
71923da7
RK
5687 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
5688 MASK are already known to be zero in X, we need not do anything. */
5689 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
5690 return x;
5691
dfbe1b2f
RK
5692 switch (code)
5693 {
6139ff20
RK
5694 case CLOBBER:
5695 /* If X is a (clobber (const_int)), return it since we know we are
5696 generating something that won't match. */
5697 return x;
5698
5699#if ! BITS_BIG_ENDIAN
5700 case USE:
5701 /* X is a (use (mem ..)) that was made from a bit-field extraction that
5702 spanned the boundary of the MEM. If we are now masking so it is
5703 within that boundary, we don't need the USE any more. */
5704 if ((mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 5705 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
5706#endif
5707
dfbe1b2f
RK
5708 case SIGN_EXTEND:
5709 case ZERO_EXTEND:
5710 case ZERO_EXTRACT:
5711 case SIGN_EXTRACT:
5712 x = expand_compound_operation (x);
5713 if (GET_CODE (x) != code)
e3d616e3 5714 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
5715 break;
5716
5717 case REG:
5718 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5719 || rtx_equal_p (reg, get_last_value (x))))
5720 x = reg;
5721 break;
5722
dfbe1b2f 5723 case SUBREG:
6139ff20 5724 if (subreg_lowpart_p (x)
180b8e4b
RK
5725 /* We can ignore the effect of this SUBREG if it narrows the mode or
5726 if the constant masks to zero all the bits the mode doesn't
5727 have. */
6139ff20
RK
5728 && ((GET_MODE_SIZE (GET_MODE (x))
5729 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
5730 || (0 == (mask
5731 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 5732 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 5733 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
5734 break;
5735
5736 case AND:
6139ff20
RK
5737 /* If this is an AND with a constant, convert it into an AND
5738 whose constant is the AND of that constant with MASK. If it
5739 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 5740
6139ff20
RK
5741 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5742 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
dfbe1b2f 5743 {
6139ff20
RK
5744 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
5745 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
5746
5747 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
5748 is just some low-order bits. If so, and it is MASK, we don't
5749 need it. */
dfbe1b2f
RK
5750
5751 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5752 && INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 5753 x = XEXP (x, 0);
d0ab8cd3 5754
71923da7
RK
5755 /* If it remains an AND, try making another AND with the bits
5756 in the mode mask that aren't in MASK turned on. If the
5757 constant in the AND is wide enough, this might make a
5758 cheaper constant. */
5759
5760 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5761 && GET_MODE_MASK (GET_MODE (x)) != mask)
5762 {
5763 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
5764 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
5765 int width = GET_MODE_BITSIZE (GET_MODE (x));
5766 rtx y;
5767
5768 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5769 number, sign extend it. */
5770 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5771 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5772 cval |= (HOST_WIDE_INT) -1 << width;
5773
5774 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
5775 if (rtx_cost (y, SET) < rtx_cost (x, SET))
5776 x = y;
5777 }
5778
d0ab8cd3 5779 break;
dfbe1b2f
RK
5780 }
5781
6139ff20 5782 goto binop;
dfbe1b2f
RK
5783
5784 case PLUS:
6139ff20
RK
5785 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5786 low-order bits (as in an alignment operation) and FOO is already
5787 aligned to that boundary, mask C1 to that boundary as well.
5788 This may eliminate that PLUS and, later, the AND. */
5789 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5790 && exact_log2 (- mask) >= 0
5791 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5792 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5793 return force_to_mode (plus_constant (XEXP (x, 0),
5794 INTVAL (XEXP (x, 1)) & mask),
e3d616e3 5795 mode, mask, reg, next_select);
6139ff20
RK
5796
5797 /* ... fall through ... */
5798
dfbe1b2f
RK
5799 case MINUS:
5800 case MULT:
6139ff20
RK
5801 /* For PLUS, MINUS and MULT, we need any bits less significant than the
5802 most significant bit in MASK since carries from those bits will
5803 affect the bits we are interested in. */
5804 mask = fuller_mask;
5805 goto binop;
5806
dfbe1b2f
RK
5807 case IOR:
5808 case XOR:
6139ff20
RK
5809 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5810 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5811 operation which may be a bitfield extraction. Ensure that the
5812 constant we form is not wider than the mode of X. */
5813
5814 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5815 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5816 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5817 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5818 && GET_CODE (XEXP (x, 1)) == CONST_INT
5819 && ((INTVAL (XEXP (XEXP (x, 0), 1))
5820 + floor_log2 (INTVAL (XEXP (x, 1))))
5821 < GET_MODE_BITSIZE (GET_MODE (x)))
5822 && (INTVAL (XEXP (x, 1))
5823 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x)) == 0))
5824 {
5825 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5826 << INTVAL (XEXP (XEXP (x, 0), 1)));
5827 temp = gen_binary (GET_CODE (x), GET_MODE (x),
5828 XEXP (XEXP (x, 0), 0), temp);
5829 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (x, 1));
e3d616e3 5830 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
5831 }
5832
5833 binop:
dfbe1b2f 5834 /* For most binary operations, just propagate into the operation and
6139ff20
RK
5835 change the mode if we have an operation of that mode. */
5836
e3d616e3
RK
5837 op0 = gen_lowpart_for_combine (op_mode,
5838 force_to_mode (XEXP (x, 0), mode, mask,
5839 reg, next_select));
5840 op1 = gen_lowpart_for_combine (op_mode,
5841 force_to_mode (XEXP (x, 1), mode, mask,
5842 reg, next_select));
6139ff20 5843
2dd484ed
RK
5844 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
5845 MASK since OP1 might have been sign-extended but we never want
5846 to turn on extra bits, since combine might have previously relied
5847 on them being off. */
5848 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
5849 && (INTVAL (op1) & mask) != 0)
5850 op1 = GEN_INT (INTVAL (op1) & mask);
5851
6139ff20
RK
5852 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
5853 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 5854 break;
dfbe1b2f
RK
5855
5856 case ASHIFT:
dfbe1b2f 5857 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
5858 However, we cannot do anything with shifts where we cannot
5859 guarantee that the counts are smaller than the size of the mode
5860 because such a count will have a different meaning in a
6139ff20 5861 wider mode. */
f6785026
RK
5862
5863 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5864 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
5865 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5866 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5867 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 5868 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
5869 break;
5870
6139ff20
RK
5871 /* If the shift count is a constant and we can do arithmetic in
5872 the mode of the shift, refine which bits we need. Otherwise, use the
5873 conservative form of the mask. */
5874 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5875 && INTVAL (XEXP (x, 1)) >= 0
5876 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
5877 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5878 mask >>= INTVAL (XEXP (x, 1));
5879 else
5880 mask = fuller_mask;
5881
5882 op0 = gen_lowpart_for_combine (op_mode,
5883 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 5884 mask, reg, next_select));
6139ff20
RK
5885
5886 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5887 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 5888 break;
dfbe1b2f
RK
5889
5890 case LSHIFTRT:
1347292b
JW
5891 /* Here we can only do something if the shift count is a constant,
5892 this shift constant is valid for the host, and we can do arithmetic
5893 in OP_MODE. */
dfbe1b2f
RK
5894
5895 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 5896 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 5897 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 5898 {
6139ff20
RK
5899 rtx inner = XEXP (x, 0);
5900
5901 /* Select the mask of the bits we need for the shift operand. */
5902 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 5903
6139ff20
RK
5904 /* We can only change the mode of the shift if we can do arithmetic
5905 in the mode of the shift and MASK is no wider than the width of
5906 OP_MODE. */
5907 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
5908 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
5909 op_mode = GET_MODE (x);
5910
e3d616e3 5911 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
5912
5913 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
5914 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 5915 }
6139ff20
RK
5916
5917 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
5918 shift and AND produces only copies of the sign bit (C2 is one less
5919 than a power of two), we can do this with just a shift. */
5920
5921 if (GET_CODE (x) == LSHIFTRT
5922 && GET_CODE (XEXP (x, 1)) == CONST_INT
5923 && ((INTVAL (XEXP (x, 1))
5924 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
5925 >= GET_MODE_BITSIZE (GET_MODE (x)))
5926 && exact_log2 (mask + 1) >= 0
5927 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5928 >= exact_log2 (mask + 1)))
5929 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5930 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
5931 - exact_log2 (mask + 1)));
d0ab8cd3
RK
5932 break;
5933
5934 case ASHIFTRT:
6139ff20
RK
5935 /* If we are just looking for the sign bit, we don't need this shift at
5936 all, even if it has a variable count. */
5937 if (mask == ((HOST_WIDE_INT) 1
5938 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))
e3d616e3 5939 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
5940
5941 /* If this is a shift by a constant, get a mask that contains those bits
5942 that are not copies of the sign bit. We then have two cases: If
5943 MASK only includes those bits, this can be a logical shift, which may
5944 allow simplifications. If MASK is a single-bit field not within
5945 those bits, we are requesting a copy of the sign bit and hence can
5946 shift the sign bit to the appropriate location. */
5947
5948 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
5949 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5950 {
5951 int i = -1;
5952
5953 nonzero = GET_MODE_MASK (GET_MODE (x));
5954 nonzero >>= INTVAL (XEXP (x, 1));
5955
5956 if ((mask & ~ nonzero) == 0
5957 || (i = exact_log2 (mask)) >= 0)
5958 {
5959 x = simplify_shift_const
5960 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5961 i < 0 ? INTVAL (XEXP (x, 1))
5962 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
5963
5964 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 5965 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
5966 }
5967 }
5968
5969 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
5970 even if the shift count isn't a constant. */
5971 if (mask == 1)
5972 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
5973
d0ab8cd3 5974 /* If this is a sign-extension operation that just affects bits
4c002f29
RK
5975 we don't care about, remove it. Be sure the call above returned
5976 something that is still a shift. */
d0ab8cd3 5977
4c002f29
RK
5978 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
5979 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 5980 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
5981 && (INTVAL (XEXP (x, 1))
5982 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
5983 && GET_CODE (XEXP (x, 0)) == ASHIFT
5984 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5985 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
5986 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
5987 reg, next_select);
6139ff20 5988
dfbe1b2f
RK
5989 break;
5990
6139ff20
RK
5991 case ROTATE:
5992 case ROTATERT:
5993 /* If the shift count is constant and we can do computations
5994 in the mode of X, compute where the bits we care about are.
5995 Otherwise, we can't do anything. Don't change the mode of
5996 the shift or propagate MODE into the shift, though. */
5997 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5998 && INTVAL (XEXP (x, 1)) >= 0)
5999 {
6000 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6001 GET_MODE (x), GEN_INT (mask),
6002 XEXP (x, 1));
7d171a1e 6003 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
6004 SUBST (XEXP (x, 0),
6005 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6006 INTVAL (temp), reg, next_select));
6139ff20
RK
6007 }
6008 break;
6009
dfbe1b2f 6010 case NEG:
180b8e4b
RK
6011 /* If we just want the low-order bit, the NEG isn't needed since it
6012 won't change the low-order bit. */
6013 if (mask == 1)
6014 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6015
6139ff20
RK
6016 /* We need any bits less significant than the most significant bit in
6017 MASK since carries from those bits will affect the bits we are
6018 interested in. */
6019 mask = fuller_mask;
6020 goto unop;
6021
dfbe1b2f 6022 case NOT:
6139ff20
RK
6023 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6024 same as the XOR case above. Ensure that the constant we form is not
6025 wider than the mode of X. */
6026
6027 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6028 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6029 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6030 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6031 < GET_MODE_BITSIZE (GET_MODE (x)))
6032 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6033 {
6034 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6035 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6036 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6037
e3d616e3 6038 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6039 }
6040
6041 unop:
e3d616e3
RK
6042 op0 = gen_lowpart_for_combine (op_mode,
6043 force_to_mode (XEXP (x, 0), mode, mask,
6044 reg, next_select));
6139ff20 6045 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 6046 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
6047 break;
6048
6049 case NE:
6050 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6051 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
6052 in CONST. */
6053 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
6054 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
e3d616e3 6055 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6056
d0ab8cd3
RK
6057 break;
6058
6059 case IF_THEN_ELSE:
6060 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6061 written in a narrower mode. We play it safe and do not do so. */
6062
6063 SUBST (XEXP (x, 1),
6064 gen_lowpart_for_combine (GET_MODE (x),
6065 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6066 mask, reg, next_select)));
d0ab8cd3
RK
6067 SUBST (XEXP (x, 2),
6068 gen_lowpart_for_combine (GET_MODE (x),
6069 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6070 mask, reg,next_select)));
d0ab8cd3 6071 break;
dfbe1b2f
RK
6072 }
6073
d0ab8cd3 6074 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6075 return gen_lowpart_for_combine (mode, x);
6076}
6077\f
abe6e52f
RK
6078/* Return nonzero if X is an expression that has one of two values depending on
6079 whether some other value is zero or nonzero. In that case, we return the
6080 value that is being tested, *PTRUE is set to the value if the rtx being
6081 returned has a nonzero value, and *PFALSE is set to the other alternative.
6082
6083 If we return zero, we set *PTRUE and *PFALSE to X. */
6084
6085static rtx
6086if_then_else_cond (x, ptrue, pfalse)
6087 rtx x;
6088 rtx *ptrue, *pfalse;
6089{
6090 enum machine_mode mode = GET_MODE (x);
6091 enum rtx_code code = GET_CODE (x);
6092 int size = GET_MODE_BITSIZE (mode);
6093 rtx cond0, cond1, true0, true1, false0, false1;
6094 unsigned HOST_WIDE_INT nz;
6095
6096 /* If this is a unary operation whose operand has one of two values, apply
6097 our opcode to compute those values. */
6098 if (GET_RTX_CLASS (code) == '1'
6099 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6100 {
0c1c8ea6
RK
6101 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6102 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
6103 return cond0;
6104 }
6105
3a19aabc
RK
6106 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
6107 make can't possibly match and would supress other optimizations. */
6108 else if (code == COMPARE)
6109 ;
6110
abe6e52f
RK
6111 /* If this is a binary operation, see if either side has only one of two
6112 values. If either one does or if both do and they are conditional on
6113 the same value, compute the new true and false values. */
6114 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6115 || GET_RTX_CLASS (code) == '<')
6116 {
6117 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6118 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6119
6120 if ((cond0 != 0 || cond1 != 0)
6121 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6122 {
6123 *ptrue = gen_binary (code, mode, true0, true1);
6124 *pfalse = gen_binary (code, mode, false0, false1);
6125 return cond0 ? cond0 : cond1;
6126 }
9210df58
RK
6127
6128#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
6129
6130 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6131 operands is zero when the other is non-zero, and vice-versa. */
6132
6133 if ((code == PLUS || code == IOR || code == XOR || code == MINUS
6134 || code == UMAX)
6135 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6136 {
6137 rtx op0 = XEXP (XEXP (x, 0), 1);
6138 rtx op1 = XEXP (XEXP (x, 1), 1);
6139
6140 cond0 = XEXP (XEXP (x, 0), 0);
6141 cond1 = XEXP (XEXP (x, 1), 0);
6142
6143 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6144 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6145 && reversible_comparison_p (cond1)
6146 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6147 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6148 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6149 || ((swap_condition (GET_CODE (cond0))
6150 == reverse_condition (GET_CODE (cond1)))
6151 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6152 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6153 && ! side_effects_p (x))
6154 {
6155 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6156 *pfalse = gen_binary (MULT, mode,
6157 (code == MINUS
0c1c8ea6 6158 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
6159 const_true_rtx);
6160 return cond0;
6161 }
6162 }
6163
6164 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6165 is always zero. */
6166 if ((code == MULT || code == AND || code == UMIN)
6167 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6168 {
6169 cond0 = XEXP (XEXP (x, 0), 0);
6170 cond1 = XEXP (XEXP (x, 1), 0);
6171
6172 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6173 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6174 && reversible_comparison_p (cond1)
6175 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6176 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6177 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6178 || ((swap_condition (GET_CODE (cond0))
6179 == reverse_condition (GET_CODE (cond1)))
6180 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6181 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6182 && ! side_effects_p (x))
6183 {
6184 *ptrue = *pfalse = const0_rtx;
6185 return cond0;
6186 }
6187 }
6188#endif
abe6e52f
RK
6189 }
6190
6191 else if (code == IF_THEN_ELSE)
6192 {
6193 /* If we have IF_THEN_ELSE already, extract the condition and
6194 canonicalize it if it is NE or EQ. */
6195 cond0 = XEXP (x, 0);
6196 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6197 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6198 return XEXP (cond0, 0);
6199 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6200 {
6201 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6202 return XEXP (cond0, 0);
6203 }
6204 else
6205 return cond0;
6206 }
6207
6208 /* If X is a normal SUBREG with both inner and outer modes integral,
6209 we can narrow both the true and false values of the inner expression,
6210 if there is a condition. */
6211 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6212 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6213 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6214 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6215 &true0, &false0)))
6216 {
00244e6b
RK
6217 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6218 *pfalse
6219 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 6220
abe6e52f
RK
6221 return cond0;
6222 }
6223
6224 /* If X is a constant, this isn't special and will cause confusions
6225 if we treat it as such. Likewise if it is equivalent to a constant. */
6226 else if (CONSTANT_P (x)
6227 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6228 ;
6229
6230 /* If X is known to be either 0 or -1, those are the true and
6231 false values when testing X. */
6232 else if (num_sign_bit_copies (x, mode) == size)
6233 {
6234 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6235 return x;
6236 }
6237
6238 /* Likewise for 0 or a single bit. */
6239 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6240 {
6241 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6242 return x;
6243 }
6244
6245 /* Otherwise fail; show no condition with true and false values the same. */
6246 *ptrue = *pfalse = x;
6247 return 0;
6248}
6249\f
1a26b032
RK
6250/* Return the value of expression X given the fact that condition COND
6251 is known to be true when applied to REG as its first operand and VAL
6252 as its second. X is known to not be shared and so can be modified in
6253 place.
6254
6255 We only handle the simplest cases, and specifically those cases that
6256 arise with IF_THEN_ELSE expressions. */
6257
6258static rtx
6259known_cond (x, cond, reg, val)
6260 rtx x;
6261 enum rtx_code cond;
6262 rtx reg, val;
6263{
6264 enum rtx_code code = GET_CODE (x);
f24ad0e4 6265 rtx temp;
1a26b032
RK
6266 char *fmt;
6267 int i, j;
6268
6269 if (side_effects_p (x))
6270 return x;
6271
6272 if (cond == EQ && rtx_equal_p (x, reg))
6273 return val;
6274
6275 /* If X is (abs REG) and we know something about REG's relationship
6276 with zero, we may be able to simplify this. */
6277
6278 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6279 switch (cond)
6280 {
6281 case GE: case GT: case EQ:
6282 return XEXP (x, 0);
6283 case LT: case LE:
0c1c8ea6
RK
6284 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6285 XEXP (x, 0));
1a26b032
RK
6286 }
6287
6288 /* The only other cases we handle are MIN, MAX, and comparisons if the
6289 operands are the same as REG and VAL. */
6290
6291 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6292 {
6293 if (rtx_equal_p (XEXP (x, 0), val))
6294 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6295
6296 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6297 {
6298 if (GET_RTX_CLASS (code) == '<')
6299 return (comparison_dominates_p (cond, code) ? const_true_rtx
6300 : (comparison_dominates_p (cond,
6301 reverse_condition (code))
6302 ? const0_rtx : x));
6303
6304 else if (code == SMAX || code == SMIN
6305 || code == UMIN || code == UMAX)
6306 {
6307 int unsignedp = (code == UMIN || code == UMAX);
6308
6309 if (code == SMAX || code == UMAX)
6310 cond = reverse_condition (cond);
6311
6312 switch (cond)
6313 {
6314 case GE: case GT:
6315 return unsignedp ? x : XEXP (x, 1);
6316 case LE: case LT:
6317 return unsignedp ? x : XEXP (x, 0);
6318 case GEU: case GTU:
6319 return unsignedp ? XEXP (x, 1) : x;
6320 case LEU: case LTU:
6321 return unsignedp ? XEXP (x, 0) : x;
6322 }
6323 }
6324 }
6325 }
6326
6327 fmt = GET_RTX_FORMAT (code);
6328 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6329 {
6330 if (fmt[i] == 'e')
6331 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6332 else if (fmt[i] == 'E')
6333 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6334 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6335 cond, reg, val));
6336 }
6337
6338 return x;
6339}
6340\f
230d793d
RS
6341/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6342 Return that assignment if so.
6343
6344 We only handle the most common cases. */
6345
6346static rtx
6347make_field_assignment (x)
6348 rtx x;
6349{
6350 rtx dest = SET_DEST (x);
6351 rtx src = SET_SRC (x);
dfbe1b2f 6352 rtx assign;
5f4f0e22
CH
6353 HOST_WIDE_INT c1;
6354 int pos, len;
dfbe1b2f
RK
6355 rtx other;
6356 enum machine_mode mode;
230d793d
RS
6357
6358 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6359 a clear of a one-bit field. We will have changed it to
6360 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6361 for a SUBREG. */
6362
6363 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6364 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6365 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
dfbe1b2f
RK
6366 && (rtx_equal_p (dest, XEXP (src, 1))
6367 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6368 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6369 {
8999a12e 6370 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6371 1, 1, 1, 0);
dfbe1b2f 6372 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
6373 }
6374
6375 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6376 && subreg_lowpart_p (XEXP (src, 0))
6377 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6378 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6379 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6380 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
dfbe1b2f
RK
6381 && (rtx_equal_p (dest, XEXP (src, 1))
6382 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6383 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6384 {
8999a12e 6385 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
6386 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6387 1, 1, 1, 0);
dfbe1b2f 6388 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
6389 }
6390
6391 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
6392 one-bit field. */
6393 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6394 && XEXP (XEXP (src, 0), 0) == const1_rtx
dfbe1b2f
RK
6395 && (rtx_equal_p (dest, XEXP (src, 1))
6396 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6397 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6398 {
8999a12e 6399 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6400 1, 1, 1, 0);
dfbe1b2f 6401 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
230d793d
RS
6402 }
6403
dfbe1b2f
RK
6404 /* The other case we handle is assignments into a constant-position
6405 field. They look like (ior (and DEST C1) OTHER). If C1 represents
6406 a mask that has all one bits except for a group of zero bits and
6407 OTHER is known to have zeros where C1 has ones, this is such an
6408 assignment. Compute the position and length from C1. Shift OTHER
6409 to the appropriate position, force it to the required mode, and
6410 make the extraction. Check for the AND in both operands. */
6411
6412 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
6413 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
6414 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
6415 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
6416 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
6417 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
6418 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
6419 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
6420 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
6421 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
6422 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
6423 dest)))
6424 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
6425 else
6426 return x;
230d793d 6427
c2f9f64e 6428 pos = get_pos_from_mask (c1 ^ GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 6429 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 6430 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 6431 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 6432 return x;
230d793d 6433
5f4f0e22 6434 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
230d793d 6435
dfbe1b2f
RK
6436 /* The mode to use for the source is the mode of the assignment, or of
6437 what is inside a possible STRICT_LOW_PART. */
6438 mode = (GET_CODE (assign) == STRICT_LOW_PART
6439 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 6440
dfbe1b2f
RK
6441 /* Shift OTHER right POS places and make it the source, restricting it
6442 to the proper length and mode. */
230d793d 6443
5f4f0e22
CH
6444 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6445 GET_MODE (src), other, pos),
6139ff20
RK
6446 mode,
6447 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6448 ? GET_MODE_MASK (mode)
6449 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 6450 dest, 0);
230d793d 6451
dfbe1b2f 6452 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
6453}
6454\f
6455/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6456 if so. */
6457
6458static rtx
6459apply_distributive_law (x)
6460 rtx x;
6461{
6462 enum rtx_code code = GET_CODE (x);
6463 rtx lhs, rhs, other;
6464 rtx tem;
6465 enum rtx_code inner_code;
6466
d8a8a4da
RS
6467 /* Distributivity is not true for floating point.
6468 It can change the value. So don't do it.
6469 -- rms and moshier@world.std.com. */
3ad2180a 6470 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
6471 return x;
6472
230d793d
RS
6473 /* The outer operation can only be one of the following: */
6474 if (code != IOR && code != AND && code != XOR
6475 && code != PLUS && code != MINUS)
6476 return x;
6477
6478 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6479
dfbe1b2f 6480 /* If either operand is a primitive we can't do anything, so get out fast. */
230d793d 6481 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 6482 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
6483 return x;
6484
6485 lhs = expand_compound_operation (lhs);
6486 rhs = expand_compound_operation (rhs);
6487 inner_code = GET_CODE (lhs);
6488 if (inner_code != GET_CODE (rhs))
6489 return x;
6490
6491 /* See if the inner and outer operations distribute. */
6492 switch (inner_code)
6493 {
6494 case LSHIFTRT:
6495 case ASHIFTRT:
6496 case AND:
6497 case IOR:
6498 /* These all distribute except over PLUS. */
6499 if (code == PLUS || code == MINUS)
6500 return x;
6501 break;
6502
6503 case MULT:
6504 if (code != PLUS && code != MINUS)
6505 return x;
6506 break;
6507
6508 case ASHIFT:
45620ed4 6509 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
6510 break;
6511
6512 case SUBREG:
dfbe1b2f
RK
6513 /* Non-paradoxical SUBREGs distributes over all operations, provided
6514 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
6515 of a low-order part, we don't convert an fp operation to int or
6516 vice versa, and we would not be converting a single-word
dfbe1b2f 6517 operation into a multi-word operation. The latter test is not
2b4bd1bc 6518 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
6519 Some of the previous tests are redundant given the latter test, but
6520 are retained because they are required for correctness.
6521
6522 We produce the result slightly differently in this case. */
6523
6524 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6525 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6526 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
6527 || (GET_MODE_CLASS (GET_MODE (lhs))
6528 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f
RK
6529 || (GET_MODE_SIZE (GET_MODE (lhs))
6530 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
6531 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
6532 return x;
6533
6534 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6535 SUBREG_REG (lhs), SUBREG_REG (rhs));
6536 return gen_lowpart_for_combine (GET_MODE (x), tem);
6537
6538 default:
6539 return x;
6540 }
6541
6542 /* Set LHS and RHS to the inner operands (A and B in the example
6543 above) and set OTHER to the common operand (C in the example).
6544 These is only one way to do this unless the inner operation is
6545 commutative. */
6546 if (GET_RTX_CLASS (inner_code) == 'c'
6547 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6548 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6549 else if (GET_RTX_CLASS (inner_code) == 'c'
6550 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6551 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6552 else if (GET_RTX_CLASS (inner_code) == 'c'
6553 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6554 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6555 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6556 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6557 else
6558 return x;
6559
6560 /* Form the new inner operation, seeing if it simplifies first. */
6561 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6562
6563 /* There is one exception to the general way of distributing:
6564 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6565 if (code == XOR && inner_code == IOR)
6566 {
6567 inner_code = AND;
0c1c8ea6 6568 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
6569 }
6570
6571 /* We may be able to continuing distributing the result, so call
6572 ourselves recursively on the inner operation before forming the
6573 outer operation, which we return. */
6574 return gen_binary (inner_code, GET_MODE (x),
6575 apply_distributive_law (tem), other);
6576}
6577\f
6578/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6579 in MODE.
6580
6581 Return an equivalent form, if different from X. Otherwise, return X. If
6582 X is zero, we are to always construct the equivalent form. */
6583
6584static rtx
6585simplify_and_const_int (x, mode, varop, constop)
6586 rtx x;
6587 enum machine_mode mode;
6588 rtx varop;
5f4f0e22 6589 unsigned HOST_WIDE_INT constop;
230d793d 6590{
951553af 6591 unsigned HOST_WIDE_INT nonzero;
42301240 6592 int i;
230d793d 6593
6139ff20
RK
6594 /* Simplify VAROP knowing that we will be only looking at some of the
6595 bits in it. */
e3d616e3 6596 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 6597
6139ff20
RK
6598 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
6599 CONST_INT, we are done. */
6600 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
6601 return varop;
230d793d 6602
fc06d7aa
RK
6603 /* See what bits may be nonzero in VAROP. Unlike the general case of
6604 a call to nonzero_bits, here we don't care about bits outside
6605 MODE. */
6606
6607 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d
RS
6608
6609 /* Turn off all bits in the constant that are known to already be zero.
951553af 6610 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
6611 which is tested below. */
6612
951553af 6613 constop &= nonzero;
230d793d
RS
6614
6615 /* If we don't have any bits left, return zero. */
6616 if (constop == 0)
6617 return const0_rtx;
6618
42301240
RK
6619 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
6620 a power of two, we can replace this with a ASHIFT. */
6621 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
6622 && (i = exact_log2 (constop)) >= 0)
6623 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
6624
6139ff20
RK
6625 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
6626 or XOR, then try to apply the distributive law. This may eliminate
6627 operations if either branch can be simplified because of the AND.
6628 It may also make some cases more complex, but those cases probably
6629 won't match a pattern either with or without this. */
6630
6631 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
6632 return
6633 gen_lowpart_for_combine
6634 (mode,
6635 apply_distributive_law
6636 (gen_binary (GET_CODE (varop), GET_MODE (varop),
6637 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6638 XEXP (varop, 0), constop),
6639 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6640 XEXP (varop, 1), constop))));
6641
230d793d
RS
6642 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6643 if we already had one (just check for the simplest cases). */
6644 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6645 && GET_MODE (XEXP (x, 0)) == mode
6646 && SUBREG_REG (XEXP (x, 0)) == varop)
6647 varop = XEXP (x, 0);
6648 else
6649 varop = gen_lowpart_for_combine (mode, varop);
6650
6651 /* If we can't make the SUBREG, try to return what we were given. */
6652 if (GET_CODE (varop) == CLOBBER)
6653 return x ? x : varop;
6654
6655 /* If we are only masking insignificant bits, return VAROP. */
951553af 6656 if (constop == nonzero)
230d793d
RS
6657 x = varop;
6658
6659 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6660 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 6661 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
6662
6663 else
6664 {
6665 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6666 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 6667 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
6668
6669 SUBST (XEXP (x, 0), varop);
6670 }
6671
6672 return x;
6673}
6674\f
6675/* Given an expression, X, compute which bits in X can be non-zero.
6676 We don't care about bits outside of those defined in MODE.
6677
6678 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6679 a shift, AND, or zero_extract, we can do better. */
6680
5f4f0e22 6681static unsigned HOST_WIDE_INT
951553af 6682nonzero_bits (x, mode)
230d793d
RS
6683 rtx x;
6684 enum machine_mode mode;
6685{
951553af
RK
6686 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6687 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
6688 enum rtx_code code;
6689 int mode_width = GET_MODE_BITSIZE (mode);
6690 rtx tem;
6691
1c75dfa4
RK
6692 /* For floating-point values, assume all bits are needed. */
6693 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
6694 return nonzero;
6695
230d793d
RS
6696 /* If X is wider than MODE, use its mode instead. */
6697 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6698 {
6699 mode = GET_MODE (x);
951553af 6700 nonzero = GET_MODE_MASK (mode);
230d793d
RS
6701 mode_width = GET_MODE_BITSIZE (mode);
6702 }
6703
5f4f0e22 6704 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
6705 /* Our only callers in this case look for single bit values. So
6706 just return the mode mask. Those tests will then be false. */
951553af 6707 return nonzero;
230d793d 6708
8baf60bb 6709#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 6710 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
6711 and target machines, we can compute this from which bits of the
6712 object might be nonzero in its own mode, taking into account the fact
6713 that on many CISC machines, accessing an object in a wider mode
6714 causes the high-order bits to become undefined. So they are
6715 not known to be zero. */
6716
6717 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
6718 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
6719 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 6720 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
6721 {
6722 nonzero &= nonzero_bits (x, GET_MODE (x));
6723 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
6724 return nonzero;
6725 }
6726#endif
6727
230d793d
RS
6728 code = GET_CODE (x);
6729 switch (code)
6730 {
6731 case REG:
6732#ifdef STACK_BOUNDARY
6733 /* If this is the stack pointer, we may know something about its
6734 alignment. If PUSH_ROUNDING is defined, it is possible for the
6735 stack to be momentarily aligned only to that amount, so we pick
6736 the least alignment. */
6737
6738 if (x == stack_pointer_rtx)
6739 {
6740 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6741
6742#ifdef PUSH_ROUNDING
6743 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6744#endif
6745
951553af 6746 return nonzero & ~ (sp_alignment - 1);
230d793d
RS
6747 }
6748#endif
6749
55310dad
RK
6750 /* If X is a register whose nonzero bits value is current, use it.
6751 Otherwise, if X is a register whose value we can find, use that
6752 value. Otherwise, use the previously-computed global nonzero bits
6753 for this register. */
6754
6755 if (reg_last_set_value[REGNO (x)] != 0
6756 && reg_last_set_mode[REGNO (x)] == mode
6757 && (reg_n_sets[REGNO (x)] == 1
6758 || reg_last_set_label[REGNO (x)] == label_tick)
6759 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6760 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
6761
6762 tem = get_last_value (x);
9afa3d54 6763
230d793d 6764 if (tem)
9afa3d54
RK
6765 {
6766#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6767 /* If X is narrower than MODE and TEM is a non-negative
6768 constant that would appear negative in the mode of X,
6769 sign-extend it for use in reg_nonzero_bits because some
6770 machines (maybe most) will actually do the sign-extension
6771 and this is the conservative approach.
6772
6773 ??? For 2.5, try to tighten up the MD files in this regard
6774 instead of this kludge. */
6775
6776 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
6777 && GET_CODE (tem) == CONST_INT
6778 && INTVAL (tem) > 0
6779 && 0 != (INTVAL (tem)
6780 & ((HOST_WIDE_INT) 1
9e69be8c 6781 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
6782 tem = GEN_INT (INTVAL (tem)
6783 | ((HOST_WIDE_INT) (-1)
6784 << GET_MODE_BITSIZE (GET_MODE (x))));
6785#endif
6786 return nonzero_bits (tem, mode);
6787 }
951553af
RK
6788 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6789 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 6790 else
951553af 6791 return nonzero;
230d793d
RS
6792
6793 case CONST_INT:
9afa3d54
RK
6794#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6795 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
6796 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
6797 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
6798 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
6799#endif
6800
230d793d
RS
6801 return INTVAL (x);
6802
230d793d 6803 case MEM:
8baf60bb 6804#ifdef LOAD_EXTEND_OP
230d793d
RS
6805 /* In many, if not most, RISC machines, reading a byte from memory
6806 zeros the rest of the register. Noticing that fact saves a lot
6807 of extra zero-extends. */
8baf60bb
RK
6808 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
6809 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 6810#endif
8baf60bb 6811 break;
230d793d 6812
230d793d
RS
6813 case EQ: case NE:
6814 case GT: case GTU:
6815 case LT: case LTU:
6816 case GE: case GEU:
6817 case LE: case LEU:
3f508eca 6818
c6965c0f
RK
6819 /* If this produces an integer result, we know which bits are set.
6820 Code here used to clear bits outside the mode of X, but that is
6821 now done above. */
230d793d 6822
c6965c0f
RK
6823 if (GET_MODE_CLASS (mode) == MODE_INT
6824 && mode_width <= HOST_BITS_PER_WIDE_INT)
6825 nonzero = STORE_FLAG_VALUE;
230d793d 6826 break;
230d793d 6827
230d793d 6828 case NEG:
d0ab8cd3
RK
6829 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6830 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6831 nonzero = 1;
230d793d
RS
6832
6833 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 6834 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 6835 break;
d0ab8cd3
RK
6836
6837 case ABS:
6838 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6839 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6840 nonzero = 1;
d0ab8cd3 6841 break;
230d793d
RS
6842
6843 case TRUNCATE:
951553af 6844 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
6845 break;
6846
6847 case ZERO_EXTEND:
951553af 6848 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 6849 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 6850 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
6851 break;
6852
6853 case SIGN_EXTEND:
6854 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6855 Otherwise, show all the bits in the outer mode but not the inner
6856 may be non-zero. */
951553af 6857 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
6858 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6859 {
951553af
RK
6860 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6861 if (inner_nz &
5f4f0e22
CH
6862 (((HOST_WIDE_INT) 1
6863 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 6864 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
6865 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6866 }
6867
951553af 6868 nonzero &= inner_nz;
230d793d
RS
6869 break;
6870
6871 case AND:
951553af
RK
6872 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6873 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6874 break;
6875
d0ab8cd3
RK
6876 case XOR: case IOR:
6877 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
6878 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6879 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6880 break;
6881
6882 case PLUS: case MINUS:
6883 case MULT:
6884 case DIV: case UDIV:
6885 case MOD: case UMOD:
6886 /* We can apply the rules of arithmetic to compute the number of
6887 high- and low-order zero bits of these operations. We start by
6888 computing the width (position of the highest-order non-zero bit)
6889 and the number of low-order zero bits for each value. */
6890 {
951553af
RK
6891 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6892 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6893 int width0 = floor_log2 (nz0) + 1;
6894 int width1 = floor_log2 (nz1) + 1;
6895 int low0 = floor_log2 (nz0 & -nz0);
6896 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
6897 HOST_WIDE_INT op0_maybe_minusp
6898 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6899 HOST_WIDE_INT op1_maybe_minusp
6900 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
6901 int result_width = mode_width;
6902 int result_low = 0;
6903
6904 switch (code)
6905 {
6906 case PLUS:
6907 result_width = MAX (width0, width1) + 1;
6908 result_low = MIN (low0, low1);
6909 break;
6910 case MINUS:
6911 result_low = MIN (low0, low1);
6912 break;
6913 case MULT:
6914 result_width = width0 + width1;
6915 result_low = low0 + low1;
6916 break;
6917 case DIV:
6918 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6919 result_width = width0;
6920 break;
6921 case UDIV:
6922 result_width = width0;
6923 break;
6924 case MOD:
6925 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6926 result_width = MIN (width0, width1);
6927 result_low = MIN (low0, low1);
6928 break;
6929 case UMOD:
6930 result_width = MIN (width0, width1);
6931 result_low = MIN (low0, low1);
6932 break;
6933 }
6934
6935 if (result_width < mode_width)
951553af 6936 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
6937
6938 if (result_low > 0)
951553af 6939 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
6940 }
6941 break;
6942
6943 case ZERO_EXTRACT:
6944 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6945 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 6946 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
6947 break;
6948
6949 case SUBREG:
c3c2cb37
RK
6950 /* If this is a SUBREG formed for a promoted variable that has
6951 been zero-extended, we know that at least the high-order bits
6952 are zero, though others might be too. */
6953
6954 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
6955 nonzero = (GET_MODE_MASK (GET_MODE (x))
6956 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 6957
230d793d
RS
6958 /* If the inner mode is a single word for both the host and target
6959 machines, we can compute this from which bits of the inner
951553af 6960 object might be nonzero. */
230d793d 6961 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
6962 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6963 <= HOST_BITS_PER_WIDE_INT))
230d793d 6964 {
951553af 6965 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb
RK
6966
6967#ifndef WORD_REGISTER_OPERATIONS
230d793d
RS
6968 /* On many CISC machines, accessing an object in a wider mode
6969 causes the high-order bits to become undefined. So they are
6970 not known to be zero. */
6971 if (GET_MODE_SIZE (GET_MODE (x))
6972 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
6973 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6974 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
6975#endif
6976 }
6977 break;
6978
6979 case ASHIFTRT:
6980 case LSHIFTRT:
6981 case ASHIFT:
230d793d 6982 case ROTATE:
951553af 6983 /* The nonzero bits are in two classes: any bits within MODE
230d793d 6984 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 6985 nonzero bits are those that are significant in the operand of
230d793d
RS
6986 the shift when shifted the appropriate number of bits. This
6987 shows that high-order bits are cleared by the right shift and
6988 low-order bits by left shifts. */
6989 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6990 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 6991 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
6992 {
6993 enum machine_mode inner_mode = GET_MODE (x);
6994 int width = GET_MODE_BITSIZE (inner_mode);
6995 int count = INTVAL (XEXP (x, 1));
5f4f0e22 6996 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
6997 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6998 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 6999 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
7000
7001 if (mode_width > width)
951553af 7002 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
7003
7004 if (code == LSHIFTRT)
7005 inner >>= count;
7006 else if (code == ASHIFTRT)
7007 {
7008 inner >>= count;
7009
951553af 7010 /* If the sign bit may have been nonzero before the shift, we
230d793d 7011 need to mark all the places it could have been copied to
951553af 7012 by the shift as possibly nonzero. */
5f4f0e22
CH
7013 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7014 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 7015 }
45620ed4 7016 else if (code == ASHIFT)
230d793d
RS
7017 inner <<= count;
7018 else
7019 inner = ((inner << (count % width)
7020 | (inner >> (width - (count % width)))) & mode_mask);
7021
951553af 7022 nonzero &= (outer | inner);
230d793d
RS
7023 }
7024 break;
7025
7026 case FFS:
7027 /* This is at most the number of bits in the mode. */
951553af 7028 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 7029 break;
d0ab8cd3
RK
7030
7031 case IF_THEN_ELSE:
951553af
RK
7032 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7033 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 7034 break;
230d793d
RS
7035 }
7036
951553af 7037 return nonzero;
230d793d
RS
7038}
7039\f
d0ab8cd3 7040/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
7041 be equal to the sign bit. X will be used in mode MODE; if MODE is
7042 VOIDmode, X will be used in its own mode. The returned value will always
7043 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
7044
7045static int
7046num_sign_bit_copies (x, mode)
7047 rtx x;
7048 enum machine_mode mode;
7049{
7050 enum rtx_code code = GET_CODE (x);
7051 int bitwidth;
7052 int num0, num1, result;
951553af 7053 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
7054 rtx tem;
7055
7056 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
7057 VOIDmode, we don't know anything. Likewise if one of the modes is
7058 floating-point. */
d0ab8cd3
RK
7059
7060 if (mode == VOIDmode)
7061 mode = GET_MODE (x);
7062
1c75dfa4 7063 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 7064 return 1;
d0ab8cd3
RK
7065
7066 bitwidth = GET_MODE_BITSIZE (mode);
7067
312def2e
RK
7068 /* For a smaller object, just ignore the high bits. */
7069 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7070 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7071 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7072
0c314d1a
RK
7073#ifndef WORD_REGISTER_OPERATIONS
7074 /* If this machine does not do all register operations on the entire
7075 register and MODE is wider than the mode of X, we can say nothing
7076 at all about the high-order bits. */
7077 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7078 return 1;
7079#endif
7080
d0ab8cd3
RK
7081 switch (code)
7082 {
7083 case REG:
55310dad
RK
7084
7085 if (reg_last_set_value[REGNO (x)] != 0
7086 && reg_last_set_mode[REGNO (x)] == mode
7087 && (reg_n_sets[REGNO (x)] == 1
7088 || reg_last_set_label[REGNO (x)] == label_tick)
7089 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7090 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7091
7092 tem = get_last_value (x);
7093 if (tem != 0)
7094 return num_sign_bit_copies (tem, mode);
55310dad
RK
7095
7096 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7097 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7098 break;
7099
457816e2 7100 case MEM:
8baf60bb 7101#ifdef LOAD_EXTEND_OP
457816e2 7102 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
7103 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7104 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 7105#endif
8baf60bb 7106 break;
457816e2 7107
d0ab8cd3
RK
7108 case CONST_INT:
7109 /* If the constant is negative, take its 1's complement and remask.
7110 Then see how many zero bits we have. */
951553af 7111 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 7112 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
7113 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7114 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 7115
951553af 7116 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7117
7118 case SUBREG:
c3c2cb37
RK
7119 /* If this is a SUBREG for a promoted object that is sign-extended
7120 and we are looking at it in a wider mode, we know that at least the
7121 high-order bits are known to be sign bit copies. */
7122
7123 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
7124 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7125 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 7126
d0ab8cd3
RK
7127 /* For a smaller object, just ignore the high bits. */
7128 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7129 {
7130 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7131 return MAX (1, (num0
7132 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7133 - bitwidth)));
7134 }
457816e2 7135
8baf60bb
RK
7136#ifdef WORD_REGISTER_OPERATIONS
7137 /* For paradoxical SUBREGs on machines where all register operations
7138 affect the entire register, just look inside. Note that we are
7139 passing MODE to the recursive call, so the number of sign bit copies
7140 will remain relative to that mode, not the inner mode. */
457816e2
RK
7141
7142 if (GET_MODE_SIZE (GET_MODE (x))
7143 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7144 return num_sign_bit_copies (SUBREG_REG (x), mode);
7145#endif
d0ab8cd3
RK
7146 break;
7147
7148 case SIGN_EXTRACT:
7149 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7150 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7151 break;
7152
7153 case SIGN_EXTEND:
7154 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7155 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7156
7157 case TRUNCATE:
7158 /* For a smaller object, just ignore the high bits. */
7159 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7160 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7161 - bitwidth)));
7162
7163 case NOT:
7164 return num_sign_bit_copies (XEXP (x, 0), mode);
7165
7166 case ROTATE: case ROTATERT:
7167 /* If we are rotating left by a number of bits less than the number
7168 of sign bit copies, we can just subtract that amount from the
7169 number. */
7170 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7171 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7172 {
7173 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7174 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7175 : bitwidth - INTVAL (XEXP (x, 1))));
7176 }
7177 break;
7178
7179 case NEG:
7180 /* In general, this subtracts one sign bit copy. But if the value
7181 is known to be positive, the number of sign bit copies is the
951553af
RK
7182 same as that of the input. Finally, if the input has just one bit
7183 that might be nonzero, all the bits are copies of the sign bit. */
7184 nonzero = nonzero_bits (XEXP (x, 0), mode);
7185 if (nonzero == 1)
d0ab8cd3
RK
7186 return bitwidth;
7187
7188 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7189 if (num0 > 1
ac49a949 7190 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7191 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
7192 num0--;
7193
7194 return num0;
7195
7196 case IOR: case AND: case XOR:
7197 case SMIN: case SMAX: case UMIN: case UMAX:
7198 /* Logical operations will preserve the number of sign-bit copies.
7199 MIN and MAX operations always return one of the operands. */
7200 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7201 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7202 return MIN (num0, num1);
7203
7204 case PLUS: case MINUS:
7205 /* For addition and subtraction, we can have a 1-bit carry. However,
7206 if we are subtracting 1 from a positive number, there will not
7207 be such a carry. Furthermore, if the positive number is known to
7208 be 0 or 1, we know the result is either -1 or 0. */
7209
3e3ea975 7210 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 7211 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7212 {
951553af
RK
7213 nonzero = nonzero_bits (XEXP (x, 0), mode);
7214 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7215 return (nonzero == 1 || nonzero == 0 ? bitwidth
7216 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7217 }
7218
7219 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7220 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7221 return MAX (1, MIN (num0, num1) - 1);
7222
7223 case MULT:
7224 /* The number of bits of the product is the sum of the number of
7225 bits of both terms. However, unless one of the terms if known
7226 to be positive, we must allow for an additional bit since negating
7227 a negative number can remove one sign bit copy. */
7228
7229 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7230 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7231
7232 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7233 if (result > 0
9295e6af 7234 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7235 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 7236 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
951553af 7237 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7238 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
7239 result--;
7240
7241 return MAX (1, result);
7242
7243 case UDIV:
7244 /* The result must be <= the first operand. */
7245 return num_sign_bit_copies (XEXP (x, 0), mode);
7246
7247 case UMOD:
7248 /* The result must be <= the scond operand. */
7249 return num_sign_bit_copies (XEXP (x, 1), mode);
7250
7251 case DIV:
7252 /* Similar to unsigned division, except that we have to worry about
7253 the case where the divisor is negative, in which case we have
7254 to add 1. */
7255 result = num_sign_bit_copies (XEXP (x, 0), mode);
7256 if (result > 1
ac49a949 7257 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7258 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7259 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7260 result --;
7261
7262 return result;
7263
7264 case MOD:
7265 result = num_sign_bit_copies (XEXP (x, 1), mode);
7266 if (result > 1
ac49a949 7267 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7268 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7269 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7270 result --;
7271
7272 return result;
7273
7274 case ASHIFTRT:
7275 /* Shifts by a constant add to the number of bits equal to the
7276 sign bit. */
7277 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7278 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7279 && INTVAL (XEXP (x, 1)) > 0)
7280 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7281
7282 return num0;
7283
7284 case ASHIFT:
d0ab8cd3
RK
7285 /* Left shifts destroy copies. */
7286 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7287 || INTVAL (XEXP (x, 1)) < 0
7288 || INTVAL (XEXP (x, 1)) >= bitwidth)
7289 return 1;
7290
7291 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7292 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7293
7294 case IF_THEN_ELSE:
7295 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7296 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7297 return MIN (num0, num1);
7298
7299#if STORE_FLAG_VALUE == -1
7300 case EQ: case NE: case GE: case GT: case LE: case LT:
7301 case GEU: case GTU: case LEU: case LTU:
7302 return bitwidth;
7303#endif
7304 }
7305
7306 /* If we haven't been able to figure it out by one of the above rules,
7307 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
7308 count those bits and return one less than that amount. If we can't
7309 safely compute the mask for this mode, always return BITWIDTH. */
7310
7311 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 7312 return 1;
d0ab8cd3 7313
951553af 7314 nonzero = nonzero_bits (x, mode);
df6f4086 7315 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 7316 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7317}
7318\f
1a26b032
RK
7319/* Return the number of "extended" bits there are in X, when interpreted
7320 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7321 unsigned quantities, this is the number of high-order zero bits.
7322 For signed quantities, this is the number of copies of the sign bit
7323 minus 1. In both case, this function returns the number of "spare"
7324 bits. For example, if two quantities for which this function returns
7325 at least 1 are added, the addition is known not to overflow.
7326
7327 This function will always return 0 unless called during combine, which
7328 implies that it must be called from a define_split. */
7329
7330int
7331extended_count (x, mode, unsignedp)
7332 rtx x;
7333 enum machine_mode mode;
7334 int unsignedp;
7335{
951553af 7336 if (nonzero_sign_valid == 0)
1a26b032
RK
7337 return 0;
7338
7339 return (unsignedp
ac49a949
RS
7340 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7341 && (GET_MODE_BITSIZE (mode) - 1
951553af 7342 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
7343 : num_sign_bit_copies (x, mode) - 1);
7344}
7345\f
230d793d
RS
7346/* This function is called from `simplify_shift_const' to merge two
7347 outer operations. Specifically, we have already found that we need
7348 to perform operation *POP0 with constant *PCONST0 at the outermost
7349 position. We would now like to also perform OP1 with constant CONST1
7350 (with *POP0 being done last).
7351
7352 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7353 the resulting operation. *PCOMP_P is set to 1 if we would need to
7354 complement the innermost operand, otherwise it is unchanged.
7355
7356 MODE is the mode in which the operation will be done. No bits outside
7357 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 7358 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
7359
7360 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7361 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7362 result is simply *PCONST0.
7363
7364 If the resulting operation cannot be expressed as one operation, we
7365 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7366
7367static int
7368merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7369 enum rtx_code *pop0;
5f4f0e22 7370 HOST_WIDE_INT *pconst0;
230d793d 7371 enum rtx_code op1;
5f4f0e22 7372 HOST_WIDE_INT const1;
230d793d
RS
7373 enum machine_mode mode;
7374 int *pcomp_p;
7375{
7376 enum rtx_code op0 = *pop0;
5f4f0e22 7377 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
7378
7379 const0 &= GET_MODE_MASK (mode);
7380 const1 &= GET_MODE_MASK (mode);
7381
7382 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7383 if (op0 == AND)
7384 const1 &= const0;
7385
7386 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7387 if OP0 is SET. */
7388
7389 if (op1 == NIL || op0 == SET)
7390 return 1;
7391
7392 else if (op0 == NIL)
7393 op0 = op1, const0 = const1;
7394
7395 else if (op0 == op1)
7396 {
7397 switch (op0)
7398 {
7399 case AND:
7400 const0 &= const1;
7401 break;
7402 case IOR:
7403 const0 |= const1;
7404 break;
7405 case XOR:
7406 const0 ^= const1;
7407 break;
7408 case PLUS:
7409 const0 += const1;
7410 break;
7411 case NEG:
7412 op0 = NIL;
7413 break;
7414 }
7415 }
7416
7417 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7418 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7419 return 0;
7420
7421 /* If the two constants aren't the same, we can't do anything. The
7422 remaining six cases can all be done. */
7423 else if (const0 != const1)
7424 return 0;
7425
7426 else
7427 switch (op0)
7428 {
7429 case IOR:
7430 if (op1 == AND)
7431 /* (a & b) | b == b */
7432 op0 = SET;
7433 else /* op1 == XOR */
7434 /* (a ^ b) | b == a | b */
7435 ;
7436 break;
7437
7438 case XOR:
7439 if (op1 == AND)
7440 /* (a & b) ^ b == (~a) & b */
7441 op0 = AND, *pcomp_p = 1;
7442 else /* op1 == IOR */
7443 /* (a | b) ^ b == a & ~b */
7444 op0 = AND, *pconst0 = ~ const0;
7445 break;
7446
7447 case AND:
7448 if (op1 == IOR)
7449 /* (a | b) & b == b */
7450 op0 = SET;
7451 else /* op1 == XOR */
7452 /* (a ^ b) & b) == (~a) & b */
7453 *pcomp_p = 1;
7454 break;
7455 }
7456
7457 /* Check for NO-OP cases. */
7458 const0 &= GET_MODE_MASK (mode);
7459 if (const0 == 0
7460 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7461 op0 = NIL;
7462 else if (const0 == 0 && op0 == AND)
7463 op0 = SET;
7464 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7465 op0 = NIL;
7466
7467 *pop0 = op0;
7468 *pconst0 = const0;
7469
7470 return 1;
7471}
7472\f
7473/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7474 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7475 that we started with.
7476
7477 The shift is normally computed in the widest mode we find in VAROP, as
7478 long as it isn't a different number of words than RESULT_MODE. Exceptions
7479 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7480
7481static rtx
7482simplify_shift_const (x, code, result_mode, varop, count)
7483 rtx x;
7484 enum rtx_code code;
7485 enum machine_mode result_mode;
7486 rtx varop;
7487 int count;
7488{
7489 enum rtx_code orig_code = code;
7490 int orig_count = count;
7491 enum machine_mode mode = result_mode;
7492 enum machine_mode shift_mode, tmode;
7493 int mode_words
7494 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7495 /* We form (outer_op (code varop count) (outer_const)). */
7496 enum rtx_code outer_op = NIL;
c4e861e8 7497 HOST_WIDE_INT outer_const = 0;
230d793d
RS
7498 rtx const_rtx;
7499 int complement_p = 0;
7500 rtx new;
7501
7502 /* If we were given an invalid count, don't do anything except exactly
7503 what was requested. */
7504
7505 if (count < 0 || count > GET_MODE_BITSIZE (mode))
7506 {
7507 if (x)
7508 return x;
7509
5f4f0e22 7510 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
7511 }
7512
7513 /* Unless one of the branches of the `if' in this loop does a `continue',
7514 we will `break' the loop after the `if'. */
7515
7516 while (count != 0)
7517 {
7518 /* If we have an operand of (clobber (const_int 0)), just return that
7519 value. */
7520 if (GET_CODE (varop) == CLOBBER)
7521 return varop;
7522
7523 /* If we discovered we had to complement VAROP, leave. Making a NOT
7524 here would cause an infinite loop. */
7525 if (complement_p)
7526 break;
7527
7528 /* Convert ROTATETRT to ROTATE. */
7529 if (code == ROTATERT)
7530 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7531
230d793d
RS
7532 /* We need to determine what mode we will do the shift in. If the
7533 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
7534 was originally done in. Otherwise, we can do it in MODE, the widest
7535 mode encountered. */
7536 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7537
7538 /* Handle cases where the count is greater than the size of the mode
7539 minus 1. For ASHIFT, use the size minus one as the count (this can
7540 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7541 take the count modulo the size. For other shifts, the result is
7542 zero.
7543
7544 Since these shifts are being produced by the compiler by combining
7545 multiple operations, each of which are defined, we know what the
7546 result is supposed to be. */
7547
7548 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7549 {
7550 if (code == ASHIFTRT)
7551 count = GET_MODE_BITSIZE (shift_mode) - 1;
7552 else if (code == ROTATE || code == ROTATERT)
7553 count %= GET_MODE_BITSIZE (shift_mode);
7554 else
7555 {
7556 /* We can't simply return zero because there may be an
7557 outer op. */
7558 varop = const0_rtx;
7559 count = 0;
7560 break;
7561 }
7562 }
7563
7564 /* Negative counts are invalid and should not have been made (a
7565 programmer-specified negative count should have been handled
7566 above). */
7567 else if (count < 0)
7568 abort ();
7569
312def2e
RK
7570 /* An arithmetic right shift of a quantity known to be -1 or 0
7571 is a no-op. */
7572 if (code == ASHIFTRT
7573 && (num_sign_bit_copies (varop, shift_mode)
7574 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 7575 {
312def2e
RK
7576 count = 0;
7577 break;
7578 }
d0ab8cd3 7579
312def2e
RK
7580 /* If we are doing an arithmetic right shift and discarding all but
7581 the sign bit copies, this is equivalent to doing a shift by the
7582 bitsize minus one. Convert it into that shift because it will often
7583 allow other simplifications. */
500c518b 7584
312def2e
RK
7585 if (code == ASHIFTRT
7586 && (count + num_sign_bit_copies (varop, shift_mode)
7587 >= GET_MODE_BITSIZE (shift_mode)))
7588 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 7589
230d793d
RS
7590 /* We simplify the tests below and elsewhere by converting
7591 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7592 `make_compound_operation' will convert it to a ASHIFTRT for
7593 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 7594 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7595 && code == ASHIFTRT
951553af 7596 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
7597 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7598 == 0))
230d793d
RS
7599 code = LSHIFTRT;
7600
7601 switch (GET_CODE (varop))
7602 {
7603 case SIGN_EXTEND:
7604 case ZERO_EXTEND:
7605 case SIGN_EXTRACT:
7606 case ZERO_EXTRACT:
7607 new = expand_compound_operation (varop);
7608 if (new != varop)
7609 {
7610 varop = new;
7611 continue;
7612 }
7613 break;
7614
7615 case MEM:
7616 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7617 minus the width of a smaller mode, we can do this with a
7618 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7619 if ((code == ASHIFTRT || code == LSHIFTRT)
7620 && ! mode_dependent_address_p (XEXP (varop, 0))
7621 && ! MEM_VOLATILE_P (varop)
7622 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7623 MODE_INT, 1)) != BLKmode)
7624 {
7625#if BYTES_BIG_ENDIAN
7626 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7627#else
7628 new = gen_rtx (MEM, tmode,
7629 plus_constant (XEXP (varop, 0),
7630 count / BITS_PER_UNIT));
7631 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7632 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7633 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7634#endif
7635 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7636 : ZERO_EXTEND, mode, new);
7637 count = 0;
7638 continue;
7639 }
7640 break;
7641
7642 case USE:
7643 /* Similar to the case above, except that we can only do this if
7644 the resulting mode is the same as that of the underlying
7645 MEM and adjust the address depending on the *bits* endianness
7646 because of the way that bit-field extract insns are defined. */
7647 if ((code == ASHIFTRT || code == LSHIFTRT)
7648 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7649 MODE_INT, 1)) != BLKmode
7650 && tmode == GET_MODE (XEXP (varop, 0)))
7651 {
7652#if BITS_BIG_ENDIAN
7653 new = XEXP (varop, 0);
7654#else
7655 new = copy_rtx (XEXP (varop, 0));
7656 SUBST (XEXP (new, 0),
7657 plus_constant (XEXP (new, 0),
7658 count / BITS_PER_UNIT));
7659#endif
7660
7661 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7662 : ZERO_EXTEND, mode, new);
7663 count = 0;
7664 continue;
7665 }
7666 break;
7667
7668 case SUBREG:
7669 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7670 the same number of words as what we've seen so far. Then store
7671 the widest mode in MODE. */
f9e67232
RS
7672 if (subreg_lowpart_p (varop)
7673 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7674 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
7675 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7676 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7677 == mode_words))
7678 {
7679 varop = SUBREG_REG (varop);
7680 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7681 mode = GET_MODE (varop);
7682 continue;
7683 }
7684 break;
7685
7686 case MULT:
7687 /* Some machines use MULT instead of ASHIFT because MULT
7688 is cheaper. But it is still better on those machines to
7689 merge two shifts into one. */
7690 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7691 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7692 {
7693 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7694 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
7695 continue;
7696 }
7697 break;
7698
7699 case UDIV:
7700 /* Similar, for when divides are cheaper. */
7701 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7702 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7703 {
7704 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7705 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
7706 continue;
7707 }
7708 break;
7709
7710 case ASHIFTRT:
7711 /* If we are extracting just the sign bit of an arithmetic right
7712 shift, that shift is not needed. */
7713 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7714 {
7715 varop = XEXP (varop, 0);
7716 continue;
7717 }
7718
7719 /* ... fall through ... */
7720
7721 case LSHIFTRT:
7722 case ASHIFT:
230d793d
RS
7723 case ROTATE:
7724 /* Here we have two nested shifts. The result is usually the
7725 AND of a new shift with a mask. We compute the result below. */
7726 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7727 && INTVAL (XEXP (varop, 1)) >= 0
7728 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
7729 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7730 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
7731 {
7732 enum rtx_code first_code = GET_CODE (varop);
7733 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 7734 unsigned HOST_WIDE_INT mask;
230d793d 7735 rtx mask_rtx;
230d793d 7736
230d793d
RS
7737 /* We have one common special case. We can't do any merging if
7738 the inner code is an ASHIFTRT of a smaller mode. However, if
7739 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7740 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7741 we can convert it to
7742 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7743 This simplifies certain SIGN_EXTEND operations. */
7744 if (code == ASHIFT && first_code == ASHIFTRT
7745 && (GET_MODE_BITSIZE (result_mode)
7746 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7747 {
7748 /* C3 has the low-order C1 bits zero. */
7749
5f4f0e22
CH
7750 mask = (GET_MODE_MASK (mode)
7751 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 7752
5f4f0e22 7753 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 7754 XEXP (varop, 0), mask);
5f4f0e22 7755 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
7756 varop, count);
7757 count = first_count;
7758 code = ASHIFTRT;
7759 continue;
7760 }
7761
d0ab8cd3
RK
7762 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7763 than C1 high-order bits equal to the sign bit, we can convert
7764 this to either an ASHIFT or a ASHIFTRT depending on the
7765 two counts.
230d793d
RS
7766
7767 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7768
7769 if (code == ASHIFTRT && first_code == ASHIFT
7770 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
7771 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7772 > first_count))
230d793d 7773 {
d0ab8cd3
RK
7774 count -= first_count;
7775 if (count < 0)
7776 count = - count, code = ASHIFT;
7777 varop = XEXP (varop, 0);
7778 continue;
230d793d
RS
7779 }
7780
7781 /* There are some cases we can't do. If CODE is ASHIFTRT,
7782 we can only do this if FIRST_CODE is also ASHIFTRT.
7783
7784 We can't do the case when CODE is ROTATE and FIRST_CODE is
7785 ASHIFTRT.
7786
7787 If the mode of this shift is not the mode of the outer shift,
7788 we can't do this if either shift is ASHIFTRT or ROTATE.
7789
7790 Finally, we can't do any of these if the mode is too wide
7791 unless the codes are the same.
7792
7793 Handle the case where the shift codes are the same
7794 first. */
7795
7796 if (code == first_code)
7797 {
7798 if (GET_MODE (varop) != result_mode
7799 && (code == ASHIFTRT || code == ROTATE))
7800 break;
7801
7802 count += first_count;
7803 varop = XEXP (varop, 0);
7804 continue;
7805 }
7806
7807 if (code == ASHIFTRT
7808 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 7809 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d
RS
7810 || (GET_MODE (varop) != result_mode
7811 && (first_code == ASHIFTRT || first_code == ROTATE
7812 || code == ROTATE)))
7813 break;
7814
7815 /* To compute the mask to apply after the shift, shift the
951553af 7816 nonzero bits of the inner shift the same way the
230d793d
RS
7817 outer shift will. */
7818
951553af 7819 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
7820
7821 mask_rtx
7822 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 7823 GEN_INT (count));
230d793d
RS
7824
7825 /* Give up if we can't compute an outer operation to use. */
7826 if (mask_rtx == 0
7827 || GET_CODE (mask_rtx) != CONST_INT
7828 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7829 INTVAL (mask_rtx),
7830 result_mode, &complement_p))
7831 break;
7832
7833 /* If the shifts are in the same direction, we add the
7834 counts. Otherwise, we subtract them. */
7835 if ((code == ASHIFTRT || code == LSHIFTRT)
7836 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7837 count += first_count;
7838 else
7839 count -= first_count;
7840
7841 /* If COUNT is positive, the new shift is usually CODE,
7842 except for the two exceptions below, in which case it is
7843 FIRST_CODE. If the count is negative, FIRST_CODE should
7844 always be used */
7845 if (count > 0
7846 && ((first_code == ROTATE && code == ASHIFT)
7847 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7848 code = first_code;
7849 else if (count < 0)
7850 code = first_code, count = - count;
7851
7852 varop = XEXP (varop, 0);
7853 continue;
7854 }
7855
7856 /* If we have (A << B << C) for any shift, we can convert this to
7857 (A << C << B). This wins if A is a constant. Only try this if
7858 B is not a constant. */
7859
7860 else if (GET_CODE (varop) == code
7861 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7862 && 0 != (new
7863 = simplify_binary_operation (code, mode,
7864 XEXP (varop, 0),
5f4f0e22 7865 GEN_INT (count))))
230d793d
RS
7866 {
7867 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7868 count = 0;
7869 continue;
7870 }
7871 break;
7872
7873 case NOT:
7874 /* Make this fit the case below. */
7875 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 7876 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
7877 continue;
7878
7879 case IOR:
7880 case AND:
7881 case XOR:
7882 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7883 with C the size of VAROP - 1 and the shift is logical if
7884 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7885 we have an (le X 0) operation. If we have an arithmetic shift
7886 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7887 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7888
7889 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7890 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7891 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7892 && (code == LSHIFTRT || code == ASHIFTRT)
7893 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7894 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7895 {
7896 count = 0;
7897 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7898 const0_rtx);
7899
7900 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7901 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7902
7903 continue;
7904 }
7905
7906 /* If we have (shift (logical)), move the logical to the outside
7907 to allow it to possibly combine with another logical and the
7908 shift to combine with another shift. This also canonicalizes to
7909 what a ZERO_EXTRACT looks like. Also, some machines have
7910 (and (shift)) insns. */
7911
7912 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7913 && (new = simplify_binary_operation (code, result_mode,
7914 XEXP (varop, 1),
5f4f0e22 7915 GEN_INT (count))) != 0
7d171a1e 7916 && GET_CODE(new) == CONST_INT
230d793d
RS
7917 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7918 INTVAL (new), result_mode, &complement_p))
7919 {
7920 varop = XEXP (varop, 0);
7921 continue;
7922 }
7923
7924 /* If we can't do that, try to simplify the shift in each arm of the
7925 logical expression, make a new logical expression, and apply
7926 the inverse distributive law. */
7927 {
00d4ca1c 7928 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 7929 XEXP (varop, 0), count);
00d4ca1c 7930 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
7931 XEXP (varop, 1), count);
7932
21a64bf1 7933 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
7934 varop = apply_distributive_law (varop);
7935
7936 count = 0;
7937 }
7938 break;
7939
7940 case EQ:
45620ed4 7941 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 7942 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
7943 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
7944 that may be nonzero. */
7945 if (code == LSHIFTRT
230d793d
RS
7946 && XEXP (varop, 1) == const0_rtx
7947 && GET_MODE (XEXP (varop, 0)) == result_mode
7948 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 7949 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7950 && ((STORE_FLAG_VALUE
5f4f0e22 7951 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 7952 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7953 && merge_outer_ops (&outer_op, &outer_const, XOR,
7954 (HOST_WIDE_INT) 1, result_mode,
7955 &complement_p))
230d793d
RS
7956 {
7957 varop = XEXP (varop, 0);
7958 count = 0;
7959 continue;
7960 }
7961 break;
7962
7963 case NEG:
d0ab8cd3
RK
7964 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7965 than the number of bits in the mode is equivalent to A. */
7966 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 7967 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 7968 {
d0ab8cd3 7969 varop = XEXP (varop, 0);
230d793d
RS
7970 count = 0;
7971 continue;
7972 }
7973
7974 /* NEG commutes with ASHIFT since it is multiplication. Move the
7975 NEG outside to allow shifts to combine. */
7976 if (code == ASHIFT
5f4f0e22
CH
7977 && merge_outer_ops (&outer_op, &outer_const, NEG,
7978 (HOST_WIDE_INT) 0, result_mode,
7979 &complement_p))
230d793d
RS
7980 {
7981 varop = XEXP (varop, 0);
7982 continue;
7983 }
7984 break;
7985
7986 case PLUS:
d0ab8cd3
RK
7987 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7988 is one less than the number of bits in the mode is
7989 equivalent to (xor A 1). */
230d793d
RS
7990 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7991 && XEXP (varop, 1) == constm1_rtx
951553af 7992 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7993 && merge_outer_ops (&outer_op, &outer_const, XOR,
7994 (HOST_WIDE_INT) 1, result_mode,
7995 &complement_p))
230d793d
RS
7996 {
7997 count = 0;
7998 varop = XEXP (varop, 0);
7999 continue;
8000 }
8001
3f508eca 8002 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 8003 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
8004 bits are known zero in FOO, we can replace the PLUS with FOO.
8005 Similarly in the other operand order. This code occurs when
8006 we are computing the size of a variable-size array. */
8007
8008 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8009 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
8010 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8011 && (nonzero_bits (XEXP (varop, 1), result_mode)
8012 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
8013 {
8014 varop = XEXP (varop, 0);
8015 continue;
8016 }
8017 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8018 && count < HOST_BITS_PER_WIDE_INT
ac49a949 8019 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 8020 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 8021 >> count)
951553af
RK
8022 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8023 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
8024 result_mode)))
8025 {
8026 varop = XEXP (varop, 1);
8027 continue;
8028 }
8029
230d793d
RS
8030 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8031 if (code == ASHIFT
8032 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8033 && (new = simplify_binary_operation (ASHIFT, result_mode,
8034 XEXP (varop, 1),
5f4f0e22 8035 GEN_INT (count))) != 0
7d171a1e 8036 && GET_CODE(new) == CONST_INT
230d793d
RS
8037 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8038 INTVAL (new), result_mode, &complement_p))
8039 {
8040 varop = XEXP (varop, 0);
8041 continue;
8042 }
8043 break;
8044
8045 case MINUS:
8046 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8047 with C the size of VAROP - 1 and the shift is logical if
8048 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8049 we have a (gt X 0) operation. If the shift is arithmetic with
8050 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8051 we have a (neg (gt X 0)) operation. */
8052
8053 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8054 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8055 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8056 && (code == LSHIFTRT || code == ASHIFTRT)
8057 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8058 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8059 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8060 {
8061 count = 0;
8062 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8063 const0_rtx);
8064
8065 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8066 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8067
8068 continue;
8069 }
8070 break;
8071 }
8072
8073 break;
8074 }
8075
8076 /* We need to determine what mode to do the shift in. If the shift is
8077 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
8078 done in. Otherwise, we can do it in MODE, the widest mode encountered.
8079 The code we care about is that of the shift that will actually be done,
8080 not the shift that was originally requested. */
8081 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
8082
8083 /* We have now finished analyzing the shift. The result should be
8084 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8085 OUTER_OP is non-NIL, it is an operation that needs to be applied
8086 to the result of the shift. OUTER_CONST is the relevant constant,
8087 but we must turn off all bits turned off in the shift.
8088
8089 If we were passed a value for X, see if we can use any pieces of
8090 it. If not, make new rtx. */
8091
8092 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8093 && GET_CODE (XEXP (x, 1)) == CONST_INT
8094 && INTVAL (XEXP (x, 1)) == count)
8095 const_rtx = XEXP (x, 1);
8096 else
5f4f0e22 8097 const_rtx = GEN_INT (count);
230d793d
RS
8098
8099 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8100 && GET_MODE (XEXP (x, 0)) == shift_mode
8101 && SUBREG_REG (XEXP (x, 0)) == varop)
8102 varop = XEXP (x, 0);
8103 else if (GET_MODE (varop) != shift_mode)
8104 varop = gen_lowpart_for_combine (shift_mode, varop);
8105
8106 /* If we can't make the SUBREG, try to return what we were given. */
8107 if (GET_CODE (varop) == CLOBBER)
8108 return x ? x : varop;
8109
8110 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8111 if (new != 0)
8112 x = new;
8113 else
8114 {
8115 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8116 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8117
8118 SUBST (XEXP (x, 0), varop);
8119 SUBST (XEXP (x, 1), const_rtx);
8120 }
8121
224eeff2
RK
8122 /* If we have an outer operation and we just made a shift, it is
8123 possible that we could have simplified the shift were it not
8124 for the outer operation. So try to do the simplification
8125 recursively. */
8126
8127 if (outer_op != NIL && GET_CODE (x) == code
8128 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8129 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8130 INTVAL (XEXP (x, 1)));
8131
230d793d
RS
8132 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8133 turn off all the bits that the shift would have turned off. */
8134 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 8135 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
8136 GET_MODE_MASK (result_mode) >> orig_count);
8137
8138 /* Do the remainder of the processing in RESULT_MODE. */
8139 x = gen_lowpart_for_combine (result_mode, x);
8140
8141 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8142 operation. */
8143 if (complement_p)
0c1c8ea6 8144 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
8145
8146 if (outer_op != NIL)
8147 {
5f4f0e22 8148 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8149 outer_const &= GET_MODE_MASK (result_mode);
8150
8151 if (outer_op == AND)
5f4f0e22 8152 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
8153 else if (outer_op == SET)
8154 /* This means that we have determined that the result is
8155 equivalent to a constant. This should be rare. */
5f4f0e22 8156 x = GEN_INT (outer_const);
230d793d 8157 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 8158 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 8159 else
5f4f0e22 8160 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
8161 }
8162
8163 return x;
8164}
8165\f
8166/* Like recog, but we receive the address of a pointer to a new pattern.
8167 We try to match the rtx that the pointer points to.
8168 If that fails, we may try to modify or replace the pattern,
8169 storing the replacement into the same pointer object.
8170
8171 Modifications include deletion or addition of CLOBBERs.
8172
8173 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8174 the CLOBBERs are placed.
8175
8176 The value is the final insn code from the pattern ultimately matched,
8177 or -1. */
8178
8179static int
8180recog_for_combine (pnewpat, insn, pnotes)
8181 rtx *pnewpat;
8182 rtx insn;
8183 rtx *pnotes;
8184{
8185 register rtx pat = *pnewpat;
8186 int insn_code_number;
8187 int num_clobbers_to_add = 0;
8188 int i;
8189 rtx notes = 0;
8190
974f4146
RK
8191 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8192 we use to indicate that something didn't match. If we find such a
8193 thing, force rejection. */
d96023cf 8194 if (GET_CODE (pat) == PARALLEL)
974f4146 8195 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
8196 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8197 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
8198 return -1;
8199
230d793d
RS
8200 /* Is the result of combination a valid instruction? */
8201 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8202
8203 /* If it isn't, there is the possibility that we previously had an insn
8204 that clobbered some register as a side effect, but the combined
8205 insn doesn't need to do that. So try once more without the clobbers
8206 unless this represents an ASM insn. */
8207
8208 if (insn_code_number < 0 && ! check_asm_operands (pat)
8209 && GET_CODE (pat) == PARALLEL)
8210 {
8211 int pos;
8212
8213 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8214 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8215 {
8216 if (i != pos)
8217 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8218 pos++;
8219 }
8220
8221 SUBST_INT (XVECLEN (pat, 0), pos);
8222
8223 if (pos == 1)
8224 pat = XVECEXP (pat, 0, 0);
8225
8226 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8227 }
8228
8229 /* If we had any clobbers to add, make a new pattern than contains
8230 them. Then check to make sure that all of them are dead. */
8231 if (num_clobbers_to_add)
8232 {
8233 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8234 gen_rtvec (GET_CODE (pat) == PARALLEL
8235 ? XVECLEN (pat, 0) + num_clobbers_to_add
8236 : num_clobbers_to_add + 1));
8237
8238 if (GET_CODE (pat) == PARALLEL)
8239 for (i = 0; i < XVECLEN (pat, 0); i++)
8240 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8241 else
8242 XVECEXP (newpat, 0, 0) = pat;
8243
8244 add_clobbers (newpat, insn_code_number);
8245
8246 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8247 i < XVECLEN (newpat, 0); i++)
8248 {
8249 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8250 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8251 return -1;
8252 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8253 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8254 }
8255 pat = newpat;
8256 }
8257
8258 *pnewpat = pat;
8259 *pnotes = notes;
8260
8261 return insn_code_number;
8262}
8263\f
8264/* Like gen_lowpart but for use by combine. In combine it is not possible
8265 to create any new pseudoregs. However, it is safe to create
8266 invalid memory addresses, because combine will try to recognize
8267 them and all they will do is make the combine attempt fail.
8268
8269 If for some reason this cannot do its job, an rtx
8270 (clobber (const_int 0)) is returned.
8271 An insn containing that will not be recognized. */
8272
8273#undef gen_lowpart
8274
8275static rtx
8276gen_lowpart_for_combine (mode, x)
8277 enum machine_mode mode;
8278 register rtx x;
8279{
8280 rtx result;
8281
8282 if (GET_MODE (x) == mode)
8283 return x;
8284
eae957a8
RK
8285 /* We can only support MODE being wider than a word if X is a
8286 constant integer or has a mode the same size. */
8287
8288 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8289 && ! ((GET_MODE (x) == VOIDmode
8290 && (GET_CODE (x) == CONST_INT
8291 || GET_CODE (x) == CONST_DOUBLE))
8292 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
230d793d
RS
8293 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8294
8295 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8296 won't know what to do. So we will strip off the SUBREG here and
8297 process normally. */
8298 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8299 {
8300 x = SUBREG_REG (x);
8301 if (GET_MODE (x) == mode)
8302 return x;
8303 }
8304
8305 result = gen_lowpart_common (mode, x);
8306 if (result)
8307 return result;
8308
8309 if (GET_CODE (x) == MEM)
8310 {
8311 register int offset = 0;
8312 rtx new;
8313
8314 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8315 address. */
8316 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8317 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8318
8319 /* If we want to refer to something bigger than the original memref,
8320 generate a perverse subreg instead. That will force a reload
8321 of the original memref X. */
8322 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8323 return gen_rtx (SUBREG, mode, x, 0);
8324
8325#if WORDS_BIG_ENDIAN
8326 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8327 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8328#endif
8329#if BYTES_BIG_ENDIAN
8330 /* Adjust the address so that the address-after-the-data
8331 is unchanged. */
8332 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8333 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8334#endif
8335 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8336 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8337 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8338 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8339 return new;
8340 }
8341
8342 /* If X is a comparison operator, rewrite it in a new mode. This
8343 probably won't match, but may allow further simplifications. */
8344 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8345 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8346
8347 /* If we couldn't simplify X any other way, just enclose it in a
8348 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 8349 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 8350 else
dfbe1b2f
RK
8351 {
8352 int word = 0;
8353
8354 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8355 word = ((GET_MODE_SIZE (GET_MODE (x))
8356 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8357 / UNITS_PER_WORD);
8358 return gen_rtx (SUBREG, mode, x, word);
8359 }
230d793d
RS
8360}
8361\f
8362/* Make an rtx expression. This is a subset of gen_rtx and only supports
8363 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8364
8365 If the identical expression was previously in the insn (in the undobuf),
8366 it will be returned. Only if it is not found will a new expression
8367 be made. */
8368
8369/*VARARGS2*/
8370static rtx
4f90e4a0 8371gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 8372{
4f90e4a0 8373#ifndef __STDC__
230d793d
RS
8374 enum rtx_code code;
8375 enum machine_mode mode;
4f90e4a0
RK
8376#endif
8377 va_list p;
230d793d
RS
8378 int n_args;
8379 rtx args[3];
8380 int i, j;
8381 char *fmt;
8382 rtx rt;
8383
4f90e4a0
RK
8384 VA_START (p, mode);
8385
8386#ifndef __STDC__
230d793d
RS
8387 code = va_arg (p, enum rtx_code);
8388 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
8389#endif
8390
230d793d
RS
8391 n_args = GET_RTX_LENGTH (code);
8392 fmt = GET_RTX_FORMAT (code);
8393
8394 if (n_args == 0 || n_args > 3)
8395 abort ();
8396
8397 /* Get each arg and verify that it is supposed to be an expression. */
8398 for (j = 0; j < n_args; j++)
8399 {
8400 if (*fmt++ != 'e')
8401 abort ();
8402
8403 args[j] = va_arg (p, rtx);
8404 }
8405
8406 /* See if this is in undobuf. Be sure we don't use objects that came
8407 from another insn; this could produce circular rtl structures. */
8408
8409 for (i = previous_num_undos; i < undobuf.num_undo; i++)
8410 if (!undobuf.undo[i].is_int
f5393ab9
RS
8411 && GET_CODE (undobuf.undo[i].old_contents.r) == code
8412 && GET_MODE (undobuf.undo[i].old_contents.r) == mode)
230d793d
RS
8413 {
8414 for (j = 0; j < n_args; j++)
f5393ab9 8415 if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
230d793d
RS
8416 break;
8417
8418 if (j == n_args)
f5393ab9 8419 return undobuf.undo[i].old_contents.r;
230d793d
RS
8420 }
8421
8422 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8423 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8424 rt = rtx_alloc (code);
8425 PUT_MODE (rt, mode);
8426 XEXP (rt, 0) = args[0];
8427 if (n_args > 1)
8428 {
8429 XEXP (rt, 1) = args[1];
8430 if (n_args > 2)
8431 XEXP (rt, 2) = args[2];
8432 }
8433 return rt;
8434}
8435
8436/* These routines make binary and unary operations by first seeing if they
8437 fold; if not, a new expression is allocated. */
8438
8439static rtx
8440gen_binary (code, mode, op0, op1)
8441 enum rtx_code code;
8442 enum machine_mode mode;
8443 rtx op0, op1;
8444{
8445 rtx result;
1a26b032
RK
8446 rtx tem;
8447
8448 if (GET_RTX_CLASS (code) == 'c'
8449 && (GET_CODE (op0) == CONST_INT
8450 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8451 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
8452
8453 if (GET_RTX_CLASS (code) == '<')
8454 {
8455 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
8456
8457 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
8458 just (REL_OP X Y). */
8459 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
8460 {
8461 op1 = XEXP (op0, 1);
8462 op0 = XEXP (op0, 0);
8463 op_mode = GET_MODE (op0);
8464 }
8465
230d793d
RS
8466 if (op_mode == VOIDmode)
8467 op_mode = GET_MODE (op1);
8468 result = simplify_relational_operation (code, op_mode, op0, op1);
8469 }
8470 else
8471 result = simplify_binary_operation (code, mode, op0, op1);
8472
8473 if (result)
8474 return result;
8475
8476 /* Put complex operands first and constants second. */
8477 if (GET_RTX_CLASS (code) == 'c'
8478 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8479 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8480 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8481 || (GET_CODE (op0) == SUBREG
8482 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8483 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8484 return gen_rtx_combine (code, mode, op1, op0);
8485
8486 return gen_rtx_combine (code, mode, op0, op1);
8487}
8488
8489static rtx
0c1c8ea6 8490gen_unary (code, mode, op0_mode, op0)
230d793d 8491 enum rtx_code code;
0c1c8ea6 8492 enum machine_mode mode, op0_mode;
230d793d
RS
8493 rtx op0;
8494{
0c1c8ea6 8495 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
8496
8497 if (result)
8498 return result;
8499
8500 return gen_rtx_combine (code, mode, op0);
8501}
8502\f
8503/* Simplify a comparison between *POP0 and *POP1 where CODE is the
8504 comparison code that will be tested.
8505
8506 The result is a possibly different comparison code to use. *POP0 and
8507 *POP1 may be updated.
8508
8509 It is possible that we might detect that a comparison is either always
8510 true or always false. However, we do not perform general constant
5089e22e 8511 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
8512 should have been detected earlier. Hence we ignore all such cases. */
8513
8514static enum rtx_code
8515simplify_comparison (code, pop0, pop1)
8516 enum rtx_code code;
8517 rtx *pop0;
8518 rtx *pop1;
8519{
8520 rtx op0 = *pop0;
8521 rtx op1 = *pop1;
8522 rtx tem, tem1;
8523 int i;
8524 enum machine_mode mode, tmode;
8525
8526 /* Try a few ways of applying the same transformation to both operands. */
8527 while (1)
8528 {
3a19aabc
RK
8529#ifndef WORD_REGISTER_OPERATIONS
8530 /* The test below this one won't handle SIGN_EXTENDs on these machines,
8531 so check specially. */
8532 if (code != GTU && code != GEU && code != LTU && code != LEU
8533 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
8534 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8535 && GET_CODE (XEXP (op1, 0)) == ASHIFT
8536 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
8537 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
8538 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 8539 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
8540 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8541 && GET_CODE (XEXP (op1, 1)) == CONST_INT
8542 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8543 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
8544 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
8545 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
8546 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
8547 && (INTVAL (XEXP (op0, 1))
8548 == (GET_MODE_BITSIZE (GET_MODE (op0))
8549 - (GET_MODE_BITSIZE
8550 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
8551 {
8552 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
8553 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
8554 }
8555#endif
8556
230d793d
RS
8557 /* If both operands are the same constant shift, see if we can ignore the
8558 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 8559 this shift are known to be zero for both inputs and if the type of
230d793d 8560 comparison is compatible with the shift. */
67232b23
RK
8561 if (GET_CODE (op0) == GET_CODE (op1)
8562 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8563 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 8564 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
8565 && (code != GT && code != LT && code != GE && code != LE))
8566 || (GET_CODE (op0) == ASHIFTRT
8567 && (code != GTU && code != LTU
8568 && code != GEU && code != GEU)))
8569 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8570 && INTVAL (XEXP (op0, 1)) >= 0
8571 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8572 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
8573 {
8574 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 8575 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8576 int shift_count = INTVAL (XEXP (op0, 1));
8577
8578 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8579 mask &= (mask >> shift_count) << shift_count;
45620ed4 8580 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
8581 mask = (mask & (mask << shift_count)) >> shift_count;
8582
951553af
RK
8583 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8584 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
8585 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8586 else
8587 break;
8588 }
8589
8590 /* If both operands are AND's of a paradoxical SUBREG by constant, the
8591 SUBREGs are of the same mode, and, in both cases, the AND would
8592 be redundant if the comparison was done in the narrower mode,
8593 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
8594 and the operand's possibly nonzero bits are 0xffffff01; in that case
8595 if we only care about QImode, we don't need the AND). This case
8596 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
8597 STORE_FLAG_VALUE == 1 (e.g., the 386).
8598
8599 Similarly, check for a case where the AND's are ZERO_EXTEND
8600 operations from some narrower mode even though a SUBREG is not
8601 present. */
230d793d
RS
8602
8603 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8604 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 8605 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 8606 {
7e4dc511
RK
8607 rtx inner_op0 = XEXP (op0, 0);
8608 rtx inner_op1 = XEXP (op1, 0);
8609 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
8610 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
8611 int changed = 0;
8612
8613 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
8614 && (GET_MODE_SIZE (GET_MODE (inner_op0))
8615 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
8616 && (GET_MODE (SUBREG_REG (inner_op0))
8617 == GET_MODE (SUBREG_REG (inner_op1)))
8618 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8619 <= HOST_BITS_PER_WIDE_INT)
8620 && (0 == (~c0) & nonzero_bits (SUBREG_REG (inner_op0),
8621 GET_MODE (SUBREG_REG (op0))))
8622 && (0 == (~c1) & nonzero_bits (SUBREG_REG (inner_op1),
8623 GET_MODE (SUBREG_REG (inner_op1)))))
8624 {
8625 op0 = SUBREG_REG (inner_op0);
8626 op1 = SUBREG_REG (inner_op1);
8627
8628 /* The resulting comparison is always unsigned since we masked
8629 off the original sign bit. */
8630 code = unsigned_condition (code);
8631
8632 changed = 1;
8633 }
230d793d 8634
7e4dc511
RK
8635 else if (c0 == c1)
8636 for (tmode = GET_CLASS_NARROWEST_MODE
8637 (GET_MODE_CLASS (GET_MODE (op0)));
8638 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
8639 if (c0 == GET_MODE_MASK (tmode))
8640 {
8641 op0 = gen_lowpart_for_combine (tmode, inner_op0);
8642 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 8643 code = unsigned_condition (code);
7e4dc511
RK
8644 changed = 1;
8645 break;
8646 }
8647
8648 if (! changed)
8649 break;
230d793d 8650 }
3a19aabc 8651
ad25ba17
RK
8652 /* If both operands are NOT, we can strip off the outer operation
8653 and adjust the comparison code for swapped operands; similarly for
8654 NEG, except that this must be an equality comparison. */
8655 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
8656 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
8657 && (code == EQ || code == NE)))
8658 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 8659
230d793d
RS
8660 else
8661 break;
8662 }
8663
8664 /* If the first operand is a constant, swap the operands and adjust the
8665 comparison code appropriately. */
8666 if (CONSTANT_P (op0))
8667 {
8668 tem = op0, op0 = op1, op1 = tem;
8669 code = swap_condition (code);
8670 }
8671
8672 /* We now enter a loop during which we will try to simplify the comparison.
8673 For the most part, we only are concerned with comparisons with zero,
8674 but some things may really be comparisons with zero but not start
8675 out looking that way. */
8676
8677 while (GET_CODE (op1) == CONST_INT)
8678 {
8679 enum machine_mode mode = GET_MODE (op0);
8680 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 8681 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8682 int equality_comparison_p;
8683 int sign_bit_comparison_p;
8684 int unsigned_comparison_p;
5f4f0e22 8685 HOST_WIDE_INT const_op;
230d793d
RS
8686
8687 /* We only want to handle integral modes. This catches VOIDmode,
8688 CCmode, and the floating-point modes. An exception is that we
8689 can handle VOIDmode if OP0 is a COMPARE or a comparison
8690 operation. */
8691
8692 if (GET_MODE_CLASS (mode) != MODE_INT
8693 && ! (mode == VOIDmode
8694 && (GET_CODE (op0) == COMPARE
8695 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8696 break;
8697
8698 /* Get the constant we are comparing against and turn off all bits
8699 not on in our mode. */
8700 const_op = INTVAL (op1);
5f4f0e22 8701 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 8702 const_op &= mask;
230d793d
RS
8703
8704 /* If we are comparing against a constant power of two and the value
951553af 8705 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
8706 `and'ed with that bit), we can replace this with a comparison
8707 with zero. */
8708 if (const_op
8709 && (code == EQ || code == NE || code == GE || code == GEU
8710 || code == LT || code == LTU)
5f4f0e22 8711 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8712 && exact_log2 (const_op) >= 0
951553af 8713 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
8714 {
8715 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8716 op1 = const0_rtx, const_op = 0;
8717 }
8718
d0ab8cd3
RK
8719 /* Similarly, if we are comparing a value known to be either -1 or
8720 0 with -1, change it to the opposite comparison against zero. */
8721
8722 if (const_op == -1
8723 && (code == EQ || code == NE || code == GT || code == LE
8724 || code == GEU || code == LTU)
8725 && num_sign_bit_copies (op0, mode) == mode_width)
8726 {
8727 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8728 op1 = const0_rtx, const_op = 0;
8729 }
8730
230d793d 8731 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
8732 comparisons against zero and then prefer equality comparisons.
8733 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
8734
8735 switch (code)
8736 {
8737 case LT:
4803a34a
RK
8738 /* < C is equivalent to <= (C - 1) */
8739 if (const_op > 0)
230d793d 8740 {
4803a34a 8741 const_op -= 1;
5f4f0e22 8742 op1 = GEN_INT (const_op);
230d793d
RS
8743 code = LE;
8744 /* ... fall through to LE case below. */
8745 }
8746 else
8747 break;
8748
8749 case LE:
4803a34a
RK
8750 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8751 if (const_op < 0)
8752 {
8753 const_op += 1;
5f4f0e22 8754 op1 = GEN_INT (const_op);
4803a34a
RK
8755 code = LT;
8756 }
230d793d
RS
8757
8758 /* If we are doing a <= 0 comparison on a value known to have
8759 a zero sign bit, we can replace this with == 0. */
8760 else if (const_op == 0
5f4f0e22 8761 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8762 && (nonzero_bits (op0, mode)
5f4f0e22 8763 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8764 code = EQ;
8765 break;
8766
8767 case GE:
4803a34a
RK
8768 /* >= C is equivalent to > (C - 1). */
8769 if (const_op > 0)
230d793d 8770 {
4803a34a 8771 const_op -= 1;
5f4f0e22 8772 op1 = GEN_INT (const_op);
230d793d
RS
8773 code = GT;
8774 /* ... fall through to GT below. */
8775 }
8776 else
8777 break;
8778
8779 case GT:
4803a34a
RK
8780 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8781 if (const_op < 0)
8782 {
8783 const_op += 1;
5f4f0e22 8784 op1 = GEN_INT (const_op);
4803a34a
RK
8785 code = GE;
8786 }
230d793d
RS
8787
8788 /* If we are doing a > 0 comparison on a value known to have
8789 a zero sign bit, we can replace this with != 0. */
8790 else if (const_op == 0
5f4f0e22 8791 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8792 && (nonzero_bits (op0, mode)
5f4f0e22 8793 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8794 code = NE;
8795 break;
8796
230d793d 8797 case LTU:
4803a34a
RK
8798 /* < C is equivalent to <= (C - 1). */
8799 if (const_op > 0)
8800 {
8801 const_op -= 1;
5f4f0e22 8802 op1 = GEN_INT (const_op);
4803a34a
RK
8803 code = LEU;
8804 /* ... fall through ... */
8805 }
d0ab8cd3
RK
8806
8807 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8808 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8809 {
8810 const_op = 0, op1 = const0_rtx;
8811 code = GE;
8812 break;
8813 }
4803a34a
RK
8814 else
8815 break;
230d793d
RS
8816
8817 case LEU:
8818 /* unsigned <= 0 is equivalent to == 0 */
8819 if (const_op == 0)
8820 code = EQ;
d0ab8cd3
RK
8821
8822 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8823 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8824 {
8825 const_op = 0, op1 = const0_rtx;
8826 code = GE;
8827 }
230d793d
RS
8828 break;
8829
4803a34a
RK
8830 case GEU:
8831 /* >= C is equivalent to < (C - 1). */
8832 if (const_op > 1)
8833 {
8834 const_op -= 1;
5f4f0e22 8835 op1 = GEN_INT (const_op);
4803a34a
RK
8836 code = GTU;
8837 /* ... fall through ... */
8838 }
d0ab8cd3
RK
8839
8840 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8841 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8842 {
8843 const_op = 0, op1 = const0_rtx;
8844 code = LT;
8b2e69e1 8845 break;
d0ab8cd3 8846 }
4803a34a
RK
8847 else
8848 break;
8849
230d793d
RS
8850 case GTU:
8851 /* unsigned > 0 is equivalent to != 0 */
8852 if (const_op == 0)
8853 code = NE;
d0ab8cd3
RK
8854
8855 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8856 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8857 {
8858 const_op = 0, op1 = const0_rtx;
8859 code = LT;
8860 }
230d793d
RS
8861 break;
8862 }
8863
8864 /* Compute some predicates to simplify code below. */
8865
8866 equality_comparison_p = (code == EQ || code == NE);
8867 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8868 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8869 || code == LEU);
8870
6139ff20
RK
8871 /* If this is a sign bit comparison and we can do arithmetic in
8872 MODE, say that we will only be needing the sign bit of OP0. */
8873 if (sign_bit_comparison_p
8874 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8875 op0 = force_to_mode (op0, mode,
8876 ((HOST_WIDE_INT) 1
8877 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 8878 NULL_RTX, 0);
6139ff20 8879
230d793d
RS
8880 /* Now try cases based on the opcode of OP0. If none of the cases
8881 does a "continue", we exit this loop immediately after the
8882 switch. */
8883
8884 switch (GET_CODE (op0))
8885 {
8886 case ZERO_EXTRACT:
8887 /* If we are extracting a single bit from a variable position in
8888 a constant that has only a single bit set and are comparing it
8889 with zero, we can convert this into an equality comparison
8890 between the position and the location of the single bit. We can't
8891 do this if bit endian and we don't have an extzv since we then
8892 can't know what mode to use for the endianness adjustment. */
8893
8894#if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8895 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8896 && XEXP (op0, 1) == const1_rtx
8897 && equality_comparison_p && const_op == 0
8898 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8899 {
8900#if BITS_BIG_ENDIAN
8901 i = (GET_MODE_BITSIZE
8902 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8903#endif
8904
8905 op0 = XEXP (op0, 2);
5f4f0e22 8906 op1 = GEN_INT (i);
230d793d
RS
8907 const_op = i;
8908
8909 /* Result is nonzero iff shift count is equal to I. */
8910 code = reverse_condition (code);
8911 continue;
8912 }
8913#endif
8914
8915 /* ... fall through ... */
8916
8917 case SIGN_EXTRACT:
8918 tem = expand_compound_operation (op0);
8919 if (tem != op0)
8920 {
8921 op0 = tem;
8922 continue;
8923 }
8924 break;
8925
8926 case NOT:
8927 /* If testing for equality, we can take the NOT of the constant. */
8928 if (equality_comparison_p
8929 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8930 {
8931 op0 = XEXP (op0, 0);
8932 op1 = tem;
8933 continue;
8934 }
8935
8936 /* If just looking at the sign bit, reverse the sense of the
8937 comparison. */
8938 if (sign_bit_comparison_p)
8939 {
8940 op0 = XEXP (op0, 0);
8941 code = (code == GE ? LT : GE);
8942 continue;
8943 }
8944 break;
8945
8946 case NEG:
8947 /* If testing for equality, we can take the NEG of the constant. */
8948 if (equality_comparison_p
8949 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8950 {
8951 op0 = XEXP (op0, 0);
8952 op1 = tem;
8953 continue;
8954 }
8955
8956 /* The remaining cases only apply to comparisons with zero. */
8957 if (const_op != 0)
8958 break;
8959
8960 /* When X is ABS or is known positive,
8961 (neg X) is < 0 if and only if X != 0. */
8962
8963 if (sign_bit_comparison_p
8964 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 8965 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8966 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 8967 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
8968 {
8969 op0 = XEXP (op0, 0);
8970 code = (code == LT ? NE : EQ);
8971 continue;
8972 }
8973
3bed8141
RK
8974 /* If we have NEG of something whose two high-order bits are the
8975 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8976 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
8977 {
8978 op0 = XEXP (op0, 0);
8979 code = swap_condition (code);
8980 continue;
8981 }
8982 break;
8983
8984 case ROTATE:
8985 /* If we are testing equality and our count is a constant, we
8986 can perform the inverse operation on our RHS. */
8987 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8988 && (tem = simplify_binary_operation (ROTATERT, mode,
8989 op1, XEXP (op0, 1))) != 0)
8990 {
8991 op0 = XEXP (op0, 0);
8992 op1 = tem;
8993 continue;
8994 }
8995
8996 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8997 a particular bit. Convert it to an AND of a constant of that
8998 bit. This will be converted into a ZERO_EXTRACT. */
8999 if (const_op == 0 && sign_bit_comparison_p
9000 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9001 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9002 {
5f4f0e22
CH
9003 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9004 ((HOST_WIDE_INT) 1
9005 << (mode_width - 1
9006 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9007 code = (code == LT ? NE : EQ);
9008 continue;
9009 }
9010
9011 /* ... fall through ... */
9012
9013 case ABS:
9014 /* ABS is ignorable inside an equality comparison with zero. */
9015 if (const_op == 0 && equality_comparison_p)
9016 {
9017 op0 = XEXP (op0, 0);
9018 continue;
9019 }
9020 break;
9021
9022
9023 case SIGN_EXTEND:
9024 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9025 to (compare FOO CONST) if CONST fits in FOO's mode and we
9026 are either testing inequality or have an unsigned comparison
9027 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9028 if (! unsigned_comparison_p
9029 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9030 <= HOST_BITS_PER_WIDE_INT)
9031 && ((unsigned HOST_WIDE_INT) const_op
9032 < (((HOST_WIDE_INT) 1
9033 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
9034 {
9035 op0 = XEXP (op0, 0);
9036 continue;
9037 }
9038 break;
9039
9040 case SUBREG:
a687e897
RK
9041 /* Check for the case where we are comparing A - C1 with C2,
9042 both constants are smaller than 1/2 the maxium positive
9043 value in MODE, and the comparison is equality or unsigned.
9044 In that case, if A is either zero-extended to MODE or has
9045 sufficient sign bits so that the high-order bit in MODE
9046 is a copy of the sign in the inner mode, we can prove that it is
9047 safe to do the operation in the wider mode. This simplifies
9048 many range checks. */
9049
9050 if (mode_width <= HOST_BITS_PER_WIDE_INT
9051 && subreg_lowpart_p (op0)
9052 && GET_CODE (SUBREG_REG (op0)) == PLUS
9053 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9054 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9055 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9056 < GET_MODE_MASK (mode) / 2)
adb7a1cb 9057 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
9058 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9059 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
9060 & ~ GET_MODE_MASK (mode))
9061 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9062 GET_MODE (SUBREG_REG (op0)))
9063 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9064 - GET_MODE_BITSIZE (mode)))))
9065 {
9066 op0 = SUBREG_REG (op0);
9067 continue;
9068 }
9069
fe0cf571
RK
9070 /* If the inner mode is narrower and we are extracting the low part,
9071 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9072 if (subreg_lowpart_p (op0)
89f1c7f2
RS
9073 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9074 /* Fall through */ ;
9075 else
230d793d
RS
9076 break;
9077
9078 /* ... fall through ... */
9079
9080 case ZERO_EXTEND:
9081 if ((unsigned_comparison_p || equality_comparison_p)
9082 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9083 <= HOST_BITS_PER_WIDE_INT)
9084 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
9085 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9086 {
9087 op0 = XEXP (op0, 0);
9088 continue;
9089 }
9090 break;
9091
9092 case PLUS:
20fdd649 9093 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 9094 this for equality comparisons due to pathological cases involving
230d793d 9095 overflows. */
20fdd649
RK
9096 if (equality_comparison_p
9097 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9098 op1, XEXP (op0, 1))))
230d793d
RS
9099 {
9100 op0 = XEXP (op0, 0);
9101 op1 = tem;
9102 continue;
9103 }
9104
9105 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9106 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9107 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9108 {
9109 op0 = XEXP (XEXP (op0, 0), 0);
9110 code = (code == LT ? EQ : NE);
9111 continue;
9112 }
9113 break;
9114
9115 case MINUS:
20fdd649
RK
9116 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9117 (eq B (minus A C)), whichever simplifies. We can only do
9118 this for equality comparisons due to pathological cases involving
9119 overflows. */
9120 if (equality_comparison_p
9121 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9122 XEXP (op0, 1), op1)))
9123 {
9124 op0 = XEXP (op0, 0);
9125 op1 = tem;
9126 continue;
9127 }
9128
9129 if (equality_comparison_p
9130 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9131 XEXP (op0, 0), op1)))
9132 {
9133 op0 = XEXP (op0, 1);
9134 op1 = tem;
9135 continue;
9136 }
9137
230d793d
RS
9138 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9139 of bits in X minus 1, is one iff X > 0. */
9140 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9141 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9142 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9143 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9144 {
9145 op0 = XEXP (op0, 1);
9146 code = (code == GE ? LE : GT);
9147 continue;
9148 }
9149 break;
9150
9151 case XOR:
9152 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9153 if C is zero or B is a constant. */
9154 if (equality_comparison_p
9155 && 0 != (tem = simplify_binary_operation (XOR, mode,
9156 XEXP (op0, 1), op1)))
9157 {
9158 op0 = XEXP (op0, 0);
9159 op1 = tem;
9160 continue;
9161 }
9162 break;
9163
9164 case EQ: case NE:
9165 case LT: case LTU: case LE: case LEU:
9166 case GT: case GTU: case GE: case GEU:
9167 /* We can't do anything if OP0 is a condition code value, rather
9168 than an actual data value. */
9169 if (const_op != 0
9170#ifdef HAVE_cc0
9171 || XEXP (op0, 0) == cc0_rtx
9172#endif
9173 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9174 break;
9175
9176 /* Get the two operands being compared. */
9177 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9178 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9179 else
9180 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9181
9182 /* Check for the cases where we simply want the result of the
9183 earlier test or the opposite of that result. */
9184 if (code == NE
9185 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 9186 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 9187 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 9188 && (STORE_FLAG_VALUE
5f4f0e22
CH
9189 & (((HOST_WIDE_INT) 1
9190 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
9191 && (code == LT
9192 || (code == GE && reversible_comparison_p (op0)))))
9193 {
9194 code = (code == LT || code == NE
9195 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9196 op0 = tem, op1 = tem1;
9197 continue;
9198 }
9199 break;
9200
9201 case IOR:
9202 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9203 iff X <= 0. */
9204 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9205 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9206 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9207 {
9208 op0 = XEXP (op0, 1);
9209 code = (code == GE ? GT : LE);
9210 continue;
9211 }
9212 break;
9213
9214 case AND:
9215 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
9216 will be converted to a ZERO_EXTRACT later. */
9217 if (const_op == 0 && equality_comparison_p
45620ed4 9218 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
9219 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9220 {
9221 op0 = simplify_and_const_int
9222 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9223 XEXP (op0, 1),
9224 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 9225 (HOST_WIDE_INT) 1);
230d793d
RS
9226 continue;
9227 }
9228
9229 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9230 zero and X is a comparison and C1 and C2 describe only bits set
9231 in STORE_FLAG_VALUE, we can compare with X. */
9232 if (const_op == 0 && equality_comparison_p
5f4f0e22 9233 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
9234 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9235 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9236 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9237 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 9238 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
9239 {
9240 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9241 << INTVAL (XEXP (XEXP (op0, 0), 1)));
9242 if ((~ STORE_FLAG_VALUE & mask) == 0
9243 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9244 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9245 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9246 {
9247 op0 = XEXP (XEXP (op0, 0), 0);
9248 continue;
9249 }
9250 }
9251
9252 /* If we are doing an equality comparison of an AND of a bit equal
9253 to the sign bit, replace this with a LT or GE comparison of
9254 the underlying value. */
9255 if (equality_comparison_p
9256 && const_op == 0
9257 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9258 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9259 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 9260 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
9261 {
9262 op0 = XEXP (op0, 0);
9263 code = (code == EQ ? GE : LT);
9264 continue;
9265 }
9266
9267 /* If this AND operation is really a ZERO_EXTEND from a narrower
9268 mode, the constant fits within that mode, and this is either an
9269 equality or unsigned comparison, try to do this comparison in
9270 the narrower mode. */
9271 if ((equality_comparison_p || unsigned_comparison_p)
9272 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9273 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9274 & GET_MODE_MASK (mode))
9275 + 1)) >= 0
9276 && const_op >> i == 0
9277 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9278 {
9279 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9280 continue;
9281 }
9282 break;
9283
9284 case ASHIFT:
45620ed4 9285 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 9286 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 9287 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
9288 shifted right N bits so long as the low-order N bits of C are
9289 zero. */
9290 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9291 && INTVAL (XEXP (op0, 1)) >= 0
9292 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
9293 < HOST_BITS_PER_WIDE_INT)
9294 && ((const_op
34785d05 9295 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 9296 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9297 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
9298 & ~ (mask >> (INTVAL (XEXP (op0, 1))
9299 + ! equality_comparison_p))) == 0)
9300 {
9301 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 9302 op1 = GEN_INT (const_op);
230d793d
RS
9303 op0 = XEXP (op0, 0);
9304 continue;
9305 }
9306
dfbe1b2f 9307 /* If we are doing a sign bit comparison, it means we are testing
230d793d 9308 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 9309 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9310 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9311 {
5f4f0e22
CH
9312 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9313 ((HOST_WIDE_INT) 1
9314 << (mode_width - 1
9315 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9316 code = (code == LT ? NE : EQ);
9317 continue;
9318 }
dfbe1b2f
RK
9319
9320 /* If this an equality comparison with zero and we are shifting
9321 the low bit to the sign bit, we can convert this to an AND of the
9322 low-order bit. */
9323 if (const_op == 0 && equality_comparison_p
9324 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9325 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9326 {
5f4f0e22
CH
9327 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9328 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
9329 continue;
9330 }
230d793d
RS
9331 break;
9332
9333 case ASHIFTRT:
d0ab8cd3
RK
9334 /* If this is an equality comparison with zero, we can do this
9335 as a logical shift, which might be much simpler. */
9336 if (equality_comparison_p && const_op == 0
9337 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9338 {
9339 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
9340 XEXP (op0, 0),
9341 INTVAL (XEXP (op0, 1)));
9342 continue;
9343 }
9344
230d793d
RS
9345 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
9346 do the comparison in a narrower mode. */
9347 if (! unsigned_comparison_p
9348 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9349 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9350 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9351 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 9352 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
9353 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9354 || ((unsigned HOST_WIDE_INT) - const_op
9355 <= GET_MODE_MASK (tmode))))
230d793d
RS
9356 {
9357 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9358 continue;
9359 }
9360
9361 /* ... fall through ... */
9362 case LSHIFTRT:
9363 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 9364 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
9365 by comparing FOO with C shifted left N bits so long as no
9366 overflow occurs. */
9367 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9368 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
9369 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9370 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9371 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9372 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
9373 && (const_op == 0
9374 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9375 < mode_width)))
9376 {
9377 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 9378 op1 = GEN_INT (const_op);
230d793d
RS
9379 op0 = XEXP (op0, 0);
9380 continue;
9381 }
9382
9383 /* If we are using this shift to extract just the sign bit, we
9384 can replace this with an LT or GE comparison. */
9385 if (const_op == 0
9386 && (equality_comparison_p || sign_bit_comparison_p)
9387 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9388 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9389 {
9390 op0 = XEXP (op0, 0);
9391 code = (code == NE || code == GT ? LT : GE);
9392 continue;
9393 }
9394 break;
9395 }
9396
9397 break;
9398 }
9399
9400 /* Now make any compound operations involved in this comparison. Then,
9401 check for an outmost SUBREG on OP0 that isn't doing anything or is
9402 paradoxical. The latter case can only occur when it is known that the
9403 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9404 We can never remove a SUBREG for a non-equality comparison because the
9405 sign bit is in a different place in the underlying object. */
9406
9407 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9408 op1 = make_compound_operation (op1, SET);
9409
9410 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9411 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9412 && (code == NE || code == EQ)
9413 && ((GET_MODE_SIZE (GET_MODE (op0))
9414 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9415 {
9416 op0 = SUBREG_REG (op0);
9417 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9418 }
9419
9420 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9421 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9422 && (code == NE || code == EQ)
ac49a949
RS
9423 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9424 <= HOST_BITS_PER_WIDE_INT)
951553af 9425 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9426 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9427 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9428 op1),
951553af 9429 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9430 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9431 op0 = SUBREG_REG (op0), op1 = tem;
9432
9433 /* We now do the opposite procedure: Some machines don't have compare
9434 insns in all modes. If OP0's mode is an integer mode smaller than a
9435 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
9436 mode for which we can do the compare. There are a number of cases in
9437 which we can use the wider mode. */
230d793d
RS
9438
9439 mode = GET_MODE (op0);
9440 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9441 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9442 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9443 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
9444 (tmode != VOIDmode
9445 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 9446 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 9447 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 9448 {
951553af 9449 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
9450 narrower mode and this is an equality or unsigned comparison,
9451 we can use the wider mode. Similarly for sign-extended
7e4dc511 9452 values, in which case it is true for all comparisons. */
a687e897
RK
9453 if (((code == EQ || code == NE
9454 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
9455 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9456 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
9457 || ((num_sign_bit_copies (op0, tmode)
9458 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 9459 && (num_sign_bit_copies (op1, tmode)
58744483 9460 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
9461 {
9462 op0 = gen_lowpart_for_combine (tmode, op0);
9463 op1 = gen_lowpart_for_combine (tmode, op1);
9464 break;
9465 }
230d793d 9466
a687e897
RK
9467 /* If this is a test for negative, we can make an explicit
9468 test of the sign bit. */
9469
9470 if (op1 == const0_rtx && (code == LT || code == GE)
9471 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 9472 {
a687e897
RK
9473 op0 = gen_binary (AND, tmode,
9474 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
9475 GEN_INT ((HOST_WIDE_INT) 1
9476 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 9477 code = (code == LT) ? NE : EQ;
a687e897 9478 break;
230d793d 9479 }
230d793d
RS
9480 }
9481
b7a775b2
RK
9482#ifdef CANONICALIZE_COMPARISON
9483 /* If this machine only supports a subset of valid comparisons, see if we
9484 can convert an unsupported one into a supported one. */
9485 CANONICALIZE_COMPARISON (code, op0, op1);
9486#endif
9487
230d793d
RS
9488 *pop0 = op0;
9489 *pop1 = op1;
9490
9491 return code;
9492}
9493\f
9494/* Return 1 if we know that X, a comparison operation, is not operating
9495 on a floating-point value or is EQ or NE, meaning that we can safely
9496 reverse it. */
9497
9498static int
9499reversible_comparison_p (x)
9500 rtx x;
9501{
9502 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 9503 || flag_fast_math
230d793d
RS
9504 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
9505 return 1;
9506
9507 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
9508 {
9509 case MODE_INT:
3ad2180a
RK
9510 case MODE_PARTIAL_INT:
9511 case MODE_COMPLEX_INT:
230d793d
RS
9512 return 1;
9513
9514 case MODE_CC:
9210df58
RK
9515 /* If the mode of the condition codes tells us that this is safe,
9516 we need look no further. */
9517 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
9518 return 1;
9519
9520 /* Otherwise try and find where the condition codes were last set and
9521 use that. */
230d793d
RS
9522 x = get_last_value (XEXP (x, 0));
9523 return (x && GET_CODE (x) == COMPARE
3ad2180a 9524 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
230d793d
RS
9525 }
9526
9527 return 0;
9528}
9529\f
9530/* Utility function for following routine. Called when X is part of a value
9531 being stored into reg_last_set_value. Sets reg_last_set_table_tick
9532 for each register mentioned. Similar to mention_regs in cse.c */
9533
9534static void
9535update_table_tick (x)
9536 rtx x;
9537{
9538 register enum rtx_code code = GET_CODE (x);
9539 register char *fmt = GET_RTX_FORMAT (code);
9540 register int i;
9541
9542 if (code == REG)
9543 {
9544 int regno = REGNO (x);
9545 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9546 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9547
9548 for (i = regno; i < endregno; i++)
9549 reg_last_set_table_tick[i] = label_tick;
9550
9551 return;
9552 }
9553
9554 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9555 /* Note that we can't have an "E" in values stored; see
9556 get_last_value_validate. */
9557 if (fmt[i] == 'e')
9558 update_table_tick (XEXP (x, i));
9559}
9560
9561/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
9562 are saying that the register is clobbered and we no longer know its
7988fd36
RK
9563 value. If INSN is zero, don't update reg_last_set; this is only permitted
9564 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
9565
9566static void
9567record_value_for_reg (reg, insn, value)
9568 rtx reg;
9569 rtx insn;
9570 rtx value;
9571{
9572 int regno = REGNO (reg);
9573 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9574 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
9575 int i;
9576
9577 /* If VALUE contains REG and we have a previous value for REG, substitute
9578 the previous value. */
9579 if (value && insn && reg_overlap_mentioned_p (reg, value))
9580 {
9581 rtx tem;
9582
9583 /* Set things up so get_last_value is allowed to see anything set up to
9584 our insn. */
9585 subst_low_cuid = INSN_CUID (insn);
9586 tem = get_last_value (reg);
9587
9588 if (tem)
9589 value = replace_rtx (copy_rtx (value), reg, tem);
9590 }
9591
9592 /* For each register modified, show we don't know its value, that
ef026f91
RS
9593 we don't know about its bitwise content, that its value has been
9594 updated, and that we don't know the location of the death of the
9595 register. */
230d793d
RS
9596 for (i = regno; i < endregno; i ++)
9597 {
9598 if (insn)
9599 reg_last_set[i] = insn;
9600 reg_last_set_value[i] = 0;
ef026f91
RS
9601 reg_last_set_mode[i] = 0;
9602 reg_last_set_nonzero_bits[i] = 0;
9603 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
9604 reg_last_death[i] = 0;
9605 }
9606
9607 /* Mark registers that are being referenced in this value. */
9608 if (value)
9609 update_table_tick (value);
9610
9611 /* Now update the status of each register being set.
9612 If someone is using this register in this block, set this register
9613 to invalid since we will get confused between the two lives in this
9614 basic block. This makes using this register always invalid. In cse, we
9615 scan the table to invalidate all entries using this register, but this
9616 is too much work for us. */
9617
9618 for (i = regno; i < endregno; i++)
9619 {
9620 reg_last_set_label[i] = label_tick;
9621 if (value && reg_last_set_table_tick[i] == label_tick)
9622 reg_last_set_invalid[i] = 1;
9623 else
9624 reg_last_set_invalid[i] = 0;
9625 }
9626
9627 /* The value being assigned might refer to X (like in "x++;"). In that
9628 case, we must replace it with (clobber (const_int 0)) to prevent
9629 infinite loops. */
9630 if (value && ! get_last_value_validate (&value,
9631 reg_last_set_label[regno], 0))
9632 {
9633 value = copy_rtx (value);
9634 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9635 value = 0;
9636 }
9637
55310dad
RK
9638 /* For the main register being modified, update the value, the mode, the
9639 nonzero bits, and the number of sign bit copies. */
9640
230d793d
RS
9641 reg_last_set_value[regno] = value;
9642
55310dad
RK
9643 if (value)
9644 {
2afabb48 9645 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
9646 reg_last_set_mode[regno] = GET_MODE (reg);
9647 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9648 reg_last_set_sign_bit_copies[regno]
9649 = num_sign_bit_copies (value, GET_MODE (reg));
9650 }
230d793d
RS
9651}
9652
9653/* Used for communication between the following two routines. */
9654static rtx record_dead_insn;
9655
9656/* Called via note_stores from record_dead_and_set_regs to handle one
9657 SET or CLOBBER in an insn. */
9658
9659static void
9660record_dead_and_set_regs_1 (dest, setter)
9661 rtx dest, setter;
9662{
9663 if (GET_CODE (dest) == REG)
9664 {
9665 /* If we are setting the whole register, we know its value. Otherwise
9666 show that we don't know the value. We can handle SUBREG in
9667 some cases. */
9668 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9669 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9670 else if (GET_CODE (setter) == SET
9671 && GET_CODE (SET_DEST (setter)) == SUBREG
9672 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 9673 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 9674 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
9675 record_value_for_reg (dest, record_dead_insn,
9676 gen_lowpart_for_combine (GET_MODE (dest),
9677 SET_SRC (setter)));
230d793d 9678 else
5f4f0e22 9679 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
9680 }
9681 else if (GET_CODE (dest) == MEM
9682 /* Ignore pushes, they clobber nothing. */
9683 && ! push_operand (dest, GET_MODE (dest)))
9684 mem_last_set = INSN_CUID (record_dead_insn);
9685}
9686
9687/* Update the records of when each REG was most recently set or killed
9688 for the things done by INSN. This is the last thing done in processing
9689 INSN in the combiner loop.
9690
ef026f91
RS
9691 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
9692 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
9693 and also the similar information mem_last_set (which insn most recently
9694 modified memory) and last_call_cuid (which insn was the most recent
9695 subroutine call). */
230d793d
RS
9696
9697static void
9698record_dead_and_set_regs (insn)
9699 rtx insn;
9700{
9701 register rtx link;
55310dad
RK
9702 int i;
9703
230d793d
RS
9704 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9705 {
dbc131f3
RK
9706 if (REG_NOTE_KIND (link) == REG_DEAD
9707 && GET_CODE (XEXP (link, 0)) == REG)
9708 {
9709 int regno = REGNO (XEXP (link, 0));
9710 int endregno
9711 = regno + (regno < FIRST_PSEUDO_REGISTER
9712 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9713 : 1);
dbc131f3
RK
9714
9715 for (i = regno; i < endregno; i++)
9716 reg_last_death[i] = insn;
9717 }
230d793d 9718 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 9719 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
9720 }
9721
9722 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
9723 {
9724 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9725 if (call_used_regs[i])
9726 {
9727 reg_last_set_value[i] = 0;
ef026f91
RS
9728 reg_last_set_mode[i] = 0;
9729 reg_last_set_nonzero_bits[i] = 0;
9730 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
9731 reg_last_death[i] = 0;
9732 }
9733
9734 last_call_cuid = mem_last_set = INSN_CUID (insn);
9735 }
230d793d
RS
9736
9737 record_dead_insn = insn;
9738 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9739}
9740\f
9741/* Utility routine for the following function. Verify that all the registers
9742 mentioned in *LOC are valid when *LOC was part of a value set when
9743 label_tick == TICK. Return 0 if some are not.
9744
9745 If REPLACE is non-zero, replace the invalid reference with
9746 (clobber (const_int 0)) and return 1. This replacement is useful because
9747 we often can get useful information about the form of a value (e.g., if
9748 it was produced by a shift that always produces -1 or 0) even though
9749 we don't know exactly what registers it was produced from. */
9750
9751static int
9752get_last_value_validate (loc, tick, replace)
9753 rtx *loc;
9754 int tick;
9755 int replace;
9756{
9757 rtx x = *loc;
9758 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9759 int len = GET_RTX_LENGTH (GET_CODE (x));
9760 int i;
9761
9762 if (GET_CODE (x) == REG)
9763 {
9764 int regno = REGNO (x);
9765 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9766 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9767 int j;
9768
9769 for (j = regno; j < endregno; j++)
9770 if (reg_last_set_invalid[j]
9771 /* If this is a pseudo-register that was only set once, it is
9772 always valid. */
9773 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9774 && reg_last_set_label[j] > tick))
9775 {
9776 if (replace)
9777 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9778 return replace;
9779 }
9780
9781 return 1;
9782 }
9783
9784 for (i = 0; i < len; i++)
9785 if ((fmt[i] == 'e'
9786 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9787 /* Don't bother with these. They shouldn't occur anyway. */
9788 || fmt[i] == 'E')
9789 return 0;
9790
9791 /* If we haven't found a reason for it to be invalid, it is valid. */
9792 return 1;
9793}
9794
9795/* Get the last value assigned to X, if known. Some registers
9796 in the value may be replaced with (clobber (const_int 0)) if their value
9797 is known longer known reliably. */
9798
9799static rtx
9800get_last_value (x)
9801 rtx x;
9802{
9803 int regno;
9804 rtx value;
9805
9806 /* If this is a non-paradoxical SUBREG, get the value of its operand and
9807 then convert it to the desired mode. If this is a paradoxical SUBREG,
9808 we cannot predict what values the "extra" bits might have. */
9809 if (GET_CODE (x) == SUBREG
9810 && subreg_lowpart_p (x)
9811 && (GET_MODE_SIZE (GET_MODE (x))
9812 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
9813 && (value = get_last_value (SUBREG_REG (x))) != 0)
9814 return gen_lowpart_for_combine (GET_MODE (x), value);
9815
9816 if (GET_CODE (x) != REG)
9817 return 0;
9818
9819 regno = REGNO (x);
9820 value = reg_last_set_value[regno];
9821
d0ab8cd3 9822 /* If we don't have a value or if it isn't for this basic block, return 0. */
230d793d
RS
9823
9824 if (value == 0
9825 || (reg_n_sets[regno] != 1
55310dad 9826 && reg_last_set_label[regno] != label_tick))
230d793d
RS
9827 return 0;
9828
d0ab8cd3 9829 /* If the value was set in a later insn that the ones we are processing,
4090a6b3
RK
9830 we can't use it even if the register was only set once, but make a quick
9831 check to see if the previous insn set it to something. This is commonly
9832 the case when the same pseudo is used by repeated insns. */
d0ab8cd3 9833
4090a6b3 9834 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
9835 {
9836 rtx insn, set;
9837
3adde2a5
RK
9838 for (insn = prev_nonnote_insn (subst_insn);
9839 insn && INSN_CUID (insn) >= subst_low_cuid;
9840 insn = prev_nonnote_insn (insn))
9841 ;
d0ab8cd3
RK
9842
9843 if (insn
9844 && (set = single_set (insn)) != 0
9845 && rtx_equal_p (SET_DEST (set), x))
9846 {
9847 value = SET_SRC (set);
9848
9849 /* Make sure that VALUE doesn't reference X. Replace any
9850 expliit references with a CLOBBER. If there are any remaining
9851 references (rare), don't use the value. */
9852
9853 if (reg_mentioned_p (x, value))
9854 value = replace_rtx (copy_rtx (value), x,
9855 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9856
9857 if (reg_overlap_mentioned_p (x, value))
9858 return 0;
9859 }
9860 else
9861 return 0;
9862 }
9863
9864 /* If the value has all its registers valid, return it. */
230d793d
RS
9865 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9866 return value;
9867
9868 /* Otherwise, make a copy and replace any invalid register with
9869 (clobber (const_int 0)). If that fails for some reason, return 0. */
9870
9871 value = copy_rtx (value);
9872 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9873 return value;
9874
9875 return 0;
9876}
9877\f
9878/* Return nonzero if expression X refers to a REG or to memory
9879 that is set in an instruction more recent than FROM_CUID. */
9880
9881static int
9882use_crosses_set_p (x, from_cuid)
9883 register rtx x;
9884 int from_cuid;
9885{
9886 register char *fmt;
9887 register int i;
9888 register enum rtx_code code = GET_CODE (x);
9889
9890 if (code == REG)
9891 {
9892 register int regno = REGNO (x);
e28f5732
RK
9893 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
9894 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9895
230d793d
RS
9896#ifdef PUSH_ROUNDING
9897 /* Don't allow uses of the stack pointer to be moved,
9898 because we don't know whether the move crosses a push insn. */
9899 if (regno == STACK_POINTER_REGNUM)
9900 return 1;
9901#endif
e28f5732
RK
9902 for (;regno < endreg; regno++)
9903 if (reg_last_set[regno]
9904 && INSN_CUID (reg_last_set[regno]) > from_cuid)
9905 return 1;
9906 return 0;
230d793d
RS
9907 }
9908
9909 if (code == MEM && mem_last_set > from_cuid)
9910 return 1;
9911
9912 fmt = GET_RTX_FORMAT (code);
9913
9914 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9915 {
9916 if (fmt[i] == 'E')
9917 {
9918 register int j;
9919 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9920 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9921 return 1;
9922 }
9923 else if (fmt[i] == 'e'
9924 && use_crosses_set_p (XEXP (x, i), from_cuid))
9925 return 1;
9926 }
9927 return 0;
9928}
9929\f
9930/* Define three variables used for communication between the following
9931 routines. */
9932
9933static int reg_dead_regno, reg_dead_endregno;
9934static int reg_dead_flag;
9935
9936/* Function called via note_stores from reg_dead_at_p.
9937
9938 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9939 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9940
9941static void
9942reg_dead_at_p_1 (dest, x)
9943 rtx dest;
9944 rtx x;
9945{
9946 int regno, endregno;
9947
9948 if (GET_CODE (dest) != REG)
9949 return;
9950
9951 regno = REGNO (dest);
9952 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9953 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9954
9955 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9956 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9957}
9958
9959/* Return non-zero if REG is known to be dead at INSN.
9960
9961 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9962 referencing REG, it is dead. If we hit a SET referencing REG, it is
9963 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
9964 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
9965 must be assumed to be always live. */
230d793d
RS
9966
9967static int
9968reg_dead_at_p (reg, insn)
9969 rtx reg;
9970 rtx insn;
9971{
9972 int block, i;
9973
9974 /* Set variables for reg_dead_at_p_1. */
9975 reg_dead_regno = REGNO (reg);
9976 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9977 ? HARD_REGNO_NREGS (reg_dead_regno,
9978 GET_MODE (reg))
9979 : 1);
9980
9981 reg_dead_flag = 0;
9982
6e25d159
RK
9983 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
9984 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
9985 {
9986 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
9987 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
9988 return 0;
9989 }
9990
230d793d
RS
9991 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9992 beginning of function. */
9993 for (; insn && GET_CODE (insn) != CODE_LABEL;
9994 insn = prev_nonnote_insn (insn))
9995 {
9996 note_stores (PATTERN (insn), reg_dead_at_p_1);
9997 if (reg_dead_flag)
9998 return reg_dead_flag == 1 ? 1 : 0;
9999
10000 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10001 return 1;
10002 }
10003
10004 /* Get the basic block number that we were in. */
10005 if (insn == 0)
10006 block = 0;
10007 else
10008 {
10009 for (block = 0; block < n_basic_blocks; block++)
10010 if (insn == basic_block_head[block])
10011 break;
10012
10013 if (block == n_basic_blocks)
10014 return 0;
10015 }
10016
10017 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
10018 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
10019 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
10020 return 0;
10021
10022 return 1;
10023}
6e25d159
RK
10024\f
10025/* Note hard registers in X that are used. This code is similar to
10026 that in flow.c, but much simpler since we don't care about pseudos. */
10027
10028static void
10029mark_used_regs_combine (x)
10030 rtx x;
10031{
10032 register RTX_CODE code = GET_CODE (x);
10033 register int regno;
10034 int i;
10035
10036 switch (code)
10037 {
10038 case LABEL_REF:
10039 case SYMBOL_REF:
10040 case CONST_INT:
10041 case CONST:
10042 case CONST_DOUBLE:
10043 case PC:
10044 case ADDR_VEC:
10045 case ADDR_DIFF_VEC:
10046 case ASM_INPUT:
10047#ifdef HAVE_cc0
10048 /* CC0 must die in the insn after it is set, so we don't need to take
10049 special note of it here. */
10050 case CC0:
10051#endif
10052 return;
10053
10054 case CLOBBER:
10055 /* If we are clobbering a MEM, mark any hard registers inside the
10056 address as used. */
10057 if (GET_CODE (XEXP (x, 0)) == MEM)
10058 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10059 return;
10060
10061 case REG:
10062 regno = REGNO (x);
10063 /* A hard reg in a wide mode may really be multiple registers.
10064 If so, mark all of them just like the first. */
10065 if (regno < FIRST_PSEUDO_REGISTER)
10066 {
10067 /* None of this applies to the stack, frame or arg pointers */
10068 if (regno == STACK_POINTER_REGNUM
10069#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10070 || regno == HARD_FRAME_POINTER_REGNUM
10071#endif
10072#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10073 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10074#endif
10075 || regno == FRAME_POINTER_REGNUM)
10076 return;
10077
10078 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10079 while (i-- > 0)
10080 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10081 }
10082 return;
10083
10084 case SET:
10085 {
10086 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10087 the address. */
10088 register rtx testreg = SET_DEST (x);
10089
e048778f
RK
10090 while (GET_CODE (testreg) == SUBREG
10091 || GET_CODE (testreg) == ZERO_EXTRACT
10092 || GET_CODE (testreg) == SIGN_EXTRACT
10093 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
10094 testreg = XEXP (testreg, 0);
10095
10096 if (GET_CODE (testreg) == MEM)
10097 mark_used_regs_combine (XEXP (testreg, 0));
10098
10099 mark_used_regs_combine (SET_SRC (x));
10100 return;
10101 }
10102 }
10103
10104 /* Recursively scan the operands of this expression. */
10105
10106 {
10107 register char *fmt = GET_RTX_FORMAT (code);
10108
10109 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10110 {
10111 if (fmt[i] == 'e')
10112 mark_used_regs_combine (XEXP (x, i));
10113 else if (fmt[i] == 'E')
10114 {
10115 register int j;
10116
10117 for (j = 0; j < XVECLEN (x, i); j++)
10118 mark_used_regs_combine (XVECEXP (x, i, j));
10119 }
10120 }
10121 }
10122}
10123
230d793d
RS
10124\f
10125/* Remove register number REGNO from the dead registers list of INSN.
10126
10127 Return the note used to record the death, if there was one. */
10128
10129rtx
10130remove_death (regno, insn)
10131 int regno;
10132 rtx insn;
10133{
10134 register rtx note = find_regno_note (insn, REG_DEAD, regno);
10135
10136 if (note)
1a26b032
RK
10137 {
10138 reg_n_deaths[regno]--;
10139 remove_note (insn, note);
10140 }
230d793d
RS
10141
10142 return note;
10143}
10144
10145/* For each register (hardware or pseudo) used within expression X, if its
10146 death is in an instruction with cuid between FROM_CUID (inclusive) and
10147 TO_INSN (exclusive), put a REG_DEAD note for that register in the
10148 list headed by PNOTES.
10149
10150 This is done when X is being merged by combination into TO_INSN. These
10151 notes will then be distributed as needed. */
10152
10153static void
10154move_deaths (x, from_cuid, to_insn, pnotes)
10155 rtx x;
10156 int from_cuid;
10157 rtx to_insn;
10158 rtx *pnotes;
10159{
10160 register char *fmt;
10161 register int len, i;
10162 register enum rtx_code code = GET_CODE (x);
10163
10164 if (code == REG)
10165 {
10166 register int regno = REGNO (x);
10167 register rtx where_dead = reg_last_death[regno];
10168
10169 if (where_dead && INSN_CUID (where_dead) >= from_cuid
10170 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
10171 {
dbc131f3 10172 rtx note = remove_death (regno, where_dead);
230d793d
RS
10173
10174 /* It is possible for the call above to return 0. This can occur
10175 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
10176 In that case make a new note.
10177
10178 We must also check for the case where X is a hard register
10179 and NOTE is a death note for a range of hard registers
10180 including X. In that case, we must put REG_DEAD notes for
10181 the remaining registers in place of NOTE. */
10182
10183 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10184 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10185 != GET_MODE_SIZE (GET_MODE (x))))
10186 {
10187 int deadregno = REGNO (XEXP (note, 0));
10188 int deadend
10189 = (deadregno + HARD_REGNO_NREGS (deadregno,
10190 GET_MODE (XEXP (note, 0))));
10191 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10192 int i;
10193
10194 for (i = deadregno; i < deadend; i++)
10195 if (i < regno || i >= ourend)
10196 REG_NOTES (where_dead)
10197 = gen_rtx (EXPR_LIST, REG_DEAD,
36b878d1 10198 gen_rtx (REG, reg_raw_mode[i], i),
dbc131f3
RK
10199 REG_NOTES (where_dead));
10200 }
230d793d 10201
dbc131f3 10202 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
10203 {
10204 XEXP (note, 1) = *pnotes;
10205 *pnotes = note;
10206 }
10207 else
10208 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
1a26b032
RK
10209
10210 reg_n_deaths[regno]++;
230d793d
RS
10211 }
10212
10213 return;
10214 }
10215
10216 else if (GET_CODE (x) == SET)
10217 {
10218 rtx dest = SET_DEST (x);
10219
10220 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
10221
a7c99304
RK
10222 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
10223 that accesses one word of a multi-word item, some
10224 piece of everything register in the expression is used by
10225 this insn, so remove any old death. */
10226
10227 if (GET_CODE (dest) == ZERO_EXTRACT
10228 || GET_CODE (dest) == STRICT_LOW_PART
10229 || (GET_CODE (dest) == SUBREG
10230 && (((GET_MODE_SIZE (GET_MODE (dest))
10231 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
10232 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
10233 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 10234 {
a7c99304
RK
10235 move_deaths (dest, from_cuid, to_insn, pnotes);
10236 return;
230d793d
RS
10237 }
10238
a7c99304
RK
10239 /* If this is some other SUBREG, we know it replaces the entire
10240 value, so use that as the destination. */
10241 if (GET_CODE (dest) == SUBREG)
10242 dest = SUBREG_REG (dest);
10243
10244 /* If this is a MEM, adjust deaths of anything used in the address.
10245 For a REG (the only other possibility), the entire value is
10246 being replaced so the old value is not used in this insn. */
230d793d
RS
10247
10248 if (GET_CODE (dest) == MEM)
10249 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
10250 return;
10251 }
10252
10253 else if (GET_CODE (x) == CLOBBER)
10254 return;
10255
10256 len = GET_RTX_LENGTH (code);
10257 fmt = GET_RTX_FORMAT (code);
10258
10259 for (i = 0; i < len; i++)
10260 {
10261 if (fmt[i] == 'E')
10262 {
10263 register int j;
10264 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10265 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
10266 }
10267 else if (fmt[i] == 'e')
10268 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
10269 }
10270}
10271\f
a7c99304
RK
10272/* Return 1 if X is the target of a bit-field assignment in BODY, the
10273 pattern of an insn. X must be a REG. */
230d793d
RS
10274
10275static int
a7c99304
RK
10276reg_bitfield_target_p (x, body)
10277 rtx x;
230d793d
RS
10278 rtx body;
10279{
10280 int i;
10281
10282 if (GET_CODE (body) == SET)
a7c99304
RK
10283 {
10284 rtx dest = SET_DEST (body);
10285 rtx target;
10286 int regno, tregno, endregno, endtregno;
10287
10288 if (GET_CODE (dest) == ZERO_EXTRACT)
10289 target = XEXP (dest, 0);
10290 else if (GET_CODE (dest) == STRICT_LOW_PART)
10291 target = SUBREG_REG (XEXP (dest, 0));
10292 else
10293 return 0;
10294
10295 if (GET_CODE (target) == SUBREG)
10296 target = SUBREG_REG (target);
10297
10298 if (GET_CODE (target) != REG)
10299 return 0;
10300
10301 tregno = REGNO (target), regno = REGNO (x);
10302 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
10303 return target == x;
10304
10305 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
10306 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10307
10308 return endregno > tregno && regno < endtregno;
10309 }
230d793d
RS
10310
10311 else if (GET_CODE (body) == PARALLEL)
10312 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 10313 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
10314 return 1;
10315
10316 return 0;
10317}
10318\f
10319/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
10320 as appropriate. I3 and I2 are the insns resulting from the combination
10321 insns including FROM (I2 may be zero).
10322
10323 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
10324 not need REG_DEAD notes because they are being substituted for. This
10325 saves searching in the most common cases.
10326
10327 Each note in the list is either ignored or placed on some insns, depending
10328 on the type of note. */
10329
10330static void
10331distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
10332 rtx notes;
10333 rtx from_insn;
10334 rtx i3, i2;
10335 rtx elim_i2, elim_i1;
10336{
10337 rtx note, next_note;
10338 rtx tem;
10339
10340 for (note = notes; note; note = next_note)
10341 {
10342 rtx place = 0, place2 = 0;
10343
10344 /* If this NOTE references a pseudo register, ensure it references
10345 the latest copy of that register. */
10346 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10347 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10348 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10349
10350 next_note = XEXP (note, 1);
10351 switch (REG_NOTE_KIND (note))
10352 {
10353 case REG_UNUSED:
07d0cbdd 10354 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
10355 REG_UNUSED notes from that insn.
10356
10357 Any clobbers from i2 or i1 can only exist if they were added by
10358 recog_for_combine. In that case, recog_for_combine created the
10359 necessary REG_UNUSED notes. Trying to keep any original
10360 REG_UNUSED notes from these insns can cause incorrect output
10361 if it is for the same register as the original i3 dest.
10362 In that case, we will notice that the register is set in i3,
10363 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
10364 is wrong. However, it is possible to have REG_UNUSED notes from
10365 i2 or i1 for register which were both used and clobbered, so
10366 we keep notes from i2 or i1 if they will turn into REG_DEAD
10367 notes. */
176c9e6b 10368
230d793d
RS
10369 /* If this register is set or clobbered in I3, put the note there
10370 unless there is one already. */
07d0cbdd 10371 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 10372 {
07d0cbdd
JW
10373 if (from_insn != i3)
10374 break;
10375
230d793d
RS
10376 if (! (GET_CODE (XEXP (note, 0)) == REG
10377 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
10378 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
10379 place = i3;
10380 }
10381 /* Otherwise, if this register is used by I3, then this register
10382 now dies here, so we must put a REG_DEAD note here unless there
10383 is one already. */
10384 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
10385 && ! (GET_CODE (XEXP (note, 0)) == REG
10386 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
10387 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
10388 {
10389 PUT_REG_NOTE_KIND (note, REG_DEAD);
10390 place = i3;
10391 }
10392 break;
10393
10394 case REG_EQUAL:
10395 case REG_EQUIV:
10396 case REG_NONNEG:
10397 /* These notes say something about results of an insn. We can
10398 only support them if they used to be on I3 in which case they
a687e897
RK
10399 remain on I3. Otherwise they are ignored.
10400
10401 If the note refers to an expression that is not a constant, we
10402 must also ignore the note since we cannot tell whether the
10403 equivalence is still true. It might be possible to do
10404 slightly better than this (we only have a problem if I2DEST
10405 or I1DEST is present in the expression), but it doesn't
10406 seem worth the trouble. */
10407
10408 if (from_insn == i3
10409 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
10410 place = i3;
10411 break;
10412
10413 case REG_INC:
10414 case REG_NO_CONFLICT:
10415 case REG_LABEL:
10416 /* These notes say something about how a register is used. They must
10417 be present on any use of the register in I2 or I3. */
10418 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
10419 place = i3;
10420
10421 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
10422 {
10423 if (place)
10424 place2 = i2;
10425 else
10426 place = i2;
10427 }
10428 break;
10429
10430 case REG_WAS_0:
10431 /* It is too much trouble to try to see if this note is still
10432 correct in all situations. It is better to simply delete it. */
10433 break;
10434
10435 case REG_RETVAL:
10436 /* If the insn previously containing this note still exists,
10437 put it back where it was. Otherwise move it to the previous
10438 insn. Adjust the corresponding REG_LIBCALL note. */
10439 if (GET_CODE (from_insn) != NOTE)
10440 place = from_insn;
10441 else
10442 {
5f4f0e22 10443 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
10444 place = prev_real_insn (from_insn);
10445 if (tem && place)
10446 XEXP (tem, 0) = place;
10447 }
10448 break;
10449
10450 case REG_LIBCALL:
10451 /* This is handled similarly to REG_RETVAL. */
10452 if (GET_CODE (from_insn) != NOTE)
10453 place = from_insn;
10454 else
10455 {
5f4f0e22 10456 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
10457 place = next_real_insn (from_insn);
10458 if (tem && place)
10459 XEXP (tem, 0) = place;
10460 }
10461 break;
10462
10463 case REG_DEAD:
10464 /* If the register is used as an input in I3, it dies there.
10465 Similarly for I2, if it is non-zero and adjacent to I3.
10466
10467 If the register is not used as an input in either I3 or I2
10468 and it is not one of the registers we were supposed to eliminate,
10469 there are two possibilities. We might have a non-adjacent I2
10470 or we might have somehow eliminated an additional register
10471 from a computation. For example, we might have had A & B where
10472 we discover that B will always be zero. In this case we will
10473 eliminate the reference to A.
10474
10475 In both cases, we must search to see if we can find a previous
10476 use of A and put the death note there. */
10477
6e2d1486
RK
10478 if (from_insn
10479 && GET_CODE (from_insn) == CALL_INSN
10480 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
10481 place = from_insn;
10482 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
10483 place = i3;
10484 else if (i2 != 0 && next_nonnote_insn (i2) == i3
10485 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10486 place = i2;
10487
10488 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
10489 break;
10490
510dd77e
RK
10491 /* If the register is used in both I2 and I3 and it dies in I3,
10492 we might have added another reference to it. If reg_n_refs
10493 was 2, bump it to 3. This has to be correct since the
10494 register must have been set somewhere. The reason this is
10495 done is because local-alloc.c treats 2 references as a
10496 special case. */
10497
10498 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
10499 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
10500 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10501 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
10502
230d793d
RS
10503 if (place == 0)
10504 for (tem = prev_nonnote_insn (i3);
10505 tem && (GET_CODE (tem) == INSN
10506 || GET_CODE (tem) == CALL_INSN);
10507 tem = prev_nonnote_insn (tem))
10508 {
10509 /* If the register is being set at TEM, see if that is all
10510 TEM is doing. If so, delete TEM. Otherwise, make this
10511 into a REG_UNUSED note instead. */
10512 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
10513 {
10514 rtx set = single_set (tem);
10515
5089e22e
RS
10516 /* Verify that it was the set, and not a clobber that
10517 modified the register. */
10518
10519 if (set != 0 && ! side_effects_p (SET_SRC (set))
10520 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
230d793d
RS
10521 {
10522 /* Move the notes and links of TEM elsewhere.
10523 This might delete other dead insns recursively.
10524 First set the pattern to something that won't use
10525 any register. */
10526
10527 PATTERN (tem) = pc_rtx;
10528
5f4f0e22
CH
10529 distribute_notes (REG_NOTES (tem), tem, tem,
10530 NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
10531 distribute_links (LOG_LINKS (tem));
10532
10533 PUT_CODE (tem, NOTE);
10534 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10535 NOTE_SOURCE_FILE (tem) = 0;
10536 }
10537 else
10538 {
10539 PUT_REG_NOTE_KIND (note, REG_UNUSED);
10540
10541 /* If there isn't already a REG_UNUSED note, put one
10542 here. */
10543 if (! find_regno_note (tem, REG_UNUSED,
10544 REGNO (XEXP (note, 0))))
10545 place = tem;
10546 break;
10547 }
10548 }
13018fad
RE
10549 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
10550 || (GET_CODE (tem) == CALL_INSN
10551 && find_reg_fusage (tem, USE, XEXP (note, 0))))
230d793d
RS
10552 {
10553 place = tem;
10554 break;
10555 }
10556 }
10557
10558 /* If the register is set or already dead at PLACE, we needn't do
10559 anything with this note if it is still a REG_DEAD note.
10560
10561 Note that we cannot use just `dead_or_set_p' here since we can
10562 convert an assignment to a register into a bit-field assignment.
10563 Therefore, we must also omit the note if the register is the
10564 target of a bitfield assignment. */
10565
10566 if (place && REG_NOTE_KIND (note) == REG_DEAD)
10567 {
10568 int regno = REGNO (XEXP (note, 0));
10569
10570 if (dead_or_set_p (place, XEXP (note, 0))
10571 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10572 {
10573 /* Unless the register previously died in PLACE, clear
10574 reg_last_death. [I no longer understand why this is
10575 being done.] */
10576 if (reg_last_death[regno] != place)
10577 reg_last_death[regno] = 0;
10578 place = 0;
10579 }
10580 else
10581 reg_last_death[regno] = place;
10582
10583 /* If this is a death note for a hard reg that is occupying
10584 multiple registers, ensure that we are still using all
10585 parts of the object. If we find a piece of the object
10586 that is unused, we must add a USE for that piece before
10587 PLACE and put the appropriate REG_DEAD note on it.
10588
10589 An alternative would be to put a REG_UNUSED for the pieces
10590 on the insn that set the register, but that can't be done if
10591 it is not in the same block. It is simpler, though less
10592 efficient, to add the USE insns. */
10593
10594 if (place && regno < FIRST_PSEUDO_REGISTER
10595 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
10596 {
10597 int endregno
10598 = regno + HARD_REGNO_NREGS (regno,
10599 GET_MODE (XEXP (note, 0)));
10600 int all_used = 1;
10601 int i;
10602
10603 for (i = regno; i < endregno; i++)
9fd5bb62
JW
10604 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
10605 && ! find_regno_fusage (place, USE, i))
230d793d 10606 {
485eeec4 10607 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
28f6d3af
RK
10608 rtx p;
10609
10610 /* See if we already placed a USE note for this
10611 register in front of PLACE. */
10612 for (p = place;
10613 GET_CODE (PREV_INSN (p)) == INSN
10614 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10615 p = PREV_INSN (p))
10616 if (rtx_equal_p (piece,
10617 XEXP (PATTERN (PREV_INSN (p)), 0)))
10618 {
10619 p = 0;
10620 break;
10621 }
10622
10623 if (p)
10624 {
10625 rtx use_insn
10626 = emit_insn_before (gen_rtx (USE, VOIDmode,
10627 piece),
10628 p);
10629 REG_NOTES (use_insn)
10630 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
10631 REG_NOTES (use_insn));
10632 }
230d793d 10633
5089e22e 10634 all_used = 0;
230d793d
RS
10635 }
10636
a394b17b
JW
10637 /* Check for the case where the register dying partially
10638 overlaps the register set by this insn. */
10639 if (all_used)
10640 for (i = regno; i < endregno; i++)
10641 if (dead_or_set_regno_p (place, i))
10642 {
10643 all_used = 0;
10644 break;
10645 }
10646
230d793d
RS
10647 if (! all_used)
10648 {
10649 /* Put only REG_DEAD notes for pieces that are
10650 still used and that are not already dead or set. */
10651
10652 for (i = regno; i < endregno; i++)
10653 {
485eeec4 10654 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
230d793d 10655
17cbf358
JW
10656 if ((reg_referenced_p (piece, PATTERN (place))
10657 || (GET_CODE (place) == CALL_INSN
10658 && find_reg_fusage (place, USE, piece)))
230d793d
RS
10659 && ! dead_or_set_p (place, piece)
10660 && ! reg_bitfield_target_p (piece,
10661 PATTERN (place)))
10662 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
10663 piece,
10664 REG_NOTES (place));
10665 }
10666
10667 place = 0;
10668 }
10669 }
10670 }
10671 break;
10672
10673 default:
10674 /* Any other notes should not be present at this point in the
10675 compilation. */
10676 abort ();
10677 }
10678
10679 if (place)
10680 {
10681 XEXP (note, 1) = REG_NOTES (place);
10682 REG_NOTES (place) = note;
10683 }
1a26b032
RK
10684 else if ((REG_NOTE_KIND (note) == REG_DEAD
10685 || REG_NOTE_KIND (note) == REG_UNUSED)
10686 && GET_CODE (XEXP (note, 0)) == REG)
10687 reg_n_deaths[REGNO (XEXP (note, 0))]--;
230d793d
RS
10688
10689 if (place2)
1a26b032
RK
10690 {
10691 if ((REG_NOTE_KIND (note) == REG_DEAD
10692 || REG_NOTE_KIND (note) == REG_UNUSED)
10693 && GET_CODE (XEXP (note, 0)) == REG)
10694 reg_n_deaths[REGNO (XEXP (note, 0))]++;
10695
10696 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
10697 XEXP (note, 0), REG_NOTES (place2));
10698 }
230d793d
RS
10699 }
10700}
10701\f
10702/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
10703 I3, I2, and I1 to new locations. This is also called in one case to
10704 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
10705
10706static void
10707distribute_links (links)
10708 rtx links;
10709{
10710 rtx link, next_link;
10711
10712 for (link = links; link; link = next_link)
10713 {
10714 rtx place = 0;
10715 rtx insn;
10716 rtx set, reg;
10717
10718 next_link = XEXP (link, 1);
10719
10720 /* If the insn that this link points to is a NOTE or isn't a single
10721 set, ignore it. In the latter case, it isn't clear what we
10722 can do other than ignore the link, since we can't tell which
10723 register it was for. Such links wouldn't be used by combine
10724 anyway.
10725
10726 It is not possible for the destination of the target of the link to
10727 have been changed by combine. The only potential of this is if we
10728 replace I3, I2, and I1 by I3 and I2. But in that case the
10729 destination of I2 also remains unchanged. */
10730
10731 if (GET_CODE (XEXP (link, 0)) == NOTE
10732 || (set = single_set (XEXP (link, 0))) == 0)
10733 continue;
10734
10735 reg = SET_DEST (set);
10736 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
10737 || GET_CODE (reg) == SIGN_EXTRACT
10738 || GET_CODE (reg) == STRICT_LOW_PART)
10739 reg = XEXP (reg, 0);
10740
10741 /* A LOG_LINK is defined as being placed on the first insn that uses
10742 a register and points to the insn that sets the register. Start
10743 searching at the next insn after the target of the link and stop
10744 when we reach a set of the register or the end of the basic block.
10745
10746 Note that this correctly handles the link that used to point from
5089e22e 10747 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
10748 since most links don't point very far away. */
10749
10750 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3
RK
10751 (insn && (this_basic_block == n_basic_blocks - 1
10752 || basic_block_head[this_basic_block + 1] != insn));
230d793d
RS
10753 insn = NEXT_INSN (insn))
10754 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
10755 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
10756 {
10757 if (reg_referenced_p (reg, PATTERN (insn)))
10758 place = insn;
10759 break;
10760 }
6e2d1486
RK
10761 else if (GET_CODE (insn) == CALL_INSN
10762 && find_reg_fusage (insn, USE, reg))
10763 {
10764 place = insn;
10765 break;
10766 }
230d793d
RS
10767
10768 /* If we found a place to put the link, place it there unless there
10769 is already a link to the same insn as LINK at that point. */
10770
10771 if (place)
10772 {
10773 rtx link2;
10774
10775 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
10776 if (XEXP (link2, 0) == XEXP (link, 0))
10777 break;
10778
10779 if (link2 == 0)
10780 {
10781 XEXP (link, 1) = LOG_LINKS (place);
10782 LOG_LINKS (place) = link;
abe6e52f
RK
10783
10784 /* Set added_links_insn to the earliest insn we added a
10785 link to. */
10786 if (added_links_insn == 0
10787 || INSN_CUID (added_links_insn) > INSN_CUID (place))
10788 added_links_insn = place;
230d793d
RS
10789 }
10790 }
10791 }
10792}
10793\f
10794void
10795dump_combine_stats (file)
10796 FILE *file;
10797{
10798 fprintf
10799 (file,
10800 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
10801 combine_attempts, combine_merges, combine_extras, combine_successes);
10802}
10803
10804void
10805dump_combine_total_stats (file)
10806 FILE *file;
10807{
10808 fprintf
10809 (file,
10810 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
10811 total_attempts, total_merges, total_extras, total_successes);
10812}
This page took 1.594071 seconds and 5 git commands to generate.