]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
(attach_deaths): When adding REG_DEAD notes...
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
0c314d1a 2 Copyright (C) 1987, 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
230d793d 76#include "config.h"
4f90e4a0 77#ifdef __STDC__
04fe4385 78#include <stdarg.h>
4f90e4a0 79#else
04fe4385 80#include <varargs.h>
4f90e4a0 81#endif
dfa3449b 82
9c3b4c8b
RS
83/* Must precede rtl.h for FFS. */
84#include <stdio.h>
85
230d793d
RS
86#include "rtl.h"
87#include "flags.h"
88#include "regs.h"
55310dad 89#include "hard-reg-set.h"
230d793d
RS
90#include "expr.h"
91#include "basic-block.h"
92#include "insn-config.h"
93#include "insn-flags.h"
94#include "insn-codes.h"
95#include "insn-attr.h"
96#include "recog.h"
97#include "real.h"
98
99/* It is not safe to use ordinary gen_lowpart in combine.
100 Use gen_lowpart_for_combine instead. See comments there. */
101#define gen_lowpart dont_use_gen_lowpart_you_dummy
102
103/* Number of attempts to combine instructions in this function. */
104
105static int combine_attempts;
106
107/* Number of attempts that got as far as substitution in this function. */
108
109static int combine_merges;
110
111/* Number of instructions combined with added SETs in this function. */
112
113static int combine_extras;
114
115/* Number of instructions combined in this function. */
116
117static int combine_successes;
118
119/* Totals over entire compilation. */
120
121static int total_attempts, total_merges, total_extras, total_successes;
9210df58
RK
122
123/* Define a defulat value for REVERSIBLE_CC_MODE.
124 We can never assume that a condition code mode is safe to reverse unless
125 the md tells us so. */
126#ifndef REVERSIBLE_CC_MODE
127#define REVERSIBLE_CC_MODE(MODE) 0
128#endif
230d793d
RS
129\f
130/* Vector mapping INSN_UIDs to cuids.
5089e22e 131 The cuids are like uids but increase monotonically always.
230d793d
RS
132 Combine always uses cuids so that it can compare them.
133 But actually renumbering the uids, which we used to do,
134 proves to be a bad idea because it makes it hard to compare
135 the dumps produced by earlier passes with those from later passes. */
136
137static int *uid_cuid;
138
139/* Get the cuid of an insn. */
140
141#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
142
143/* Maximum register number, which is the size of the tables below. */
144
145static int combine_max_regno;
146
147/* Record last point of death of (hard or pseudo) register n. */
148
149static rtx *reg_last_death;
150
151/* Record last point of modification of (hard or pseudo) register n. */
152
153static rtx *reg_last_set;
154
155/* Record the cuid of the last insn that invalidated memory
156 (anything that writes memory, and subroutine calls, but not pushes). */
157
158static int mem_last_set;
159
160/* Record the cuid of the last CALL_INSN
161 so we can tell whether a potential combination crosses any calls. */
162
163static int last_call_cuid;
164
165/* When `subst' is called, this is the insn that is being modified
166 (by combining in a previous insn). The PATTERN of this insn
167 is still the old pattern partially modified and it should not be
168 looked at, but this may be used to examine the successors of the insn
169 to judge whether a simplification is valid. */
170
171static rtx subst_insn;
172
173/* This is the lowest CUID that `subst' is currently dealing with.
174 get_last_value will not return a value if the register was set at or
175 after this CUID. If not for this mechanism, we could get confused if
176 I2 or I1 in try_combine were an insn that used the old value of a register
177 to obtain a new value. In that case, we might erroneously get the
178 new value of the register when we wanted the old one. */
179
180static int subst_low_cuid;
181
6e25d159
RK
182/* This contains any hard registers that are used in newpat; reg_dead_at_p
183 must consider all these registers to be always live. */
184
185static HARD_REG_SET newpat_used_regs;
186
abe6e52f
RK
187/* This is an insn to which a LOG_LINKS entry has been added. If this
188 insn is the earlier than I2 or I3, combine should rescan starting at
189 that location. */
190
191static rtx added_links_insn;
192
230d793d
RS
193/* This is the value of undobuf.num_undo when we started processing this
194 substitution. This will prevent gen_rtx_combine from re-used a piece
195 from the previous expression. Doing so can produce circular rtl
196 structures. */
197
198static int previous_num_undos;
ca5c3ef4 199
0d4d42c3
RK
200/* Basic block number of the block in which we are performing combines. */
201static int this_basic_block;
230d793d
RS
202\f
203/* The next group of arrays allows the recording of the last value assigned
204 to (hard or pseudo) register n. We use this information to see if a
5089e22e 205 operation being processed is redundant given a prior operation performed
230d793d
RS
206 on the register. For example, an `and' with a constant is redundant if
207 all the zero bits are already known to be turned off.
208
209 We use an approach similar to that used by cse, but change it in the
210 following ways:
211
212 (1) We do not want to reinitialize at each label.
213 (2) It is useful, but not critical, to know the actual value assigned
214 to a register. Often just its form is helpful.
215
216 Therefore, we maintain the following arrays:
217
218 reg_last_set_value the last value assigned
219 reg_last_set_label records the value of label_tick when the
220 register was assigned
221 reg_last_set_table_tick records the value of label_tick when a
222 value using the register is assigned
223 reg_last_set_invalid set to non-zero when it is not valid
224 to use the value of this register in some
225 register's value
226
227 To understand the usage of these tables, it is important to understand
228 the distinction between the value in reg_last_set_value being valid
229 and the register being validly contained in some other expression in the
230 table.
231
232 Entry I in reg_last_set_value is valid if it is non-zero, and either
233 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
234
235 Register I may validly appear in any expression returned for the value
236 of another register if reg_n_sets[i] is 1. It may also appear in the
237 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
238 reg_last_set_invalid[j] is zero.
239
240 If an expression is found in the table containing a register which may
241 not validly appear in an expression, the register is replaced by
242 something that won't match, (clobber (const_int 0)).
243
244 reg_last_set_invalid[i] is set non-zero when register I is being assigned
245 to and reg_last_set_table_tick[i] == label_tick. */
246
247/* Record last value assigned to (hard or pseudo) register n. */
248
249static rtx *reg_last_set_value;
250
251/* Record the value of label_tick when the value for register n is placed in
252 reg_last_set_value[n]. */
253
568356af 254static int *reg_last_set_label;
230d793d
RS
255
256/* Record the value of label_tick when an expression involving register n
257 is placed in reg_last_set_value. */
258
568356af 259static int *reg_last_set_table_tick;
230d793d
RS
260
261/* Set non-zero if references to register n in expressions should not be
262 used. */
263
264static char *reg_last_set_invalid;
265
266/* Incremented for each label. */
267
568356af 268static int label_tick;
230d793d
RS
269
270/* Some registers that are set more than once and used in more than one
271 basic block are nevertheless always set in similar ways. For example,
272 a QImode register may be loaded from memory in two places on a machine
273 where byte loads zero extend.
274
951553af 275 We record in the following array what we know about the nonzero
230d793d
RS
276 bits of a register, specifically which bits are known to be zero.
277
278 If an entry is zero, it means that we don't know anything special. */
279
55310dad 280static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 281
951553af 282/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 283 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 284
951553af 285static enum machine_mode nonzero_bits_mode;
230d793d 286
d0ab8cd3
RK
287/* Nonzero if we know that a register has some leading bits that are always
288 equal to the sign bit. */
289
290static char *reg_sign_bit_copies;
291
951553af 292/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
293 It is zero while computing them and after combine has completed. This
294 former test prevents propagating values based on previously set values,
295 which can be incorrect if a variable is modified in a loop. */
230d793d 296
951553af 297static int nonzero_sign_valid;
55310dad
RK
298
299/* These arrays are maintained in parallel with reg_last_set_value
300 and are used to store the mode in which the register was last set,
301 the bits that were known to be zero when it was last set, and the
302 number of sign bits copies it was known to have when it was last set. */
303
304static enum machine_mode *reg_last_set_mode;
305static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
306static char *reg_last_set_sign_bit_copies;
230d793d
RS
307\f
308/* Record one modification to rtl structure
309 to be undone by storing old_contents into *where.
310 is_int is 1 if the contents are an int. */
311
312struct undo
313{
230d793d 314 int is_int;
f5393ab9
RS
315 union {rtx r; int i;} old_contents;
316 union {rtx *r; int *i;} where;
230d793d
RS
317};
318
319/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
320 num_undo says how many are currently recorded.
321
322 storage is nonzero if we must undo the allocation of new storage.
323 The value of storage is what to pass to obfree.
324
325 other_insn is nonzero if we have modified some other insn in the process
326 of working on subst_insn. It must be verified too. */
327
328#define MAX_UNDO 50
329
330struct undobuf
331{
332 int num_undo;
333 char *storage;
334 struct undo undo[MAX_UNDO];
335 rtx other_insn;
336};
337
338static struct undobuf undobuf;
339
cc876596 340/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 341 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
342 set to NEWVAL, do not record this change. Because computing NEWVAL might
343 also call SUBST, we have to compute it before we put anything into
344 the undo table. */
230d793d
RS
345
346#define SUBST(INTO, NEWVAL) \
cc876596
RK
347 do { rtx _new = (NEWVAL); \
348 if (undobuf.num_undo < MAX_UNDO) \
230d793d 349 { \
230d793d 350 undobuf.undo[undobuf.num_undo].is_int = 0; \
f5393ab9
RS
351 undobuf.undo[undobuf.num_undo].where.r = &INTO; \
352 undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \
cc876596 353 INTO = _new; \
f5393ab9 354 if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \
230d793d
RS
355 undobuf.num_undo++; \
356 } \
357 } while (0)
358
359/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
360 expression.
361 Note that substitution for the value of a CONST_INT is not safe. */
362
363#define SUBST_INT(INTO, NEWVAL) \
364 do { if (undobuf.num_undo < MAX_UNDO) \
365{ \
7c046e4e
RK
366 undobuf.undo[undobuf.num_undo].is_int = 1; \
367 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
368 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
230d793d 369 INTO = NEWVAL; \
7c046e4e 370 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
230d793d
RS
371 undobuf.num_undo++; \
372 } \
373 } while (0)
374
375/* Number of times the pseudo being substituted for
376 was found and replaced. */
377
378static int n_occurrences;
379
ef026f91 380static void init_reg_last_arrays PROTO(());
fe2db4fb
RK
381static void setup_incoming_promotions PROTO(());
382static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
383static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
384static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
385static rtx try_combine PROTO((rtx, rtx, rtx));
386static void undo_all PROTO((void));
387static rtx *find_split_point PROTO((rtx *, rtx));
388static rtx subst PROTO((rtx, rtx, rtx, int, int));
8079805d
RK
389static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
390static rtx simplify_if_then_else PROTO((rtx));
391static rtx simplify_set PROTO((rtx));
392static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
393static rtx expand_compound_operation PROTO((rtx));
394static rtx expand_field_assignment PROTO((rtx));
395static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
396 int, int, int));
71923da7 397static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
398static rtx make_compound_operation PROTO((rtx, enum rtx_code));
399static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 400static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 401 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 402static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb
RK
403static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
404static rtx make_field_assignment PROTO((rtx));
405static rtx apply_distributive_law PROTO((rtx));
406static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
407 unsigned HOST_WIDE_INT));
408static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
409static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
410static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
411 enum rtx_code, HOST_WIDE_INT,
412 enum machine_mode, int *));
413static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
414 rtx, int));
415static int recog_for_combine PROTO((rtx *, rtx, rtx *));
416static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 417static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 418 ...));
fe2db4fb
RK
419static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
420 rtx, rtx));
0c1c8ea6
RK
421static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
422 enum machine_mode, rtx));
fe2db4fb
RK
423static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
424static int reversible_comparison_p PROTO((rtx));
425static void update_table_tick PROTO((rtx));
426static void record_value_for_reg PROTO((rtx, rtx, rtx));
427static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
428static void record_dead_and_set_regs PROTO((rtx));
429static int get_last_value_validate PROTO((rtx *, int, int));
430static rtx get_last_value PROTO((rtx));
431static int use_crosses_set_p PROTO((rtx, int));
432static void reg_dead_at_p_1 PROTO((rtx, rtx));
433static int reg_dead_at_p PROTO((rtx, rtx));
434static void move_deaths PROTO((rtx, int, rtx, rtx *));
435static int reg_bitfield_target_p PROTO((rtx, rtx));
436static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
437static void distribute_links PROTO((rtx));
6e25d159 438static void mark_used_regs_combine PROTO((rtx));
230d793d
RS
439\f
440/* Main entry point for combiner. F is the first insn of the function.
441 NREGS is the first unused pseudo-reg number. */
442
443void
444combine_instructions (f, nregs)
445 rtx f;
446 int nregs;
447{
448 register rtx insn, next, prev;
449 register int i;
450 register rtx links, nextlinks;
451
452 combine_attempts = 0;
453 combine_merges = 0;
454 combine_extras = 0;
455 combine_successes = 0;
bef9925b 456 undobuf.num_undo = previous_num_undos = 0;
230d793d
RS
457
458 combine_max_regno = nregs;
459
ef026f91
RS
460 reg_nonzero_bits
461 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
462 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
463
4c9a05bc 464 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
465 bzero (reg_sign_bit_copies, nregs * sizeof (char));
466
230d793d
RS
467 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
468 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
469 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
470 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
471 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 472 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
473 reg_last_set_mode
474 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
475 reg_last_set_nonzero_bits
476 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
477 reg_last_set_sign_bit_copies
478 = (char *) alloca (nregs * sizeof (char));
479
ef026f91 480 init_reg_last_arrays ();
230d793d
RS
481
482 init_recog_no_volatile ();
483
484 /* Compute maximum uid value so uid_cuid can be allocated. */
485
486 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
487 if (INSN_UID (insn) > i)
488 i = INSN_UID (insn);
489
490 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
491
951553af 492 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 493
951553af 494 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
495 when, for example, we have j <<= 1 in a loop. */
496
951553af 497 nonzero_sign_valid = 0;
230d793d
RS
498
499 /* Compute the mapping from uids to cuids.
500 Cuids are numbers assigned to insns, like uids,
501 except that cuids increase monotonically through the code.
502
503 Scan all SETs and see if we can deduce anything about what
951553af 504 bits are known to be zero for some registers and how many copies
d79f08e0
RK
505 of the sign bit are known to exist for those registers.
506
507 Also set any known values so that we can use it while searching
508 for what bits are known to be set. */
509
510 label_tick = 1;
230d793d 511
7988fd36
RK
512 setup_incoming_promotions ();
513
230d793d
RS
514 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
515 {
516 INSN_CUID (insn) = ++i;
d79f08e0
RK
517 subst_low_cuid = i;
518 subst_insn = insn;
519
230d793d 520 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
521 {
522 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
523 record_dead_and_set_regs (insn);
524 }
525
526 if (GET_CODE (insn) == CODE_LABEL)
527 label_tick++;
230d793d
RS
528 }
529
951553af 530 nonzero_sign_valid = 1;
230d793d
RS
531
532 /* Now scan all the insns in forward order. */
533
0d4d42c3 534 this_basic_block = -1;
230d793d
RS
535 label_tick = 1;
536 last_call_cuid = 0;
537 mem_last_set = 0;
ef026f91 538 init_reg_last_arrays ();
7988fd36
RK
539 setup_incoming_promotions ();
540
230d793d
RS
541 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
542 {
543 next = 0;
544
0d4d42c3 545 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 546 if (this_basic_block + 1 < n_basic_blocks
0d4d42c3
RK
547 && basic_block_head[this_basic_block + 1] == insn)
548 this_basic_block++;
549
230d793d
RS
550 if (GET_CODE (insn) == CODE_LABEL)
551 label_tick++;
552
0d4d42c3 553 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
554 {
555 /* Try this insn with each insn it links back to. */
556
557 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 558 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
559 goto retry;
560
561 /* Try each sequence of three linked insns ending with this one. */
562
563 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
564 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
565 nextlinks = XEXP (nextlinks, 1))
566 if ((next = try_combine (insn, XEXP (links, 0),
567 XEXP (nextlinks, 0))) != 0)
568 goto retry;
569
570#ifdef HAVE_cc0
571 /* Try to combine a jump insn that uses CC0
572 with a preceding insn that sets CC0, and maybe with its
573 logical predecessor as well.
574 This is how we make decrement-and-branch insns.
575 We need this special code because data flow connections
576 via CC0 do not get entered in LOG_LINKS. */
577
578 if (GET_CODE (insn) == JUMP_INSN
579 && (prev = prev_nonnote_insn (insn)) != 0
580 && GET_CODE (prev) == INSN
581 && sets_cc0_p (PATTERN (prev)))
582 {
5f4f0e22 583 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
584 goto retry;
585
586 for (nextlinks = LOG_LINKS (prev); nextlinks;
587 nextlinks = XEXP (nextlinks, 1))
588 if ((next = try_combine (insn, prev,
589 XEXP (nextlinks, 0))) != 0)
590 goto retry;
591 }
592
593 /* Do the same for an insn that explicitly references CC0. */
594 if (GET_CODE (insn) == INSN
595 && (prev = prev_nonnote_insn (insn)) != 0
596 && GET_CODE (prev) == INSN
597 && sets_cc0_p (PATTERN (prev))
598 && GET_CODE (PATTERN (insn)) == SET
599 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
600 {
5f4f0e22 601 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
602 goto retry;
603
604 for (nextlinks = LOG_LINKS (prev); nextlinks;
605 nextlinks = XEXP (nextlinks, 1))
606 if ((next = try_combine (insn, prev,
607 XEXP (nextlinks, 0))) != 0)
608 goto retry;
609 }
610
611 /* Finally, see if any of the insns that this insn links to
612 explicitly references CC0. If so, try this insn, that insn,
5089e22e 613 and its predecessor if it sets CC0. */
230d793d
RS
614 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
615 if (GET_CODE (XEXP (links, 0)) == INSN
616 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
617 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
618 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
619 && GET_CODE (prev) == INSN
620 && sets_cc0_p (PATTERN (prev))
621 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
622 goto retry;
623#endif
624
625 /* Try combining an insn with two different insns whose results it
626 uses. */
627 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
628 for (nextlinks = XEXP (links, 1); nextlinks;
629 nextlinks = XEXP (nextlinks, 1))
630 if ((next = try_combine (insn, XEXP (links, 0),
631 XEXP (nextlinks, 0))) != 0)
632 goto retry;
633
634 if (GET_CODE (insn) != NOTE)
635 record_dead_and_set_regs (insn);
636
637 retry:
638 ;
639 }
640 }
641
642 total_attempts += combine_attempts;
643 total_merges += combine_merges;
644 total_extras += combine_extras;
645 total_successes += combine_successes;
1a26b032 646
951553af 647 nonzero_sign_valid = 0;
230d793d 648}
ef026f91
RS
649
650/* Wipe the reg_last_xxx arrays in preparation for another pass. */
651
652static void
653init_reg_last_arrays ()
654{
655 int nregs = combine_max_regno;
656
4c9a05bc
RK
657 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
658 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
659 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
660 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
661 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 662 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
663 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
664 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
665 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
666}
230d793d 667\f
7988fd36
RK
668/* Set up any promoted values for incoming argument registers. */
669
ee791cc3 670static void
7988fd36
RK
671setup_incoming_promotions ()
672{
673#ifdef PROMOTE_FUNCTION_ARGS
674 int regno;
675 rtx reg;
676 enum machine_mode mode;
677 int unsignedp;
678 rtx first = get_insns ();
679
680 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
681 if (FUNCTION_ARG_REGNO_P (regno)
682 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
683 record_value_for_reg (reg, first,
684 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
500c518b
RK
685 GET_MODE (reg),
686 gen_rtx (CLOBBER, mode, const0_rtx)));
7988fd36
RK
687#endif
688}
689\f
230d793d 690/* Called via note_stores. If X is a pseudo that is used in more than
5f4f0e22 691 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
951553af 692 set, record what bits are known zero. If we are clobbering X,
230d793d
RS
693 ignore this "set" because the clobbered value won't be used.
694
695 If we are setting only a portion of X and we can't figure out what
696 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
697 be happening.
698
699 Similarly, set how many bits of X are known to be copies of the sign bit
700 at all locations in the function. This is the smallest number implied
701 by any set of X. */
230d793d
RS
702
703static void
951553af 704set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
705 rtx x;
706 rtx set;
707{
d0ab8cd3
RK
708 int num;
709
230d793d
RS
710 if (GET_CODE (x) == REG
711 && REGNO (x) >= FIRST_PSEUDO_REGISTER
712 && reg_n_sets[REGNO (x)] > 1
713 && reg_basic_block[REGNO (x)] < 0
e8095e80
RK
714 /* If this register is undefined at the start of the file, we can't
715 say what its contents were. */
716 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
717 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
5f4f0e22 718 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
719 {
720 if (GET_CODE (set) == CLOBBER)
e8095e80
RK
721 {
722 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
723 reg_sign_bit_copies[REGNO (x)] = 0;
724 return;
725 }
230d793d
RS
726
727 /* If this is a complex assignment, see if we can convert it into a
5089e22e 728 simple assignment. */
230d793d 729 set = expand_field_assignment (set);
d79f08e0
RK
730
731 /* If this is a simple assignment, or we have a paradoxical SUBREG,
732 set what we know about X. */
733
734 if (SET_DEST (set) == x
735 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
736 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
737 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 738 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 739 {
9afa3d54
RK
740 rtx src = SET_SRC (set);
741
742#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
743 /* If X is narrower than a word and SRC is a non-negative
744 constant that would appear negative in the mode of X,
745 sign-extend it for use in reg_nonzero_bits because some
746 machines (maybe most) will actually do the sign-extension
747 and this is the conservative approach.
748
749 ??? For 2.5, try to tighten up the MD files in this regard
750 instead of this kludge. */
751
752 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
753 && GET_CODE (src) == CONST_INT
754 && INTVAL (src) > 0
755 && 0 != (INTVAL (src)
756 & ((HOST_WIDE_INT) 1
9e69be8c 757 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
758 src = GEN_INT (INTVAL (src)
759 | ((HOST_WIDE_INT) (-1)
760 << GET_MODE_BITSIZE (GET_MODE (x))));
761#endif
762
951553af 763 reg_nonzero_bits[REGNO (x)]
9afa3d54 764 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
765 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
766 if (reg_sign_bit_copies[REGNO (x)] == 0
767 || reg_sign_bit_copies[REGNO (x)] > num)
768 reg_sign_bit_copies[REGNO (x)] = num;
769 }
230d793d 770 else
d0ab8cd3 771 {
951553af 772 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
d0ab8cd3
RK
773 reg_sign_bit_copies[REGNO (x)] = 0;
774 }
230d793d
RS
775 }
776}
777\f
778/* See if INSN can be combined into I3. PRED and SUCC are optionally
779 insns that were previously combined into I3 or that will be combined
780 into the merger of INSN and I3.
781
782 Return 0 if the combination is not allowed for any reason.
783
784 If the combination is allowed, *PDEST will be set to the single
785 destination of INSN and *PSRC to the single source, and this function
786 will return 1. */
787
788static int
789can_combine_p (insn, i3, pred, succ, pdest, psrc)
790 rtx insn;
791 rtx i3;
792 rtx pred, succ;
793 rtx *pdest, *psrc;
794{
795 int i;
796 rtx set = 0, src, dest;
797 rtx p, link;
798 int all_adjacent = (succ ? (next_active_insn (insn) == succ
799 && next_active_insn (succ) == i3)
800 : next_active_insn (insn) == i3);
801
802 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
803 or a PARALLEL consisting of such a SET and CLOBBERs.
804
805 If INSN has CLOBBER parallel parts, ignore them for our processing.
806 By definition, these happen during the execution of the insn. When it
807 is merged with another insn, all bets are off. If they are, in fact,
808 needed and aren't also supplied in I3, they may be added by
809 recog_for_combine. Otherwise, it won't match.
810
811 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
812 note.
813
814 Get the source and destination of INSN. If more than one, can't
815 combine. */
816
817 if (GET_CODE (PATTERN (insn)) == SET)
818 set = PATTERN (insn);
819 else if (GET_CODE (PATTERN (insn)) == PARALLEL
820 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
821 {
822 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
823 {
824 rtx elt = XVECEXP (PATTERN (insn), 0, i);
825
826 switch (GET_CODE (elt))
827 {
828 /* We can ignore CLOBBERs. */
829 case CLOBBER:
830 break;
831
832 case SET:
833 /* Ignore SETs whose result isn't used but not those that
834 have side-effects. */
835 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
836 && ! side_effects_p (elt))
837 break;
838
839 /* If we have already found a SET, this is a second one and
840 so we cannot combine with this insn. */
841 if (set)
842 return 0;
843
844 set = elt;
845 break;
846
847 default:
848 /* Anything else means we can't combine. */
849 return 0;
850 }
851 }
852
853 if (set == 0
854 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
855 so don't do anything with it. */
856 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
857 return 0;
858 }
859 else
860 return 0;
861
862 if (set == 0)
863 return 0;
864
865 set = expand_field_assignment (set);
866 src = SET_SRC (set), dest = SET_DEST (set);
867
868 /* Don't eliminate a store in the stack pointer. */
869 if (dest == stack_pointer_rtx
230d793d
RS
870 /* If we couldn't eliminate a field assignment, we can't combine. */
871 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
872 /* Don't combine with an insn that sets a register to itself if it has
873 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 874 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
875 /* Can't merge a function call. */
876 || GET_CODE (src) == CALL
cd5e8f1f
RK
877 /* Don't eliminate a function call argument. */
878 || (GET_CODE (i3) == CALL_INSN && find_reg_fusage (i3, USE, dest))
230d793d
RS
879 /* Don't substitute into an incremented register. */
880 || FIND_REG_INC_NOTE (i3, dest)
881 || (succ && FIND_REG_INC_NOTE (succ, dest))
882 /* Don't combine the end of a libcall into anything. */
5f4f0e22 883 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
884 /* Make sure that DEST is not used after SUCC but before I3. */
885 || (succ && ! all_adjacent
886 && reg_used_between_p (dest, succ, i3))
887 /* Make sure that the value that is to be substituted for the register
888 does not use any registers whose values alter in between. However,
889 If the insns are adjacent, a use can't cross a set even though we
890 think it might (this can happen for a sequence of insns each setting
891 the same destination; reg_last_set of that register might point to
d81481d3
RK
892 a NOTE). If INSN has a REG_EQUIV note, the register is always
893 equivalent to the memory so the substitution is valid even if there
894 are intervening stores. Also, don't move a volatile asm or
895 UNSPEC_VOLATILE across any other insns. */
230d793d 896 || (! all_adjacent
d81481d3
RK
897 && (((GET_CODE (src) != MEM
898 || ! find_reg_note (insn, REG_EQUIV, src))
899 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
900 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
901 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
902 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
903 better register allocation by not doing the combine. */
904 || find_reg_note (i3, REG_NO_CONFLICT, dest)
905 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
906 /* Don't combine across a CALL_INSN, because that would possibly
907 change whether the life span of some REGs crosses calls or not,
908 and it is a pain to update that information.
909 Exception: if source is a constant, moving it later can't hurt.
910 Accept that special case, because it helps -fforce-addr a lot. */
911 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
912 return 0;
913
914 /* DEST must either be a REG or CC0. */
915 if (GET_CODE (dest) == REG)
916 {
917 /* If register alignment is being enforced for multi-word items in all
918 cases except for parameters, it is possible to have a register copy
919 insn referencing a hard register that is not allowed to contain the
920 mode being copied and which would not be valid as an operand of most
921 insns. Eliminate this problem by not combining with such an insn.
922
923 Also, on some machines we don't want to extend the life of a hard
924 register. */
925
926 if (GET_CODE (src) == REG
927 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
928 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
929 /* Don't extend the life of a hard register unless it is
930 user variable (if we have few registers) or it can't
931 fit into the desired register (meaning something special
932 is going on). */
230d793d 933 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e
RK
934 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
935#ifdef SMALL_REGISTER_CLASSES
936 || ! REG_USERVAR_P (src)
230d793d 937#endif
c448a43e 938 ))))
230d793d
RS
939 return 0;
940 }
941 else if (GET_CODE (dest) != CC0)
942 return 0;
943
5f96750d
RS
944 /* Don't substitute for a register intended as a clobberable operand.
945 Similarly, don't substitute an expression containing a register that
946 will be clobbered in I3. */
230d793d
RS
947 if (GET_CODE (PATTERN (i3)) == PARALLEL)
948 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
949 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
950 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
951 src)
952 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
953 return 0;
954
955 /* If INSN contains anything volatile, or is an `asm' (whether volatile
956 or not), reject, unless nothing volatile comes between it and I3,
957 with the exception of SUCC. */
958
959 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
960 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
961 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
962 && p != succ && volatile_refs_p (PATTERN (p)))
963 return 0;
964
4b2cb4a2
RS
965 /* If there are any volatile insns between INSN and I3, reject, because
966 they might affect machine state. */
967
968 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
969 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
970 && p != succ && volatile_insn_p (PATTERN (p)))
971 return 0;
972
230d793d
RS
973 /* If INSN or I2 contains an autoincrement or autodecrement,
974 make sure that register is not used between there and I3,
975 and not already used in I3 either.
976 Also insist that I3 not be a jump; if it were one
977 and the incremented register were spilled, we would lose. */
978
979#ifdef AUTO_INC_DEC
980 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
981 if (REG_NOTE_KIND (link) == REG_INC
982 && (GET_CODE (i3) == JUMP_INSN
983 || reg_used_between_p (XEXP (link, 0), insn, i3)
984 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
985 return 0;
986#endif
987
988#ifdef HAVE_cc0
989 /* Don't combine an insn that follows a CC0-setting insn.
990 An insn that uses CC0 must not be separated from the one that sets it.
991 We do, however, allow I2 to follow a CC0-setting insn if that insn
992 is passed as I1; in that case it will be deleted also.
993 We also allow combining in this case if all the insns are adjacent
994 because that would leave the two CC0 insns adjacent as well.
995 It would be more logical to test whether CC0 occurs inside I1 or I2,
996 but that would be much slower, and this ought to be equivalent. */
997
998 p = prev_nonnote_insn (insn);
999 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1000 && ! all_adjacent)
1001 return 0;
1002#endif
1003
1004 /* If we get here, we have passed all the tests and the combination is
1005 to be allowed. */
1006
1007 *pdest = dest;
1008 *psrc = src;
1009
1010 return 1;
1011}
1012\f
1013/* LOC is the location within I3 that contains its pattern or the component
1014 of a PARALLEL of the pattern. We validate that it is valid for combining.
1015
1016 One problem is if I3 modifies its output, as opposed to replacing it
1017 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1018 so would produce an insn that is not equivalent to the original insns.
1019
1020 Consider:
1021
1022 (set (reg:DI 101) (reg:DI 100))
1023 (set (subreg:SI (reg:DI 101) 0) <foo>)
1024
1025 This is NOT equivalent to:
1026
1027 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1028 (set (reg:DI 101) (reg:DI 100))])
1029
1030 Not only does this modify 100 (in which case it might still be valid
1031 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1032
1033 We can also run into a problem if I2 sets a register that I1
1034 uses and I1 gets directly substituted into I3 (not via I2). In that
1035 case, we would be getting the wrong value of I2DEST into I3, so we
1036 must reject the combination. This case occurs when I2 and I1 both
1037 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1038 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1039 of a SET must prevent combination from occurring.
1040
1041 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
c448a43e
RK
1042 if the destination of a SET is a hard register that isn't a user
1043 variable.
230d793d
RS
1044
1045 Before doing the above check, we first try to expand a field assignment
1046 into a set of logical operations.
1047
1048 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1049 we place a register that is both set and used within I3. If more than one
1050 such register is detected, we fail.
1051
1052 Return 1 if the combination is valid, zero otherwise. */
1053
1054static int
1055combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1056 rtx i3;
1057 rtx *loc;
1058 rtx i2dest;
1059 rtx i1dest;
1060 int i1_not_in_src;
1061 rtx *pi3dest_killed;
1062{
1063 rtx x = *loc;
1064
1065 if (GET_CODE (x) == SET)
1066 {
1067 rtx set = expand_field_assignment (x);
1068 rtx dest = SET_DEST (set);
1069 rtx src = SET_SRC (set);
1070 rtx inner_dest = dest, inner_src = src;
1071
1072 SUBST (*loc, set);
1073
1074 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1075 || GET_CODE (inner_dest) == SUBREG
1076 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1077 inner_dest = XEXP (inner_dest, 0);
1078
1079 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1080 was added. */
1081#if 0
1082 while (GET_CODE (inner_src) == STRICT_LOW_PART
1083 || GET_CODE (inner_src) == SUBREG
1084 || GET_CODE (inner_src) == ZERO_EXTRACT)
1085 inner_src = XEXP (inner_src, 0);
1086
1087 /* If it is better that two different modes keep two different pseudos,
1088 avoid combining them. This avoids producing the following pattern
1089 on a 386:
1090 (set (subreg:SI (reg/v:QI 21) 0)
1091 (lshiftrt:SI (reg/v:SI 20)
1092 (const_int 24)))
1093 If that were made, reload could not handle the pair of
1094 reg 20/21, since it would try to get any GENERAL_REGS
1095 but some of them don't handle QImode. */
1096
1097 if (rtx_equal_p (inner_src, i2dest)
1098 && GET_CODE (inner_dest) == REG
1099 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1100 return 0;
1101#endif
1102
1103 /* Check for the case where I3 modifies its output, as
1104 discussed above. */
1105 if ((inner_dest != dest
1106 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1107 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
1108 /* This is the same test done in can_combine_p except that we
1109 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1110 CALL operation. */
230d793d 1111 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1112 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1113 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1114 GET_MODE (inner_dest))
3f508eca 1115#ifdef SMALL_REGISTER_CLASSES
c448a43e 1116 || (GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest))
230d793d 1117#endif
c448a43e 1118 ))
230d793d
RS
1119 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1120 return 0;
1121
1122 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1123 so record that for later.
1124 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1125 STACK_POINTER_REGNUM, since these are always considered to be
1126 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1127 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1128 && reg_referenced_p (dest, PATTERN (i3))
1129 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1130#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1131 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1132#endif
36a9c2e9
JL
1133#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1134 && (REGNO (dest) != ARG_POINTER_REGNUM
1135 || ! fixed_regs [REGNO (dest)])
1136#endif
1137 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1138 {
1139 if (*pi3dest_killed)
1140 return 0;
1141
1142 *pi3dest_killed = dest;
1143 }
1144 }
1145
1146 else if (GET_CODE (x) == PARALLEL)
1147 {
1148 int i;
1149
1150 for (i = 0; i < XVECLEN (x, 0); i++)
1151 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1152 i1_not_in_src, pi3dest_killed))
1153 return 0;
1154 }
1155
1156 return 1;
1157}
1158\f
1159/* Try to combine the insns I1 and I2 into I3.
1160 Here I1 and I2 appear earlier than I3.
1161 I1 can be zero; then we combine just I2 into I3.
1162
1163 It we are combining three insns and the resulting insn is not recognized,
1164 try splitting it into two insns. If that happens, I2 and I3 are retained
1165 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1166 are pseudo-deleted.
1167
abe6e52f
RK
1168 Return 0 if the combination does not work. Then nothing is changed.
1169 If we did the combination, return the insn at which combine should
1170 resume scanning. */
230d793d
RS
1171
1172static rtx
1173try_combine (i3, i2, i1)
1174 register rtx i3, i2, i1;
1175{
1176 /* New patterns for I3 and I3, respectively. */
1177 rtx newpat, newi2pat = 0;
1178 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1179 int added_sets_1, added_sets_2;
1180 /* Total number of SETs to put into I3. */
1181 int total_sets;
1182 /* Nonzero is I2's body now appears in I3. */
1183 int i2_is_used;
1184 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1185 int insn_code_number, i2_code_number, other_code_number;
1186 /* Contains I3 if the destination of I3 is used in its source, which means
1187 that the old life of I3 is being killed. If that usage is placed into
1188 I2 and not in I3, a REG_DEAD note must be made. */
1189 rtx i3dest_killed = 0;
1190 /* SET_DEST and SET_SRC of I2 and I1. */
1191 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1192 /* PATTERN (I2), or a copy of it in certain cases. */
1193 rtx i2pat;
1194 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1195 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1196 int i1_feeds_i3 = 0;
1197 /* Notes that must be added to REG_NOTES in I3 and I2. */
1198 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1199 /* Notes that we substituted I3 into I2 instead of the normal case. */
1200 int i3_subst_into_i2 = 0;
df7d75de
RK
1201 /* Notes that I1, I2 or I3 is a MULT operation. */
1202 int have_mult = 0;
230d793d
RS
1203
1204 int maxreg;
1205 rtx temp;
1206 register rtx link;
1207 int i;
1208
1209 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1210 This can occur when flow deletes an insn that it has merged into an
1211 auto-increment address. We also can't do anything if I3 has a
1212 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1213 libcall. */
1214
1215 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1216 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1217 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1218 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1219 return 0;
1220
1221 combine_attempts++;
1222
1223 undobuf.num_undo = previous_num_undos = 0;
1224 undobuf.other_insn = 0;
1225
1226 /* Save the current high-water-mark so we can free storage if we didn't
1227 accept this combination. */
1228 undobuf.storage = (char *) oballoc (0);
1229
6e25d159
RK
1230 /* Reset the hard register usage information. */
1231 CLEAR_HARD_REG_SET (newpat_used_regs);
1232
230d793d
RS
1233 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1234 code below, set I1 to be the earlier of the two insns. */
1235 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1236 temp = i1, i1 = i2, i2 = temp;
1237
abe6e52f 1238 added_links_insn = 0;
137e889e 1239
230d793d
RS
1240 /* First check for one important special-case that the code below will
1241 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1242 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1243 we may be able to replace that destination with the destination of I3.
1244 This occurs in the common code where we compute both a quotient and
1245 remainder into a structure, in which case we want to do the computation
1246 directly into the structure to avoid register-register copies.
1247
1248 We make very conservative checks below and only try to handle the
1249 most common cases of this. For example, we only handle the case
1250 where I2 and I3 are adjacent to avoid making difficult register
1251 usage tests. */
1252
1253 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1254 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1255 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1256#ifdef SMALL_REGISTER_CLASSES
1257 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
c448a43e
RK
1258 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1259 || REG_USERVAR_P (SET_DEST (PATTERN (i3))))
230d793d
RS
1260#endif
1261 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1262 && GET_CODE (PATTERN (i2)) == PARALLEL
1263 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1264 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1265 below would need to check what is inside (and reg_overlap_mentioned_p
1266 doesn't support those codes anyway). Don't allow those destinations;
1267 the resulting insn isn't likely to be recognized anyway. */
1268 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1269 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1270 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1271 SET_DEST (PATTERN (i3)))
1272 && next_real_insn (i2) == i3)
5089e22e
RS
1273 {
1274 rtx p2 = PATTERN (i2);
1275
1276 /* Make sure that the destination of I3,
1277 which we are going to substitute into one output of I2,
1278 is not used within another output of I2. We must avoid making this:
1279 (parallel [(set (mem (reg 69)) ...)
1280 (set (reg 69) ...)])
1281 which is not well-defined as to order of actions.
1282 (Besides, reload can't handle output reloads for this.)
1283
1284 The problem can also happen if the dest of I3 is a memory ref,
1285 if another dest in I2 is an indirect memory ref. */
1286 for (i = 0; i < XVECLEN (p2, 0); i++)
1287 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1288 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1289 SET_DEST (XVECEXP (p2, 0, i))))
1290 break;
230d793d 1291
5089e22e
RS
1292 if (i == XVECLEN (p2, 0))
1293 for (i = 0; i < XVECLEN (p2, 0); i++)
1294 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1295 {
1296 combine_merges++;
230d793d 1297
5089e22e
RS
1298 subst_insn = i3;
1299 subst_low_cuid = INSN_CUID (i2);
230d793d 1300
c4e861e8 1301 added_sets_2 = added_sets_1 = 0;
5089e22e 1302 i2dest = SET_SRC (PATTERN (i3));
230d793d 1303
5089e22e
RS
1304 /* Replace the dest in I2 with our dest and make the resulting
1305 insn the new pattern for I3. Then skip to where we
1306 validate the pattern. Everything was set up above. */
1307 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1308 SET_DEST (PATTERN (i3)));
1309
1310 newpat = p2;
176c9e6b 1311 i3_subst_into_i2 = 1;
5089e22e
RS
1312 goto validate_replacement;
1313 }
1314 }
230d793d
RS
1315
1316#ifndef HAVE_cc0
1317 /* If we have no I1 and I2 looks like:
1318 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1319 (set Y OP)])
1320 make up a dummy I1 that is
1321 (set Y OP)
1322 and change I2 to be
1323 (set (reg:CC X) (compare:CC Y (const_int 0)))
1324
1325 (We can ignore any trailing CLOBBERs.)
1326
1327 This undoes a previous combination and allows us to match a branch-and-
1328 decrement insn. */
1329
1330 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1331 && XVECLEN (PATTERN (i2), 0) >= 2
1332 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1333 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1334 == MODE_CC)
1335 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1336 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1337 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1338 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1339 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1340 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1341 {
1342 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1343 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1344 break;
1345
1346 if (i == 1)
1347 {
1348 /* We make I1 with the same INSN_UID as I2. This gives it
1349 the same INSN_CUID for value tracking. Our fake I1 will
1350 never appear in the insn stream so giving it the same INSN_UID
1351 as I2 will not cause a problem. */
1352
3adde2a5
RK
1353 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1354 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
230d793d
RS
1355
1356 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1357 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1358 SET_DEST (PATTERN (i1)));
1359 }
1360 }
1361#endif
1362
1363 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1364 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1365 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1366 {
1367 undo_all ();
1368 return 0;
1369 }
1370
1371 /* Record whether I2DEST is used in I2SRC and similarly for the other
1372 cases. Knowing this will help in register status updating below. */
1373 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1374 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1375 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1376
916f14f1 1377 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1378 in I2SRC. */
1379 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1380
1381 /* Ensure that I3's pattern can be the destination of combines. */
1382 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1383 i1 && i2dest_in_i1src && i1_feeds_i3,
1384 &i3dest_killed))
1385 {
1386 undo_all ();
1387 return 0;
1388 }
1389
df7d75de
RK
1390 /* See if any of the insns is a MULT operation. Unless one is, we will
1391 reject a combination that is, since it must be slower. Be conservative
1392 here. */
1393 if (GET_CODE (i2src) == MULT
1394 || (i1 != 0 && GET_CODE (i1src) == MULT)
1395 || (GET_CODE (PATTERN (i3)) == SET
1396 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1397 have_mult = 1;
1398
230d793d
RS
1399 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1400 We used to do this EXCEPT in one case: I3 has a post-inc in an
1401 output operand. However, that exception can give rise to insns like
1402 mov r3,(r3)+
1403 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1404 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1405
1406#if 0
1407 if (!(GET_CODE (PATTERN (i3)) == SET
1408 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1409 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1410 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1411 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1412 /* It's not the exception. */
1413#endif
1414#ifdef AUTO_INC_DEC
1415 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1416 if (REG_NOTE_KIND (link) == REG_INC
1417 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1418 || (i1 != 0
1419 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1420 {
1421 undo_all ();
1422 return 0;
1423 }
1424#endif
1425
1426 /* See if the SETs in I1 or I2 need to be kept around in the merged
1427 instruction: whenever the value set there is still needed past I3.
1428 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1429
1430 For the SET in I1, we have two cases: If I1 and I2 independently
1431 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1432 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1433 in I1 needs to be kept around unless I1DEST dies or is set in either
1434 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1435 I1DEST. If so, we know I1 feeds into I2. */
1436
1437 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1438
1439 added_sets_1
1440 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1441 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1442
1443 /* If the set in I2 needs to be kept around, we must make a copy of
1444 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1445 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1446 an already-substituted copy. This also prevents making self-referential
1447 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1448 I2DEST. */
1449
1450 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1451 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1452 : PATTERN (i2));
1453
1454 if (added_sets_2)
1455 i2pat = copy_rtx (i2pat);
1456
1457 combine_merges++;
1458
1459 /* Substitute in the latest insn for the regs set by the earlier ones. */
1460
1461 maxreg = max_reg_num ();
1462
1463 subst_insn = i3;
230d793d
RS
1464
1465 /* It is possible that the source of I2 or I1 may be performing an
1466 unneeded operation, such as a ZERO_EXTEND of something that is known
1467 to have the high part zero. Handle that case by letting subst look at
1468 the innermost one of them.
1469
1470 Another way to do this would be to have a function that tries to
1471 simplify a single insn instead of merging two or more insns. We don't
1472 do this because of the potential of infinite loops and because
1473 of the potential extra memory required. However, doing it the way
1474 we are is a bit of a kludge and doesn't catch all cases.
1475
1476 But only do this if -fexpensive-optimizations since it slows things down
1477 and doesn't usually win. */
1478
1479 if (flag_expensive_optimizations)
1480 {
1481 /* Pass pc_rtx so no substitutions are done, just simplifications.
1482 The cases that we are interested in here do not involve the few
1483 cases were is_replaced is checked. */
1484 if (i1)
d0ab8cd3
RK
1485 {
1486 subst_low_cuid = INSN_CUID (i1);
1487 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1488 }
230d793d 1489 else
d0ab8cd3
RK
1490 {
1491 subst_low_cuid = INSN_CUID (i2);
1492 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1493 }
230d793d
RS
1494
1495 previous_num_undos = undobuf.num_undo;
1496 }
1497
1498#ifndef HAVE_cc0
1499 /* Many machines that don't use CC0 have insns that can both perform an
1500 arithmetic operation and set the condition code. These operations will
1501 be represented as a PARALLEL with the first element of the vector
1502 being a COMPARE of an arithmetic operation with the constant zero.
1503 The second element of the vector will set some pseudo to the result
1504 of the same arithmetic operation. If we simplify the COMPARE, we won't
1505 match such a pattern and so will generate an extra insn. Here we test
1506 for this case, where both the comparison and the operation result are
1507 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1508 I2SRC. Later we will make the PARALLEL that contains I2. */
1509
1510 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1511 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1512 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1513 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1514 {
1515 rtx *cc_use;
1516 enum machine_mode compare_mode;
1517
1518 newpat = PATTERN (i3);
1519 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1520
1521 i2_is_used = 1;
1522
1523#ifdef EXTRA_CC_MODES
1524 /* See if a COMPARE with the operand we substituted in should be done
1525 with the mode that is currently being used. If not, do the same
1526 processing we do in `subst' for a SET; namely, if the destination
1527 is used only once, try to replace it with a register of the proper
1528 mode and also replace the COMPARE. */
1529 if (undobuf.other_insn == 0
1530 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1531 &undobuf.other_insn))
77fa0940
RK
1532 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1533 i2src, const0_rtx))
230d793d
RS
1534 != GET_MODE (SET_DEST (newpat))))
1535 {
1536 int regno = REGNO (SET_DEST (newpat));
1537 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1538
1539 if (regno < FIRST_PSEUDO_REGISTER
1540 || (reg_n_sets[regno] == 1 && ! added_sets_2
1541 && ! REG_USERVAR_P (SET_DEST (newpat))))
1542 {
1543 if (regno >= FIRST_PSEUDO_REGISTER)
1544 SUBST (regno_reg_rtx[regno], new_dest);
1545
1546 SUBST (SET_DEST (newpat), new_dest);
1547 SUBST (XEXP (*cc_use, 0), new_dest);
1548 SUBST (SET_SRC (newpat),
1549 gen_rtx_combine (COMPARE, compare_mode,
1550 i2src, const0_rtx));
1551 }
1552 else
1553 undobuf.other_insn = 0;
1554 }
1555#endif
1556 }
1557 else
1558#endif
1559 {
1560 n_occurrences = 0; /* `subst' counts here */
1561
1562 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1563 need to make a unique copy of I2SRC each time we substitute it
1564 to avoid self-referential rtl. */
1565
d0ab8cd3 1566 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1567 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1568 ! i1_feeds_i3 && i1dest_in_i1src);
1569 previous_num_undos = undobuf.num_undo;
1570
1571 /* Record whether i2's body now appears within i3's body. */
1572 i2_is_used = n_occurrences;
1573 }
1574
1575 /* If we already got a failure, don't try to do more. Otherwise,
1576 try to substitute in I1 if we have it. */
1577
1578 if (i1 && GET_CODE (newpat) != CLOBBER)
1579 {
1580 /* Before we can do this substitution, we must redo the test done
1581 above (see detailed comments there) that ensures that I1DEST
1582 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1583
5f4f0e22
CH
1584 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1585 0, NULL_PTR))
230d793d
RS
1586 {
1587 undo_all ();
1588 return 0;
1589 }
1590
1591 n_occurrences = 0;
d0ab8cd3 1592 subst_low_cuid = INSN_CUID (i1);
230d793d
RS
1593 newpat = subst (newpat, i1dest, i1src, 0, 0);
1594 previous_num_undos = undobuf.num_undo;
1595 }
1596
916f14f1
RK
1597 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1598 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1599 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1600 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1601 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1602 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1603 > 1))
230d793d
RS
1604 /* Fail if we tried to make a new register (we used to abort, but there's
1605 really no reason to). */
1606 || max_reg_num () != maxreg
1607 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1608 || GET_CODE (newpat) == CLOBBER
1609 /* Fail if this new pattern is a MULT and we didn't have one before
1610 at the outer level. */
1611 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1612 && ! have_mult))
230d793d
RS
1613 {
1614 undo_all ();
1615 return 0;
1616 }
1617
1618 /* If the actions of the earlier insns must be kept
1619 in addition to substituting them into the latest one,
1620 we must make a new PARALLEL for the latest insn
1621 to hold additional the SETs. */
1622
1623 if (added_sets_1 || added_sets_2)
1624 {
1625 combine_extras++;
1626
1627 if (GET_CODE (newpat) == PARALLEL)
1628 {
1629 rtvec old = XVEC (newpat, 0);
1630 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1631 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
4c9a05bc 1632 bcopy ((char *) &old->elem[0], (char *) &XVECEXP (newpat, 0, 0),
230d793d
RS
1633 sizeof (old->elem[0]) * old->num_elem);
1634 }
1635 else
1636 {
1637 rtx old = newpat;
1638 total_sets = 1 + added_sets_1 + added_sets_2;
1639 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1640 XVECEXP (newpat, 0, 0) = old;
1641 }
1642
1643 if (added_sets_1)
1644 XVECEXP (newpat, 0, --total_sets)
1645 = (GET_CODE (PATTERN (i1)) == PARALLEL
1646 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1647
1648 if (added_sets_2)
1649 {
1650 /* If there is no I1, use I2's body as is. We used to also not do
1651 the subst call below if I2 was substituted into I3,
1652 but that could lose a simplification. */
1653 if (i1 == 0)
1654 XVECEXP (newpat, 0, --total_sets) = i2pat;
1655 else
1656 /* See comment where i2pat is assigned. */
1657 XVECEXP (newpat, 0, --total_sets)
1658 = subst (i2pat, i1dest, i1src, 0, 0);
1659 }
1660 }
1661
1662 /* We come here when we are replacing a destination in I2 with the
1663 destination of I3. */
1664 validate_replacement:
1665
6e25d159
RK
1666 /* Note which hard regs this insn has as inputs. */
1667 mark_used_regs_combine (newpat);
1668
230d793d
RS
1669 /* Is the result of combination a valid instruction? */
1670 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1671
1672 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1673 the second SET's destination is a register that is unused. In that case,
1674 we just need the first SET. This can occur when simplifying a divmod
1675 insn. We *must* test for this case here because the code below that
1676 splits two independent SETs doesn't handle this case correctly when it
1677 updates the register status. Also check the case where the first
1678 SET's destination is unused. That would not cause incorrect code, but
1679 does cause an unneeded insn to remain. */
1680
1681 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1682 && XVECLEN (newpat, 0) == 2
1683 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1684 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1685 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1686 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1687 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1688 && asm_noperands (newpat) < 0)
1689 {
1690 newpat = XVECEXP (newpat, 0, 0);
1691 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1692 }
1693
1694 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1695 && XVECLEN (newpat, 0) == 2
1696 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1697 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1698 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1699 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1700 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1701 && asm_noperands (newpat) < 0)
1702 {
1703 newpat = XVECEXP (newpat, 0, 1);
1704 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1705 }
1706
1707 /* If we were combining three insns and the result is a simple SET
1708 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1709 insns. There are two ways to do this. It can be split using a
1710 machine-specific method (like when you have an addition of a large
1711 constant) or by combine in the function find_split_point. */
1712
230d793d
RS
1713 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1714 && asm_noperands (newpat) < 0)
1715 {
916f14f1 1716 rtx m_split, *split;
42495ca0 1717 rtx ni2dest = i2dest;
916f14f1
RK
1718
1719 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1720 use I2DEST as a scratch register will help. In the latter case,
1721 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1722
1723 m_split = split_insns (newpat, i3);
a70c61d9
JW
1724
1725 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1726 inputs of NEWPAT. */
1727
1728 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1729 possible to try that as a scratch reg. This would require adding
1730 more code to make it work though. */
1731
1732 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1733 {
1734 /* If I2DEST is a hard register or the only use of a pseudo,
1735 we can change its mode. */
1736 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1737 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1738 && GET_CODE (i2dest) == REG
42495ca0
RK
1739 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1740 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1741 && ! REG_USERVAR_P (i2dest))))
1742 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1743 REGNO (i2dest));
1744
1745 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1746 gen_rtvec (2, newpat,
1747 gen_rtx (CLOBBER,
1748 VOIDmode,
1749 ni2dest))),
1750 i3);
1751 }
916f14f1
RK
1752
1753 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1754 && XVECLEN (m_split, 0) == 2
1755 && (next_real_insn (i2) == i3
1756 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1757 INSN_CUID (i2))))
916f14f1 1758 {
1a26b032 1759 rtx i2set, i3set;
d0ab8cd3 1760 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1761 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1762
e4ba89be
RK
1763 i3set = single_set (XVECEXP (m_split, 0, 1));
1764 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1765
42495ca0
RK
1766 /* In case we changed the mode of I2DEST, replace it in the
1767 pseudo-register table here. We can't do it above in case this
1768 code doesn't get executed and we do a split the other way. */
1769
1770 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1771 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1772
916f14f1 1773 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
1774
1775 /* If I2 or I3 has multiple SETs, we won't know how to track
1776 register status, so don't use these insns. */
1777
1778 if (i2_code_number >= 0 && i2set && i3set)
8888fada
RK
1779 insn_code_number = recog_for_combine (&newi3pat, i3,
1780 &new_i3_notes);
c767f54b 1781
d0ab8cd3
RK
1782 if (insn_code_number >= 0)
1783 newpat = newi3pat;
1784
c767f54b 1785 /* It is possible that both insns now set the destination of I3.
22609cbf 1786 If so, we must show an extra use of it. */
c767f54b 1787
1a26b032
RK
1788 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1789 && GET_CODE (SET_DEST (i2set)) == REG
1790 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
22609cbf 1791 reg_n_sets[REGNO (SET_DEST (i2set))]++;
916f14f1 1792 }
230d793d
RS
1793
1794 /* If we can split it and use I2DEST, go ahead and see if that
1795 helps things be recognized. Verify that none of the registers
1796 are set between I2 and I3. */
d0ab8cd3 1797 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1798#ifdef HAVE_cc0
1799 && GET_CODE (i2dest) == REG
1800#endif
1801 /* We need I2DEST in the proper mode. If it is a hard register
1802 or the only use of a pseudo, we can change its mode. */
1803 && (GET_MODE (*split) == GET_MODE (i2dest)
1804 || GET_MODE (*split) == VOIDmode
1805 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1806 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1807 && ! REG_USERVAR_P (i2dest)))
1808 && (next_real_insn (i2) == i3
1809 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1810 /* We can't overwrite I2DEST if its value is still used by
1811 NEWPAT. */
1812 && ! reg_referenced_p (i2dest, newpat))
1813 {
1814 rtx newdest = i2dest;
df7d75de
RK
1815 enum rtx_code split_code = GET_CODE (*split);
1816 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
1817
1818 /* Get NEWDEST as a register in the proper mode. We have already
1819 validated that we can do this. */
df7d75de 1820 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 1821 {
df7d75de 1822 newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
230d793d
RS
1823
1824 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1825 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1826 }
1827
1828 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1829 an ASHIFT. This can occur if it was inside a PLUS and hence
1830 appeared to be a memory address. This is a kludge. */
df7d75de 1831 if (split_code == MULT
230d793d
RS
1832 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1833 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
df7d75de 1834 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
5f4f0e22 1835 XEXP (*split, 0), GEN_INT (i)));
230d793d
RS
1836
1837#ifdef INSN_SCHEDULING
1838 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1839 be written as a ZERO_EXTEND. */
df7d75de
RK
1840 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
1841 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
1842 XEXP (*split, 0)));
1843#endif
1844
1845 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1846 SUBST (*split, newdest);
1847 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
df7d75de
RK
1848
1849 /* If the split point was a MULT and we didn't have one before,
1850 don't use one now. */
1851 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
230d793d
RS
1852 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1853 }
1854 }
1855
1856 /* Check for a case where we loaded from memory in a narrow mode and
1857 then sign extended it, but we need both registers. In that case,
1858 we have a PARALLEL with both loads from the same memory location.
1859 We can split this into a load from memory followed by a register-register
1860 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
1861 eliminate the copy.
1862
1863 We cannot do this if the destination of the second assignment is
1864 a register that we have already assumed is zero-extended. Similarly
1865 for a SUBREG of such a register. */
230d793d
RS
1866
1867 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1868 && GET_CODE (newpat) == PARALLEL
1869 && XVECLEN (newpat, 0) == 2
1870 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1871 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1872 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1873 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1874 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1875 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1876 INSN_CUID (i2))
1877 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1878 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
1879 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1880 (GET_CODE (temp) == REG
1881 && reg_nonzero_bits[REGNO (temp)] != 0
1882 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1883 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1884 && (reg_nonzero_bits[REGNO (temp)]
1885 != GET_MODE_MASK (word_mode))))
1886 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1887 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1888 (GET_CODE (temp) == REG
1889 && reg_nonzero_bits[REGNO (temp)] != 0
1890 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1891 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1892 && (reg_nonzero_bits[REGNO (temp)]
1893 != GET_MODE_MASK (word_mode)))))
230d793d
RS
1894 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1895 SET_SRC (XVECEXP (newpat, 0, 1)))
1896 && ! find_reg_note (i3, REG_UNUSED,
1897 SET_DEST (XVECEXP (newpat, 0, 0))))
1898 {
472fbdd1
RK
1899 rtx ni2dest;
1900
230d793d 1901 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 1902 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
1903 newpat = XVECEXP (newpat, 0, 1);
1904 SUBST (SET_SRC (newpat),
472fbdd1 1905 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
230d793d
RS
1906 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1907 if (i2_code_number >= 0)
1908 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
1909
1910 if (insn_code_number >= 0)
1911 {
1912 rtx insn;
1913 rtx link;
1914
1915 /* If we will be able to accept this, we have made a change to the
1916 destination of I3. This can invalidate a LOG_LINKS pointing
1917 to I3. No other part of combine.c makes such a transformation.
1918
1919 The new I3 will have a destination that was previously the
1920 destination of I1 or I2 and which was used in i2 or I3. Call
1921 distribute_links to make a LOG_LINK from the next use of
1922 that destination. */
1923
1924 PATTERN (i3) = newpat;
5f4f0e22 1925 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
1926
1927 /* I3 now uses what used to be its destination and which is
1928 now I2's destination. That means we need a LOG_LINK from
1929 I3 to I2. But we used to have one, so we still will.
1930
1931 However, some later insn might be using I2's dest and have
1932 a LOG_LINK pointing at I3. We must remove this link.
1933 The simplest way to remove the link is to point it at I1,
1934 which we know will be a NOTE. */
1935
1936 for (insn = NEXT_INSN (i3);
0d4d42c3
RK
1937 insn && (this_basic_block == n_basic_blocks - 1
1938 || insn != basic_block_head[this_basic_block + 1]);
5089e22e
RS
1939 insn = NEXT_INSN (insn))
1940 {
1941 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 1942 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
1943 {
1944 for (link = LOG_LINKS (insn); link;
1945 link = XEXP (link, 1))
1946 if (XEXP (link, 0) == i3)
1947 XEXP (link, 0) = i1;
1948
1949 break;
1950 }
1951 }
1952 }
230d793d
RS
1953 }
1954
1955 /* Similarly, check for a case where we have a PARALLEL of two independent
1956 SETs but we started with three insns. In this case, we can do the sets
1957 as two separate insns. This case occurs when some SET allows two
1958 other insns to combine, but the destination of that SET is still live. */
1959
1960 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1961 && GET_CODE (newpat) == PARALLEL
1962 && XVECLEN (newpat, 0) == 2
1963 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1964 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1965 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1966 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1967 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1968 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1969 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1970 INSN_CUID (i2))
1971 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1972 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1973 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1974 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1975 XVECEXP (newpat, 0, 0))
1976 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1977 XVECEXP (newpat, 0, 1)))
1978 {
1979 newi2pat = XVECEXP (newpat, 0, 1);
1980 newpat = XVECEXP (newpat, 0, 0);
1981
1982 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1983 if (i2_code_number >= 0)
1984 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1985 }
1986
1987 /* If it still isn't recognized, fail and change things back the way they
1988 were. */
1989 if ((insn_code_number < 0
1990 /* Is the result a reasonable ASM_OPERANDS? */
1991 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1992 {
1993 undo_all ();
1994 return 0;
1995 }
1996
1997 /* If we had to change another insn, make sure it is valid also. */
1998 if (undobuf.other_insn)
1999 {
230d793d
RS
2000 rtx other_pat = PATTERN (undobuf.other_insn);
2001 rtx new_other_notes;
2002 rtx note, next;
2003
6e25d159
RK
2004 CLEAR_HARD_REG_SET (newpat_used_regs);
2005
230d793d
RS
2006 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2007 &new_other_notes);
2008
2009 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2010 {
2011 undo_all ();
2012 return 0;
2013 }
2014
2015 PATTERN (undobuf.other_insn) = other_pat;
2016
2017 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2018 are still valid. Then add any non-duplicate notes added by
2019 recog_for_combine. */
2020 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2021 {
2022 next = XEXP (note, 1);
2023
2024 if (REG_NOTE_KIND (note) == REG_UNUSED
2025 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2026 {
2027 if (GET_CODE (XEXP (note, 0)) == REG)
2028 reg_n_deaths[REGNO (XEXP (note, 0))]--;
2029
2030 remove_note (undobuf.other_insn, note);
2031 }
230d793d
RS
2032 }
2033
1a26b032
RK
2034 for (note = new_other_notes; note; note = XEXP (note, 1))
2035 if (GET_CODE (XEXP (note, 0)) == REG)
2036 reg_n_deaths[REGNO (XEXP (note, 0))]++;
2037
230d793d 2038 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2039 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2040 }
2041
2042 /* We now know that we can do this combination. Merge the insns and
2043 update the status of registers and LOG_LINKS. */
2044
2045 {
2046 rtx i3notes, i2notes, i1notes = 0;
2047 rtx i3links, i2links, i1links = 0;
2048 rtx midnotes = 0;
230d793d
RS
2049 register int regno;
2050 /* Compute which registers we expect to eliminate. */
2051 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2052 ? 0 : i2dest);
2053 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2054
2055 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2056 clear them. */
2057 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2058 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2059 if (i1)
2060 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2061
2062 /* Ensure that we do not have something that should not be shared but
2063 occurs multiple times in the new insns. Check this by first
5089e22e 2064 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2065
2066 reset_used_flags (i3notes);
2067 reset_used_flags (i2notes);
2068 reset_used_flags (i1notes);
2069 reset_used_flags (newpat);
2070 reset_used_flags (newi2pat);
2071 if (undobuf.other_insn)
2072 reset_used_flags (PATTERN (undobuf.other_insn));
2073
2074 i3notes = copy_rtx_if_shared (i3notes);
2075 i2notes = copy_rtx_if_shared (i2notes);
2076 i1notes = copy_rtx_if_shared (i1notes);
2077 newpat = copy_rtx_if_shared (newpat);
2078 newi2pat = copy_rtx_if_shared (newi2pat);
2079 if (undobuf.other_insn)
2080 reset_used_flags (PATTERN (undobuf.other_insn));
2081
2082 INSN_CODE (i3) = insn_code_number;
2083 PATTERN (i3) = newpat;
2084 if (undobuf.other_insn)
2085 INSN_CODE (undobuf.other_insn) = other_code_number;
2086
2087 /* We had one special case above where I2 had more than one set and
2088 we replaced a destination of one of those sets with the destination
2089 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2090 in this basic block. Note that this (expensive) case is rare.
2091
2092 Also, in this case, we must pretend that all REG_NOTEs for I2
2093 actually came from I3, so that REG_UNUSED notes from I2 will be
2094 properly handled. */
2095
2096 if (i3_subst_into_i2)
2097 {
2098 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2099 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2100 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2101 && ! find_reg_note (i2, REG_UNUSED,
2102 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2103 for (temp = NEXT_INSN (i2);
2104 temp && (this_basic_block == n_basic_blocks - 1
2105 || basic_block_head[this_basic_block] != temp);
2106 temp = NEXT_INSN (temp))
2107 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2108 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2109 if (XEXP (link, 0) == i2)
2110 XEXP (link, 0) = i3;
2111
2112 if (i3notes)
2113 {
2114 rtx link = i3notes;
2115 while (XEXP (link, 1))
2116 link = XEXP (link, 1);
2117 XEXP (link, 1) = i2notes;
2118 }
2119 else
2120 i3notes = i2notes;
2121 i2notes = 0;
2122 }
230d793d
RS
2123
2124 LOG_LINKS (i3) = 0;
2125 REG_NOTES (i3) = 0;
2126 LOG_LINKS (i2) = 0;
2127 REG_NOTES (i2) = 0;
2128
2129 if (newi2pat)
2130 {
2131 INSN_CODE (i2) = i2_code_number;
2132 PATTERN (i2) = newi2pat;
2133 }
2134 else
2135 {
2136 PUT_CODE (i2, NOTE);
2137 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2138 NOTE_SOURCE_FILE (i2) = 0;
2139 }
2140
2141 if (i1)
2142 {
2143 LOG_LINKS (i1) = 0;
2144 REG_NOTES (i1) = 0;
2145 PUT_CODE (i1, NOTE);
2146 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2147 NOTE_SOURCE_FILE (i1) = 0;
2148 }
2149
2150 /* Get death notes for everything that is now used in either I3 or
2151 I2 and used to die in a previous insn. */
2152
2153 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2154 if (newi2pat)
2155 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2156
2157 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2158 if (i3notes)
5f4f0e22
CH
2159 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2160 elim_i2, elim_i1);
230d793d 2161 if (i2notes)
5f4f0e22
CH
2162 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2163 elim_i2, elim_i1);
230d793d 2164 if (i1notes)
5f4f0e22
CH
2165 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2166 elim_i2, elim_i1);
230d793d 2167 if (midnotes)
5f4f0e22
CH
2168 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2169 elim_i2, elim_i1);
230d793d
RS
2170
2171 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2172 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2173 so we always pass it as i3. We have not counted the notes in
2174 reg_n_deaths yet, so we need to do so now. */
2175
230d793d 2176 if (newi2pat && new_i2_notes)
1a26b032
RK
2177 {
2178 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2179 if (GET_CODE (XEXP (temp, 0)) == REG)
2180 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2181
2182 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2183 }
2184
230d793d 2185 if (new_i3_notes)
1a26b032
RK
2186 {
2187 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2188 if (GET_CODE (XEXP (temp, 0)) == REG)
2189 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2190
2191 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2192 }
230d793d
RS
2193
2194 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1a26b032
RK
2195 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2196 Show an additional death due to the REG_DEAD note we make here. If
2197 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2198
230d793d 2199 if (i3dest_killed)
1a26b032
RK
2200 {
2201 if (GET_CODE (i3dest_killed) == REG)
2202 reg_n_deaths[REGNO (i3dest_killed)]++;
2203
2204 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2205 NULL_RTX),
2206 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2207 NULL_RTX, NULL_RTX);
2208 }
58c8c593
RK
2209
2210 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2211 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2212 we passed I3 in that case, it might delete I2. */
2213
230d793d 2214 if (i2dest_in_i2src)
58c8c593 2215 {
1a26b032
RK
2216 if (GET_CODE (i2dest) == REG)
2217 reg_n_deaths[REGNO (i2dest)]++;
2218
58c8c593
RK
2219 if (newi2pat && reg_set_p (i2dest, newi2pat))
2220 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2221 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2222 else
2223 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2224 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2225 NULL_RTX, NULL_RTX);
2226 }
2227
230d793d 2228 if (i1dest_in_i1src)
58c8c593 2229 {
1a26b032
RK
2230 if (GET_CODE (i1dest) == REG)
2231 reg_n_deaths[REGNO (i1dest)]++;
2232
58c8c593
RK
2233 if (newi2pat && reg_set_p (i1dest, newi2pat))
2234 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2235 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2236 else
2237 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2238 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2239 NULL_RTX, NULL_RTX);
2240 }
230d793d
RS
2241
2242 distribute_links (i3links);
2243 distribute_links (i2links);
2244 distribute_links (i1links);
2245
2246 if (GET_CODE (i2dest) == REG)
2247 {
d0ab8cd3
RK
2248 rtx link;
2249 rtx i2_insn = 0, i2_val = 0, set;
2250
2251 /* The insn that used to set this register doesn't exist, and
2252 this life of the register may not exist either. See if one of
2253 I3's links points to an insn that sets I2DEST. If it does,
2254 that is now the last known value for I2DEST. If we don't update
2255 this and I2 set the register to a value that depended on its old
230d793d
RS
2256 contents, we will get confused. If this insn is used, thing
2257 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2258
2259 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2260 if ((set = single_set (XEXP (link, 0))) != 0
2261 && rtx_equal_p (i2dest, SET_DEST (set)))
2262 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2263
2264 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2265
2266 /* If the reg formerly set in I2 died only once and that was in I3,
2267 zero its use count so it won't make `reload' do any work. */
5af91171 2268 if (! added_sets_2 && newi2pat == 0 && ! i2dest_in_i2src)
230d793d
RS
2269 {
2270 regno = REGNO (i2dest);
2271 reg_n_sets[regno]--;
2272 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2273 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2274 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2275 reg_n_refs[regno] = 0;
2276 }
2277 }
2278
2279 if (i1 && GET_CODE (i1dest) == REG)
2280 {
d0ab8cd3
RK
2281 rtx link;
2282 rtx i1_insn = 0, i1_val = 0, set;
2283
2284 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2285 if ((set = single_set (XEXP (link, 0))) != 0
2286 && rtx_equal_p (i1dest, SET_DEST (set)))
2287 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2288
2289 record_value_for_reg (i1dest, i1_insn, i1_val);
2290
230d793d 2291 regno = REGNO (i1dest);
5af91171 2292 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d
RS
2293 {
2294 reg_n_sets[regno]--;
2295 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2296 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2297 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2298 reg_n_refs[regno] = 0;
2299 }
2300 }
2301
951553af 2302 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2303 to this insn. */
2304
951553af 2305 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2306 if (newi2pat)
951553af 2307 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2308
230d793d
RS
2309 /* If I3 is now an unconditional jump, ensure that it has a
2310 BARRIER following it since it may have initially been a
381ee8af 2311 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2312
2313 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2314 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2315 || GET_CODE (temp) != BARRIER))
230d793d
RS
2316 emit_barrier_after (i3);
2317 }
2318
2319 combine_successes++;
2320
abe6e52f
RK
2321 if (added_links_insn
2322 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2323 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2324 return added_links_insn;
2325 else
2326 return newi2pat ? i2 : i3;
230d793d
RS
2327}
2328\f
2329/* Undo all the modifications recorded in undobuf. */
2330
2331static void
2332undo_all ()
2333{
2334 register int i;
2335 if (undobuf.num_undo > MAX_UNDO)
2336 undobuf.num_undo = MAX_UNDO;
2337 for (i = undobuf.num_undo - 1; i >= 0; i--)
7c046e4e
RK
2338 {
2339 if (undobuf.undo[i].is_int)
2340 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2341 else
f5393ab9 2342 *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
7c046e4e
RK
2343
2344 }
230d793d
RS
2345
2346 obfree (undobuf.storage);
2347 undobuf.num_undo = 0;
2348}
2349\f
2350/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2351 where we have an arithmetic expression and return that point. LOC will
2352 be inside INSN.
230d793d
RS
2353
2354 try_combine will call this function to see if an insn can be split into
2355 two insns. */
2356
2357static rtx *
d0ab8cd3 2358find_split_point (loc, insn)
230d793d 2359 rtx *loc;
d0ab8cd3 2360 rtx insn;
230d793d
RS
2361{
2362 rtx x = *loc;
2363 enum rtx_code code = GET_CODE (x);
2364 rtx *split;
2365 int len = 0, pos, unsignedp;
2366 rtx inner;
2367
2368 /* First special-case some codes. */
2369 switch (code)
2370 {
2371 case SUBREG:
2372#ifdef INSN_SCHEDULING
2373 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2374 point. */
2375 if (GET_CODE (SUBREG_REG (x)) == MEM)
2376 return loc;
2377#endif
d0ab8cd3 2378 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2379
230d793d 2380 case MEM:
916f14f1 2381#ifdef HAVE_lo_sum
230d793d
RS
2382 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2383 using LO_SUM and HIGH. */
2384 if (GET_CODE (XEXP (x, 0)) == CONST
2385 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2386 {
2387 SUBST (XEXP (x, 0),
2388 gen_rtx_combine (LO_SUM, Pmode,
2389 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2390 XEXP (x, 0)));
2391 return &XEXP (XEXP (x, 0), 0);
2392 }
230d793d
RS
2393#endif
2394
916f14f1
RK
2395 /* If we have a PLUS whose second operand is a constant and the
2396 address is not valid, perhaps will can split it up using
2397 the machine-specific way to split large constants. We use
d0ab8cd3 2398 the first psuedo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2399 it will not remain in the result. */
2400 if (GET_CODE (XEXP (x, 0)) == PLUS
2401 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2402 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2403 {
2404 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2405 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2406 subst_insn);
2407
2408 /* This should have produced two insns, each of which sets our
2409 placeholder. If the source of the second is a valid address,
2410 we can make put both sources together and make a split point
2411 in the middle. */
2412
2413 if (seq && XVECLEN (seq, 0) == 2
2414 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2415 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2416 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2417 && ! reg_mentioned_p (reg,
2418 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2419 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2420 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2421 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2422 && memory_address_p (GET_MODE (x),
2423 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2424 {
2425 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2426 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2427
2428 /* Replace the placeholder in SRC2 with SRC1. If we can
2429 find where in SRC2 it was placed, that can become our
2430 split point and we can replace this address with SRC2.
2431 Just try two obvious places. */
2432
2433 src2 = replace_rtx (src2, reg, src1);
2434 split = 0;
2435 if (XEXP (src2, 0) == src1)
2436 split = &XEXP (src2, 0);
2437 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2438 && XEXP (XEXP (src2, 0), 0) == src1)
2439 split = &XEXP (XEXP (src2, 0), 0);
2440
2441 if (split)
2442 {
2443 SUBST (XEXP (x, 0), src2);
2444 return split;
2445 }
2446 }
1a26b032
RK
2447
2448 /* If that didn't work, perhaps the first operand is complex and
2449 needs to be computed separately, so make a split point there.
2450 This will occur on machines that just support REG + CONST
2451 and have a constant moved through some previous computation. */
2452
2453 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2454 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2455 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2456 == 'o')))
2457 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2458 }
2459 break;
2460
230d793d
RS
2461 case SET:
2462#ifdef HAVE_cc0
2463 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2464 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2465 we need to put the operand into a register. So split at that
2466 point. */
2467
2468 if (SET_DEST (x) == cc0_rtx
2469 && GET_CODE (SET_SRC (x)) != COMPARE
2470 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2471 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2472 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2473 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2474 return &SET_SRC (x);
2475#endif
2476
2477 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2478 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2479 if (split && split != &SET_SRC (x))
2480 return split;
2481
2482 /* See if this is a bitfield assignment with everything constant. If
2483 so, this is an IOR of an AND, so split it into that. */
2484 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2485 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2486 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2487 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2488 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2489 && GET_CODE (SET_SRC (x)) == CONST_INT
2490 && ((INTVAL (XEXP (SET_DEST (x), 1))
2491 + INTVAL (XEXP (SET_DEST (x), 2)))
2492 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2493 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2494 {
2495 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2496 int len = INTVAL (XEXP (SET_DEST (x), 1));
2497 int src = INTVAL (SET_SRC (x));
2498 rtx dest = XEXP (SET_DEST (x), 0);
2499 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2500 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d
RS
2501
2502#if BITS_BIG_ENDIAN
2503 pos = GET_MODE_BITSIZE (mode) - len - pos;
2504#endif
2505
2506 if (src == mask)
2507 SUBST (SET_SRC (x),
5f4f0e22 2508 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2509 else
2510 SUBST (SET_SRC (x),
2511 gen_binary (IOR, mode,
2512 gen_binary (AND, mode, dest,
5f4f0e22
CH
2513 GEN_INT (~ (mask << pos)
2514 & GET_MODE_MASK (mode))),
2515 GEN_INT (src << pos)));
230d793d
RS
2516
2517 SUBST (SET_DEST (x), dest);
2518
d0ab8cd3 2519 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2520 if (split && split != &SET_SRC (x))
2521 return split;
2522 }
2523
2524 /* Otherwise, see if this is an operation that we can split into two.
2525 If so, try to split that. */
2526 code = GET_CODE (SET_SRC (x));
2527
2528 switch (code)
2529 {
d0ab8cd3
RK
2530 case AND:
2531 /* If we are AND'ing with a large constant that is only a single
2532 bit and the result is only being used in a context where we
2533 need to know if it is zero or non-zero, replace it with a bit
2534 extraction. This will avoid the large constant, which might
2535 have taken more than one insn to make. If the constant were
2536 not a valid argument to the AND but took only one insn to make,
2537 this is no worse, but if it took more than one insn, it will
2538 be better. */
2539
2540 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2541 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2542 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2543 && GET_CODE (SET_DEST (x)) == REG
2544 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2545 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2546 && XEXP (*split, 0) == SET_DEST (x)
2547 && XEXP (*split, 1) == const0_rtx)
2548 {
2549 SUBST (SET_SRC (x),
2550 make_extraction (GET_MODE (SET_DEST (x)),
2551 XEXP (SET_SRC (x), 0),
2552 pos, NULL_RTX, 1, 1, 0, 0));
2553 return find_split_point (loc, insn);
2554 }
2555 break;
2556
230d793d
RS
2557 case SIGN_EXTEND:
2558 inner = XEXP (SET_SRC (x), 0);
2559 pos = 0;
2560 len = GET_MODE_BITSIZE (GET_MODE (inner));
2561 unsignedp = 0;
2562 break;
2563
2564 case SIGN_EXTRACT:
2565 case ZERO_EXTRACT:
2566 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2567 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2568 {
2569 inner = XEXP (SET_SRC (x), 0);
2570 len = INTVAL (XEXP (SET_SRC (x), 1));
2571 pos = INTVAL (XEXP (SET_SRC (x), 2));
2572
2573#if BITS_BIG_ENDIAN
2574 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2575#endif
2576 unsignedp = (code == ZERO_EXTRACT);
2577 }
2578 break;
2579 }
2580
2581 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2582 {
2583 enum machine_mode mode = GET_MODE (SET_SRC (x));
2584
d0ab8cd3
RK
2585 /* For unsigned, we have a choice of a shift followed by an
2586 AND or two shifts. Use two shifts for field sizes where the
2587 constant might be too large. We assume here that we can
2588 always at least get 8-bit constants in an AND insn, which is
2589 true for every current RISC. */
2590
2591 if (unsignedp && len <= 8)
230d793d
RS
2592 {
2593 SUBST (SET_SRC (x),
2594 gen_rtx_combine
2595 (AND, mode,
2596 gen_rtx_combine (LSHIFTRT, mode,
2597 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2598 GEN_INT (pos)),
2599 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2600
d0ab8cd3 2601 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2602 if (split && split != &SET_SRC (x))
2603 return split;
2604 }
2605 else
2606 {
2607 SUBST (SET_SRC (x),
2608 gen_rtx_combine
d0ab8cd3 2609 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2610 gen_rtx_combine (ASHIFT, mode,
2611 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2612 GEN_INT (GET_MODE_BITSIZE (mode)
2613 - len - pos)),
2614 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2615
d0ab8cd3 2616 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2617 if (split && split != &SET_SRC (x))
2618 return split;
2619 }
2620 }
2621
2622 /* See if this is a simple operation with a constant as the second
2623 operand. It might be that this constant is out of range and hence
2624 could be used as a split point. */
2625 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2626 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2627 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2628 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2629 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2630 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2631 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2632 == 'o'))))
2633 return &XEXP (SET_SRC (x), 1);
2634
2635 /* Finally, see if this is a simple operation with its first operand
2636 not in a register. The operation might require this operand in a
2637 register, so return it as a split point. We can always do this
2638 because if the first operand were another operation, we would have
2639 already found it as a split point. */
2640 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2641 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2642 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2643 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2644 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2645 return &XEXP (SET_SRC (x), 0);
2646
2647 return 0;
2648
2649 case AND:
2650 case IOR:
2651 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2652 it is better to write this as (not (ior A B)) so we can split it.
2653 Similarly for IOR. */
2654 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2655 {
2656 SUBST (*loc,
2657 gen_rtx_combine (NOT, GET_MODE (x),
2658 gen_rtx_combine (code == IOR ? AND : IOR,
2659 GET_MODE (x),
2660 XEXP (XEXP (x, 0), 0),
2661 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2662 return find_split_point (loc, insn);
230d793d
RS
2663 }
2664
2665 /* Many RISC machines have a large set of logical insns. If the
2666 second operand is a NOT, put it first so we will try to split the
2667 other operand first. */
2668 if (GET_CODE (XEXP (x, 1)) == NOT)
2669 {
2670 rtx tem = XEXP (x, 0);
2671 SUBST (XEXP (x, 0), XEXP (x, 1));
2672 SUBST (XEXP (x, 1), tem);
2673 }
2674 break;
2675 }
2676
2677 /* Otherwise, select our actions depending on our rtx class. */
2678 switch (GET_RTX_CLASS (code))
2679 {
2680 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2681 case '3':
d0ab8cd3 2682 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2683 if (split)
2684 return split;
2685 /* ... fall through ... */
2686 case '2':
2687 case 'c':
2688 case '<':
d0ab8cd3 2689 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2690 if (split)
2691 return split;
2692 /* ... fall through ... */
2693 case '1':
2694 /* Some machines have (and (shift ...) ...) insns. If X is not
2695 an AND, but XEXP (X, 0) is, use it as our split point. */
2696 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2697 return &XEXP (x, 0);
2698
d0ab8cd3 2699 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2700 if (split)
2701 return split;
2702 return loc;
2703 }
2704
2705 /* Otherwise, we don't have a split point. */
2706 return 0;
2707}
2708\f
2709/* Throughout X, replace FROM with TO, and return the result.
2710 The result is TO if X is FROM;
2711 otherwise the result is X, but its contents may have been modified.
2712 If they were modified, a record was made in undobuf so that
2713 undo_all will (among other things) return X to its original state.
2714
2715 If the number of changes necessary is too much to record to undo,
2716 the excess changes are not made, so the result is invalid.
2717 The changes already made can still be undone.
2718 undobuf.num_undo is incremented for such changes, so by testing that
2719 the caller can tell whether the result is valid.
2720
2721 `n_occurrences' is incremented each time FROM is replaced.
2722
2723 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2724
5089e22e 2725 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2726 by copying if `n_occurrences' is non-zero. */
2727
2728static rtx
2729subst (x, from, to, in_dest, unique_copy)
2730 register rtx x, from, to;
2731 int in_dest;
2732 int unique_copy;
2733{
f24ad0e4 2734 register enum rtx_code code = GET_CODE (x);
230d793d 2735 enum machine_mode op0_mode = VOIDmode;
8079805d
RK
2736 register char *fmt;
2737 register int len, i;
2738 rtx new;
230d793d
RS
2739
2740/* Two expressions are equal if they are identical copies of a shared
2741 RTX or if they are both registers with the same register number
2742 and mode. */
2743
2744#define COMBINE_RTX_EQUAL_P(X,Y) \
2745 ((X) == (Y) \
2746 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2747 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2748
2749 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2750 {
2751 n_occurrences++;
2752 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2753 }
2754
2755 /* If X and FROM are the same register but different modes, they will
2756 not have been seen as equal above. However, flow.c will make a
2757 LOG_LINKS entry for that case. If we do nothing, we will try to
2758 rerecognize our original insn and, when it succeeds, we will
2759 delete the feeding insn, which is incorrect.
2760
2761 So force this insn not to match in this (rare) case. */
2762 if (! in_dest && code == REG && GET_CODE (from) == REG
2763 && REGNO (x) == REGNO (from))
2764 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2765
2766 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2767 of which may contain things that can be combined. */
2768 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2769 return x;
2770
2771 /* It is possible to have a subexpression appear twice in the insn.
2772 Suppose that FROM is a register that appears within TO.
2773 Then, after that subexpression has been scanned once by `subst',
2774 the second time it is scanned, TO may be found. If we were
2775 to scan TO here, we would find FROM within it and create a
2776 self-referent rtl structure which is completely wrong. */
2777 if (COMBINE_RTX_EQUAL_P (x, to))
2778 return to;
2779
2780 len = GET_RTX_LENGTH (code);
2781 fmt = GET_RTX_FORMAT (code);
2782
2783 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2784 set up to skip this common case. All other cases where we want to
2785 suppress replacing something inside a SET_SRC are handled via the
2786 IN_DEST operand. */
2787 if (code == SET
2788 && (GET_CODE (SET_DEST (x)) == REG
2789 || GET_CODE (SET_DEST (x)) == CC0
2790 || GET_CODE (SET_DEST (x)) == PC))
2791 fmt = "ie";
2792
2793 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2794 if (fmt[0] == 'e')
2795 op0_mode = GET_MODE (XEXP (x, 0));
2796
2797 for (i = 0; i < len; i++)
2798 {
2799 if (fmt[i] == 'E')
2800 {
2801 register int j;
2802 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2803 {
230d793d
RS
2804 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2805 {
2806 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2807 n_occurrences++;
2808 }
2809 else
2810 {
2811 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2812
2813 /* If this substitution failed, this whole thing fails. */
2814 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2815 return new;
2816 }
2817
2818 SUBST (XVECEXP (x, i, j), new);
2819 }
2820 }
2821 else if (fmt[i] == 'e')
2822 {
230d793d
RS
2823 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2824 {
42301240
RK
2825 /* In general, don't install a subreg involving two modes not
2826 tieable. It can worsen register allocation, and can even
2827 make invalid reload insns, since the reg inside may need to
2828 be copied from in the outside mode, and that may be invalid
2829 if it is an fp reg copied in integer mode.
2830
2831 We allow two exceptions to this: It is valid if it is inside
2832 another SUBREG and the mode of that SUBREG and the mode of
2833 the inside of TO is tieable and it is valid if X is a SET
2834 that copies FROM to CC0. */
2835 if (GET_CODE (to) == SUBREG
2836 && ! MODES_TIEABLE_P (GET_MODE (to),
2837 GET_MODE (SUBREG_REG (to)))
2838 && ! (code == SUBREG
8079805d
RK
2839 && MODES_TIEABLE_P (GET_MODE (x),
2840 GET_MODE (SUBREG_REG (to))))
42301240
RK
2841#ifdef HAVE_cc0
2842 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2843#endif
2844 )
2845 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2846
230d793d
RS
2847 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2848 n_occurrences++;
2849 }
2850 else
2851 /* If we are in a SET_DEST, suppress most cases unless we
2852 have gone inside a MEM, in which case we want to
2853 simplify the address. We assume here that things that
2854 are actually part of the destination have their inner
2855 parts in the first expression. This is true for SUBREG,
2856 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2857 things aside from REG and MEM that should appear in a
2858 SET_DEST. */
2859 new = subst (XEXP (x, i), from, to,
2860 (((in_dest
2861 && (code == SUBREG || code == STRICT_LOW_PART
2862 || code == ZERO_EXTRACT))
2863 || code == SET)
2864 && i == 0), unique_copy);
2865
2866 /* If we found that we will have to reject this combination,
2867 indicate that by returning the CLOBBER ourselves, rather than
2868 an expression containing it. This will speed things up as
2869 well as prevent accidents where two CLOBBERs are considered
2870 to be equal, thus producing an incorrect simplification. */
2871
2872 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2873 return new;
2874
2875 SUBST (XEXP (x, i), new);
2876 }
2877 }
2878
8079805d
RK
2879 /* Try to simplify X. If the simplification changed the code, it is likely
2880 that further simplification will help, so loop, but limit the number
2881 of repetitions that will be performed. */
2882
2883 for (i = 0; i < 4; i++)
2884 {
2885 /* If X is sufficiently simple, don't bother trying to do anything
2886 with it. */
2887 if (code != CONST_INT && code != REG && code != CLOBBER)
2888 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 2889
8079805d
RK
2890 if (GET_CODE (x) == code)
2891 break;
d0ab8cd3 2892
8079805d 2893 code = GET_CODE (x);
eeb43d32 2894
8079805d
RK
2895 /* We no longer know the original mode of operand 0 since we
2896 have changed the form of X) */
2897 op0_mode = VOIDmode;
2898 }
eeb43d32 2899
8079805d
RK
2900 return x;
2901}
2902\f
2903/* Simplify X, a piece of RTL. We just operate on the expression at the
2904 outer level; call `subst' to simplify recursively. Return the new
2905 expression.
2906
2907 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
2908 will be the iteration even if an expression with a code different from
2909 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 2910
8079805d
RK
2911static rtx
2912simplify_rtx (x, op0_mode, last, in_dest)
2913 rtx x;
2914 enum machine_mode op0_mode;
2915 int last;
2916 int in_dest;
2917{
2918 enum rtx_code code = GET_CODE (x);
2919 enum machine_mode mode = GET_MODE (x);
2920 rtx temp;
2921 int i;
d0ab8cd3 2922
230d793d
RS
2923 /* If this is a commutative operation, put a constant last and a complex
2924 expression first. We don't need to do this for comparisons here. */
2925 if (GET_RTX_CLASS (code) == 'c'
2926 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2927 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2928 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2929 || (GET_CODE (XEXP (x, 0)) == SUBREG
2930 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2931 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2932 {
2933 temp = XEXP (x, 0);
2934 SUBST (XEXP (x, 0), XEXP (x, 1));
2935 SUBST (XEXP (x, 1), temp);
2936 }
2937
22609cbf
RK
2938 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2939 sign extension of a PLUS with a constant, reverse the order of the sign
2940 extension and the addition. Note that this not the same as the original
2941 code, but overflow is undefined for signed values. Also note that the
2942 PLUS will have been partially moved "inside" the sign-extension, so that
2943 the first operand of X will really look like:
2944 (ashiftrt (plus (ashift A C4) C5) C4).
2945 We convert this to
2946 (plus (ashiftrt (ashift A C4) C2) C4)
2947 and replace the first operand of X with that expression. Later parts
2948 of this function may simplify the expression further.
2949
2950 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2951 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2952 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2953
2954 We do this to simplify address expressions. */
2955
2956 if ((code == PLUS || code == MINUS || code == MULT)
2957 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2958 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2959 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2960 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2961 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2962 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2963 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2964 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2965 XEXP (XEXP (XEXP (x, 0), 0), 1),
2966 XEXP (XEXP (x, 0), 1))) != 0)
2967 {
2968 rtx new
2969 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2970 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2971 INTVAL (XEXP (XEXP (x, 0), 1)));
2972
2973 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2974 INTVAL (XEXP (XEXP (x, 0), 1)));
2975
2976 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2977 }
2978
d0ab8cd3
RK
2979 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2980 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
2981 things. Check for cases where both arms are testing the same
2982 condition.
2983
2984 Don't do anything if all operands are very simple. */
2985
2986 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
2987 || GET_RTX_CLASS (code) == '<')
2988 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
2989 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
2990 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
2991 == 'o')))
2992 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
2993 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
2994 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
2995 == 'o')))))
2996 || (GET_RTX_CLASS (code) == '1'
2997 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
2998 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
2999 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3000 == 'o'))))))
d0ab8cd3 3001 {
abe6e52f
RK
3002 rtx cond, true, false;
3003
3004 cond = if_then_else_cond (x, &true, &false);
3005 if (cond != 0)
3006 {
3007 rtx cop1 = const0_rtx;
3008 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3009
9210df58
RK
3010 /* Simplify the alternative arms; this may collapse the true and
3011 false arms to store-flag values. */
3012 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3013 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3014
3015 /* Restarting if we generate a store-flag expression will cause
3016 us to loop. Just drop through in this case. */
3017
abe6e52f
RK
3018 /* If the result values are STORE_FLAG_VALUE and zero, we can
3019 just make the comparison operation. */
3020 if (true == const_true_rtx && false == const0_rtx)
3021 x = gen_binary (cond_code, mode, cond, cop1);
3022 else if (true == const0_rtx && false == const_true_rtx)
3023 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3024
3025 /* Likewise, we can make the negate of a comparison operation
3026 if the result values are - STORE_FLAG_VALUE and zero. */
3027 else if (GET_CODE (true) == CONST_INT
3028 && INTVAL (true) == - STORE_FLAG_VALUE
3029 && false == const0_rtx)
0c1c8ea6 3030 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3031 gen_binary (cond_code, mode, cond, cop1));
3032 else if (GET_CODE (false) == CONST_INT
3033 && INTVAL (false) == - STORE_FLAG_VALUE
3034 && true == const0_rtx)
0c1c8ea6 3035 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3036 gen_binary (reverse_condition (cond_code),
3037 mode, cond, cop1));
3038 else
8079805d
RK
3039 return gen_rtx (IF_THEN_ELSE, mode,
3040 gen_binary (cond_code, VOIDmode, cond, cop1),
3041 true, false);
5109d49f 3042
9210df58
RK
3043 code = GET_CODE (x);
3044 op0_mode = VOIDmode;
abe6e52f 3045 }
d0ab8cd3
RK
3046 }
3047
230d793d
RS
3048 /* Try to fold this expression in case we have constants that weren't
3049 present before. */
3050 temp = 0;
3051 switch (GET_RTX_CLASS (code))
3052 {
3053 case '1':
3054 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3055 break;
3056 case '<':
3057 temp = simplify_relational_operation (code, op0_mode,
3058 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3059#ifdef FLOAT_STORE_FLAG_VALUE
3060 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3061 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3062 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3063#endif
230d793d
RS
3064 break;
3065 case 'c':
3066 case '2':
3067 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3068 break;
3069 case 'b':
3070 case '3':
3071 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3072 XEXP (x, 1), XEXP (x, 2));
3073 break;
3074 }
3075
3076 if (temp)
d0ab8cd3 3077 x = temp, code = GET_CODE (temp);
230d793d 3078
230d793d 3079 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3080 if (code == PLUS || code == MINUS
3081 || code == AND || code == IOR || code == XOR)
230d793d
RS
3082 {
3083 x = apply_distributive_law (x);
3084 code = GET_CODE (x);
3085 }
3086
3087 /* If CODE is an associative operation not otherwise handled, see if we
3088 can associate some operands. This can win if they are constants or
3089 if they are logically related (i.e. (a & b) & a. */
3090 if ((code == PLUS || code == MINUS
3091 || code == MULT || code == AND || code == IOR || code == XOR
3092 || code == DIV || code == UDIV
3093 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3094 && INTEGRAL_MODE_P (mode))
230d793d
RS
3095 {
3096 if (GET_CODE (XEXP (x, 0)) == code)
3097 {
3098 rtx other = XEXP (XEXP (x, 0), 0);
3099 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3100 rtx inner_op1 = XEXP (x, 1);
3101 rtx inner;
3102
3103 /* Make sure we pass the constant operand if any as the second
3104 one if this is a commutative operation. */
3105 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3106 {
3107 rtx tem = inner_op0;
3108 inner_op0 = inner_op1;
3109 inner_op1 = tem;
3110 }
3111 inner = simplify_binary_operation (code == MINUS ? PLUS
3112 : code == DIV ? MULT
3113 : code == UDIV ? MULT
3114 : code,
3115 mode, inner_op0, inner_op1);
3116
3117 /* For commutative operations, try the other pair if that one
3118 didn't simplify. */
3119 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3120 {
3121 other = XEXP (XEXP (x, 0), 1);
3122 inner = simplify_binary_operation (code, mode,
3123 XEXP (XEXP (x, 0), 0),
3124 XEXP (x, 1));
3125 }
3126
3127 if (inner)
8079805d 3128 return gen_binary (code, mode, other, inner);
230d793d
RS
3129 }
3130 }
3131
3132 /* A little bit of algebraic simplification here. */
3133 switch (code)
3134 {
3135 case MEM:
3136 /* Ensure that our address has any ASHIFTs converted to MULT in case
3137 address-recognizing predicates are called later. */
3138 temp = make_compound_operation (XEXP (x, 0), MEM);
3139 SUBST (XEXP (x, 0), temp);
3140 break;
3141
3142 case SUBREG:
3143 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3144 is paradoxical. If we can't do that safely, then it becomes
3145 something nonsensical so that this combination won't take place. */
3146
3147 if (GET_CODE (SUBREG_REG (x)) == MEM
3148 && (GET_MODE_SIZE (mode)
3149 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3150 {
3151 rtx inner = SUBREG_REG (x);
3152 int endian_offset = 0;
3153 /* Don't change the mode of the MEM
3154 if that would change the meaning of the address. */
3155 if (MEM_VOLATILE_P (SUBREG_REG (x))
3156 || mode_dependent_address_p (XEXP (inner, 0)))
3157 return gen_rtx (CLOBBER, mode, const0_rtx);
3158
3159#if BYTES_BIG_ENDIAN
3160 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3161 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3162 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3163 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
3164#endif
3165 /* Note if the plus_constant doesn't make a valid address
3166 then this combination won't be accepted. */
3167 x = gen_rtx (MEM, mode,
3168 plus_constant (XEXP (inner, 0),
3169 (SUBREG_WORD (x) * UNITS_PER_WORD
3170 + endian_offset)));
3171 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3172 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3173 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3174 return x;
3175 }
3176
3177 /* If we are in a SET_DEST, these other cases can't apply. */
3178 if (in_dest)
3179 return x;
3180
3181 /* Changing mode twice with SUBREG => just change it once,
3182 or not at all if changing back to starting mode. */
3183 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3184 {
3185 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3186 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3187 return SUBREG_REG (SUBREG_REG (x));
3188
3189 SUBST_INT (SUBREG_WORD (x),
3190 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3191 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3192 }
3193
3194 /* SUBREG of a hard register => just change the register number
3195 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3196 suppress this combination. If the hard register is the stack,
3197 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3198
3199 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3200 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3201 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3202#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3203 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3204#endif
26ecfc76
RK
3205#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3206 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3207#endif
3208 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3209 {
3210 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3211 mode))
3212 return gen_rtx (REG, mode,
3213 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3214 else
3215 return gen_rtx (CLOBBER, mode, const0_rtx);
3216 }
3217
3218 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3219 word and low-order part. Only do this if we are narrowing
3220 the constant; if it is being widened, we have no idea what
3221 the extra bits will have been set to. */
230d793d
RS
3222
3223 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3224 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 3225 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
3226 && GET_MODE_CLASS (mode) == MODE_INT)
3227 {
3228 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3229 0, op0_mode);
230d793d
RS
3230 if (temp)
3231 return temp;
3232 }
3233
19808e22
RS
3234 /* If we want a subreg of a constant, at offset 0,
3235 take the low bits. On a little-endian machine, that's
3236 always valid. On a big-endian machine, it's valid
3237 only if the constant's mode fits in one word. */
a4bde0b1 3238 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
19808e22
RS
3239 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3240#if WORDS_BIG_ENDIAN
097e45d1 3241 && GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD
19808e22
RS
3242#endif
3243 )
230d793d
RS
3244 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3245
b65c1b5b
RK
3246 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3247 since we are saying that the high bits don't matter. */
3248 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3249 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3250 return SUBREG_REG (x);
3251
87e3e0c1
RK
3252 /* Note that we cannot do any narrowing for non-constants since
3253 we might have been counting on using the fact that some bits were
3254 zero. We now do this in the SET. */
3255
230d793d
RS
3256 break;
3257
3258 case NOT:
3259 /* (not (plus X -1)) can become (neg X). */
3260 if (GET_CODE (XEXP (x, 0)) == PLUS
3261 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3262 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3263
3264 /* Similarly, (not (neg X)) is (plus X -1). */
3265 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3266 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3267 constm1_rtx);
230d793d 3268
d0ab8cd3
RK
3269 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3270 if (GET_CODE (XEXP (x, 0)) == XOR
3271 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3272 && (temp = simplify_unary_operation (NOT, mode,
3273 XEXP (XEXP (x, 0), 1),
3274 mode)) != 0)
787745f5 3275 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3276
230d793d
RS
3277 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3278 other than 1, but that is not valid. We could do a similar
3279 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3280 but this doesn't seem common enough to bother with. */
3281 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3282 && XEXP (XEXP (x, 0), 0) == const1_rtx)
0c1c8ea6 3283 return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
8079805d 3284 XEXP (XEXP (x, 0), 1));
230d793d
RS
3285
3286 if (GET_CODE (XEXP (x, 0)) == SUBREG
3287 && subreg_lowpart_p (XEXP (x, 0))
3288 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3289 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3290 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3291 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3292 {
3293 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3294
3295 x = gen_rtx (ROTATE, inner_mode,
0c1c8ea6 3296 gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
230d793d 3297 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3298 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3299 }
3300
3301#if STORE_FLAG_VALUE == -1
3302 /* (not (comparison foo bar)) can be done by reversing the comparison
3303 code if valid. */
3304 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3305 && reversible_comparison_p (XEXP (x, 0)))
3306 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3307 mode, XEXP (XEXP (x, 0), 0),
3308 XEXP (XEXP (x, 0), 1));
500c518b
RK
3309
3310 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3311 is (lt foo (const_int 0)), so we can perform the above
3312 simplification. */
3313
3314 if (XEXP (x, 1) == const1_rtx
3315 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3316 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3317 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3318 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3319#endif
3320
3321 /* Apply De Morgan's laws to reduce number of patterns for machines
3322 with negating logical insns (and-not, nand, etc.). If result has
3323 only one NOT, put it first, since that is how the patterns are
3324 coded. */
3325
3326 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3327 {
3328 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3329
3330 if (GET_CODE (in1) == NOT)
3331 in1 = XEXP (in1, 0);
3332 else
3333 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3334
3335 if (GET_CODE (in2) == NOT)
3336 in2 = XEXP (in2, 0);
3337 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3338 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3339 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3340 else
3341 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3342
3343 if (GET_CODE (in2) == NOT)
3344 {
3345 rtx tem = in2;
3346 in2 = in1; in1 = tem;
3347 }
3348
8079805d
RK
3349 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3350 mode, in1, in2);
230d793d
RS
3351 }
3352 break;
3353
3354 case NEG:
3355 /* (neg (plus X 1)) can become (not X). */
3356 if (GET_CODE (XEXP (x, 0)) == PLUS
3357 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3358 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3359
3360 /* Similarly, (neg (not X)) is (plus X 1). */
3361 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3362 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3363
230d793d
RS
3364 /* (neg (minus X Y)) can become (minus Y X). */
3365 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3366 && (! FLOAT_MODE_P (mode)
230d793d 3367 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3368 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3369 || flag_fast_math))
8079805d
RK
3370 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3371 XEXP (XEXP (x, 0), 0));
230d793d 3372
d0ab8cd3
RK
3373 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3374 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3375 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3376 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3377
230d793d
RS
3378 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3379 if we can then eliminate the NEG (e.g.,
3380 if the operand is a constant). */
3381
3382 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3383 {
3384 temp = simplify_unary_operation (NEG, mode,
3385 XEXP (XEXP (x, 0), 0), mode);
3386 if (temp)
3387 {
3388 SUBST (XEXP (XEXP (x, 0), 0), temp);
3389 return XEXP (x, 0);
3390 }
3391 }
3392
3393 temp = expand_compound_operation (XEXP (x, 0));
3394
3395 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3396 replaced by (lshiftrt X C). This will convert
3397 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3398
3399 if (GET_CODE (temp) == ASHIFTRT
3400 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3401 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3402 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3403 INTVAL (XEXP (temp, 1)));
230d793d 3404
951553af 3405 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3406 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3407 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3408 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3409 or a SUBREG of one since we'd be making the expression more
3410 complex if it was just a register. */
3411
3412 if (GET_CODE (temp) != REG
3413 && ! (GET_CODE (temp) == SUBREG
3414 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3415 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3416 {
3417 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3418 (NULL_RTX, ASHIFTRT, mode,
3419 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3420 GET_MODE_BITSIZE (mode) - 1 - i),
3421 GET_MODE_BITSIZE (mode) - 1 - i);
3422
3423 /* If all we did was surround TEMP with the two shifts, we
3424 haven't improved anything, so don't use it. Otherwise,
3425 we are better off with TEMP1. */
3426 if (GET_CODE (temp1) != ASHIFTRT
3427 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3428 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3429 return temp1;
230d793d
RS
3430 }
3431 break;
3432
3433 case FLOAT_TRUNCATE:
3434 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3435 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3436 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3437 return XEXP (XEXP (x, 0), 0);
4635f748
RK
3438
3439 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3440 (OP:SF foo:SF) if OP is NEG or ABS. */
3441 if ((GET_CODE (XEXP (x, 0)) == ABS
3442 || GET_CODE (XEXP (x, 0)) == NEG)
3443 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3444 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
3445 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3446 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
3447
3448 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3449 is (float_truncate:SF x). */
3450 if (GET_CODE (XEXP (x, 0)) == SUBREG
3451 && subreg_lowpart_p (XEXP (x, 0))
3452 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3453 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
3454 break;
3455
3456#ifdef HAVE_cc0
3457 case COMPARE:
3458 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3459 using cc0, in which case we want to leave it as a COMPARE
3460 so we can distinguish it from a register-register-copy. */
3461 if (XEXP (x, 1) == const0_rtx)
3462 return XEXP (x, 0);
3463
3464 /* In IEEE floating point, x-0 is not the same as x. */
3465 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3466 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3467 || flag_fast_math)
230d793d
RS
3468 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3469 return XEXP (x, 0);
3470 break;
3471#endif
3472
3473 case CONST:
3474 /* (const (const X)) can become (const X). Do it this way rather than
3475 returning the inner CONST since CONST can be shared with a
3476 REG_EQUAL note. */
3477 if (GET_CODE (XEXP (x, 0)) == CONST)
3478 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3479 break;
3480
3481#ifdef HAVE_lo_sum
3482 case LO_SUM:
3483 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3484 can add in an offset. find_split_point will split this address up
3485 again if it doesn't match. */
3486 if (GET_CODE (XEXP (x, 0)) == HIGH
3487 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3488 return XEXP (x, 1);
3489 break;
3490#endif
3491
3492 case PLUS:
3493 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3494 outermost. That's because that's the way indexed addresses are
3495 supposed to appear. This code used to check many more cases, but
3496 they are now checked elsewhere. */
3497 if (GET_CODE (XEXP (x, 0)) == PLUS
3498 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3499 return gen_binary (PLUS, mode,
3500 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3501 XEXP (x, 1)),
3502 XEXP (XEXP (x, 0), 1));
3503
3504 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3505 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3506 bit-field and can be replaced by either a sign_extend or a
3507 sign_extract. The `and' may be a zero_extend. */
3508 if (GET_CODE (XEXP (x, 0)) == XOR
3509 && GET_CODE (XEXP (x, 1)) == CONST_INT
3510 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3511 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3512 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3513 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3514 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3515 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3516 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3517 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3518 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3519 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3520 == i + 1))))
8079805d
RK
3521 return simplify_shift_const
3522 (NULL_RTX, ASHIFTRT, mode,
3523 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3524 XEXP (XEXP (XEXP (x, 0), 0), 0),
3525 GET_MODE_BITSIZE (mode) - (i + 1)),
3526 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 3527
bc0776c6
RK
3528 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3529 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3530 is 1. This produces better code than the alternative immediately
3531 below. */
3532 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3533 && reversible_comparison_p (XEXP (x, 0))
3534 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3535 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 3536 return
0c1c8ea6 3537 gen_unary (NEG, mode, mode,
8079805d
RK
3538 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3539 mode, XEXP (XEXP (x, 0), 0),
3540 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
3541
3542 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3543 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3544 the bitsize of the mode - 1. This allows simplification of
3545 "a = (b & 8) == 0;" */
3546 if (XEXP (x, 1) == constm1_rtx
3547 && GET_CODE (XEXP (x, 0)) != REG
3548 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3549 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3550 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
3551 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3552 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3553 gen_rtx_combine (XOR, mode,
3554 XEXP (x, 0), const1_rtx),
3555 GET_MODE_BITSIZE (mode) - 1),
3556 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
3557
3558 /* If we are adding two things that have no bits in common, convert
3559 the addition into an IOR. This will often be further simplified,
3560 for example in cases like ((a & 1) + (a & 2)), which can
3561 become a & 3. */
3562
ac49a949 3563 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3564 && (nonzero_bits (XEXP (x, 0), mode)
3565 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 3566 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
3567 break;
3568
3569 case MINUS:
5109d49f
RK
3570#if STORE_FLAG_VALUE == 1
3571 /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3572 code if valid. */
3573 if (XEXP (x, 0) == const1_rtx
3574 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3575 && reversible_comparison_p (XEXP (x, 1)))
3576 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3577 mode, XEXP (XEXP (x, 1), 0),
3578 XEXP (XEXP (x, 1), 1));
3579#endif
3580
230d793d
RS
3581 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3582 (and <foo> (const_int pow2-1)) */
3583 if (GET_CODE (XEXP (x, 1)) == AND
3584 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3585 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3586 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
3587 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3588 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
3589
3590 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3591 integers. */
3592 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
3593 return gen_binary (MINUS, mode,
3594 gen_binary (MINUS, mode, XEXP (x, 0),
3595 XEXP (XEXP (x, 1), 0)),
3596 XEXP (XEXP (x, 1), 1));
230d793d
RS
3597 break;
3598
3599 case MULT:
3600 /* If we have (mult (plus A B) C), apply the distributive law and then
3601 the inverse distributive law to see if things simplify. This
3602 occurs mostly in addresses, often when unrolling loops. */
3603
3604 if (GET_CODE (XEXP (x, 0)) == PLUS)
3605 {
3606 x = apply_distributive_law
3607 (gen_binary (PLUS, mode,
3608 gen_binary (MULT, mode,
3609 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3610 gen_binary (MULT, mode,
3611 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3612
3613 if (GET_CODE (x) != MULT)
8079805d 3614 return x;
230d793d 3615 }
230d793d
RS
3616 break;
3617
3618 case UDIV:
3619 /* If this is a divide by a power of two, treat it as a shift if
3620 its first operand is a shift. */
3621 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3622 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3623 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3624 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3625 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3626 || GET_CODE (XEXP (x, 0)) == ROTATE
3627 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 3628 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3629 break;
3630
3631 case EQ: case NE:
3632 case GT: case GTU: case GE: case GEU:
3633 case LT: case LTU: case LE: case LEU:
3634 /* If the first operand is a condition code, we can't do anything
3635 with it. */
3636 if (GET_CODE (XEXP (x, 0)) == COMPARE
3637 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3638#ifdef HAVE_cc0
3639 && XEXP (x, 0) != cc0_rtx
3640#endif
3641 ))
3642 {
3643 rtx op0 = XEXP (x, 0);
3644 rtx op1 = XEXP (x, 1);
3645 enum rtx_code new_code;
3646
3647 if (GET_CODE (op0) == COMPARE)
3648 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3649
3650 /* Simplify our comparison, if possible. */
3651 new_code = simplify_comparison (code, &op0, &op1);
3652
3653#if STORE_FLAG_VALUE == 1
3654 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 3655 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
3656 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3657 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3658 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3659 (plus X 1).
3660
3661 Remove any ZERO_EXTRACT we made when thinking this was a
3662 comparison. It may now be simpler to use, e.g., an AND. If a
3663 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3664 the call to make_compound_operation in the SET case. */
3665
3f508eca 3666 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3667 && op1 == const0_rtx
5109d49f 3668 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3669 return gen_lowpart_for_combine (mode,
3670 expand_compound_operation (op0));
5109d49f
RK
3671
3672 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3673 && op1 == const0_rtx
3674 && (num_sign_bit_copies (op0, mode)
3675 == GET_MODE_BITSIZE (mode)))
3676 {
3677 op0 = expand_compound_operation (op0);
0c1c8ea6 3678 return gen_unary (NEG, mode, mode,
8079805d 3679 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3680 }
3681
3f508eca 3682 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3683 && op1 == const0_rtx
5109d49f 3684 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3685 {
3686 op0 = expand_compound_operation (op0);
8079805d
RK
3687 return gen_binary (XOR, mode,
3688 gen_lowpart_for_combine (mode, op0),
3689 const1_rtx);
5109d49f 3690 }
818b11b9 3691
5109d49f
RK
3692 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3693 && op1 == const0_rtx
3694 && (num_sign_bit_copies (op0, mode)
3695 == GET_MODE_BITSIZE (mode)))
3696 {
3697 op0 = expand_compound_operation (op0);
8079805d 3698 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 3699 }
230d793d
RS
3700#endif
3701
3702#if STORE_FLAG_VALUE == -1
5109d49f
RK
3703 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3704 those above. */
3f508eca 3705 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3706 && op1 == const0_rtx
5109d49f
RK
3707 && (num_sign_bit_copies (op0, mode)
3708 == GET_MODE_BITSIZE (mode)))
3709 return gen_lowpart_for_combine (mode,
3710 expand_compound_operation (op0));
3711
3712 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3713 && op1 == const0_rtx
3714 && nonzero_bits (op0, mode) == 1)
3715 {
3716 op0 = expand_compound_operation (op0);
0c1c8ea6 3717 return gen_unary (NEG, mode, mode,
8079805d 3718 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3719 }
3720
3721 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3722 && op1 == const0_rtx
3723 && (num_sign_bit_copies (op0, mode)
3724 == GET_MODE_BITSIZE (mode)))
230d793d 3725 {
818b11b9 3726 op0 = expand_compound_operation (op0);
0c1c8ea6 3727 return gen_unary (NOT, mode, mode,
8079805d 3728 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3729 }
3730
3731 /* If X is 0/1, (eq X 0) is X-1. */
3732 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3733 && op1 == const0_rtx
3734 && nonzero_bits (op0, mode) == 1)
3735 {
3736 op0 = expand_compound_operation (op0);
8079805d 3737 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d
RS
3738 }
3739#endif
3740
3741 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
3742 one bit that might be nonzero, we can convert (ne x 0) to
3743 (ashift x c) where C puts the bit in the sign bit. Remove any
3744 AND with STORE_FLAG_VALUE when we are done, since we are only
3745 going to test the sign bit. */
3f508eca 3746 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22
CH
3747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3748 && (STORE_FLAG_VALUE
3749 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3750 && op1 == const0_rtx
3751 && mode == GET_MODE (op0)
5109d49f 3752 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 3753 {
818b11b9
RK
3754 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3755 expand_compound_operation (op0),
230d793d
RS
3756 GET_MODE_BITSIZE (mode) - 1 - i);
3757 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3758 return XEXP (x, 0);
3759 else
3760 return x;
3761 }
3762
3763 /* If the code changed, return a whole new comparison. */
3764 if (new_code != code)
3765 return gen_rtx_combine (new_code, mode, op0, op1);
3766
3767 /* Otherwise, keep this operation, but maybe change its operands.
3768 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3769 SUBST (XEXP (x, 0), op0);
3770 SUBST (XEXP (x, 1), op1);
3771 }
3772 break;
3773
3774 case IF_THEN_ELSE:
8079805d 3775 return simplify_if_then_else (x);
9210df58 3776
8079805d
RK
3777 case ZERO_EXTRACT:
3778 case SIGN_EXTRACT:
3779 case ZERO_EXTEND:
3780 case SIGN_EXTEND:
3781 /* If we are processing SET_DEST, we are done. */
3782 if (in_dest)
3783 return x;
d0ab8cd3 3784
8079805d 3785 return expand_compound_operation (x);
d0ab8cd3 3786
8079805d
RK
3787 case SET:
3788 return simplify_set (x);
1a26b032 3789
8079805d
RK
3790 case AND:
3791 case IOR:
3792 case XOR:
3793 return simplify_logical (x, last);
d0ab8cd3 3794
8079805d
RK
3795 case ABS:
3796 /* (abs (neg <foo>)) -> (abs <foo>) */
3797 if (GET_CODE (XEXP (x, 0)) == NEG)
3798 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 3799
8079805d
RK
3800 /* If operand is something known to be positive, ignore the ABS. */
3801 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3802 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3803 <= HOST_BITS_PER_WIDE_INT)
3804 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3805 & ((HOST_WIDE_INT) 1
3806 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3807 == 0)))
3808 return XEXP (x, 0);
1a26b032 3809
1a26b032 3810
8079805d
RK
3811 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3812 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
3813 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 3814
8079805d 3815 break;
1a26b032 3816
8079805d
RK
3817 case FFS:
3818 /* (ffs (*_extend <X>)) = (ffs <X>) */
3819 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3820 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3821 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3822 break;
1a26b032 3823
8079805d
RK
3824 case FLOAT:
3825 /* (float (sign_extend <X>)) = (float <X>). */
3826 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3827 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3828 break;
1a26b032 3829
8079805d
RK
3830 case ASHIFT:
3831 case LSHIFTRT:
3832 case ASHIFTRT:
3833 case ROTATE:
3834 case ROTATERT:
3835 /* If this is a shift by a constant amount, simplify it. */
3836 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3837 return simplify_shift_const (x, code, mode, XEXP (x, 0),
3838 INTVAL (XEXP (x, 1)));
3839
3840#ifdef SHIFT_COUNT_TRUNCATED
3841 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
3842 SUBST (XEXP (x, 1),
3843 force_to_mode (XEXP (x, 1), GET_MODE (x),
3844 ((HOST_WIDE_INT) 1
3845 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
3846 - 1,
3847 NULL_RTX, 0));
3848#endif
3849
3850 break;
3851 }
3852
3853 return x;
3854}
3855\f
3856/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 3857
8079805d
RK
3858static rtx
3859simplify_if_then_else (x)
3860 rtx x;
3861{
3862 enum machine_mode mode = GET_MODE (x);
3863 rtx cond = XEXP (x, 0);
3864 rtx true = XEXP (x, 1);
3865 rtx false = XEXP (x, 2);
3866 enum rtx_code true_code = GET_CODE (cond);
3867 int comparison_p = GET_RTX_CLASS (true_code) == '<';
3868 rtx temp;
3869 int i;
3870
3871 /* Simplify storing of the truth value. */
3872 if (comparison_p && true == const_true_rtx && false == const0_rtx)
3873 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
3874
3875 /* Also when the truth value has to be reversed. */
3876 if (comparison_p && reversible_comparison_p (cond)
3877 && true == const0_rtx && false == const_true_rtx)
3878 return gen_binary (reverse_condition (true_code),
3879 mode, XEXP (cond, 0), XEXP (cond, 1));
3880
3881 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
3882 in it is being compared against certain values. Get the true and false
3883 comparisons and see if that says anything about the value of each arm. */
3884
3885 if (comparison_p && reversible_comparison_p (cond)
3886 && GET_CODE (XEXP (cond, 0)) == REG)
3887 {
3888 HOST_WIDE_INT nzb;
3889 rtx from = XEXP (cond, 0);
3890 enum rtx_code false_code = reverse_condition (true_code);
3891 rtx true_val = XEXP (cond, 1);
3892 rtx false_val = true_val;
3893 int swapped = 0;
9210df58 3894
8079805d 3895 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 3896
8079805d 3897 if (false_code == EQ)
1a26b032 3898 {
8079805d
RK
3899 swapped = 1, true_code = EQ, false_code = NE;
3900 temp = true, true = false, false = temp;
3901 }
5109d49f 3902
8079805d
RK
3903 /* If we are comparing against zero and the expression being tested has
3904 only a single bit that might be nonzero, that is its value when it is
3905 not equal to zero. Similarly if it is known to be -1 or 0. */
3906
3907 if (true_code == EQ && true_val == const0_rtx
3908 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3909 false_code = EQ, false_val = GEN_INT (nzb);
3910 else if (true_code == EQ && true_val == const0_rtx
3911 && (num_sign_bit_copies (from, GET_MODE (from))
3912 == GET_MODE_BITSIZE (GET_MODE (from))))
3913 false_code = EQ, false_val = constm1_rtx;
3914
3915 /* Now simplify an arm if we know the value of the register in the
3916 branch and it is used in the arm. Be careful due to the potential
3917 of locally-shared RTL. */
3918
3919 if (reg_mentioned_p (from, true))
3920 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
3921 pc_rtx, pc_rtx, 0, 0);
3922 if (reg_mentioned_p (from, false))
3923 false = subst (known_cond (copy_rtx (false), false_code,
3924 from, false_val),
3925 pc_rtx, pc_rtx, 0, 0);
3926
3927 SUBST (XEXP (x, 1), swapped ? false : true);
3928 SUBST (XEXP (x, 2), swapped ? true : false);
3929
3930 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
3931 }
5109d49f 3932
8079805d
RK
3933 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3934 reversed, do so to avoid needing two sets of patterns for
3935 subtract-and-branch insns. Similarly if we have a constant in the true
3936 arm, the false arm is the same as the first operand of the comparison, or
3937 the false arm is more complicated than the true arm. */
3938
3939 if (comparison_p && reversible_comparison_p (cond)
3940 && (true == pc_rtx
3941 || (CONSTANT_P (true)
3942 && GET_CODE (false) != CONST_INT && false != pc_rtx)
3943 || true == const0_rtx
3944 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
3945 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
3946 || (GET_CODE (true) == SUBREG
3947 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
3948 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
3949 || reg_mentioned_p (true, false)
3950 || rtx_equal_p (false, XEXP (cond, 0))))
3951 {
3952 true_code = reverse_condition (true_code);
3953 SUBST (XEXP (x, 0),
3954 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
3955 XEXP (cond, 1)));
5109d49f 3956
8079805d
RK
3957 SUBST (XEXP (x, 1), false);
3958 SUBST (XEXP (x, 2), true);
1a26b032 3959
8079805d
RK
3960 temp = true, true = false, false = temp, cond = XEXP (x, 0);
3961 }
abe6e52f 3962
8079805d 3963 /* If the two arms are identical, we don't need the comparison. */
1a26b032 3964
8079805d
RK
3965 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
3966 return true;
1a26b032 3967
8079805d
RK
3968 /* Look for cases where we have (abs x) or (neg (abs X)). */
3969
3970 if (GET_MODE_CLASS (mode) == MODE_INT
3971 && GET_CODE (false) == NEG
3972 && rtx_equal_p (true, XEXP (false, 0))
3973 && comparison_p
3974 && rtx_equal_p (true, XEXP (cond, 0))
3975 && ! side_effects_p (true))
3976 switch (true_code)
3977 {
3978 case GT:
3979 case GE:
0c1c8ea6 3980 return gen_unary (ABS, mode, mode, true);
8079805d
RK
3981 case LT:
3982 case LE:
0c1c8ea6 3983 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
8079805d
RK
3984 }
3985
3986 /* Look for MIN or MAX. */
3987
3988 if ((! FLOAT_MODE_P (mode) | flag_fast_math)
3989 && comparison_p
3990 && rtx_equal_p (XEXP (cond, 0), true)
3991 && rtx_equal_p (XEXP (cond, 1), false)
3992 && ! side_effects_p (cond))
3993 switch (true_code)
3994 {
3995 case GE:
3996 case GT:
3997 return gen_binary (SMAX, mode, true, false);
3998 case LE:
3999 case LT:
4000 return gen_binary (SMIN, mode, true, false);
4001 case GEU:
4002 case GTU:
4003 return gen_binary (UMAX, mode, true, false);
4004 case LEU:
4005 case LTU:
4006 return gen_binary (UMIN, mode, true, false);
4007 }
4008
4009#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
4010
4011 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4012 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4013 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4014 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4015 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
d5a4ebdc 4016 neither of the above, but it isn't worth checking for. */
8079805d
RK
4017
4018 if (comparison_p && mode != VOIDmode && ! side_effects_p (x))
4019 {
4020 rtx t = make_compound_operation (true, SET);
4021 rtx f = make_compound_operation (false, SET);
4022 rtx cond_op0 = XEXP (cond, 0);
4023 rtx cond_op1 = XEXP (cond, 1);
4024 enum rtx_code op, extend_op = NIL;
4025 enum machine_mode m = mode;
f24ad0e4 4026 rtx z = 0, c1;
8079805d 4027
8079805d
RK
4028 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4029 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4030 || GET_CODE (t) == ASHIFT
4031 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4032 && rtx_equal_p (XEXP (t, 0), f))
4033 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4034
4035 /* If an identity-zero op is commutative, check whether there
4036 would be a match if we swapped the operands. */
4037 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4038 || GET_CODE (t) == XOR)
4039 && rtx_equal_p (XEXP (t, 1), f))
4040 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4041 else if (GET_CODE (t) == SIGN_EXTEND
4042 && (GET_CODE (XEXP (t, 0)) == PLUS
4043 || GET_CODE (XEXP (t, 0)) == MINUS
4044 || GET_CODE (XEXP (t, 0)) == IOR
4045 || GET_CODE (XEXP (t, 0)) == XOR
4046 || GET_CODE (XEXP (t, 0)) == ASHIFT
4047 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4048 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4049 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4050 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4051 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4052 && (num_sign_bit_copies (f, GET_MODE (f))
4053 > (GET_MODE_BITSIZE (mode)
4054 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4055 {
4056 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4057 extend_op = SIGN_EXTEND;
4058 m = GET_MODE (XEXP (t, 0));
1a26b032 4059 }
8079805d
RK
4060 else if (GET_CODE (t) == SIGN_EXTEND
4061 && (GET_CODE (XEXP (t, 0)) == PLUS
4062 || GET_CODE (XEXP (t, 0)) == IOR
4063 || GET_CODE (XEXP (t, 0)) == XOR)
4064 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4065 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4066 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4067 && (num_sign_bit_copies (f, GET_MODE (f))
4068 > (GET_MODE_BITSIZE (mode)
4069 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4070 {
4071 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4072 extend_op = SIGN_EXTEND;
4073 m = GET_MODE (XEXP (t, 0));
4074 }
4075 else if (GET_CODE (t) == ZERO_EXTEND
4076 && (GET_CODE (XEXP (t, 0)) == PLUS
4077 || GET_CODE (XEXP (t, 0)) == MINUS
4078 || GET_CODE (XEXP (t, 0)) == IOR
4079 || GET_CODE (XEXP (t, 0)) == XOR
4080 || GET_CODE (XEXP (t, 0)) == ASHIFT
4081 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4082 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4083 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4084 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4085 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4086 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4087 && ((nonzero_bits (f, GET_MODE (f))
4088 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4089 == 0))
4090 {
4091 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4092 extend_op = ZERO_EXTEND;
4093 m = GET_MODE (XEXP (t, 0));
4094 }
4095 else if (GET_CODE (t) == ZERO_EXTEND
4096 && (GET_CODE (XEXP (t, 0)) == PLUS
4097 || GET_CODE (XEXP (t, 0)) == IOR
4098 || GET_CODE (XEXP (t, 0)) == XOR)
4099 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4100 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4101 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4102 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4103 && ((nonzero_bits (f, GET_MODE (f))
4104 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4105 == 0))
4106 {
4107 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4108 extend_op = ZERO_EXTEND;
4109 m = GET_MODE (XEXP (t, 0));
4110 }
4111
4112 if (z)
4113 {
4114 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4115 pc_rtx, pc_rtx, 0, 0);
4116 temp = gen_binary (MULT, m, temp,
4117 gen_binary (MULT, m, c1, const_true_rtx));
4118 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4119 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4120
4121 if (extend_op != NIL)
0c1c8ea6 4122 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4123
4124 return temp;
4125 }
4126 }
5109d49f 4127#endif
224eeff2 4128
8079805d
RK
4129 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4130 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4131 negation of a single bit, we can convert this operation to a shift. We
4132 can actually do this more generally, but it doesn't seem worth it. */
4133
4134 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4135 && false == const0_rtx && GET_CODE (true) == CONST_INT
4136 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4137 && (i = exact_log2 (INTVAL (true))) >= 0)
4138 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4139 == GET_MODE_BITSIZE (mode))
4140 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4141 return
4142 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4143 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4144
8079805d
RK
4145 return x;
4146}
4147\f
4148/* Simplify X, a SET expression. Return the new expression. */
230d793d 4149
8079805d
RK
4150static rtx
4151simplify_set (x)
4152 rtx x;
4153{
4154 rtx src = SET_SRC (x);
4155 rtx dest = SET_DEST (x);
4156 enum machine_mode mode
4157 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4158 rtx other_insn;
4159 rtx *cc_use;
4160
4161 /* (set (pc) (return)) gets written as (return). */
4162 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4163 return src;
230d793d 4164
87e3e0c1
RK
4165 /* Now that we know for sure which bits of SRC we are using, see if we can
4166 simplify the expression for the object knowing that we only need the
4167 low-order bits. */
4168
4169 if (GET_MODE_CLASS (mode) == MODE_INT)
4170 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4171
8079805d
RK
4172 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4173 the comparison result and try to simplify it unless we already have used
4174 undobuf.other_insn. */
4175 if ((GET_CODE (src) == COMPARE
230d793d 4176#ifdef HAVE_cc0
8079805d 4177 || dest == cc0_rtx
230d793d 4178#endif
8079805d
RK
4179 )
4180 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4181 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4182 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4183 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4184 {
4185 enum rtx_code old_code = GET_CODE (*cc_use);
4186 enum rtx_code new_code;
4187 rtx op0, op1;
4188 int other_changed = 0;
4189 enum machine_mode compare_mode = GET_MODE (dest);
4190
4191 if (GET_CODE (src) == COMPARE)
4192 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4193 else
4194 op0 = src, op1 = const0_rtx;
230d793d 4195
8079805d
RK
4196 /* Simplify our comparison, if possible. */
4197 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4198
c141a106 4199#ifdef EXTRA_CC_MODES
8079805d
RK
4200 /* If this machine has CC modes other than CCmode, check to see if we
4201 need to use a different CC mode here. */
4202 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4203#endif /* EXTRA_CC_MODES */
230d793d 4204
c141a106 4205#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4206 /* If the mode changed, we have to change SET_DEST, the mode in the
4207 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4208 a hard register, just build new versions with the proper mode. If it
4209 is a pseudo, we lose unless it is only time we set the pseudo, in
4210 which case we can safely change its mode. */
4211 if (compare_mode != GET_MODE (dest))
4212 {
4213 int regno = REGNO (dest);
4214 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4215
4216 if (regno < FIRST_PSEUDO_REGISTER
4217 || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest)))
230d793d 4218 {
8079805d
RK
4219 if (regno >= FIRST_PSEUDO_REGISTER)
4220 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4221
8079805d
RK
4222 SUBST (SET_DEST (x), new_dest);
4223 SUBST (XEXP (*cc_use, 0), new_dest);
4224 other_changed = 1;
230d793d 4225
8079805d 4226 dest = new_dest;
230d793d 4227 }
8079805d 4228 }
230d793d
RS
4229#endif
4230
8079805d
RK
4231 /* If the code changed, we have to build a new comparison in
4232 undobuf.other_insn. */
4233 if (new_code != old_code)
4234 {
4235 unsigned HOST_WIDE_INT mask;
4236
4237 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4238 dest, const0_rtx));
4239
4240 /* If the only change we made was to change an EQ into an NE or
4241 vice versa, OP0 has only one bit that might be nonzero, and OP1
4242 is zero, check if changing the user of the condition code will
4243 produce a valid insn. If it won't, we can keep the original code
4244 in that insn by surrounding our operation with an XOR. */
4245
4246 if (((old_code == NE && new_code == EQ)
4247 || (old_code == EQ && new_code == NE))
4248 && ! other_changed && op1 == const0_rtx
4249 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4250 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4251 {
8079805d 4252 rtx pat = PATTERN (other_insn), note = 0;
230d793d 4253
8079805d
RK
4254 if ((recog_for_combine (&pat, other_insn, &note) < 0
4255 && ! check_asm_operands (pat)))
4256 {
4257 PUT_CODE (*cc_use, old_code);
4258 other_insn = 0;
230d793d 4259
8079805d 4260 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4261 }
230d793d
RS
4262 }
4263
8079805d
RK
4264 other_changed = 1;
4265 }
4266
4267 if (other_changed)
4268 undobuf.other_insn = other_insn;
230d793d
RS
4269
4270#ifdef HAVE_cc0
8079805d
RK
4271 /* If we are now comparing against zero, change our source if
4272 needed. If we do not use cc0, we always have a COMPARE. */
4273 if (op1 == const0_rtx && dest == cc0_rtx)
4274 {
4275 SUBST (SET_SRC (x), op0);
4276 src = op0;
4277 }
4278 else
230d793d
RS
4279#endif
4280
8079805d
RK
4281 /* Otherwise, if we didn't previously have a COMPARE in the
4282 correct mode, we need one. */
4283 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4284 {
4285 SUBST (SET_SRC (x),
4286 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4287 src = SET_SRC (x);
230d793d
RS
4288 }
4289 else
4290 {
8079805d
RK
4291 /* Otherwise, update the COMPARE if needed. */
4292 SUBST (XEXP (src, 0), op0);
4293 SUBST (XEXP (src, 1), op1);
230d793d 4294 }
8079805d
RK
4295 }
4296 else
4297 {
4298 /* Get SET_SRC in a form where we have placed back any
4299 compound expressions. Then do the checks below. */
4300 src = make_compound_operation (src, SET);
4301 SUBST (SET_SRC (x), src);
4302 }
230d793d 4303
8079805d
RK
4304 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4305 and X being a REG or (subreg (reg)), we may be able to convert this to
4306 (set (subreg:m2 x) (op)).
df62f951 4307
8079805d
RK
4308 We can always do this if M1 is narrower than M2 because that means that
4309 we only care about the low bits of the result.
df62f951 4310
8079805d
RK
4311 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4312 perform a narrower operation that requested since the high-order bits will
4313 be undefined. On machine where it is defined, this transformation is safe
4314 as long as M1 and M2 have the same number of words. */
df62f951 4315
8079805d
RK
4316 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4317 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4318 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4319 / UNITS_PER_WORD)
4320 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4321 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4322#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4323 && (GET_MODE_SIZE (GET_MODE (src))
4324 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4325#endif
8079805d
RK
4326 && (GET_CODE (dest) == REG
4327 || (GET_CODE (dest) == SUBREG
4328 && GET_CODE (SUBREG_REG (dest)) == REG)))
4329 {
4330 SUBST (SET_DEST (x),
4331 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4332 dest));
4333 SUBST (SET_SRC (x), SUBREG_REG (src));
4334
4335 src = SET_SRC (x), dest = SET_DEST (x);
4336 }
df62f951 4337
8baf60bb 4338#ifdef LOAD_EXTEND_OP
8079805d
RK
4339 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4340 would require a paradoxical subreg. Replace the subreg with a
4341 zero_extend to avoid the reload that would otherwise be required. */
4342
4343 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4344 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4345 && SUBREG_WORD (src) == 0
4346 && (GET_MODE_SIZE (GET_MODE (src))
4347 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4348 && GET_CODE (SUBREG_REG (src)) == MEM)
4349 {
4350 SUBST (SET_SRC (x),
4351 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4352 GET_MODE (src), XEXP (src, 0)));
4353
4354 src = SET_SRC (x);
4355 }
230d793d
RS
4356#endif
4357
8079805d
RK
4358 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4359 are comparing an item known to be 0 or -1 against 0, use a logical
4360 operation instead. Check for one of the arms being an IOR of the other
4361 arm with some value. We compute three terms to be IOR'ed together. In
4362 practice, at most two will be nonzero. Then we do the IOR's. */
4363
4364 if (GET_CODE (dest) != PC
4365 && GET_CODE (src) == IF_THEN_ELSE
094030c9
DE
4366#ifdef HAVE_conditional_move
4367 && ! HAVE_conditional_move
4368#endif
36b8d792 4369 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4370 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4371 && XEXP (XEXP (src, 0), 1) == const0_rtx
4372 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4373 GET_MODE (XEXP (XEXP (src, 0), 0)))
4374 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4375 && ! side_effects_p (src))
4376 {
4377 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4378 ? XEXP (src, 1) : XEXP (src, 2));
4379 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4380 ? XEXP (src, 2) : XEXP (src, 1));
4381 rtx term1 = const0_rtx, term2, term3;
4382
4383 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4384 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4385 else if (GET_CODE (true) == IOR
4386 && rtx_equal_p (XEXP (true, 1), false))
4387 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4388 else if (GET_CODE (false) == IOR
4389 && rtx_equal_p (XEXP (false, 0), true))
4390 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4391 else if (GET_CODE (false) == IOR
4392 && rtx_equal_p (XEXP (false, 1), true))
4393 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4394
4395 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4396 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 4397 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
4398 XEXP (XEXP (src, 0), 0)),
4399 false);
4400
4401 SUBST (SET_SRC (x),
4402 gen_binary (IOR, GET_MODE (src),
4403 gen_binary (IOR, GET_MODE (src), term1, term2),
4404 term3));
4405
4406 src = SET_SRC (x);
4407 }
230d793d 4408
246e00f2
RK
4409 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4410 whole thing fail. */
4411 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4412 return src;
4413 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4414 return dest;
4415 else
4416 /* Convert this into a field assignment operation, if possible. */
4417 return make_field_assignment (x);
8079805d
RK
4418}
4419\f
4420/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4421 result. LAST is nonzero if this is the last retry. */
4422
4423static rtx
4424simplify_logical (x, last)
4425 rtx x;
4426 int last;
4427{
4428 enum machine_mode mode = GET_MODE (x);
4429 rtx op0 = XEXP (x, 0);
4430 rtx op1 = XEXP (x, 1);
4431
4432 switch (GET_CODE (x))
4433 {
230d793d 4434 case AND:
8079805d
RK
4435 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4436 insn (and may simplify more). */
4437 if (GET_CODE (op0) == XOR
4438 && rtx_equal_p (XEXP (op0, 0), op1)
4439 && ! side_effects_p (op1))
0c1c8ea6
RK
4440 x = gen_binary (AND, mode,
4441 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
4442
4443 if (GET_CODE (op0) == XOR
4444 && rtx_equal_p (XEXP (op0, 1), op1)
4445 && ! side_effects_p (op1))
0c1c8ea6
RK
4446 x = gen_binary (AND, mode,
4447 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
4448
4449 /* Similarly for (~ (A ^ B)) & A. */
4450 if (GET_CODE (op0) == NOT
4451 && GET_CODE (XEXP (op0, 0)) == XOR
4452 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4453 && ! side_effects_p (op1))
4454 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4455
4456 if (GET_CODE (op0) == NOT
4457 && GET_CODE (XEXP (op0, 0)) == XOR
4458 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4459 && ! side_effects_p (op1))
4460 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4461
4462 if (GET_CODE (op1) == CONST_INT)
230d793d 4463 {
8079805d 4464 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
4465
4466 /* If we have (ior (and (X C1) C2)) and the next restart would be
4467 the last, simplify this by making C1 as small as possible
4468 and then exit. */
8079805d
RK
4469 if (last
4470 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4471 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4472 && GET_CODE (op1) == CONST_INT)
4473 return gen_binary (IOR, mode,
4474 gen_binary (AND, mode, XEXP (op0, 0),
4475 GEN_INT (INTVAL (XEXP (op0, 1))
4476 & ~ INTVAL (op1))), op1);
230d793d
RS
4477
4478 if (GET_CODE (x) != AND)
8079805d 4479 return x;
230d793d
RS
4480 }
4481
4482 /* Convert (A | B) & A to A. */
8079805d
RK
4483 if (GET_CODE (op0) == IOR
4484 && (rtx_equal_p (XEXP (op0, 0), op1)
4485 || rtx_equal_p (XEXP (op0, 1), op1))
4486 && ! side_effects_p (XEXP (op0, 0))
4487 && ! side_effects_p (XEXP (op0, 1)))
4488 return op1;
230d793d 4489
d0ab8cd3 4490 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4491 we start with some combination of logical operations and apply
4492 the distributive law followed by the inverse distributive law.
4493 Most of the time, this results in no change. However, if some of
4494 the operands are the same or inverses of each other, simplifications
4495 will result.
4496
4497 For example, (and (ior A B) (not B)) can occur as the result of
4498 expanding a bit field assignment. When we apply the distributive
4499 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 4500 which then simplifies to (and (A (not B))).
230d793d 4501
8079805d 4502 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
4503 the inverse distributive law to see if things simplify. */
4504
8079805d 4505 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
4506 {
4507 x = apply_distributive_law
8079805d
RK
4508 (gen_binary (GET_CODE (op0), mode,
4509 gen_binary (AND, mode, XEXP (op0, 0), op1),
4510 gen_binary (AND, mode, XEXP (op0, 1), op1)));
230d793d 4511 if (GET_CODE (x) != AND)
8079805d 4512 return x;
230d793d
RS
4513 }
4514
8079805d
RK
4515 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4516 return apply_distributive_law
4517 (gen_binary (GET_CODE (op1), mode,
4518 gen_binary (AND, mode, XEXP (op1, 0), op0),
4519 gen_binary (AND, mode, XEXP (op1, 1), op0)));
230d793d
RS
4520
4521 /* Similarly, taking advantage of the fact that
4522 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4523
8079805d
RK
4524 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4525 return apply_distributive_law
4526 (gen_binary (XOR, mode,
4527 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4528 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
230d793d 4529
8079805d
RK
4530 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4531 return apply_distributive_law
4532 (gen_binary (XOR, mode,
4533 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4534 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
230d793d
RS
4535 break;
4536
4537 case IOR:
951553af 4538 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 4539 if (GET_CODE (op1) == CONST_INT
ac49a949 4540 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
4541 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4542 return op1;
d0ab8cd3 4543
230d793d 4544 /* Convert (A & B) | A to A. */
8079805d
RK
4545 if (GET_CODE (op0) == AND
4546 && (rtx_equal_p (XEXP (op0, 0), op1)
4547 || rtx_equal_p (XEXP (op0, 1), op1))
4548 && ! side_effects_p (XEXP (op0, 0))
4549 && ! side_effects_p (XEXP (op0, 1)))
4550 return op1;
230d793d
RS
4551
4552 /* If we have (ior (and A B) C), apply the distributive law and then
4553 the inverse distributive law to see if things simplify. */
4554
8079805d 4555 if (GET_CODE (op0) == AND)
230d793d
RS
4556 {
4557 x = apply_distributive_law
4558 (gen_binary (AND, mode,
8079805d
RK
4559 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4560 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
230d793d
RS
4561
4562 if (GET_CODE (x) != IOR)
8079805d 4563 return x;
230d793d
RS
4564 }
4565
8079805d 4566 if (GET_CODE (op1) == AND)
230d793d
RS
4567 {
4568 x = apply_distributive_law
4569 (gen_binary (AND, mode,
8079805d
RK
4570 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4571 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
230d793d
RS
4572
4573 if (GET_CODE (x) != IOR)
8079805d 4574 return x;
230d793d
RS
4575 }
4576
4577 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4578 mode size to (rotate A CX). */
4579
8079805d
RK
4580 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4581 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4582 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4583 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4584 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4585 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 4586 == GET_MODE_BITSIZE (mode)))
8079805d
RK
4587 return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4588 (GET_CODE (op0) == ASHIFT
4589 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 4590
71923da7
RK
4591 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4592 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4593 does not affect any of the bits in OP1, it can really be done
4594 as a PLUS and we can associate. We do this by seeing if OP1
4595 can be safely shifted left C bits. */
4596 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4597 && GET_CODE (XEXP (op0, 0)) == PLUS
4598 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4599 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4600 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4601 {
4602 int count = INTVAL (XEXP (op0, 1));
4603 HOST_WIDE_INT mask = INTVAL (op1) << count;
4604
4605 if (mask >> count == INTVAL (op1)
4606 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4607 {
4608 SUBST (XEXP (XEXP (op0, 0), 1),
4609 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
4610 return op0;
4611 }
4612 }
230d793d
RS
4613 break;
4614
4615 case XOR:
4616 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4617 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4618 (NOT y). */
4619 {
4620 int num_negated = 0;
230d793d 4621
8079805d
RK
4622 if (GET_CODE (op0) == NOT)
4623 num_negated++, op0 = XEXP (op0, 0);
4624 if (GET_CODE (op1) == NOT)
4625 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
4626
4627 if (num_negated == 2)
4628 {
8079805d
RK
4629 SUBST (XEXP (x, 0), op0);
4630 SUBST (XEXP (x, 1), op1);
230d793d
RS
4631 }
4632 else if (num_negated == 1)
0c1c8ea6 4633 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
4634 }
4635
4636 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4637 correspond to a machine insn or result in further simplifications
4638 if B is a constant. */
4639
8079805d
RK
4640 if (GET_CODE (op0) == AND
4641 && rtx_equal_p (XEXP (op0, 1), op1)
4642 && ! side_effects_p (op1))
0c1c8ea6
RK
4643 return gen_binary (AND, mode,
4644 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 4645 op1);
230d793d 4646
8079805d
RK
4647 else if (GET_CODE (op0) == AND
4648 && rtx_equal_p (XEXP (op0, 0), op1)
4649 && ! side_effects_p (op1))
0c1c8ea6
RK
4650 return gen_binary (AND, mode,
4651 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 4652 op1);
230d793d
RS
4653
4654#if STORE_FLAG_VALUE == 1
4655 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4656 comparison. */
8079805d
RK
4657 if (op1 == const1_rtx
4658 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4659 && reversible_comparison_p (op0))
4660 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4661 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
4662
4663 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4664 is (lt foo (const_int 0)), so we can perform the above
4665 simplification. */
4666
8079805d
RK
4667 if (op1 == const1_rtx
4668 && GET_CODE (op0) == LSHIFTRT
4669 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4670 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
4671 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
4672#endif
4673
4674 /* (xor (comparison foo bar) (const_int sign-bit))
4675 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22
CH
4676 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4677 && (STORE_FLAG_VALUE
4678 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
4679 && op1 == const_true_rtx
4680 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4681 && reversible_comparison_p (op0))
4682 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4683 mode, XEXP (op0, 0), XEXP (op0, 1));
230d793d
RS
4684 break;
4685 }
4686
4687 return x;
4688}
4689\f
4690/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4691 operations" because they can be replaced with two more basic operations.
4692 ZERO_EXTEND is also considered "compound" because it can be replaced with
4693 an AND operation, which is simpler, though only one operation.
4694
4695 The function expand_compound_operation is called with an rtx expression
4696 and will convert it to the appropriate shifts and AND operations,
4697 simplifying at each stage.
4698
4699 The function make_compound_operation is called to convert an expression
4700 consisting of shifts and ANDs into the equivalent compound expression.
4701 It is the inverse of this function, loosely speaking. */
4702
4703static rtx
4704expand_compound_operation (x)
4705 rtx x;
4706{
4707 int pos = 0, len;
4708 int unsignedp = 0;
4709 int modewidth;
4710 rtx tem;
4711
4712 switch (GET_CODE (x))
4713 {
4714 case ZERO_EXTEND:
4715 unsignedp = 1;
4716 case SIGN_EXTEND:
75473182
RS
4717 /* We can't necessarily use a const_int for a multiword mode;
4718 it depends on implicitly extending the value.
4719 Since we don't know the right way to extend it,
4720 we can't tell whether the implicit way is right.
4721
4722 Even for a mode that is no wider than a const_int,
4723 we can't win, because we need to sign extend one of its bits through
4724 the rest of it, and we don't know which bit. */
230d793d 4725 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 4726 return x;
230d793d 4727
8079805d
RK
4728 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
4729 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
4730 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
4731 reloaded. If not for that, MEM's would very rarely be safe.
4732
4733 Reject MODEs bigger than a word, because we might not be able
4734 to reference a two-register group starting with an arbitrary register
4735 (and currently gen_lowpart might crash for a SUBREG). */
4736
4737 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
4738 return x;
4739
4740 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4741 /* If the inner object has VOIDmode (the only way this can happen
4742 is if it is a ASM_OPERANDS), we can't do anything since we don't
4743 know how much masking to do. */
4744 if (len == 0)
4745 return x;
4746
4747 break;
4748
4749 case ZERO_EXTRACT:
4750 unsignedp = 1;
4751 case SIGN_EXTRACT:
4752 /* If the operand is a CLOBBER, just return it. */
4753 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4754 return XEXP (x, 0);
4755
4756 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4757 || GET_CODE (XEXP (x, 2)) != CONST_INT
4758 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4759 return x;
4760
4761 len = INTVAL (XEXP (x, 1));
4762 pos = INTVAL (XEXP (x, 2));
4763
4764 /* If this goes outside the object being extracted, replace the object
4765 with a (use (mem ...)) construct that only combine understands
4766 and is used only for this purpose. */
4767 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4768 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4769
4770#if BITS_BIG_ENDIAN
4771 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4772#endif
4773 break;
4774
4775 default:
4776 return x;
4777 }
4778
4779 /* If we reach here, we want to return a pair of shifts. The inner
4780 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4781 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4782 logical depending on the value of UNSIGNEDP.
4783
4784 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4785 converted into an AND of a shift.
4786
4787 We must check for the case where the left shift would have a negative
4788 count. This can happen in a case like (x >> 31) & 255 on machines
4789 that can't shift by a constant. On those machines, we would first
4790 combine the shift with the AND to produce a variable-position
4791 extraction. Then the constant of 31 would be substituted in to produce
4792 a such a position. */
4793
4794 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4795 if (modewidth >= pos - len)
5f4f0e22 4796 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 4797 GET_MODE (x),
5f4f0e22
CH
4798 simplify_shift_const (NULL_RTX, ASHIFT,
4799 GET_MODE (x),
230d793d
RS
4800 XEXP (x, 0),
4801 modewidth - pos - len),
4802 modewidth - len);
4803
5f4f0e22
CH
4804 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4805 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4806 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
4807 GET_MODE (x),
4808 XEXP (x, 0), pos),
5f4f0e22 4809 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4810 else
4811 /* Any other cases we can't handle. */
4812 return x;
4813
4814
4815 /* If we couldn't do this for some reason, return the original
4816 expression. */
4817 if (GET_CODE (tem) == CLOBBER)
4818 return x;
4819
4820 return tem;
4821}
4822\f
4823/* X is a SET which contains an assignment of one object into
4824 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4825 or certain SUBREGS). If possible, convert it into a series of
4826 logical operations.
4827
4828 We half-heartedly support variable positions, but do not at all
4829 support variable lengths. */
4830
4831static rtx
4832expand_field_assignment (x)
4833 rtx x;
4834{
4835 rtx inner;
4836 rtx pos; /* Always counts from low bit. */
4837 int len;
4838 rtx mask;
4839 enum machine_mode compute_mode;
4840
4841 /* Loop until we find something we can't simplify. */
4842 while (1)
4843 {
4844 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4845 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4846 {
4847 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4848 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4849 pos = const0_rtx;
4850 }
4851 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4852 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4853 {
4854 inner = XEXP (SET_DEST (x), 0);
4855 len = INTVAL (XEXP (SET_DEST (x), 1));
4856 pos = XEXP (SET_DEST (x), 2);
4857
4858 /* If the position is constant and spans the width of INNER,
4859 surround INNER with a USE to indicate this. */
4860 if (GET_CODE (pos) == CONST_INT
4861 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4862 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4863
4864#if BITS_BIG_ENDIAN
4865 if (GET_CODE (pos) == CONST_INT)
5f4f0e22
CH
4866 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4867 - INTVAL (pos));
230d793d
RS
4868 else if (GET_CODE (pos) == MINUS
4869 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4870 && (INTVAL (XEXP (pos, 1))
4871 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4872 /* If position is ADJUST - X, new position is X. */
4873 pos = XEXP (pos, 0);
4874 else
4875 pos = gen_binary (MINUS, GET_MODE (pos),
5f4f0e22
CH
4876 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4877 - len),
4878 pos);
230d793d
RS
4879#endif
4880 }
4881
4882 /* A SUBREG between two modes that occupy the same numbers of words
4883 can be done by moving the SUBREG to the source. */
4884 else if (GET_CODE (SET_DEST (x)) == SUBREG
4885 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4886 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4887 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4888 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4889 {
4890 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4891 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4892 SET_SRC (x)));
4893 continue;
4894 }
4895 else
4896 break;
4897
4898 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4899 inner = SUBREG_REG (inner);
4900
4901 compute_mode = GET_MODE (inner);
4902
4903 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
4904 if (len < HOST_BITS_PER_WIDE_INT)
4905 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4906 else
4907 break;
4908
4909 /* Now compute the equivalent expression. Make a copy of INNER
4910 for the SET_DEST in case it is a MEM into which we will substitute;
4911 we don't want shared RTL in that case. */
4912 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4913 gen_binary (IOR, compute_mode,
4914 gen_binary (AND, compute_mode,
4915 gen_unary (NOT, compute_mode,
0c1c8ea6 4916 compute_mode,
230d793d
RS
4917 gen_binary (ASHIFT,
4918 compute_mode,
4919 mask, pos)),
4920 inner),
4921 gen_binary (ASHIFT, compute_mode,
4922 gen_binary (AND, compute_mode,
4923 gen_lowpart_for_combine
4924 (compute_mode,
4925 SET_SRC (x)),
4926 mask),
4927 pos)));
4928 }
4929
4930 return x;
4931}
4932\f
8999a12e
RK
4933/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4934 it is an RTX that represents a variable starting position; otherwise,
4935 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
4936
4937 INNER may be a USE. This will occur when we started with a bitfield
4938 that went outside the boundary of the object in memory, which is
4939 allowed on most machines. To isolate this case, we produce a USE
4940 whose mode is wide enough and surround the MEM with it. The only
4941 code that understands the USE is this routine. If it is not removed,
4942 it will cause the resulting insn not to match.
4943
4944 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4945 signed reference.
4946
4947 IN_DEST is non-zero if this is a reference in the destination of a
4948 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4949 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4950 be used.
4951
4952 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4953 ZERO_EXTRACT should be built even for bits starting at bit 0.
4954
4955 MODE is the desired mode of the result (if IN_DEST == 0). */
4956
4957static rtx
4958make_extraction (mode, inner, pos, pos_rtx, len,
4959 unsignedp, in_dest, in_compare)
4960 enum machine_mode mode;
4961 rtx inner;
4962 int pos;
4963 rtx pos_rtx;
4964 int len;
4965 int unsignedp;
4966 int in_dest, in_compare;
4967{
94b4b17a
RS
4968 /* This mode describes the size of the storage area
4969 to fetch the overall value from. Within that, we
4970 ignore the POS lowest bits, etc. */
230d793d
RS
4971 enum machine_mode is_mode = GET_MODE (inner);
4972 enum machine_mode inner_mode;
4973 enum machine_mode wanted_mem_mode = byte_mode;
4974 enum machine_mode pos_mode = word_mode;
4975 enum machine_mode extraction_mode = word_mode;
4976 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4977 int spans_byte = 0;
4978 rtx new = 0;
8999a12e 4979 rtx orig_pos_rtx = pos_rtx;
6139ff20 4980 int orig_pos;
230d793d
RS
4981
4982 /* Get some information about INNER and get the innermost object. */
4983 if (GET_CODE (inner) == USE)
94b4b17a 4984 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
4985 /* We don't need to adjust the position because we set up the USE
4986 to pretend that it was a full-word object. */
4987 spans_byte = 1, inner = XEXP (inner, 0);
4988 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
4989 {
4990 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4991 consider just the QI as the memory to extract from.
4992 The subreg adds or removes high bits; its mode is
4993 irrelevant to the meaning of this extraction,
4994 since POS and LEN count from the lsb. */
4995 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4996 is_mode = GET_MODE (SUBREG_REG (inner));
4997 inner = SUBREG_REG (inner);
4998 }
230d793d
RS
4999
5000 inner_mode = GET_MODE (inner);
5001
5002 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5003 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5004
5005 /* See if this can be done without an extraction. We never can if the
5006 width of the field is not the same as that of some integer mode. For
5007 registers, we can only avoid the extraction if the position is at the
5008 low-order bit and this is either not in the destination or we have the
5009 appropriate STRICT_LOW_PART operation available.
5010
5011 For MEM, we can avoid an extract if the field starts on an appropriate
5012 boundary and we can change the mode of the memory reference. However,
5013 we cannot directly access the MEM if we have a USE and the underlying
5014 MEM is not TMODE. This combination means that MEM was being used in a
5015 context where bits outside its mode were being referenced; that is only
5016 valid in bit-field insns. */
5017
5018 if (tmode != BLKmode
5019 && ! (spans_byte && inner_mode != tmode)
8999a12e 5020 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
230d793d 5021 && (! in_dest
df62f951
RK
5022 || (GET_CODE (inner) == REG
5023 && (movstrict_optab->handlers[(int) tmode].insn_code
5024 != CODE_FOR_nothing))))
8999a12e 5025 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5026 && (pos
5027 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5028 : BITS_PER_UNIT)) == 0
230d793d
RS
5029 /* We can't do this if we are widening INNER_MODE (it
5030 may not be aligned, for one thing). */
5031 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5032 && (inner_mode == tmode
5033 || (! mode_dependent_address_p (XEXP (inner, 0))
5034 && ! MEM_VOLATILE_P (inner))))))
5035 {
230d793d
RS
5036 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5037 field. If the original and current mode are the same, we need not
5038 adjust the offset. Otherwise, we do if bytes big endian.
5039
5040 If INNER is not a MEM, get a piece consisting of the just the field
df62f951 5041 of interest (in this case POS must be 0). */
230d793d
RS
5042
5043 if (GET_CODE (inner) == MEM)
5044 {
94b4b17a
RS
5045 int offset;
5046 /* POS counts from lsb, but make OFFSET count in memory order. */
5047 if (BYTES_BIG_ENDIAN)
5048 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5049 else
5050 offset = pos / BITS_PER_UNIT;
230d793d
RS
5051
5052 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5053 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5054 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5055 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5056 }
df62f951 5057 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5058 {
5059 /* We can't call gen_lowpart_for_combine here since we always want
5060 a SUBREG and it would sometimes return a new hard register. */
5061 if (tmode != inner_mode)
5062 new = gen_rtx (SUBREG, tmode, inner,
5063 (WORDS_BIG_ENDIAN
5064 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5065 ? ((GET_MODE_SIZE (inner_mode)
5066 - GET_MODE_SIZE (tmode))
5067 / UNITS_PER_WORD)
5068 : 0));
5069 else
5070 new = inner;
5071 }
230d793d 5072 else
6139ff20
RK
5073 new = force_to_mode (inner, tmode,
5074 len >= HOST_BITS_PER_WIDE_INT
5075 ? GET_MODE_MASK (tmode)
5076 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5077 NULL_RTX, 0);
230d793d
RS
5078
5079 /* If this extraction is going into the destination of a SET,
5080 make a STRICT_LOW_PART unless we made a MEM. */
5081
5082 if (in_dest)
5083 return (GET_CODE (new) == MEM ? new
77fa0940
RK
5084 : (GET_CODE (new) != SUBREG
5085 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5086 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5087
5088 /* Otherwise, sign- or zero-extend unless we already are in the
5089 proper mode. */
5090
5091 return (mode == tmode ? new
5092 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5093 mode, new));
5094 }
5095
cc471082
RS
5096 /* Unless this is a COMPARE or we have a funny memory reference,
5097 don't do anything with zero-extending field extracts starting at
5098 the low-order bit since they are simple AND operations. */
8999a12e
RK
5099 if (pos_rtx == 0 && pos == 0 && ! in_dest
5100 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5101 return 0;
5102
e7373556
RK
5103 /* Unless we are allowed to span bytes, reject this if we would be
5104 spanning bytes or if the position is not a constant and the length
5105 is not 1. In all other cases, we would only be going outside
5106 out object in cases when an original shift would have been
5107 undefined. */
5108 if (! spans_byte
5109 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5110 || (pos_rtx != 0 && len != 1)))
5111 return 0;
5112
230d793d
RS
5113 /* Get the mode to use should INNER be a MEM, the mode for the position,
5114 and the mode for the result. */
5115#ifdef HAVE_insv
5116 if (in_dest)
5117 {
5118 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5119 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5120 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5121 }
5122#endif
5123
5124#ifdef HAVE_extzv
5125 if (! in_dest && unsignedp)
5126 {
5127 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5128 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5129 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5130 }
5131#endif
5132
5133#ifdef HAVE_extv
5134 if (! in_dest && ! unsignedp)
5135 {
5136 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5137 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5138 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5139 }
5140#endif
5141
5142 /* Never narrow an object, since that might not be safe. */
5143
5144 if (mode != VOIDmode
5145 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5146 extraction_mode = mode;
5147
5148 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5149 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5150 pos_mode = GET_MODE (pos_rtx);
5151
5152 /* If this is not from memory or we have to change the mode of memory and
5153 cannot, the desired mode is EXTRACTION_MODE. */
5154 if (GET_CODE (inner) != MEM
5155 || (inner_mode != wanted_mem_mode
5156 && (mode_dependent_address_p (XEXP (inner, 0))
5157 || MEM_VOLATILE_P (inner))))
5158 wanted_mem_mode = extraction_mode;
5159
6139ff20
RK
5160 orig_pos = pos;
5161
230d793d
RS
5162#if BITS_BIG_ENDIAN
5163 /* If position is constant, compute new position. Otherwise, build
5164 subtraction. */
8999a12e 5165 if (pos_rtx == 0)
230d793d
RS
5166 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
5167 - len - pos);
5168 else
5169 pos_rtx
5170 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5f4f0e22
CH
5171 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
5172 GET_MODE_BITSIZE (wanted_mem_mode))
5173 - len),
5174 pos_rtx);
230d793d
RS
5175#endif
5176
5177 /* If INNER has a wider mode, make it smaller. If this is a constant
5178 extract, try to adjust the byte to point to the byte containing
5179 the value. */
5180 if (wanted_mem_mode != VOIDmode
5181 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
5182 && ((GET_CODE (inner) == MEM
5183 && (inner_mode == wanted_mem_mode
5184 || (! mode_dependent_address_p (XEXP (inner, 0))
5185 && ! MEM_VOLATILE_P (inner))))))
5186 {
5187 int offset = 0;
5188
5189 /* The computations below will be correct if the machine is big
5190 endian in both bits and bytes or little endian in bits and bytes.
5191 If it is mixed, we must adjust. */
5192
230d793d
RS
5193 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5194 adjust OFFSET to compensate. */
5195#if BYTES_BIG_ENDIAN
5196 if (! spans_byte
5197 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5198 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5199#endif
5200
5201 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5202 if (pos_rtx == 0)
230d793d
RS
5203 {
5204 offset += pos / BITS_PER_UNIT;
5205 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
5206 }
5207
c6b3f1f2
JW
5208#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5209 if (! spans_byte && is_mode != wanted_mem_mode)
5210 offset = (GET_MODE_SIZE (is_mode)
5211 - GET_MODE_SIZE (wanted_mem_mode) - offset);
5212#endif
5213
230d793d
RS
5214 if (offset != 0 || inner_mode != wanted_mem_mode)
5215 {
5216 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
5217 plus_constant (XEXP (inner, 0), offset));
5218 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5219 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5220 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5221 inner = newmem;
5222 }
5223 }
5224
5225 /* If INNER is not memory, we can always get it into the proper mode. */
5226 else if (GET_CODE (inner) != MEM)
d0ab8cd3 5227 inner = force_to_mode (inner, extraction_mode,
6139ff20
RK
5228 pos_rtx || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5229 ? GET_MODE_MASK (extraction_mode)
5230 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
e3d616e3 5231 NULL_RTX, 0);
230d793d
RS
5232
5233 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5234 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5235 if (pos_rtx != 0
230d793d
RS
5236 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5237 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5238 else if (pos_rtx != 0
230d793d
RS
5239 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5240 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5241
8999a12e
RK
5242 /* Make POS_RTX unless we already have it and it is correct. If we don't
5243 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5244 be a CONST_INT. */
5245 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5246 pos_rtx = orig_pos_rtx;
5247
5248 else if (pos_rtx == 0)
5f4f0e22 5249 pos_rtx = GEN_INT (pos);
230d793d
RS
5250
5251 /* Make the required operation. See if we can use existing rtx. */
5252 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5253 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5254 if (! in_dest)
5255 new = gen_lowpart_for_combine (mode, new);
5256
5257 return new;
5258}
5259\f
71923da7
RK
5260/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5261 with any other operations in X. Return X without that shift if so. */
5262
5263static rtx
5264extract_left_shift (x, count)
5265 rtx x;
5266 int count;
5267{
5268 enum rtx_code code = GET_CODE (x);
5269 enum machine_mode mode = GET_MODE (x);
5270 rtx tem;
5271
5272 switch (code)
5273 {
5274 case ASHIFT:
5275 /* This is the shift itself. If it is wide enough, we will return
5276 either the value being shifted if the shift count is equal to
5277 COUNT or a shift for the difference. */
5278 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5279 && INTVAL (XEXP (x, 1)) >= count)
5280 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5281 INTVAL (XEXP (x, 1)) - count);
5282 break;
5283
5284 case NEG: case NOT:
5285 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 5286 return gen_unary (code, mode, mode, tem);
71923da7
RK
5287
5288 break;
5289
5290 case PLUS: case IOR: case XOR: case AND:
5291 /* If we can safely shift this constant and we find the inner shift,
5292 make a new operation. */
5293 if (GET_CODE (XEXP (x,1)) == CONST_INT
5294 && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5295 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5296 return gen_binary (code, mode, tem,
5297 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5298
5299 break;
5300 }
5301
5302 return 0;
5303}
5304\f
230d793d
RS
5305/* Look at the expression rooted at X. Look for expressions
5306 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5307 Form these expressions.
5308
5309 Return the new rtx, usually just X.
5310
5311 Also, for machines like the Vax that don't have logical shift insns,
5312 try to convert logical to arithmetic shift operations in cases where
5313 they are equivalent. This undoes the canonicalizations to logical
5314 shifts done elsewhere.
5315
5316 We try, as much as possible, to re-use rtl expressions to save memory.
5317
5318 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5319 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5320 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5321 or a COMPARE against zero, it is COMPARE. */
5322
5323static rtx
5324make_compound_operation (x, in_code)
5325 rtx x;
5326 enum rtx_code in_code;
5327{
5328 enum rtx_code code = GET_CODE (x);
5329 enum machine_mode mode = GET_MODE (x);
5330 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 5331 rtx rhs, lhs;
230d793d 5332 enum rtx_code next_code;
f24ad0e4 5333 int i;
230d793d 5334 rtx new = 0;
280f58ba 5335 rtx tem;
230d793d
RS
5336 char *fmt;
5337
5338 /* Select the code to be used in recursive calls. Once we are inside an
5339 address, we stay there. If we have a comparison, set to COMPARE,
5340 but once inside, go back to our default of SET. */
5341
42495ca0 5342 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5343 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5344 && XEXP (x, 1) == const0_rtx) ? COMPARE
5345 : in_code == COMPARE ? SET : in_code);
5346
5347 /* Process depending on the code of this operation. If NEW is set
5348 non-zero, it will be returned. */
5349
5350 switch (code)
5351 {
5352 case ASHIFT:
230d793d
RS
5353 /* Convert shifts by constants into multiplications if inside
5354 an address. */
5355 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5356 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5357 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5358 {
5359 new = make_compound_operation (XEXP (x, 0), next_code);
5360 new = gen_rtx_combine (MULT, mode, new,
5361 GEN_INT ((HOST_WIDE_INT) 1
5362 << INTVAL (XEXP (x, 1))));
5363 }
230d793d
RS
5364 break;
5365
5366 case AND:
5367 /* If the second operand is not a constant, we can't do anything
5368 with it. */
5369 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5370 break;
5371
5372 /* If the constant is a power of two minus one and the first operand
5373 is a logical right shift, make an extraction. */
5374 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5375 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5376 {
5377 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5378 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5379 0, in_code == COMPARE);
5380 }
dfbe1b2f 5381
230d793d
RS
5382 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5383 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5384 && subreg_lowpart_p (XEXP (x, 0))
5385 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5386 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5387 {
5388 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5389 next_code);
aadfb062 5390 new = make_extraction (mode, new, 0,
280f58ba
RK
5391 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5392 0, in_code == COMPARE);
5393 }
45620ed4 5394 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
5395 else if ((GET_CODE (XEXP (x, 0)) == XOR
5396 || GET_CODE (XEXP (x, 0)) == IOR)
5397 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5398 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5399 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5400 {
5401 /* Apply the distributive law, and then try to make extractions. */
5402 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5403 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5404 XEXP (x, 1)),
5405 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5406 XEXP (x, 1)));
5407 new = make_compound_operation (new, in_code);
5408 }
a7c99304
RK
5409
5410 /* If we are have (and (rotate X C) M) and C is larger than the number
5411 of bits in M, this is an extraction. */
5412
5413 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5414 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5415 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5416 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5417 {
5418 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5419 new = make_extraction (mode, new,
5420 (GET_MODE_BITSIZE (mode)
5421 - INTVAL (XEXP (XEXP (x, 0), 1))),
5422 NULL_RTX, i, 1, 0, in_code == COMPARE);
5423 }
a7c99304
RK
5424
5425 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5426 a logical shift and our mask turns off all the propagated sign
5427 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5428 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5429 && (lshr_optab->handlers[(int) mode].insn_code
5430 == CODE_FOR_nothing)
230d793d
RS
5431 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5432 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5433 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5434 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5435 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5436 {
5f4f0e22 5437 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5438
5439 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5440 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5441 SUBST (XEXP (x, 0),
280f58ba
RK
5442 gen_rtx_combine (ASHIFTRT, mode,
5443 make_compound_operation (XEXP (XEXP (x, 0), 0),
5444 next_code),
230d793d
RS
5445 XEXP (XEXP (x, 0), 1)));
5446 }
5447
5448 /* If the constant is one less than a power of two, this might be
5449 representable by an extraction even if no shift is present.
5450 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5451 we are in a COMPARE. */
5452 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5453 new = make_extraction (mode,
5454 make_compound_operation (XEXP (x, 0),
5455 next_code),
5456 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5457
5458 /* If we are in a comparison and this is an AND with a power of two,
5459 convert this into the appropriate bit extract. */
5460 else if (in_code == COMPARE
5461 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5462 new = make_extraction (mode,
5463 make_compound_operation (XEXP (x, 0),
5464 next_code),
5465 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5466
5467 break;
5468
5469 case LSHIFTRT:
5470 /* If the sign bit is known to be zero, replace this with an
5471 arithmetic shift. */
d0ab8cd3
RK
5472 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5473 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5474 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5475 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5476 {
280f58ba
RK
5477 new = gen_rtx_combine (ASHIFTRT, mode,
5478 make_compound_operation (XEXP (x, 0),
5479 next_code),
5480 XEXP (x, 1));
230d793d
RS
5481 break;
5482 }
5483
5484 /* ... fall through ... */
5485
5486 case ASHIFTRT:
71923da7
RK
5487 lhs = XEXP (x, 0);
5488 rhs = XEXP (x, 1);
5489
230d793d
RS
5490 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5491 this is a SIGN_EXTRACT. */
71923da7
RK
5492 if (GET_CODE (rhs) == CONST_INT
5493 && GET_CODE (lhs) == ASHIFT
5494 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5495 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 5496 {
71923da7 5497 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 5498 new = make_extraction (mode, new,
71923da7
RK
5499 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5500 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
5501 code == LSHIFTRT, 0, in_code == COMPARE);
5502 }
5503
71923da7
RK
5504 /* See if we have operations between an ASHIFTRT and an ASHIFT.
5505 If so, try to merge the shifts into a SIGN_EXTEND. We could
5506 also do this for some cases of SIGN_EXTRACT, but it doesn't
5507 seem worth the effort; the case checked for occurs on Alpha. */
5508
5509 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
5510 && ! (GET_CODE (lhs) == SUBREG
5511 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
5512 && GET_CODE (rhs) == CONST_INT
5513 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
5514 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
5515 new = make_extraction (mode, make_compound_operation (new, next_code),
5516 0, NULL_RTX, mode_width - INTVAL (rhs),
5517 code == LSHIFTRT, 0, in_code == COMPARE);
5518
230d793d 5519 break;
280f58ba
RK
5520
5521 case SUBREG:
5522 /* Call ourselves recursively on the inner expression. If we are
5523 narrowing the object and it has a different RTL code from
5524 what it originally did, do this SUBREG as a force_to_mode. */
5525
0a5cbff6 5526 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
5527 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5528 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5529 && subreg_lowpart_p (x))
0a5cbff6
RK
5530 {
5531 rtx newer = force_to_mode (tem, mode,
e3d616e3 5532 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
5533
5534 /* If we have something other than a SUBREG, we might have
5535 done an expansion, so rerun outselves. */
5536 if (GET_CODE (newer) != SUBREG)
5537 newer = make_compound_operation (newer, in_code);
5538
5539 return newer;
5540 }
230d793d
RS
5541 }
5542
5543 if (new)
5544 {
df62f951 5545 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
5546 code = GET_CODE (x);
5547 }
5548
5549 /* Now recursively process each operand of this operation. */
5550 fmt = GET_RTX_FORMAT (code);
5551 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5552 if (fmt[i] == 'e')
5553 {
5554 new = make_compound_operation (XEXP (x, i), next_code);
5555 SUBST (XEXP (x, i), new);
5556 }
5557
5558 return x;
5559}
5560\f
5561/* Given M see if it is a value that would select a field of bits
5562 within an item, but not the entire word. Return -1 if not.
5563 Otherwise, return the starting position of the field, where 0 is the
5564 low-order bit.
5565
5566 *PLEN is set to the length of the field. */
5567
5568static int
5569get_pos_from_mask (m, plen)
5f4f0e22 5570 unsigned HOST_WIDE_INT m;
230d793d
RS
5571 int *plen;
5572{
5573 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5574 int pos = exact_log2 (m & - m);
5575
5576 if (pos < 0)
5577 return -1;
5578
5579 /* Now shift off the low-order zero bits and see if we have a power of
5580 two minus 1. */
5581 *plen = exact_log2 ((m >> pos) + 1);
5582
5583 if (*plen <= 0)
5584 return -1;
5585
5586 return pos;
5587}
5588\f
6139ff20
RK
5589/* See if X can be simplified knowing that we will only refer to it in
5590 MODE and will only refer to those bits that are nonzero in MASK.
5591 If other bits are being computed or if masking operations are done
5592 that select a superset of the bits in MASK, they can sometimes be
5593 ignored.
5594
5595 Return a possibly simplified expression, but always convert X to
5596 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
5597
5598 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
5599 replace X with REG.
5600
5601 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
5602 are all off in X. This is used when X will be complemented, by either
180b8e4b 5603 NOT, NEG, or XOR. */
dfbe1b2f
RK
5604
5605static rtx
e3d616e3 5606force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
5607 rtx x;
5608 enum machine_mode mode;
6139ff20 5609 unsigned HOST_WIDE_INT mask;
dfbe1b2f 5610 rtx reg;
e3d616e3 5611 int just_select;
dfbe1b2f
RK
5612{
5613 enum rtx_code code = GET_CODE (x);
180b8e4b 5614 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
5615 enum machine_mode op_mode;
5616 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
5617 rtx op0, op1, temp;
5618
246e00f2
RK
5619 /* If this is a CALL, don't do anything. Some of the code below
5620 will do the wrong thing since the mode of a CALL is VOIDmode. */
5621 if (code == CALL)
5622 return x;
5623
6139ff20
RK
5624 /* We want to perform the operation is its present mode unless we know
5625 that the operation is valid in MODE, in which case we do the operation
5626 in MODE. */
1c75dfa4
RK
5627 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
5628 && code_to_optab[(int) code] != 0
ef026f91
RS
5629 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5630 != CODE_FOR_nothing))
5631 ? mode : GET_MODE (x));
e3d616e3 5632
aa988991
RS
5633 /* It is not valid to do a right-shift in a narrower mode
5634 than the one it came in with. */
5635 if ((code == LSHIFTRT || code == ASHIFTRT)
5636 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
5637 op_mode = GET_MODE (x);
ef026f91
RS
5638
5639 /* Truncate MASK to fit OP_MODE. */
5640 if (op_mode)
5641 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
5642
5643 /* When we have an arithmetic operation, or a shift whose count we
5644 do not know, we need to assume that all bit the up to the highest-order
5645 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
5646 if (op_mode)
5647 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5648 ? GET_MODE_MASK (op_mode)
5649 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5650 else
5651 fuller_mask = ~ (HOST_WIDE_INT) 0;
5652
5653 /* Determine what bits of X are guaranteed to be (non)zero. */
5654 nonzero = nonzero_bits (x, mode);
6139ff20
RK
5655
5656 /* If none of the bits in X are needed, return a zero. */
e3d616e3 5657 if (! just_select && (nonzero & mask) == 0)
6139ff20 5658 return const0_rtx;
dfbe1b2f 5659
6139ff20
RK
5660 /* If X is a CONST_INT, return a new one. Do this here since the
5661 test below will fail. */
5662 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
5663 {
5664 HOST_WIDE_INT cval = INTVAL (x) & mask;
5665 int width = GET_MODE_BITSIZE (mode);
5666
5667 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5668 number, sign extend it. */
5669 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5670 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5671 cval |= (HOST_WIDE_INT) -1 << width;
5672
5673 return GEN_INT (cval);
5674 }
dfbe1b2f 5675
180b8e4b
RK
5676 /* If X is narrower than MODE and we want all the bits in X's mode, just
5677 get X in the proper mode. */
5678 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5679 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
5680 return gen_lowpart_for_combine (mode, x);
5681
71923da7
RK
5682 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
5683 MASK are already known to be zero in X, we need not do anything. */
5684 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
5685 return x;
5686
dfbe1b2f
RK
5687 switch (code)
5688 {
6139ff20
RK
5689 case CLOBBER:
5690 /* If X is a (clobber (const_int)), return it since we know we are
5691 generating something that won't match. */
5692 return x;
5693
5694#if ! BITS_BIG_ENDIAN
5695 case USE:
5696 /* X is a (use (mem ..)) that was made from a bit-field extraction that
5697 spanned the boundary of the MEM. If we are now masking so it is
5698 within that boundary, we don't need the USE any more. */
5699 if ((mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 5700 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
5701#endif
5702
dfbe1b2f
RK
5703 case SIGN_EXTEND:
5704 case ZERO_EXTEND:
5705 case ZERO_EXTRACT:
5706 case SIGN_EXTRACT:
5707 x = expand_compound_operation (x);
5708 if (GET_CODE (x) != code)
e3d616e3 5709 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
5710 break;
5711
5712 case REG:
5713 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5714 || rtx_equal_p (reg, get_last_value (x))))
5715 x = reg;
5716 break;
5717
dfbe1b2f 5718 case SUBREG:
6139ff20 5719 if (subreg_lowpart_p (x)
180b8e4b
RK
5720 /* We can ignore the effect of this SUBREG if it narrows the mode or
5721 if the constant masks to zero all the bits the mode doesn't
5722 have. */
6139ff20
RK
5723 && ((GET_MODE_SIZE (GET_MODE (x))
5724 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
5725 || (0 == (mask
5726 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 5727 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 5728 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
5729 break;
5730
5731 case AND:
6139ff20
RK
5732 /* If this is an AND with a constant, convert it into an AND
5733 whose constant is the AND of that constant with MASK. If it
5734 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 5735
6139ff20
RK
5736 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5737 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
dfbe1b2f 5738 {
6139ff20
RK
5739 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
5740 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
5741
5742 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
5743 is just some low-order bits. If so, and it is MASK, we don't
5744 need it. */
dfbe1b2f
RK
5745
5746 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5747 && INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 5748 x = XEXP (x, 0);
d0ab8cd3 5749
71923da7
RK
5750 /* If it remains an AND, try making another AND with the bits
5751 in the mode mask that aren't in MASK turned on. If the
5752 constant in the AND is wide enough, this might make a
5753 cheaper constant. */
5754
5755 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5756 && GET_MODE_MASK (GET_MODE (x)) != mask)
5757 {
5758 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
5759 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
5760 int width = GET_MODE_BITSIZE (GET_MODE (x));
5761 rtx y;
5762
5763 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5764 number, sign extend it. */
5765 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5766 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5767 cval |= (HOST_WIDE_INT) -1 << width;
5768
5769 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
5770 if (rtx_cost (y, SET) < rtx_cost (x, SET))
5771 x = y;
5772 }
5773
d0ab8cd3 5774 break;
dfbe1b2f
RK
5775 }
5776
6139ff20 5777 goto binop;
dfbe1b2f
RK
5778
5779 case PLUS:
6139ff20
RK
5780 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5781 low-order bits (as in an alignment operation) and FOO is already
5782 aligned to that boundary, mask C1 to that boundary as well.
5783 This may eliminate that PLUS and, later, the AND. */
5784 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5785 && exact_log2 (- mask) >= 0
5786 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5787 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5788 return force_to_mode (plus_constant (XEXP (x, 0),
5789 INTVAL (XEXP (x, 1)) & mask),
e3d616e3 5790 mode, mask, reg, next_select);
6139ff20
RK
5791
5792 /* ... fall through ... */
5793
dfbe1b2f
RK
5794 case MINUS:
5795 case MULT:
6139ff20
RK
5796 /* For PLUS, MINUS and MULT, we need any bits less significant than the
5797 most significant bit in MASK since carries from those bits will
5798 affect the bits we are interested in. */
5799 mask = fuller_mask;
5800 goto binop;
5801
dfbe1b2f
RK
5802 case IOR:
5803 case XOR:
6139ff20
RK
5804 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5805 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5806 operation which may be a bitfield extraction. Ensure that the
5807 constant we form is not wider than the mode of X. */
5808
5809 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5810 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5811 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5812 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5813 && GET_CODE (XEXP (x, 1)) == CONST_INT
5814 && ((INTVAL (XEXP (XEXP (x, 0), 1))
5815 + floor_log2 (INTVAL (XEXP (x, 1))))
5816 < GET_MODE_BITSIZE (GET_MODE (x)))
5817 && (INTVAL (XEXP (x, 1))
5818 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x)) == 0))
5819 {
5820 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5821 << INTVAL (XEXP (XEXP (x, 0), 1)));
5822 temp = gen_binary (GET_CODE (x), GET_MODE (x),
5823 XEXP (XEXP (x, 0), 0), temp);
5824 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (x, 1));
e3d616e3 5825 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
5826 }
5827
5828 binop:
dfbe1b2f 5829 /* For most binary operations, just propagate into the operation and
6139ff20
RK
5830 change the mode if we have an operation of that mode. */
5831
e3d616e3
RK
5832 op0 = gen_lowpart_for_combine (op_mode,
5833 force_to_mode (XEXP (x, 0), mode, mask,
5834 reg, next_select));
5835 op1 = gen_lowpart_for_combine (op_mode,
5836 force_to_mode (XEXP (x, 1), mode, mask,
5837 reg, next_select));
6139ff20 5838
2dd484ed
RK
5839 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
5840 MASK since OP1 might have been sign-extended but we never want
5841 to turn on extra bits, since combine might have previously relied
5842 on them being off. */
5843 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
5844 && (INTVAL (op1) & mask) != 0)
5845 op1 = GEN_INT (INTVAL (op1) & mask);
5846
6139ff20
RK
5847 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
5848 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 5849 break;
dfbe1b2f
RK
5850
5851 case ASHIFT:
dfbe1b2f 5852 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
5853 However, we cannot do anything with shifts where we cannot
5854 guarantee that the counts are smaller than the size of the mode
5855 because such a count will have a different meaning in a
6139ff20 5856 wider mode. */
f6785026
RK
5857
5858 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5859 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
5860 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5861 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5862 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 5863 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
5864 break;
5865
6139ff20
RK
5866 /* If the shift count is a constant and we can do arithmetic in
5867 the mode of the shift, refine which bits we need. Otherwise, use the
5868 conservative form of the mask. */
5869 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5870 && INTVAL (XEXP (x, 1)) >= 0
5871 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
5872 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5873 mask >>= INTVAL (XEXP (x, 1));
5874 else
5875 mask = fuller_mask;
5876
5877 op0 = gen_lowpart_for_combine (op_mode,
5878 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 5879 mask, reg, next_select));
6139ff20
RK
5880
5881 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5882 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 5883 break;
dfbe1b2f
RK
5884
5885 case LSHIFTRT:
1347292b
JW
5886 /* Here we can only do something if the shift count is a constant,
5887 this shift constant is valid for the host, and we can do arithmetic
5888 in OP_MODE. */
dfbe1b2f
RK
5889
5890 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 5891 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 5892 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 5893 {
6139ff20
RK
5894 rtx inner = XEXP (x, 0);
5895
5896 /* Select the mask of the bits we need for the shift operand. */
5897 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 5898
6139ff20
RK
5899 /* We can only change the mode of the shift if we can do arithmetic
5900 in the mode of the shift and MASK is no wider than the width of
5901 OP_MODE. */
5902 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
5903 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
5904 op_mode = GET_MODE (x);
5905
e3d616e3 5906 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
5907
5908 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
5909 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 5910 }
6139ff20
RK
5911
5912 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
5913 shift and AND produces only copies of the sign bit (C2 is one less
5914 than a power of two), we can do this with just a shift. */
5915
5916 if (GET_CODE (x) == LSHIFTRT
5917 && GET_CODE (XEXP (x, 1)) == CONST_INT
5918 && ((INTVAL (XEXP (x, 1))
5919 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
5920 >= GET_MODE_BITSIZE (GET_MODE (x)))
5921 && exact_log2 (mask + 1) >= 0
5922 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5923 >= exact_log2 (mask + 1)))
5924 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5925 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
5926 - exact_log2 (mask + 1)));
d0ab8cd3
RK
5927 break;
5928
5929 case ASHIFTRT:
6139ff20
RK
5930 /* If we are just looking for the sign bit, we don't need this shift at
5931 all, even if it has a variable count. */
5932 if (mask == ((HOST_WIDE_INT) 1
5933 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))
e3d616e3 5934 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
5935
5936 /* If this is a shift by a constant, get a mask that contains those bits
5937 that are not copies of the sign bit. We then have two cases: If
5938 MASK only includes those bits, this can be a logical shift, which may
5939 allow simplifications. If MASK is a single-bit field not within
5940 those bits, we are requesting a copy of the sign bit and hence can
5941 shift the sign bit to the appropriate location. */
5942
5943 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
5944 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5945 {
5946 int i = -1;
5947
5948 nonzero = GET_MODE_MASK (GET_MODE (x));
5949 nonzero >>= INTVAL (XEXP (x, 1));
5950
5951 if ((mask & ~ nonzero) == 0
5952 || (i = exact_log2 (mask)) >= 0)
5953 {
5954 x = simplify_shift_const
5955 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5956 i < 0 ? INTVAL (XEXP (x, 1))
5957 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
5958
5959 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 5960 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
5961 }
5962 }
5963
5964 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
5965 even if the shift count isn't a constant. */
5966 if (mask == 1)
5967 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
5968
d0ab8cd3 5969 /* If this is a sign-extension operation that just affects bits
4c002f29
RK
5970 we don't care about, remove it. Be sure the call above returned
5971 something that is still a shift. */
d0ab8cd3 5972
4c002f29
RK
5973 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
5974 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 5975 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
5976 && (INTVAL (XEXP (x, 1))
5977 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
5978 && GET_CODE (XEXP (x, 0)) == ASHIFT
5979 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5980 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
5981 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
5982 reg, next_select);
6139ff20 5983
dfbe1b2f
RK
5984 break;
5985
6139ff20
RK
5986 case ROTATE:
5987 case ROTATERT:
5988 /* If the shift count is constant and we can do computations
5989 in the mode of X, compute where the bits we care about are.
5990 Otherwise, we can't do anything. Don't change the mode of
5991 the shift or propagate MODE into the shift, though. */
5992 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5993 && INTVAL (XEXP (x, 1)) >= 0)
5994 {
5995 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
5996 GET_MODE (x), GEN_INT (mask),
5997 XEXP (x, 1));
7d171a1e 5998 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
5999 SUBST (XEXP (x, 0),
6000 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6001 INTVAL (temp), reg, next_select));
6139ff20
RK
6002 }
6003 break;
6004
dfbe1b2f 6005 case NEG:
180b8e4b
RK
6006 /* If we just want the low-order bit, the NEG isn't needed since it
6007 won't change the low-order bit. */
6008 if (mask == 1)
6009 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6010
6139ff20
RK
6011 /* We need any bits less significant than the most significant bit in
6012 MASK since carries from those bits will affect the bits we are
6013 interested in. */
6014 mask = fuller_mask;
6015 goto unop;
6016
dfbe1b2f 6017 case NOT:
6139ff20
RK
6018 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6019 same as the XOR case above. Ensure that the constant we form is not
6020 wider than the mode of X. */
6021
6022 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6023 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6024 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6025 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6026 < GET_MODE_BITSIZE (GET_MODE (x)))
6027 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6028 {
6029 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6030 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6031 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6032
e3d616e3 6033 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6034 }
6035
6036 unop:
e3d616e3
RK
6037 op0 = gen_lowpart_for_combine (op_mode,
6038 force_to_mode (XEXP (x, 0), mode, mask,
6039 reg, next_select));
6139ff20 6040 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 6041 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
6042 break;
6043
6044 case NE:
6045 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6046 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
6047 in CONST. */
6048 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
6049 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
e3d616e3 6050 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6051
d0ab8cd3
RK
6052 break;
6053
6054 case IF_THEN_ELSE:
6055 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6056 written in a narrower mode. We play it safe and do not do so. */
6057
6058 SUBST (XEXP (x, 1),
6059 gen_lowpart_for_combine (GET_MODE (x),
6060 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6061 mask, reg, next_select)));
d0ab8cd3
RK
6062 SUBST (XEXP (x, 2),
6063 gen_lowpart_for_combine (GET_MODE (x),
6064 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6065 mask, reg,next_select)));
d0ab8cd3 6066 break;
dfbe1b2f
RK
6067 }
6068
d0ab8cd3 6069 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6070 return gen_lowpart_for_combine (mode, x);
6071}
6072\f
abe6e52f
RK
6073/* Return nonzero if X is an expression that has one of two values depending on
6074 whether some other value is zero or nonzero. In that case, we return the
6075 value that is being tested, *PTRUE is set to the value if the rtx being
6076 returned has a nonzero value, and *PFALSE is set to the other alternative.
6077
6078 If we return zero, we set *PTRUE and *PFALSE to X. */
6079
6080static rtx
6081if_then_else_cond (x, ptrue, pfalse)
6082 rtx x;
6083 rtx *ptrue, *pfalse;
6084{
6085 enum machine_mode mode = GET_MODE (x);
6086 enum rtx_code code = GET_CODE (x);
6087 int size = GET_MODE_BITSIZE (mode);
6088 rtx cond0, cond1, true0, true1, false0, false1;
6089 unsigned HOST_WIDE_INT nz;
6090
6091 /* If this is a unary operation whose operand has one of two values, apply
6092 our opcode to compute those values. */
6093 if (GET_RTX_CLASS (code) == '1'
6094 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6095 {
0c1c8ea6
RK
6096 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6097 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
6098 return cond0;
6099 }
6100
3a19aabc
RK
6101 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
6102 make can't possibly match and would supress other optimizations. */
6103 else if (code == COMPARE)
6104 ;
6105
abe6e52f
RK
6106 /* If this is a binary operation, see if either side has only one of two
6107 values. If either one does or if both do and they are conditional on
6108 the same value, compute the new true and false values. */
6109 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6110 || GET_RTX_CLASS (code) == '<')
6111 {
6112 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6113 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6114
6115 if ((cond0 != 0 || cond1 != 0)
6116 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6117 {
6118 *ptrue = gen_binary (code, mode, true0, true1);
6119 *pfalse = gen_binary (code, mode, false0, false1);
6120 return cond0 ? cond0 : cond1;
6121 }
9210df58
RK
6122
6123#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
6124
6125 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6126 operands is zero when the other is non-zero, and vice-versa. */
6127
6128 if ((code == PLUS || code == IOR || code == XOR || code == MINUS
6129 || code == UMAX)
6130 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6131 {
6132 rtx op0 = XEXP (XEXP (x, 0), 1);
6133 rtx op1 = XEXP (XEXP (x, 1), 1);
6134
6135 cond0 = XEXP (XEXP (x, 0), 0);
6136 cond1 = XEXP (XEXP (x, 1), 0);
6137
6138 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6139 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6140 && reversible_comparison_p (cond1)
6141 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6142 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6143 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6144 || ((swap_condition (GET_CODE (cond0))
6145 == reverse_condition (GET_CODE (cond1)))
6146 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6147 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6148 && ! side_effects_p (x))
6149 {
6150 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6151 *pfalse = gen_binary (MULT, mode,
6152 (code == MINUS
0c1c8ea6 6153 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
6154 const_true_rtx);
6155 return cond0;
6156 }
6157 }
6158
6159 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6160 is always zero. */
6161 if ((code == MULT || code == AND || code == UMIN)
6162 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6163 {
6164 cond0 = XEXP (XEXP (x, 0), 0);
6165 cond1 = XEXP (XEXP (x, 1), 0);
6166
6167 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6168 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6169 && reversible_comparison_p (cond1)
6170 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6171 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6172 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6173 || ((swap_condition (GET_CODE (cond0))
6174 == reverse_condition (GET_CODE (cond1)))
6175 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6176 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6177 && ! side_effects_p (x))
6178 {
6179 *ptrue = *pfalse = const0_rtx;
6180 return cond0;
6181 }
6182 }
6183#endif
abe6e52f
RK
6184 }
6185
6186 else if (code == IF_THEN_ELSE)
6187 {
6188 /* If we have IF_THEN_ELSE already, extract the condition and
6189 canonicalize it if it is NE or EQ. */
6190 cond0 = XEXP (x, 0);
6191 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6192 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6193 return XEXP (cond0, 0);
6194 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6195 {
6196 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6197 return XEXP (cond0, 0);
6198 }
6199 else
6200 return cond0;
6201 }
6202
6203 /* If X is a normal SUBREG with both inner and outer modes integral,
6204 we can narrow both the true and false values of the inner expression,
6205 if there is a condition. */
6206 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6207 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6208 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6209 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6210 &true0, &false0)))
6211 {
00244e6b
RK
6212 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6213 *pfalse
6214 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 6215
abe6e52f
RK
6216 return cond0;
6217 }
6218
6219 /* If X is a constant, this isn't special and will cause confusions
6220 if we treat it as such. Likewise if it is equivalent to a constant. */
6221 else if (CONSTANT_P (x)
6222 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6223 ;
6224
6225 /* If X is known to be either 0 or -1, those are the true and
6226 false values when testing X. */
6227 else if (num_sign_bit_copies (x, mode) == size)
6228 {
6229 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6230 return x;
6231 }
6232
6233 /* Likewise for 0 or a single bit. */
6234 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6235 {
6236 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6237 return x;
6238 }
6239
6240 /* Otherwise fail; show no condition with true and false values the same. */
6241 *ptrue = *pfalse = x;
6242 return 0;
6243}
6244\f
1a26b032
RK
6245/* Return the value of expression X given the fact that condition COND
6246 is known to be true when applied to REG as its first operand and VAL
6247 as its second. X is known to not be shared and so can be modified in
6248 place.
6249
6250 We only handle the simplest cases, and specifically those cases that
6251 arise with IF_THEN_ELSE expressions. */
6252
6253static rtx
6254known_cond (x, cond, reg, val)
6255 rtx x;
6256 enum rtx_code cond;
6257 rtx reg, val;
6258{
6259 enum rtx_code code = GET_CODE (x);
f24ad0e4 6260 rtx temp;
1a26b032
RK
6261 char *fmt;
6262 int i, j;
6263
6264 if (side_effects_p (x))
6265 return x;
6266
6267 if (cond == EQ && rtx_equal_p (x, reg))
6268 return val;
6269
6270 /* If X is (abs REG) and we know something about REG's relationship
6271 with zero, we may be able to simplify this. */
6272
6273 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6274 switch (cond)
6275 {
6276 case GE: case GT: case EQ:
6277 return XEXP (x, 0);
6278 case LT: case LE:
0c1c8ea6
RK
6279 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6280 XEXP (x, 0));
1a26b032
RK
6281 }
6282
6283 /* The only other cases we handle are MIN, MAX, and comparisons if the
6284 operands are the same as REG and VAL. */
6285
6286 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6287 {
6288 if (rtx_equal_p (XEXP (x, 0), val))
6289 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6290
6291 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6292 {
6293 if (GET_RTX_CLASS (code) == '<')
6294 return (comparison_dominates_p (cond, code) ? const_true_rtx
6295 : (comparison_dominates_p (cond,
6296 reverse_condition (code))
6297 ? const0_rtx : x));
6298
6299 else if (code == SMAX || code == SMIN
6300 || code == UMIN || code == UMAX)
6301 {
6302 int unsignedp = (code == UMIN || code == UMAX);
6303
6304 if (code == SMAX || code == UMAX)
6305 cond = reverse_condition (cond);
6306
6307 switch (cond)
6308 {
6309 case GE: case GT:
6310 return unsignedp ? x : XEXP (x, 1);
6311 case LE: case LT:
6312 return unsignedp ? x : XEXP (x, 0);
6313 case GEU: case GTU:
6314 return unsignedp ? XEXP (x, 1) : x;
6315 case LEU: case LTU:
6316 return unsignedp ? XEXP (x, 0) : x;
6317 }
6318 }
6319 }
6320 }
6321
6322 fmt = GET_RTX_FORMAT (code);
6323 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6324 {
6325 if (fmt[i] == 'e')
6326 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6327 else if (fmt[i] == 'E')
6328 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6329 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6330 cond, reg, val));
6331 }
6332
6333 return x;
6334}
6335\f
230d793d
RS
6336/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6337 Return that assignment if so.
6338
6339 We only handle the most common cases. */
6340
6341static rtx
6342make_field_assignment (x)
6343 rtx x;
6344{
6345 rtx dest = SET_DEST (x);
6346 rtx src = SET_SRC (x);
dfbe1b2f 6347 rtx assign;
5f4f0e22
CH
6348 HOST_WIDE_INT c1;
6349 int pos, len;
dfbe1b2f
RK
6350 rtx other;
6351 enum machine_mode mode;
230d793d
RS
6352
6353 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6354 a clear of a one-bit field. We will have changed it to
6355 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6356 for a SUBREG. */
6357
6358 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6359 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6360 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
dfbe1b2f
RK
6361 && (rtx_equal_p (dest, XEXP (src, 1))
6362 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6363 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6364 {
8999a12e 6365 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6366 1, 1, 1, 0);
dfbe1b2f 6367 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
6368 }
6369
6370 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6371 && subreg_lowpart_p (XEXP (src, 0))
6372 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6373 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6374 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6375 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
dfbe1b2f
RK
6376 && (rtx_equal_p (dest, XEXP (src, 1))
6377 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6378 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6379 {
8999a12e 6380 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
6381 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6382 1, 1, 1, 0);
dfbe1b2f 6383 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
6384 }
6385
6386 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
6387 one-bit field. */
6388 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6389 && XEXP (XEXP (src, 0), 0) == const1_rtx
dfbe1b2f
RK
6390 && (rtx_equal_p (dest, XEXP (src, 1))
6391 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6392 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6393 {
8999a12e 6394 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6395 1, 1, 1, 0);
dfbe1b2f 6396 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
230d793d
RS
6397 }
6398
dfbe1b2f
RK
6399 /* The other case we handle is assignments into a constant-position
6400 field. They look like (ior (and DEST C1) OTHER). If C1 represents
6401 a mask that has all one bits except for a group of zero bits and
6402 OTHER is known to have zeros where C1 has ones, this is such an
6403 assignment. Compute the position and length from C1. Shift OTHER
6404 to the appropriate position, force it to the required mode, and
6405 make the extraction. Check for the AND in both operands. */
6406
6407 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
6408 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
6409 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
6410 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
6411 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
6412 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
6413 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
6414 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
6415 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
6416 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
6417 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
6418 dest)))
6419 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
6420 else
6421 return x;
230d793d 6422
c2f9f64e 6423 pos = get_pos_from_mask (c1 ^ GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 6424 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 6425 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 6426 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 6427 return x;
230d793d 6428
5f4f0e22 6429 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
230d793d 6430
dfbe1b2f
RK
6431 /* The mode to use for the source is the mode of the assignment, or of
6432 what is inside a possible STRICT_LOW_PART. */
6433 mode = (GET_CODE (assign) == STRICT_LOW_PART
6434 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 6435
dfbe1b2f
RK
6436 /* Shift OTHER right POS places and make it the source, restricting it
6437 to the proper length and mode. */
230d793d 6438
5f4f0e22
CH
6439 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6440 GET_MODE (src), other, pos),
6139ff20
RK
6441 mode,
6442 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6443 ? GET_MODE_MASK (mode)
6444 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 6445 dest, 0);
230d793d 6446
dfbe1b2f 6447 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
6448}
6449\f
6450/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6451 if so. */
6452
6453static rtx
6454apply_distributive_law (x)
6455 rtx x;
6456{
6457 enum rtx_code code = GET_CODE (x);
6458 rtx lhs, rhs, other;
6459 rtx tem;
6460 enum rtx_code inner_code;
6461
d8a8a4da
RS
6462 /* Distributivity is not true for floating point.
6463 It can change the value. So don't do it.
6464 -- rms and moshier@world.std.com. */
3ad2180a 6465 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
6466 return x;
6467
230d793d
RS
6468 /* The outer operation can only be one of the following: */
6469 if (code != IOR && code != AND && code != XOR
6470 && code != PLUS && code != MINUS)
6471 return x;
6472
6473 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6474
dfbe1b2f 6475 /* If either operand is a primitive we can't do anything, so get out fast. */
230d793d 6476 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 6477 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
6478 return x;
6479
6480 lhs = expand_compound_operation (lhs);
6481 rhs = expand_compound_operation (rhs);
6482 inner_code = GET_CODE (lhs);
6483 if (inner_code != GET_CODE (rhs))
6484 return x;
6485
6486 /* See if the inner and outer operations distribute. */
6487 switch (inner_code)
6488 {
6489 case LSHIFTRT:
6490 case ASHIFTRT:
6491 case AND:
6492 case IOR:
6493 /* These all distribute except over PLUS. */
6494 if (code == PLUS || code == MINUS)
6495 return x;
6496 break;
6497
6498 case MULT:
6499 if (code != PLUS && code != MINUS)
6500 return x;
6501 break;
6502
6503 case ASHIFT:
45620ed4 6504 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
6505 break;
6506
6507 case SUBREG:
dfbe1b2f
RK
6508 /* Non-paradoxical SUBREGs distributes over all operations, provided
6509 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
6510 of a low-order part, we don't convert an fp operation to int or
6511 vice versa, and we would not be converting a single-word
dfbe1b2f 6512 operation into a multi-word operation. The latter test is not
2b4bd1bc 6513 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
6514 Some of the previous tests are redundant given the latter test, but
6515 are retained because they are required for correctness.
6516
6517 We produce the result slightly differently in this case. */
6518
6519 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6520 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6521 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
6522 || (GET_MODE_CLASS (GET_MODE (lhs))
6523 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f
RK
6524 || (GET_MODE_SIZE (GET_MODE (lhs))
6525 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
6526 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
6527 return x;
6528
6529 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6530 SUBREG_REG (lhs), SUBREG_REG (rhs));
6531 return gen_lowpart_for_combine (GET_MODE (x), tem);
6532
6533 default:
6534 return x;
6535 }
6536
6537 /* Set LHS and RHS to the inner operands (A and B in the example
6538 above) and set OTHER to the common operand (C in the example).
6539 These is only one way to do this unless the inner operation is
6540 commutative. */
6541 if (GET_RTX_CLASS (inner_code) == 'c'
6542 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6543 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6544 else if (GET_RTX_CLASS (inner_code) == 'c'
6545 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6546 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6547 else if (GET_RTX_CLASS (inner_code) == 'c'
6548 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6549 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6550 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6551 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6552 else
6553 return x;
6554
6555 /* Form the new inner operation, seeing if it simplifies first. */
6556 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6557
6558 /* There is one exception to the general way of distributing:
6559 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6560 if (code == XOR && inner_code == IOR)
6561 {
6562 inner_code = AND;
0c1c8ea6 6563 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
6564 }
6565
6566 /* We may be able to continuing distributing the result, so call
6567 ourselves recursively on the inner operation before forming the
6568 outer operation, which we return. */
6569 return gen_binary (inner_code, GET_MODE (x),
6570 apply_distributive_law (tem), other);
6571}
6572\f
6573/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6574 in MODE.
6575
6576 Return an equivalent form, if different from X. Otherwise, return X. If
6577 X is zero, we are to always construct the equivalent form. */
6578
6579static rtx
6580simplify_and_const_int (x, mode, varop, constop)
6581 rtx x;
6582 enum machine_mode mode;
6583 rtx varop;
5f4f0e22 6584 unsigned HOST_WIDE_INT constop;
230d793d 6585{
951553af 6586 unsigned HOST_WIDE_INT nonzero;
42301240 6587 int i;
230d793d 6588
6139ff20
RK
6589 /* Simplify VAROP knowing that we will be only looking at some of the
6590 bits in it. */
e3d616e3 6591 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 6592
6139ff20
RK
6593 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
6594 CONST_INT, we are done. */
6595 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
6596 return varop;
230d793d 6597
fc06d7aa
RK
6598 /* See what bits may be nonzero in VAROP. Unlike the general case of
6599 a call to nonzero_bits, here we don't care about bits outside
6600 MODE. */
6601
6602 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d
RS
6603
6604 /* Turn off all bits in the constant that are known to already be zero.
951553af 6605 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
6606 which is tested below. */
6607
951553af 6608 constop &= nonzero;
230d793d
RS
6609
6610 /* If we don't have any bits left, return zero. */
6611 if (constop == 0)
6612 return const0_rtx;
6613
42301240
RK
6614 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
6615 a power of two, we can replace this with a ASHIFT. */
6616 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
6617 && (i = exact_log2 (constop)) >= 0)
6618 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
6619
6139ff20
RK
6620 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
6621 or XOR, then try to apply the distributive law. This may eliminate
6622 operations if either branch can be simplified because of the AND.
6623 It may also make some cases more complex, but those cases probably
6624 won't match a pattern either with or without this. */
6625
6626 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
6627 return
6628 gen_lowpart_for_combine
6629 (mode,
6630 apply_distributive_law
6631 (gen_binary (GET_CODE (varop), GET_MODE (varop),
6632 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6633 XEXP (varop, 0), constop),
6634 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6635 XEXP (varop, 1), constop))));
6636
230d793d
RS
6637 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6638 if we already had one (just check for the simplest cases). */
6639 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6640 && GET_MODE (XEXP (x, 0)) == mode
6641 && SUBREG_REG (XEXP (x, 0)) == varop)
6642 varop = XEXP (x, 0);
6643 else
6644 varop = gen_lowpart_for_combine (mode, varop);
6645
6646 /* If we can't make the SUBREG, try to return what we were given. */
6647 if (GET_CODE (varop) == CLOBBER)
6648 return x ? x : varop;
6649
6650 /* If we are only masking insignificant bits, return VAROP. */
951553af 6651 if (constop == nonzero)
230d793d
RS
6652 x = varop;
6653
6654 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6655 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 6656 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
6657
6658 else
6659 {
6660 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6661 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 6662 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
6663
6664 SUBST (XEXP (x, 0), varop);
6665 }
6666
6667 return x;
6668}
6669\f
6670/* Given an expression, X, compute which bits in X can be non-zero.
6671 We don't care about bits outside of those defined in MODE.
6672
6673 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6674 a shift, AND, or zero_extract, we can do better. */
6675
5f4f0e22 6676static unsigned HOST_WIDE_INT
951553af 6677nonzero_bits (x, mode)
230d793d
RS
6678 rtx x;
6679 enum machine_mode mode;
6680{
951553af
RK
6681 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6682 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
6683 enum rtx_code code;
6684 int mode_width = GET_MODE_BITSIZE (mode);
6685 rtx tem;
6686
1c75dfa4
RK
6687 /* For floating-point values, assume all bits are needed. */
6688 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
6689 return nonzero;
6690
230d793d
RS
6691 /* If X is wider than MODE, use its mode instead. */
6692 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6693 {
6694 mode = GET_MODE (x);
951553af 6695 nonzero = GET_MODE_MASK (mode);
230d793d
RS
6696 mode_width = GET_MODE_BITSIZE (mode);
6697 }
6698
5f4f0e22 6699 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
6700 /* Our only callers in this case look for single bit values. So
6701 just return the mode mask. Those tests will then be false. */
951553af 6702 return nonzero;
230d793d 6703
8baf60bb 6704#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 6705 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
6706 and target machines, we can compute this from which bits of the
6707 object might be nonzero in its own mode, taking into account the fact
6708 that on many CISC machines, accessing an object in a wider mode
6709 causes the high-order bits to become undefined. So they are
6710 not known to be zero. */
6711
6712 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
6713 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
6714 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 6715 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
6716 {
6717 nonzero &= nonzero_bits (x, GET_MODE (x));
6718 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
6719 return nonzero;
6720 }
6721#endif
6722
230d793d
RS
6723 code = GET_CODE (x);
6724 switch (code)
6725 {
6726 case REG:
6727#ifdef STACK_BOUNDARY
6728 /* If this is the stack pointer, we may know something about its
6729 alignment. If PUSH_ROUNDING is defined, it is possible for the
6730 stack to be momentarily aligned only to that amount, so we pick
6731 the least alignment. */
6732
6733 if (x == stack_pointer_rtx)
6734 {
6735 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6736
6737#ifdef PUSH_ROUNDING
6738 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6739#endif
6740
951553af 6741 return nonzero & ~ (sp_alignment - 1);
230d793d
RS
6742 }
6743#endif
6744
55310dad
RK
6745 /* If X is a register whose nonzero bits value is current, use it.
6746 Otherwise, if X is a register whose value we can find, use that
6747 value. Otherwise, use the previously-computed global nonzero bits
6748 for this register. */
6749
6750 if (reg_last_set_value[REGNO (x)] != 0
6751 && reg_last_set_mode[REGNO (x)] == mode
6752 && (reg_n_sets[REGNO (x)] == 1
6753 || reg_last_set_label[REGNO (x)] == label_tick)
6754 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6755 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
6756
6757 tem = get_last_value (x);
9afa3d54 6758
230d793d 6759 if (tem)
9afa3d54
RK
6760 {
6761#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6762 /* If X is narrower than MODE and TEM is a non-negative
6763 constant that would appear negative in the mode of X,
6764 sign-extend it for use in reg_nonzero_bits because some
6765 machines (maybe most) will actually do the sign-extension
6766 and this is the conservative approach.
6767
6768 ??? For 2.5, try to tighten up the MD files in this regard
6769 instead of this kludge. */
6770
6771 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
6772 && GET_CODE (tem) == CONST_INT
6773 && INTVAL (tem) > 0
6774 && 0 != (INTVAL (tem)
6775 & ((HOST_WIDE_INT) 1
9e69be8c 6776 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
6777 tem = GEN_INT (INTVAL (tem)
6778 | ((HOST_WIDE_INT) (-1)
6779 << GET_MODE_BITSIZE (GET_MODE (x))));
6780#endif
6781 return nonzero_bits (tem, mode);
6782 }
951553af
RK
6783 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6784 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 6785 else
951553af 6786 return nonzero;
230d793d
RS
6787
6788 case CONST_INT:
9afa3d54
RK
6789#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6790 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
6791 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
6792 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
6793 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
6794#endif
6795
230d793d
RS
6796 return INTVAL (x);
6797
230d793d 6798 case MEM:
8baf60bb 6799#ifdef LOAD_EXTEND_OP
230d793d
RS
6800 /* In many, if not most, RISC machines, reading a byte from memory
6801 zeros the rest of the register. Noticing that fact saves a lot
6802 of extra zero-extends. */
8baf60bb
RK
6803 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
6804 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 6805#endif
8baf60bb 6806 break;
230d793d 6807
230d793d
RS
6808 case EQ: case NE:
6809 case GT: case GTU:
6810 case LT: case LTU:
6811 case GE: case GEU:
6812 case LE: case LEU:
3f508eca 6813
c6965c0f
RK
6814 /* If this produces an integer result, we know which bits are set.
6815 Code here used to clear bits outside the mode of X, but that is
6816 now done above. */
230d793d 6817
c6965c0f
RK
6818 if (GET_MODE_CLASS (mode) == MODE_INT
6819 && mode_width <= HOST_BITS_PER_WIDE_INT)
6820 nonzero = STORE_FLAG_VALUE;
230d793d 6821 break;
230d793d 6822
230d793d 6823 case NEG:
d0ab8cd3
RK
6824 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6825 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6826 nonzero = 1;
230d793d
RS
6827
6828 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 6829 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 6830 break;
d0ab8cd3
RK
6831
6832 case ABS:
6833 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6834 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6835 nonzero = 1;
d0ab8cd3 6836 break;
230d793d
RS
6837
6838 case TRUNCATE:
951553af 6839 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
6840 break;
6841
6842 case ZERO_EXTEND:
951553af 6843 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 6844 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 6845 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
6846 break;
6847
6848 case SIGN_EXTEND:
6849 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6850 Otherwise, show all the bits in the outer mode but not the inner
6851 may be non-zero. */
951553af 6852 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
6853 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6854 {
951553af
RK
6855 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6856 if (inner_nz &
5f4f0e22
CH
6857 (((HOST_WIDE_INT) 1
6858 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 6859 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
6860 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6861 }
6862
951553af 6863 nonzero &= inner_nz;
230d793d
RS
6864 break;
6865
6866 case AND:
951553af
RK
6867 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6868 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6869 break;
6870
d0ab8cd3
RK
6871 case XOR: case IOR:
6872 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
6873 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6874 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6875 break;
6876
6877 case PLUS: case MINUS:
6878 case MULT:
6879 case DIV: case UDIV:
6880 case MOD: case UMOD:
6881 /* We can apply the rules of arithmetic to compute the number of
6882 high- and low-order zero bits of these operations. We start by
6883 computing the width (position of the highest-order non-zero bit)
6884 and the number of low-order zero bits for each value. */
6885 {
951553af
RK
6886 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6887 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6888 int width0 = floor_log2 (nz0) + 1;
6889 int width1 = floor_log2 (nz1) + 1;
6890 int low0 = floor_log2 (nz0 & -nz0);
6891 int low1 = floor_log2 (nz1 & -nz1);
6892 int op0_maybe_minusp = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6893 int op1_maybe_minusp = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
6894 int result_width = mode_width;
6895 int result_low = 0;
6896
6897 switch (code)
6898 {
6899 case PLUS:
6900 result_width = MAX (width0, width1) + 1;
6901 result_low = MIN (low0, low1);
6902 break;
6903 case MINUS:
6904 result_low = MIN (low0, low1);
6905 break;
6906 case MULT:
6907 result_width = width0 + width1;
6908 result_low = low0 + low1;
6909 break;
6910 case DIV:
6911 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6912 result_width = width0;
6913 break;
6914 case UDIV:
6915 result_width = width0;
6916 break;
6917 case MOD:
6918 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6919 result_width = MIN (width0, width1);
6920 result_low = MIN (low0, low1);
6921 break;
6922 case UMOD:
6923 result_width = MIN (width0, width1);
6924 result_low = MIN (low0, low1);
6925 break;
6926 }
6927
6928 if (result_width < mode_width)
951553af 6929 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
6930
6931 if (result_low > 0)
951553af 6932 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
6933 }
6934 break;
6935
6936 case ZERO_EXTRACT:
6937 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6938 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 6939 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
6940 break;
6941
6942 case SUBREG:
c3c2cb37
RK
6943 /* If this is a SUBREG formed for a promoted variable that has
6944 been zero-extended, we know that at least the high-order bits
6945 are zero, though others might be too. */
6946
6947 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
6948 nonzero = (GET_MODE_MASK (GET_MODE (x))
6949 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 6950
230d793d
RS
6951 /* If the inner mode is a single word for both the host and target
6952 machines, we can compute this from which bits of the inner
951553af 6953 object might be nonzero. */
230d793d 6954 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
6955 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6956 <= HOST_BITS_PER_WIDE_INT))
230d793d 6957 {
951553af 6958 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb
RK
6959
6960#ifndef WORD_REGISTER_OPERATIONS
230d793d
RS
6961 /* On many CISC machines, accessing an object in a wider mode
6962 causes the high-order bits to become undefined. So they are
6963 not known to be zero. */
6964 if (GET_MODE_SIZE (GET_MODE (x))
6965 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
6966 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6967 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
6968#endif
6969 }
6970 break;
6971
6972 case ASHIFTRT:
6973 case LSHIFTRT:
6974 case ASHIFT:
230d793d 6975 case ROTATE:
951553af 6976 /* The nonzero bits are in two classes: any bits within MODE
230d793d 6977 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 6978 nonzero bits are those that are significant in the operand of
230d793d
RS
6979 the shift when shifted the appropriate number of bits. This
6980 shows that high-order bits are cleared by the right shift and
6981 low-order bits by left shifts. */
6982 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6983 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 6984 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
6985 {
6986 enum machine_mode inner_mode = GET_MODE (x);
6987 int width = GET_MODE_BITSIZE (inner_mode);
6988 int count = INTVAL (XEXP (x, 1));
5f4f0e22 6989 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
6990 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6991 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 6992 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
6993
6994 if (mode_width > width)
951553af 6995 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
6996
6997 if (code == LSHIFTRT)
6998 inner >>= count;
6999 else if (code == ASHIFTRT)
7000 {
7001 inner >>= count;
7002
951553af 7003 /* If the sign bit may have been nonzero before the shift, we
230d793d 7004 need to mark all the places it could have been copied to
951553af 7005 by the shift as possibly nonzero. */
5f4f0e22
CH
7006 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7007 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 7008 }
45620ed4 7009 else if (code == ASHIFT)
230d793d
RS
7010 inner <<= count;
7011 else
7012 inner = ((inner << (count % width)
7013 | (inner >> (width - (count % width)))) & mode_mask);
7014
951553af 7015 nonzero &= (outer | inner);
230d793d
RS
7016 }
7017 break;
7018
7019 case FFS:
7020 /* This is at most the number of bits in the mode. */
951553af 7021 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 7022 break;
d0ab8cd3
RK
7023
7024 case IF_THEN_ELSE:
951553af
RK
7025 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7026 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 7027 break;
230d793d
RS
7028 }
7029
951553af 7030 return nonzero;
230d793d
RS
7031}
7032\f
d0ab8cd3 7033/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
7034 be equal to the sign bit. X will be used in mode MODE; if MODE is
7035 VOIDmode, X will be used in its own mode. The returned value will always
7036 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
7037
7038static int
7039num_sign_bit_copies (x, mode)
7040 rtx x;
7041 enum machine_mode mode;
7042{
7043 enum rtx_code code = GET_CODE (x);
7044 int bitwidth;
7045 int num0, num1, result;
951553af 7046 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
7047 rtx tem;
7048
7049 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
7050 VOIDmode, we don't know anything. Likewise if one of the modes is
7051 floating-point. */
d0ab8cd3
RK
7052
7053 if (mode == VOIDmode)
7054 mode = GET_MODE (x);
7055
1c75dfa4 7056 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 7057 return 1;
d0ab8cd3
RK
7058
7059 bitwidth = GET_MODE_BITSIZE (mode);
7060
312def2e
RK
7061 /* For a smaller object, just ignore the high bits. */
7062 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7063 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7064 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7065
0c314d1a
RK
7066#ifndef WORD_REGISTER_OPERATIONS
7067 /* If this machine does not do all register operations on the entire
7068 register and MODE is wider than the mode of X, we can say nothing
7069 at all about the high-order bits. */
7070 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7071 return 1;
7072#endif
7073
d0ab8cd3
RK
7074 switch (code)
7075 {
7076 case REG:
55310dad
RK
7077
7078 if (reg_last_set_value[REGNO (x)] != 0
7079 && reg_last_set_mode[REGNO (x)] == mode
7080 && (reg_n_sets[REGNO (x)] == 1
7081 || reg_last_set_label[REGNO (x)] == label_tick)
7082 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7083 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7084
7085 tem = get_last_value (x);
7086 if (tem != 0)
7087 return num_sign_bit_copies (tem, mode);
55310dad
RK
7088
7089 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7090 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7091 break;
7092
457816e2 7093 case MEM:
8baf60bb 7094#ifdef LOAD_EXTEND_OP
457816e2 7095 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
7096 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7097 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 7098#endif
8baf60bb 7099 break;
457816e2 7100
d0ab8cd3
RK
7101 case CONST_INT:
7102 /* If the constant is negative, take its 1's complement and remask.
7103 Then see how many zero bits we have. */
951553af 7104 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 7105 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
7106 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7107 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 7108
951553af 7109 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7110
7111 case SUBREG:
c3c2cb37
RK
7112 /* If this is a SUBREG for a promoted object that is sign-extended
7113 and we are looking at it in a wider mode, we know that at least the
7114 high-order bits are known to be sign bit copies. */
7115
7116 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
7117 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7118 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 7119
d0ab8cd3
RK
7120 /* For a smaller object, just ignore the high bits. */
7121 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7122 {
7123 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7124 return MAX (1, (num0
7125 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7126 - bitwidth)));
7127 }
457816e2 7128
8baf60bb
RK
7129#ifdef WORD_REGISTER_OPERATIONS
7130 /* For paradoxical SUBREGs on machines where all register operations
7131 affect the entire register, just look inside. Note that we are
7132 passing MODE to the recursive call, so the number of sign bit copies
7133 will remain relative to that mode, not the inner mode. */
457816e2
RK
7134
7135 if (GET_MODE_SIZE (GET_MODE (x))
7136 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7137 return num_sign_bit_copies (SUBREG_REG (x), mode);
7138#endif
d0ab8cd3
RK
7139 break;
7140
7141 case SIGN_EXTRACT:
7142 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7143 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7144 break;
7145
7146 case SIGN_EXTEND:
7147 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7148 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7149
7150 case TRUNCATE:
7151 /* For a smaller object, just ignore the high bits. */
7152 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7153 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7154 - bitwidth)));
7155
7156 case NOT:
7157 return num_sign_bit_copies (XEXP (x, 0), mode);
7158
7159 case ROTATE: case ROTATERT:
7160 /* If we are rotating left by a number of bits less than the number
7161 of sign bit copies, we can just subtract that amount from the
7162 number. */
7163 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7164 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7165 {
7166 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7167 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7168 : bitwidth - INTVAL (XEXP (x, 1))));
7169 }
7170 break;
7171
7172 case NEG:
7173 /* In general, this subtracts one sign bit copy. But if the value
7174 is known to be positive, the number of sign bit copies is the
951553af
RK
7175 same as that of the input. Finally, if the input has just one bit
7176 that might be nonzero, all the bits are copies of the sign bit. */
7177 nonzero = nonzero_bits (XEXP (x, 0), mode);
7178 if (nonzero == 1)
d0ab8cd3
RK
7179 return bitwidth;
7180
7181 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7182 if (num0 > 1
ac49a949 7183 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7184 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
7185 num0--;
7186
7187 return num0;
7188
7189 case IOR: case AND: case XOR:
7190 case SMIN: case SMAX: case UMIN: case UMAX:
7191 /* Logical operations will preserve the number of sign-bit copies.
7192 MIN and MAX operations always return one of the operands. */
7193 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7194 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7195 return MIN (num0, num1);
7196
7197 case PLUS: case MINUS:
7198 /* For addition and subtraction, we can have a 1-bit carry. However,
7199 if we are subtracting 1 from a positive number, there will not
7200 be such a carry. Furthermore, if the positive number is known to
7201 be 0 or 1, we know the result is either -1 or 0. */
7202
3e3ea975 7203 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 7204 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7205 {
951553af
RK
7206 nonzero = nonzero_bits (XEXP (x, 0), mode);
7207 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7208 return (nonzero == 1 || nonzero == 0 ? bitwidth
7209 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7210 }
7211
7212 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7213 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7214 return MAX (1, MIN (num0, num1) - 1);
7215
7216 case MULT:
7217 /* The number of bits of the product is the sum of the number of
7218 bits of both terms. However, unless one of the terms if known
7219 to be positive, we must allow for an additional bit since negating
7220 a negative number can remove one sign bit copy. */
7221
7222 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7223 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7224
7225 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7226 if (result > 0
9295e6af 7227 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7228 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 7229 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
951553af 7230 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7231 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
7232 result--;
7233
7234 return MAX (1, result);
7235
7236 case UDIV:
7237 /* The result must be <= the first operand. */
7238 return num_sign_bit_copies (XEXP (x, 0), mode);
7239
7240 case UMOD:
7241 /* The result must be <= the scond operand. */
7242 return num_sign_bit_copies (XEXP (x, 1), mode);
7243
7244 case DIV:
7245 /* Similar to unsigned division, except that we have to worry about
7246 the case where the divisor is negative, in which case we have
7247 to add 1. */
7248 result = num_sign_bit_copies (XEXP (x, 0), mode);
7249 if (result > 1
ac49a949 7250 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7251 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7252 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7253 result --;
7254
7255 return result;
7256
7257 case MOD:
7258 result = num_sign_bit_copies (XEXP (x, 1), mode);
7259 if (result > 1
ac49a949 7260 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7261 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7262 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7263 result --;
7264
7265 return result;
7266
7267 case ASHIFTRT:
7268 /* Shifts by a constant add to the number of bits equal to the
7269 sign bit. */
7270 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7271 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7272 && INTVAL (XEXP (x, 1)) > 0)
7273 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7274
7275 return num0;
7276
7277 case ASHIFT:
d0ab8cd3
RK
7278 /* Left shifts destroy copies. */
7279 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7280 || INTVAL (XEXP (x, 1)) < 0
7281 || INTVAL (XEXP (x, 1)) >= bitwidth)
7282 return 1;
7283
7284 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7285 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7286
7287 case IF_THEN_ELSE:
7288 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7289 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7290 return MIN (num0, num1);
7291
7292#if STORE_FLAG_VALUE == -1
7293 case EQ: case NE: case GE: case GT: case LE: case LT:
7294 case GEU: case GTU: case LEU: case LTU:
7295 return bitwidth;
7296#endif
7297 }
7298
7299 /* If we haven't been able to figure it out by one of the above rules,
7300 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
7301 count those bits and return one less than that amount. If we can't
7302 safely compute the mask for this mode, always return BITWIDTH. */
7303
7304 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 7305 return 1;
d0ab8cd3 7306
951553af 7307 nonzero = nonzero_bits (x, mode);
df6f4086 7308 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 7309 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7310}
7311\f
1a26b032
RK
7312/* Return the number of "extended" bits there are in X, when interpreted
7313 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7314 unsigned quantities, this is the number of high-order zero bits.
7315 For signed quantities, this is the number of copies of the sign bit
7316 minus 1. In both case, this function returns the number of "spare"
7317 bits. For example, if two quantities for which this function returns
7318 at least 1 are added, the addition is known not to overflow.
7319
7320 This function will always return 0 unless called during combine, which
7321 implies that it must be called from a define_split. */
7322
7323int
7324extended_count (x, mode, unsignedp)
7325 rtx x;
7326 enum machine_mode mode;
7327 int unsignedp;
7328{
951553af 7329 if (nonzero_sign_valid == 0)
1a26b032
RK
7330 return 0;
7331
7332 return (unsignedp
ac49a949
RS
7333 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7334 && (GET_MODE_BITSIZE (mode) - 1
951553af 7335 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
7336 : num_sign_bit_copies (x, mode) - 1);
7337}
7338\f
230d793d
RS
7339/* This function is called from `simplify_shift_const' to merge two
7340 outer operations. Specifically, we have already found that we need
7341 to perform operation *POP0 with constant *PCONST0 at the outermost
7342 position. We would now like to also perform OP1 with constant CONST1
7343 (with *POP0 being done last).
7344
7345 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7346 the resulting operation. *PCOMP_P is set to 1 if we would need to
7347 complement the innermost operand, otherwise it is unchanged.
7348
7349 MODE is the mode in which the operation will be done. No bits outside
7350 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 7351 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
7352
7353 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7354 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7355 result is simply *PCONST0.
7356
7357 If the resulting operation cannot be expressed as one operation, we
7358 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7359
7360static int
7361merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7362 enum rtx_code *pop0;
5f4f0e22 7363 HOST_WIDE_INT *pconst0;
230d793d 7364 enum rtx_code op1;
5f4f0e22 7365 HOST_WIDE_INT const1;
230d793d
RS
7366 enum machine_mode mode;
7367 int *pcomp_p;
7368{
7369 enum rtx_code op0 = *pop0;
5f4f0e22 7370 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
7371
7372 const0 &= GET_MODE_MASK (mode);
7373 const1 &= GET_MODE_MASK (mode);
7374
7375 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7376 if (op0 == AND)
7377 const1 &= const0;
7378
7379 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7380 if OP0 is SET. */
7381
7382 if (op1 == NIL || op0 == SET)
7383 return 1;
7384
7385 else if (op0 == NIL)
7386 op0 = op1, const0 = const1;
7387
7388 else if (op0 == op1)
7389 {
7390 switch (op0)
7391 {
7392 case AND:
7393 const0 &= const1;
7394 break;
7395 case IOR:
7396 const0 |= const1;
7397 break;
7398 case XOR:
7399 const0 ^= const1;
7400 break;
7401 case PLUS:
7402 const0 += const1;
7403 break;
7404 case NEG:
7405 op0 = NIL;
7406 break;
7407 }
7408 }
7409
7410 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7411 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7412 return 0;
7413
7414 /* If the two constants aren't the same, we can't do anything. The
7415 remaining six cases can all be done. */
7416 else if (const0 != const1)
7417 return 0;
7418
7419 else
7420 switch (op0)
7421 {
7422 case IOR:
7423 if (op1 == AND)
7424 /* (a & b) | b == b */
7425 op0 = SET;
7426 else /* op1 == XOR */
7427 /* (a ^ b) | b == a | b */
7428 ;
7429 break;
7430
7431 case XOR:
7432 if (op1 == AND)
7433 /* (a & b) ^ b == (~a) & b */
7434 op0 = AND, *pcomp_p = 1;
7435 else /* op1 == IOR */
7436 /* (a | b) ^ b == a & ~b */
7437 op0 = AND, *pconst0 = ~ const0;
7438 break;
7439
7440 case AND:
7441 if (op1 == IOR)
7442 /* (a | b) & b == b */
7443 op0 = SET;
7444 else /* op1 == XOR */
7445 /* (a ^ b) & b) == (~a) & b */
7446 *pcomp_p = 1;
7447 break;
7448 }
7449
7450 /* Check for NO-OP cases. */
7451 const0 &= GET_MODE_MASK (mode);
7452 if (const0 == 0
7453 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7454 op0 = NIL;
7455 else if (const0 == 0 && op0 == AND)
7456 op0 = SET;
7457 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7458 op0 = NIL;
7459
7460 *pop0 = op0;
7461 *pconst0 = const0;
7462
7463 return 1;
7464}
7465\f
7466/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7467 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7468 that we started with.
7469
7470 The shift is normally computed in the widest mode we find in VAROP, as
7471 long as it isn't a different number of words than RESULT_MODE. Exceptions
7472 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7473
7474static rtx
7475simplify_shift_const (x, code, result_mode, varop, count)
7476 rtx x;
7477 enum rtx_code code;
7478 enum machine_mode result_mode;
7479 rtx varop;
7480 int count;
7481{
7482 enum rtx_code orig_code = code;
7483 int orig_count = count;
7484 enum machine_mode mode = result_mode;
7485 enum machine_mode shift_mode, tmode;
7486 int mode_words
7487 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7488 /* We form (outer_op (code varop count) (outer_const)). */
7489 enum rtx_code outer_op = NIL;
c4e861e8 7490 HOST_WIDE_INT outer_const = 0;
230d793d
RS
7491 rtx const_rtx;
7492 int complement_p = 0;
7493 rtx new;
7494
7495 /* If we were given an invalid count, don't do anything except exactly
7496 what was requested. */
7497
7498 if (count < 0 || count > GET_MODE_BITSIZE (mode))
7499 {
7500 if (x)
7501 return x;
7502
5f4f0e22 7503 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
7504 }
7505
7506 /* Unless one of the branches of the `if' in this loop does a `continue',
7507 we will `break' the loop after the `if'. */
7508
7509 while (count != 0)
7510 {
7511 /* If we have an operand of (clobber (const_int 0)), just return that
7512 value. */
7513 if (GET_CODE (varop) == CLOBBER)
7514 return varop;
7515
7516 /* If we discovered we had to complement VAROP, leave. Making a NOT
7517 here would cause an infinite loop. */
7518 if (complement_p)
7519 break;
7520
7521 /* Convert ROTATETRT to ROTATE. */
7522 if (code == ROTATERT)
7523 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7524
230d793d
RS
7525 /* We need to determine what mode we will do the shift in. If the
7526 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
7527 was originally done in. Otherwise, we can do it in MODE, the widest
7528 mode encountered. */
7529 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7530
7531 /* Handle cases where the count is greater than the size of the mode
7532 minus 1. For ASHIFT, use the size minus one as the count (this can
7533 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7534 take the count modulo the size. For other shifts, the result is
7535 zero.
7536
7537 Since these shifts are being produced by the compiler by combining
7538 multiple operations, each of which are defined, we know what the
7539 result is supposed to be. */
7540
7541 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7542 {
7543 if (code == ASHIFTRT)
7544 count = GET_MODE_BITSIZE (shift_mode) - 1;
7545 else if (code == ROTATE || code == ROTATERT)
7546 count %= GET_MODE_BITSIZE (shift_mode);
7547 else
7548 {
7549 /* We can't simply return zero because there may be an
7550 outer op. */
7551 varop = const0_rtx;
7552 count = 0;
7553 break;
7554 }
7555 }
7556
7557 /* Negative counts are invalid and should not have been made (a
7558 programmer-specified negative count should have been handled
7559 above). */
7560 else if (count < 0)
7561 abort ();
7562
312def2e
RK
7563 /* An arithmetic right shift of a quantity known to be -1 or 0
7564 is a no-op. */
7565 if (code == ASHIFTRT
7566 && (num_sign_bit_copies (varop, shift_mode)
7567 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 7568 {
312def2e
RK
7569 count = 0;
7570 break;
7571 }
d0ab8cd3 7572
312def2e
RK
7573 /* If we are doing an arithmetic right shift and discarding all but
7574 the sign bit copies, this is equivalent to doing a shift by the
7575 bitsize minus one. Convert it into that shift because it will often
7576 allow other simplifications. */
500c518b 7577
312def2e
RK
7578 if (code == ASHIFTRT
7579 && (count + num_sign_bit_copies (varop, shift_mode)
7580 >= GET_MODE_BITSIZE (shift_mode)))
7581 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 7582
230d793d
RS
7583 /* We simplify the tests below and elsewhere by converting
7584 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7585 `make_compound_operation' will convert it to a ASHIFTRT for
7586 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 7587 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7588 && code == ASHIFTRT
951553af 7589 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
7590 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7591 == 0))
230d793d
RS
7592 code = LSHIFTRT;
7593
7594 switch (GET_CODE (varop))
7595 {
7596 case SIGN_EXTEND:
7597 case ZERO_EXTEND:
7598 case SIGN_EXTRACT:
7599 case ZERO_EXTRACT:
7600 new = expand_compound_operation (varop);
7601 if (new != varop)
7602 {
7603 varop = new;
7604 continue;
7605 }
7606 break;
7607
7608 case MEM:
7609 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7610 minus the width of a smaller mode, we can do this with a
7611 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7612 if ((code == ASHIFTRT || code == LSHIFTRT)
7613 && ! mode_dependent_address_p (XEXP (varop, 0))
7614 && ! MEM_VOLATILE_P (varop)
7615 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7616 MODE_INT, 1)) != BLKmode)
7617 {
7618#if BYTES_BIG_ENDIAN
7619 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7620#else
7621 new = gen_rtx (MEM, tmode,
7622 plus_constant (XEXP (varop, 0),
7623 count / BITS_PER_UNIT));
7624 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7625 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7626 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7627#endif
7628 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7629 : ZERO_EXTEND, mode, new);
7630 count = 0;
7631 continue;
7632 }
7633 break;
7634
7635 case USE:
7636 /* Similar to the case above, except that we can only do this if
7637 the resulting mode is the same as that of the underlying
7638 MEM and adjust the address depending on the *bits* endianness
7639 because of the way that bit-field extract insns are defined. */
7640 if ((code == ASHIFTRT || code == LSHIFTRT)
7641 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7642 MODE_INT, 1)) != BLKmode
7643 && tmode == GET_MODE (XEXP (varop, 0)))
7644 {
7645#if BITS_BIG_ENDIAN
7646 new = XEXP (varop, 0);
7647#else
7648 new = copy_rtx (XEXP (varop, 0));
7649 SUBST (XEXP (new, 0),
7650 plus_constant (XEXP (new, 0),
7651 count / BITS_PER_UNIT));
7652#endif
7653
7654 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7655 : ZERO_EXTEND, mode, new);
7656 count = 0;
7657 continue;
7658 }
7659 break;
7660
7661 case SUBREG:
7662 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7663 the same number of words as what we've seen so far. Then store
7664 the widest mode in MODE. */
f9e67232
RS
7665 if (subreg_lowpart_p (varop)
7666 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7667 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
7668 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7669 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7670 == mode_words))
7671 {
7672 varop = SUBREG_REG (varop);
7673 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7674 mode = GET_MODE (varop);
7675 continue;
7676 }
7677 break;
7678
7679 case MULT:
7680 /* Some machines use MULT instead of ASHIFT because MULT
7681 is cheaper. But it is still better on those machines to
7682 merge two shifts into one. */
7683 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7684 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7685 {
7686 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7687 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
7688 continue;
7689 }
7690 break;
7691
7692 case UDIV:
7693 /* Similar, for when divides are cheaper. */
7694 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7695 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7696 {
7697 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7698 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
7699 continue;
7700 }
7701 break;
7702
7703 case ASHIFTRT:
7704 /* If we are extracting just the sign bit of an arithmetic right
7705 shift, that shift is not needed. */
7706 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7707 {
7708 varop = XEXP (varop, 0);
7709 continue;
7710 }
7711
7712 /* ... fall through ... */
7713
7714 case LSHIFTRT:
7715 case ASHIFT:
230d793d
RS
7716 case ROTATE:
7717 /* Here we have two nested shifts. The result is usually the
7718 AND of a new shift with a mask. We compute the result below. */
7719 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7720 && INTVAL (XEXP (varop, 1)) >= 0
7721 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
7722 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7723 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
7724 {
7725 enum rtx_code first_code = GET_CODE (varop);
7726 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 7727 unsigned HOST_WIDE_INT mask;
230d793d 7728 rtx mask_rtx;
230d793d 7729
230d793d
RS
7730 /* We have one common special case. We can't do any merging if
7731 the inner code is an ASHIFTRT of a smaller mode. However, if
7732 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7733 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7734 we can convert it to
7735 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7736 This simplifies certain SIGN_EXTEND operations. */
7737 if (code == ASHIFT && first_code == ASHIFTRT
7738 && (GET_MODE_BITSIZE (result_mode)
7739 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7740 {
7741 /* C3 has the low-order C1 bits zero. */
7742
5f4f0e22
CH
7743 mask = (GET_MODE_MASK (mode)
7744 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 7745
5f4f0e22 7746 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 7747 XEXP (varop, 0), mask);
5f4f0e22 7748 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
7749 varop, count);
7750 count = first_count;
7751 code = ASHIFTRT;
7752 continue;
7753 }
7754
d0ab8cd3
RK
7755 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7756 than C1 high-order bits equal to the sign bit, we can convert
7757 this to either an ASHIFT or a ASHIFTRT depending on the
7758 two counts.
230d793d
RS
7759
7760 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7761
7762 if (code == ASHIFTRT && first_code == ASHIFT
7763 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
7764 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7765 > first_count))
230d793d 7766 {
d0ab8cd3
RK
7767 count -= first_count;
7768 if (count < 0)
7769 count = - count, code = ASHIFT;
7770 varop = XEXP (varop, 0);
7771 continue;
230d793d
RS
7772 }
7773
7774 /* There are some cases we can't do. If CODE is ASHIFTRT,
7775 we can only do this if FIRST_CODE is also ASHIFTRT.
7776
7777 We can't do the case when CODE is ROTATE and FIRST_CODE is
7778 ASHIFTRT.
7779
7780 If the mode of this shift is not the mode of the outer shift,
7781 we can't do this if either shift is ASHIFTRT or ROTATE.
7782
7783 Finally, we can't do any of these if the mode is too wide
7784 unless the codes are the same.
7785
7786 Handle the case where the shift codes are the same
7787 first. */
7788
7789 if (code == first_code)
7790 {
7791 if (GET_MODE (varop) != result_mode
7792 && (code == ASHIFTRT || code == ROTATE))
7793 break;
7794
7795 count += first_count;
7796 varop = XEXP (varop, 0);
7797 continue;
7798 }
7799
7800 if (code == ASHIFTRT
7801 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 7802 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d
RS
7803 || (GET_MODE (varop) != result_mode
7804 && (first_code == ASHIFTRT || first_code == ROTATE
7805 || code == ROTATE)))
7806 break;
7807
7808 /* To compute the mask to apply after the shift, shift the
951553af 7809 nonzero bits of the inner shift the same way the
230d793d
RS
7810 outer shift will. */
7811
951553af 7812 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
7813
7814 mask_rtx
7815 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 7816 GEN_INT (count));
230d793d
RS
7817
7818 /* Give up if we can't compute an outer operation to use. */
7819 if (mask_rtx == 0
7820 || GET_CODE (mask_rtx) != CONST_INT
7821 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7822 INTVAL (mask_rtx),
7823 result_mode, &complement_p))
7824 break;
7825
7826 /* If the shifts are in the same direction, we add the
7827 counts. Otherwise, we subtract them. */
7828 if ((code == ASHIFTRT || code == LSHIFTRT)
7829 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7830 count += first_count;
7831 else
7832 count -= first_count;
7833
7834 /* If COUNT is positive, the new shift is usually CODE,
7835 except for the two exceptions below, in which case it is
7836 FIRST_CODE. If the count is negative, FIRST_CODE should
7837 always be used */
7838 if (count > 0
7839 && ((first_code == ROTATE && code == ASHIFT)
7840 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7841 code = first_code;
7842 else if (count < 0)
7843 code = first_code, count = - count;
7844
7845 varop = XEXP (varop, 0);
7846 continue;
7847 }
7848
7849 /* If we have (A << B << C) for any shift, we can convert this to
7850 (A << C << B). This wins if A is a constant. Only try this if
7851 B is not a constant. */
7852
7853 else if (GET_CODE (varop) == code
7854 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7855 && 0 != (new
7856 = simplify_binary_operation (code, mode,
7857 XEXP (varop, 0),
5f4f0e22 7858 GEN_INT (count))))
230d793d
RS
7859 {
7860 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7861 count = 0;
7862 continue;
7863 }
7864 break;
7865
7866 case NOT:
7867 /* Make this fit the case below. */
7868 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 7869 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
7870 continue;
7871
7872 case IOR:
7873 case AND:
7874 case XOR:
7875 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7876 with C the size of VAROP - 1 and the shift is logical if
7877 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7878 we have an (le X 0) operation. If we have an arithmetic shift
7879 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7880 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7881
7882 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7883 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7884 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7885 && (code == LSHIFTRT || code == ASHIFTRT)
7886 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7887 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7888 {
7889 count = 0;
7890 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7891 const0_rtx);
7892
7893 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7894 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7895
7896 continue;
7897 }
7898
7899 /* If we have (shift (logical)), move the logical to the outside
7900 to allow it to possibly combine with another logical and the
7901 shift to combine with another shift. This also canonicalizes to
7902 what a ZERO_EXTRACT looks like. Also, some machines have
7903 (and (shift)) insns. */
7904
7905 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7906 && (new = simplify_binary_operation (code, result_mode,
7907 XEXP (varop, 1),
5f4f0e22 7908 GEN_INT (count))) != 0
7d171a1e 7909 && GET_CODE(new) == CONST_INT
230d793d
RS
7910 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7911 INTVAL (new), result_mode, &complement_p))
7912 {
7913 varop = XEXP (varop, 0);
7914 continue;
7915 }
7916
7917 /* If we can't do that, try to simplify the shift in each arm of the
7918 logical expression, make a new logical expression, and apply
7919 the inverse distributive law. */
7920 {
00d4ca1c 7921 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 7922 XEXP (varop, 0), count);
00d4ca1c 7923 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
7924 XEXP (varop, 1), count);
7925
21a64bf1 7926 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
7927 varop = apply_distributive_law (varop);
7928
7929 count = 0;
7930 }
7931 break;
7932
7933 case EQ:
45620ed4 7934 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 7935 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
7936 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
7937 that may be nonzero. */
7938 if (code == LSHIFTRT
230d793d
RS
7939 && XEXP (varop, 1) == const0_rtx
7940 && GET_MODE (XEXP (varop, 0)) == result_mode
7941 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 7942 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7943 && ((STORE_FLAG_VALUE
5f4f0e22 7944 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 7945 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7946 && merge_outer_ops (&outer_op, &outer_const, XOR,
7947 (HOST_WIDE_INT) 1, result_mode,
7948 &complement_p))
230d793d
RS
7949 {
7950 varop = XEXP (varop, 0);
7951 count = 0;
7952 continue;
7953 }
7954 break;
7955
7956 case NEG:
d0ab8cd3
RK
7957 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7958 than the number of bits in the mode is equivalent to A. */
7959 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 7960 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 7961 {
d0ab8cd3 7962 varop = XEXP (varop, 0);
230d793d
RS
7963 count = 0;
7964 continue;
7965 }
7966
7967 /* NEG commutes with ASHIFT since it is multiplication. Move the
7968 NEG outside to allow shifts to combine. */
7969 if (code == ASHIFT
5f4f0e22
CH
7970 && merge_outer_ops (&outer_op, &outer_const, NEG,
7971 (HOST_WIDE_INT) 0, result_mode,
7972 &complement_p))
230d793d
RS
7973 {
7974 varop = XEXP (varop, 0);
7975 continue;
7976 }
7977 break;
7978
7979 case PLUS:
d0ab8cd3
RK
7980 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7981 is one less than the number of bits in the mode is
7982 equivalent to (xor A 1). */
230d793d
RS
7983 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7984 && XEXP (varop, 1) == constm1_rtx
951553af 7985 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7986 && merge_outer_ops (&outer_op, &outer_const, XOR,
7987 (HOST_WIDE_INT) 1, result_mode,
7988 &complement_p))
230d793d
RS
7989 {
7990 count = 0;
7991 varop = XEXP (varop, 0);
7992 continue;
7993 }
7994
3f508eca 7995 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 7996 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
7997 bits are known zero in FOO, we can replace the PLUS with FOO.
7998 Similarly in the other operand order. This code occurs when
7999 we are computing the size of a variable-size array. */
8000
8001 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8002 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
8003 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8004 && (nonzero_bits (XEXP (varop, 1), result_mode)
8005 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
8006 {
8007 varop = XEXP (varop, 0);
8008 continue;
8009 }
8010 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8011 && count < HOST_BITS_PER_WIDE_INT
ac49a949 8012 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 8013 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 8014 >> count)
951553af
RK
8015 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8016 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
8017 result_mode)))
8018 {
8019 varop = XEXP (varop, 1);
8020 continue;
8021 }
8022
230d793d
RS
8023 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8024 if (code == ASHIFT
8025 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8026 && (new = simplify_binary_operation (ASHIFT, result_mode,
8027 XEXP (varop, 1),
5f4f0e22 8028 GEN_INT (count))) != 0
7d171a1e 8029 && GET_CODE(new) == CONST_INT
230d793d
RS
8030 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8031 INTVAL (new), result_mode, &complement_p))
8032 {
8033 varop = XEXP (varop, 0);
8034 continue;
8035 }
8036 break;
8037
8038 case MINUS:
8039 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8040 with C the size of VAROP - 1 and the shift is logical if
8041 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8042 we have a (gt X 0) operation. If the shift is arithmetic with
8043 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8044 we have a (neg (gt X 0)) operation. */
8045
8046 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8047 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8048 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8049 && (code == LSHIFTRT || code == ASHIFTRT)
8050 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8051 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8052 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8053 {
8054 count = 0;
8055 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8056 const0_rtx);
8057
8058 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8059 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8060
8061 continue;
8062 }
8063 break;
8064 }
8065
8066 break;
8067 }
8068
8069 /* We need to determine what mode to do the shift in. If the shift is
8070 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
8071 done in. Otherwise, we can do it in MODE, the widest mode encountered.
8072 The code we care about is that of the shift that will actually be done,
8073 not the shift that was originally requested. */
8074 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
8075
8076 /* We have now finished analyzing the shift. The result should be
8077 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8078 OUTER_OP is non-NIL, it is an operation that needs to be applied
8079 to the result of the shift. OUTER_CONST is the relevant constant,
8080 but we must turn off all bits turned off in the shift.
8081
8082 If we were passed a value for X, see if we can use any pieces of
8083 it. If not, make new rtx. */
8084
8085 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8086 && GET_CODE (XEXP (x, 1)) == CONST_INT
8087 && INTVAL (XEXP (x, 1)) == count)
8088 const_rtx = XEXP (x, 1);
8089 else
5f4f0e22 8090 const_rtx = GEN_INT (count);
230d793d
RS
8091
8092 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8093 && GET_MODE (XEXP (x, 0)) == shift_mode
8094 && SUBREG_REG (XEXP (x, 0)) == varop)
8095 varop = XEXP (x, 0);
8096 else if (GET_MODE (varop) != shift_mode)
8097 varop = gen_lowpart_for_combine (shift_mode, varop);
8098
8099 /* If we can't make the SUBREG, try to return what we were given. */
8100 if (GET_CODE (varop) == CLOBBER)
8101 return x ? x : varop;
8102
8103 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8104 if (new != 0)
8105 x = new;
8106 else
8107 {
8108 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8109 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8110
8111 SUBST (XEXP (x, 0), varop);
8112 SUBST (XEXP (x, 1), const_rtx);
8113 }
8114
224eeff2
RK
8115 /* If we have an outer operation and we just made a shift, it is
8116 possible that we could have simplified the shift were it not
8117 for the outer operation. So try to do the simplification
8118 recursively. */
8119
8120 if (outer_op != NIL && GET_CODE (x) == code
8121 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8122 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8123 INTVAL (XEXP (x, 1)));
8124
230d793d
RS
8125 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8126 turn off all the bits that the shift would have turned off. */
8127 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 8128 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
8129 GET_MODE_MASK (result_mode) >> orig_count);
8130
8131 /* Do the remainder of the processing in RESULT_MODE. */
8132 x = gen_lowpart_for_combine (result_mode, x);
8133
8134 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8135 operation. */
8136 if (complement_p)
0c1c8ea6 8137 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
8138
8139 if (outer_op != NIL)
8140 {
5f4f0e22 8141 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8142 outer_const &= GET_MODE_MASK (result_mode);
8143
8144 if (outer_op == AND)
5f4f0e22 8145 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
8146 else if (outer_op == SET)
8147 /* This means that we have determined that the result is
8148 equivalent to a constant. This should be rare. */
5f4f0e22 8149 x = GEN_INT (outer_const);
230d793d 8150 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 8151 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 8152 else
5f4f0e22 8153 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
8154 }
8155
8156 return x;
8157}
8158\f
8159/* Like recog, but we receive the address of a pointer to a new pattern.
8160 We try to match the rtx that the pointer points to.
8161 If that fails, we may try to modify or replace the pattern,
8162 storing the replacement into the same pointer object.
8163
8164 Modifications include deletion or addition of CLOBBERs.
8165
8166 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8167 the CLOBBERs are placed.
8168
8169 The value is the final insn code from the pattern ultimately matched,
8170 or -1. */
8171
8172static int
8173recog_for_combine (pnewpat, insn, pnotes)
8174 rtx *pnewpat;
8175 rtx insn;
8176 rtx *pnotes;
8177{
8178 register rtx pat = *pnewpat;
8179 int insn_code_number;
8180 int num_clobbers_to_add = 0;
8181 int i;
8182 rtx notes = 0;
8183
974f4146
RK
8184 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8185 we use to indicate that something didn't match. If we find such a
8186 thing, force rejection. */
d96023cf 8187 if (GET_CODE (pat) == PARALLEL)
974f4146 8188 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
8189 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8190 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
8191 return -1;
8192
230d793d
RS
8193 /* Is the result of combination a valid instruction? */
8194 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8195
8196 /* If it isn't, there is the possibility that we previously had an insn
8197 that clobbered some register as a side effect, but the combined
8198 insn doesn't need to do that. So try once more without the clobbers
8199 unless this represents an ASM insn. */
8200
8201 if (insn_code_number < 0 && ! check_asm_operands (pat)
8202 && GET_CODE (pat) == PARALLEL)
8203 {
8204 int pos;
8205
8206 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8207 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8208 {
8209 if (i != pos)
8210 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8211 pos++;
8212 }
8213
8214 SUBST_INT (XVECLEN (pat, 0), pos);
8215
8216 if (pos == 1)
8217 pat = XVECEXP (pat, 0, 0);
8218
8219 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8220 }
8221
8222 /* If we had any clobbers to add, make a new pattern than contains
8223 them. Then check to make sure that all of them are dead. */
8224 if (num_clobbers_to_add)
8225 {
8226 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8227 gen_rtvec (GET_CODE (pat) == PARALLEL
8228 ? XVECLEN (pat, 0) + num_clobbers_to_add
8229 : num_clobbers_to_add + 1));
8230
8231 if (GET_CODE (pat) == PARALLEL)
8232 for (i = 0; i < XVECLEN (pat, 0); i++)
8233 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8234 else
8235 XVECEXP (newpat, 0, 0) = pat;
8236
8237 add_clobbers (newpat, insn_code_number);
8238
8239 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8240 i < XVECLEN (newpat, 0); i++)
8241 {
8242 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8243 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8244 return -1;
8245 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8246 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8247 }
8248 pat = newpat;
8249 }
8250
8251 *pnewpat = pat;
8252 *pnotes = notes;
8253
8254 return insn_code_number;
8255}
8256\f
8257/* Like gen_lowpart but for use by combine. In combine it is not possible
8258 to create any new pseudoregs. However, it is safe to create
8259 invalid memory addresses, because combine will try to recognize
8260 them and all they will do is make the combine attempt fail.
8261
8262 If for some reason this cannot do its job, an rtx
8263 (clobber (const_int 0)) is returned.
8264 An insn containing that will not be recognized. */
8265
8266#undef gen_lowpart
8267
8268static rtx
8269gen_lowpart_for_combine (mode, x)
8270 enum machine_mode mode;
8271 register rtx x;
8272{
8273 rtx result;
8274
8275 if (GET_MODE (x) == mode)
8276 return x;
8277
eae957a8
RK
8278 /* We can only support MODE being wider than a word if X is a
8279 constant integer or has a mode the same size. */
8280
8281 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8282 && ! ((GET_MODE (x) == VOIDmode
8283 && (GET_CODE (x) == CONST_INT
8284 || GET_CODE (x) == CONST_DOUBLE))
8285 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
230d793d
RS
8286 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8287
8288 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8289 won't know what to do. So we will strip off the SUBREG here and
8290 process normally. */
8291 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8292 {
8293 x = SUBREG_REG (x);
8294 if (GET_MODE (x) == mode)
8295 return x;
8296 }
8297
8298 result = gen_lowpart_common (mode, x);
8299 if (result)
8300 return result;
8301
8302 if (GET_CODE (x) == MEM)
8303 {
8304 register int offset = 0;
8305 rtx new;
8306
8307 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8308 address. */
8309 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8310 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8311
8312 /* If we want to refer to something bigger than the original memref,
8313 generate a perverse subreg instead. That will force a reload
8314 of the original memref X. */
8315 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8316 return gen_rtx (SUBREG, mode, x, 0);
8317
8318#if WORDS_BIG_ENDIAN
8319 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8320 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8321#endif
8322#if BYTES_BIG_ENDIAN
8323 /* Adjust the address so that the address-after-the-data
8324 is unchanged. */
8325 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8326 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8327#endif
8328 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8329 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8330 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8331 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8332 return new;
8333 }
8334
8335 /* If X is a comparison operator, rewrite it in a new mode. This
8336 probably won't match, but may allow further simplifications. */
8337 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8338 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8339
8340 /* If we couldn't simplify X any other way, just enclose it in a
8341 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 8342 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 8343 else
dfbe1b2f
RK
8344 {
8345 int word = 0;
8346
8347 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8348 word = ((GET_MODE_SIZE (GET_MODE (x))
8349 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8350 / UNITS_PER_WORD);
8351 return gen_rtx (SUBREG, mode, x, word);
8352 }
230d793d
RS
8353}
8354\f
8355/* Make an rtx expression. This is a subset of gen_rtx and only supports
8356 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8357
8358 If the identical expression was previously in the insn (in the undobuf),
8359 it will be returned. Only if it is not found will a new expression
8360 be made. */
8361
8362/*VARARGS2*/
8363static rtx
4f90e4a0 8364gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 8365{
4f90e4a0 8366#ifndef __STDC__
230d793d
RS
8367 enum rtx_code code;
8368 enum machine_mode mode;
4f90e4a0
RK
8369#endif
8370 va_list p;
230d793d
RS
8371 int n_args;
8372 rtx args[3];
8373 int i, j;
8374 char *fmt;
8375 rtx rt;
8376
4f90e4a0
RK
8377 VA_START (p, mode);
8378
8379#ifndef __STDC__
230d793d
RS
8380 code = va_arg (p, enum rtx_code);
8381 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
8382#endif
8383
230d793d
RS
8384 n_args = GET_RTX_LENGTH (code);
8385 fmt = GET_RTX_FORMAT (code);
8386
8387 if (n_args == 0 || n_args > 3)
8388 abort ();
8389
8390 /* Get each arg and verify that it is supposed to be an expression. */
8391 for (j = 0; j < n_args; j++)
8392 {
8393 if (*fmt++ != 'e')
8394 abort ();
8395
8396 args[j] = va_arg (p, rtx);
8397 }
8398
8399 /* See if this is in undobuf. Be sure we don't use objects that came
8400 from another insn; this could produce circular rtl structures. */
8401
8402 for (i = previous_num_undos; i < undobuf.num_undo; i++)
8403 if (!undobuf.undo[i].is_int
f5393ab9
RS
8404 && GET_CODE (undobuf.undo[i].old_contents.r) == code
8405 && GET_MODE (undobuf.undo[i].old_contents.r) == mode)
230d793d
RS
8406 {
8407 for (j = 0; j < n_args; j++)
f5393ab9 8408 if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
230d793d
RS
8409 break;
8410
8411 if (j == n_args)
f5393ab9 8412 return undobuf.undo[i].old_contents.r;
230d793d
RS
8413 }
8414
8415 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8416 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8417 rt = rtx_alloc (code);
8418 PUT_MODE (rt, mode);
8419 XEXP (rt, 0) = args[0];
8420 if (n_args > 1)
8421 {
8422 XEXP (rt, 1) = args[1];
8423 if (n_args > 2)
8424 XEXP (rt, 2) = args[2];
8425 }
8426 return rt;
8427}
8428
8429/* These routines make binary and unary operations by first seeing if they
8430 fold; if not, a new expression is allocated. */
8431
8432static rtx
8433gen_binary (code, mode, op0, op1)
8434 enum rtx_code code;
8435 enum machine_mode mode;
8436 rtx op0, op1;
8437{
8438 rtx result;
1a26b032
RK
8439 rtx tem;
8440
8441 if (GET_RTX_CLASS (code) == 'c'
8442 && (GET_CODE (op0) == CONST_INT
8443 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8444 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
8445
8446 if (GET_RTX_CLASS (code) == '<')
8447 {
8448 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
8449
8450 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
8451 just (REL_OP X Y). */
8452 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
8453 {
8454 op1 = XEXP (op0, 1);
8455 op0 = XEXP (op0, 0);
8456 op_mode = GET_MODE (op0);
8457 }
8458
230d793d
RS
8459 if (op_mode == VOIDmode)
8460 op_mode = GET_MODE (op1);
8461 result = simplify_relational_operation (code, op_mode, op0, op1);
8462 }
8463 else
8464 result = simplify_binary_operation (code, mode, op0, op1);
8465
8466 if (result)
8467 return result;
8468
8469 /* Put complex operands first and constants second. */
8470 if (GET_RTX_CLASS (code) == 'c'
8471 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8472 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8473 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8474 || (GET_CODE (op0) == SUBREG
8475 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8476 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8477 return gen_rtx_combine (code, mode, op1, op0);
8478
8479 return gen_rtx_combine (code, mode, op0, op1);
8480}
8481
8482static rtx
0c1c8ea6 8483gen_unary (code, mode, op0_mode, op0)
230d793d 8484 enum rtx_code code;
0c1c8ea6 8485 enum machine_mode mode, op0_mode;
230d793d
RS
8486 rtx op0;
8487{
0c1c8ea6 8488 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
8489
8490 if (result)
8491 return result;
8492
8493 return gen_rtx_combine (code, mode, op0);
8494}
8495\f
8496/* Simplify a comparison between *POP0 and *POP1 where CODE is the
8497 comparison code that will be tested.
8498
8499 The result is a possibly different comparison code to use. *POP0 and
8500 *POP1 may be updated.
8501
8502 It is possible that we might detect that a comparison is either always
8503 true or always false. However, we do not perform general constant
5089e22e 8504 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
8505 should have been detected earlier. Hence we ignore all such cases. */
8506
8507static enum rtx_code
8508simplify_comparison (code, pop0, pop1)
8509 enum rtx_code code;
8510 rtx *pop0;
8511 rtx *pop1;
8512{
8513 rtx op0 = *pop0;
8514 rtx op1 = *pop1;
8515 rtx tem, tem1;
8516 int i;
8517 enum machine_mode mode, tmode;
8518
8519 /* Try a few ways of applying the same transformation to both operands. */
8520 while (1)
8521 {
3a19aabc
RK
8522#ifndef WORD_REGISTER_OPERATIONS
8523 /* The test below this one won't handle SIGN_EXTENDs on these machines,
8524 so check specially. */
8525 if (code != GTU && code != GEU && code != LTU && code != LEU
8526 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
8527 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8528 && GET_CODE (XEXP (op1, 0)) == ASHIFT
8529 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
8530 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
8531 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 8532 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
8533 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8534 && GET_CODE (XEXP (op1, 1)) == CONST_INT
8535 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8536 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
8537 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
8538 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
8539 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
8540 && (INTVAL (XEXP (op0, 1))
8541 == (GET_MODE_BITSIZE (GET_MODE (op0))
8542 - (GET_MODE_BITSIZE
8543 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
8544 {
8545 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
8546 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
8547 }
8548#endif
8549
230d793d
RS
8550 /* If both operands are the same constant shift, see if we can ignore the
8551 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 8552 this shift are known to be zero for both inputs and if the type of
230d793d 8553 comparison is compatible with the shift. */
67232b23
RK
8554 if (GET_CODE (op0) == GET_CODE (op1)
8555 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8556 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 8557 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
8558 && (code != GT && code != LT && code != GE && code != LE))
8559 || (GET_CODE (op0) == ASHIFTRT
8560 && (code != GTU && code != LTU
8561 && code != GEU && code != GEU)))
8562 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8563 && INTVAL (XEXP (op0, 1)) >= 0
8564 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8565 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
8566 {
8567 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 8568 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8569 int shift_count = INTVAL (XEXP (op0, 1));
8570
8571 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8572 mask &= (mask >> shift_count) << shift_count;
45620ed4 8573 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
8574 mask = (mask & (mask << shift_count)) >> shift_count;
8575
951553af
RK
8576 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8577 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
8578 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8579 else
8580 break;
8581 }
8582
8583 /* If both operands are AND's of a paradoxical SUBREG by constant, the
8584 SUBREGs are of the same mode, and, in both cases, the AND would
8585 be redundant if the comparison was done in the narrower mode,
8586 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
8587 and the operand's possibly nonzero bits are 0xffffff01; in that case
8588 if we only care about QImode, we don't need the AND). This case
8589 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
8590 STORE_FLAG_VALUE == 1 (e.g., the 386).
8591
8592 Similarly, check for a case where the AND's are ZERO_EXTEND
8593 operations from some narrower mode even though a SUBREG is not
8594 present. */
230d793d
RS
8595
8596 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8597 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 8598 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 8599 {
7e4dc511
RK
8600 rtx inner_op0 = XEXP (op0, 0);
8601 rtx inner_op1 = XEXP (op1, 0);
8602 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
8603 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
8604 int changed = 0;
8605
8606 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
8607 && (GET_MODE_SIZE (GET_MODE (inner_op0))
8608 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
8609 && (GET_MODE (SUBREG_REG (inner_op0))
8610 == GET_MODE (SUBREG_REG (inner_op1)))
8611 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8612 <= HOST_BITS_PER_WIDE_INT)
8613 && (0 == (~c0) & nonzero_bits (SUBREG_REG (inner_op0),
8614 GET_MODE (SUBREG_REG (op0))))
8615 && (0 == (~c1) & nonzero_bits (SUBREG_REG (inner_op1),
8616 GET_MODE (SUBREG_REG (inner_op1)))))
8617 {
8618 op0 = SUBREG_REG (inner_op0);
8619 op1 = SUBREG_REG (inner_op1);
8620
8621 /* The resulting comparison is always unsigned since we masked
8622 off the original sign bit. */
8623 code = unsigned_condition (code);
8624
8625 changed = 1;
8626 }
230d793d 8627
7e4dc511
RK
8628 else if (c0 == c1)
8629 for (tmode = GET_CLASS_NARROWEST_MODE
8630 (GET_MODE_CLASS (GET_MODE (op0)));
8631 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
8632 if (c0 == GET_MODE_MASK (tmode))
8633 {
8634 op0 = gen_lowpart_for_combine (tmode, inner_op0);
8635 op1 = gen_lowpart_for_combine (tmode, inner_op1);
8636 changed = 1;
8637 break;
8638 }
8639
8640 if (! changed)
8641 break;
230d793d 8642 }
3a19aabc 8643
ad25ba17
RK
8644 /* If both operands are NOT, we can strip off the outer operation
8645 and adjust the comparison code for swapped operands; similarly for
8646 NEG, except that this must be an equality comparison. */
8647 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
8648 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
8649 && (code == EQ || code == NE)))
8650 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 8651
230d793d
RS
8652 else
8653 break;
8654 }
8655
8656 /* If the first operand is a constant, swap the operands and adjust the
8657 comparison code appropriately. */
8658 if (CONSTANT_P (op0))
8659 {
8660 tem = op0, op0 = op1, op1 = tem;
8661 code = swap_condition (code);
8662 }
8663
8664 /* We now enter a loop during which we will try to simplify the comparison.
8665 For the most part, we only are concerned with comparisons with zero,
8666 but some things may really be comparisons with zero but not start
8667 out looking that way. */
8668
8669 while (GET_CODE (op1) == CONST_INT)
8670 {
8671 enum machine_mode mode = GET_MODE (op0);
8672 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 8673 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8674 int equality_comparison_p;
8675 int sign_bit_comparison_p;
8676 int unsigned_comparison_p;
5f4f0e22 8677 HOST_WIDE_INT const_op;
230d793d
RS
8678
8679 /* We only want to handle integral modes. This catches VOIDmode,
8680 CCmode, and the floating-point modes. An exception is that we
8681 can handle VOIDmode if OP0 is a COMPARE or a comparison
8682 operation. */
8683
8684 if (GET_MODE_CLASS (mode) != MODE_INT
8685 && ! (mode == VOIDmode
8686 && (GET_CODE (op0) == COMPARE
8687 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8688 break;
8689
8690 /* Get the constant we are comparing against and turn off all bits
8691 not on in our mode. */
8692 const_op = INTVAL (op1);
5f4f0e22 8693 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 8694 const_op &= mask;
230d793d
RS
8695
8696 /* If we are comparing against a constant power of two and the value
951553af 8697 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
8698 `and'ed with that bit), we can replace this with a comparison
8699 with zero. */
8700 if (const_op
8701 && (code == EQ || code == NE || code == GE || code == GEU
8702 || code == LT || code == LTU)
5f4f0e22 8703 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8704 && exact_log2 (const_op) >= 0
951553af 8705 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
8706 {
8707 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8708 op1 = const0_rtx, const_op = 0;
8709 }
8710
d0ab8cd3
RK
8711 /* Similarly, if we are comparing a value known to be either -1 or
8712 0 with -1, change it to the opposite comparison against zero. */
8713
8714 if (const_op == -1
8715 && (code == EQ || code == NE || code == GT || code == LE
8716 || code == GEU || code == LTU)
8717 && num_sign_bit_copies (op0, mode) == mode_width)
8718 {
8719 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8720 op1 = const0_rtx, const_op = 0;
8721 }
8722
230d793d 8723 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
8724 comparisons against zero and then prefer equality comparisons.
8725 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
8726
8727 switch (code)
8728 {
8729 case LT:
4803a34a
RK
8730 /* < C is equivalent to <= (C - 1) */
8731 if (const_op > 0)
230d793d 8732 {
4803a34a 8733 const_op -= 1;
5f4f0e22 8734 op1 = GEN_INT (const_op);
230d793d
RS
8735 code = LE;
8736 /* ... fall through to LE case below. */
8737 }
8738 else
8739 break;
8740
8741 case LE:
4803a34a
RK
8742 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8743 if (const_op < 0)
8744 {
8745 const_op += 1;
5f4f0e22 8746 op1 = GEN_INT (const_op);
4803a34a
RK
8747 code = LT;
8748 }
230d793d
RS
8749
8750 /* If we are doing a <= 0 comparison on a value known to have
8751 a zero sign bit, we can replace this with == 0. */
8752 else if (const_op == 0
5f4f0e22 8753 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8754 && (nonzero_bits (op0, mode)
5f4f0e22 8755 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8756 code = EQ;
8757 break;
8758
8759 case GE:
4803a34a
RK
8760 /* >= C is equivalent to > (C - 1). */
8761 if (const_op > 0)
230d793d 8762 {
4803a34a 8763 const_op -= 1;
5f4f0e22 8764 op1 = GEN_INT (const_op);
230d793d
RS
8765 code = GT;
8766 /* ... fall through to GT below. */
8767 }
8768 else
8769 break;
8770
8771 case GT:
4803a34a
RK
8772 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8773 if (const_op < 0)
8774 {
8775 const_op += 1;
5f4f0e22 8776 op1 = GEN_INT (const_op);
4803a34a
RK
8777 code = GE;
8778 }
230d793d
RS
8779
8780 /* If we are doing a > 0 comparison on a value known to have
8781 a zero sign bit, we can replace this with != 0. */
8782 else if (const_op == 0
5f4f0e22 8783 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8784 && (nonzero_bits (op0, mode)
5f4f0e22 8785 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8786 code = NE;
8787 break;
8788
230d793d 8789 case LTU:
4803a34a
RK
8790 /* < C is equivalent to <= (C - 1). */
8791 if (const_op > 0)
8792 {
8793 const_op -= 1;
5f4f0e22 8794 op1 = GEN_INT (const_op);
4803a34a
RK
8795 code = LEU;
8796 /* ... fall through ... */
8797 }
d0ab8cd3
RK
8798
8799 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8800 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8801 {
8802 const_op = 0, op1 = const0_rtx;
8803 code = GE;
8804 break;
8805 }
4803a34a
RK
8806 else
8807 break;
230d793d
RS
8808
8809 case LEU:
8810 /* unsigned <= 0 is equivalent to == 0 */
8811 if (const_op == 0)
8812 code = EQ;
d0ab8cd3
RK
8813
8814 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8815 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8816 {
8817 const_op = 0, op1 = const0_rtx;
8818 code = GE;
8819 }
230d793d
RS
8820 break;
8821
4803a34a
RK
8822 case GEU:
8823 /* >= C is equivalent to < (C - 1). */
8824 if (const_op > 1)
8825 {
8826 const_op -= 1;
5f4f0e22 8827 op1 = GEN_INT (const_op);
4803a34a
RK
8828 code = GTU;
8829 /* ... fall through ... */
8830 }
d0ab8cd3
RK
8831
8832 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8833 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8834 {
8835 const_op = 0, op1 = const0_rtx;
8836 code = LT;
8837 }
4803a34a
RK
8838 else
8839 break;
8840
230d793d
RS
8841 case GTU:
8842 /* unsigned > 0 is equivalent to != 0 */
8843 if (const_op == 0)
8844 code = NE;
d0ab8cd3
RK
8845
8846 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8847 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8848 {
8849 const_op = 0, op1 = const0_rtx;
8850 code = LT;
8851 }
230d793d
RS
8852 break;
8853 }
8854
8855 /* Compute some predicates to simplify code below. */
8856
8857 equality_comparison_p = (code == EQ || code == NE);
8858 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8859 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8860 || code == LEU);
8861
6139ff20
RK
8862 /* If this is a sign bit comparison and we can do arithmetic in
8863 MODE, say that we will only be needing the sign bit of OP0. */
8864 if (sign_bit_comparison_p
8865 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8866 op0 = force_to_mode (op0, mode,
8867 ((HOST_WIDE_INT) 1
8868 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 8869 NULL_RTX, 0);
6139ff20 8870
230d793d
RS
8871 /* Now try cases based on the opcode of OP0. If none of the cases
8872 does a "continue", we exit this loop immediately after the
8873 switch. */
8874
8875 switch (GET_CODE (op0))
8876 {
8877 case ZERO_EXTRACT:
8878 /* If we are extracting a single bit from a variable position in
8879 a constant that has only a single bit set and are comparing it
8880 with zero, we can convert this into an equality comparison
8881 between the position and the location of the single bit. We can't
8882 do this if bit endian and we don't have an extzv since we then
8883 can't know what mode to use for the endianness adjustment. */
8884
8885#if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8886 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8887 && XEXP (op0, 1) == const1_rtx
8888 && equality_comparison_p && const_op == 0
8889 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8890 {
8891#if BITS_BIG_ENDIAN
8892 i = (GET_MODE_BITSIZE
8893 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8894#endif
8895
8896 op0 = XEXP (op0, 2);
5f4f0e22 8897 op1 = GEN_INT (i);
230d793d
RS
8898 const_op = i;
8899
8900 /* Result is nonzero iff shift count is equal to I. */
8901 code = reverse_condition (code);
8902 continue;
8903 }
8904#endif
8905
8906 /* ... fall through ... */
8907
8908 case SIGN_EXTRACT:
8909 tem = expand_compound_operation (op0);
8910 if (tem != op0)
8911 {
8912 op0 = tem;
8913 continue;
8914 }
8915 break;
8916
8917 case NOT:
8918 /* If testing for equality, we can take the NOT of the constant. */
8919 if (equality_comparison_p
8920 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8921 {
8922 op0 = XEXP (op0, 0);
8923 op1 = tem;
8924 continue;
8925 }
8926
8927 /* If just looking at the sign bit, reverse the sense of the
8928 comparison. */
8929 if (sign_bit_comparison_p)
8930 {
8931 op0 = XEXP (op0, 0);
8932 code = (code == GE ? LT : GE);
8933 continue;
8934 }
8935 break;
8936
8937 case NEG:
8938 /* If testing for equality, we can take the NEG of the constant. */
8939 if (equality_comparison_p
8940 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8941 {
8942 op0 = XEXP (op0, 0);
8943 op1 = tem;
8944 continue;
8945 }
8946
8947 /* The remaining cases only apply to comparisons with zero. */
8948 if (const_op != 0)
8949 break;
8950
8951 /* When X is ABS or is known positive,
8952 (neg X) is < 0 if and only if X != 0. */
8953
8954 if (sign_bit_comparison_p
8955 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 8956 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8957 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 8958 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
8959 {
8960 op0 = XEXP (op0, 0);
8961 code = (code == LT ? NE : EQ);
8962 continue;
8963 }
8964
3bed8141
RK
8965 /* If we have NEG of something whose two high-order bits are the
8966 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8967 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
8968 {
8969 op0 = XEXP (op0, 0);
8970 code = swap_condition (code);
8971 continue;
8972 }
8973 break;
8974
8975 case ROTATE:
8976 /* If we are testing equality and our count is a constant, we
8977 can perform the inverse operation on our RHS. */
8978 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8979 && (tem = simplify_binary_operation (ROTATERT, mode,
8980 op1, XEXP (op0, 1))) != 0)
8981 {
8982 op0 = XEXP (op0, 0);
8983 op1 = tem;
8984 continue;
8985 }
8986
8987 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8988 a particular bit. Convert it to an AND of a constant of that
8989 bit. This will be converted into a ZERO_EXTRACT. */
8990 if (const_op == 0 && sign_bit_comparison_p
8991 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8992 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 8993 {
5f4f0e22
CH
8994 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8995 ((HOST_WIDE_INT) 1
8996 << (mode_width - 1
8997 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
8998 code = (code == LT ? NE : EQ);
8999 continue;
9000 }
9001
9002 /* ... fall through ... */
9003
9004 case ABS:
9005 /* ABS is ignorable inside an equality comparison with zero. */
9006 if (const_op == 0 && equality_comparison_p)
9007 {
9008 op0 = XEXP (op0, 0);
9009 continue;
9010 }
9011 break;
9012
9013
9014 case SIGN_EXTEND:
9015 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9016 to (compare FOO CONST) if CONST fits in FOO's mode and we
9017 are either testing inequality or have an unsigned comparison
9018 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9019 if (! unsigned_comparison_p
9020 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9021 <= HOST_BITS_PER_WIDE_INT)
9022 && ((unsigned HOST_WIDE_INT) const_op
9023 < (((HOST_WIDE_INT) 1
9024 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
9025 {
9026 op0 = XEXP (op0, 0);
9027 continue;
9028 }
9029 break;
9030
9031 case SUBREG:
a687e897
RK
9032 /* Check for the case where we are comparing A - C1 with C2,
9033 both constants are smaller than 1/2 the maxium positive
9034 value in MODE, and the comparison is equality or unsigned.
9035 In that case, if A is either zero-extended to MODE or has
9036 sufficient sign bits so that the high-order bit in MODE
9037 is a copy of the sign in the inner mode, we can prove that it is
9038 safe to do the operation in the wider mode. This simplifies
9039 many range checks. */
9040
9041 if (mode_width <= HOST_BITS_PER_WIDE_INT
9042 && subreg_lowpart_p (op0)
9043 && GET_CODE (SUBREG_REG (op0)) == PLUS
9044 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9045 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9046 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9047 < GET_MODE_MASK (mode) / 2)
adb7a1cb 9048 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
9049 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9050 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
9051 & ~ GET_MODE_MASK (mode))
9052 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9053 GET_MODE (SUBREG_REG (op0)))
9054 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9055 - GET_MODE_BITSIZE (mode)))))
9056 {
9057 op0 = SUBREG_REG (op0);
9058 continue;
9059 }
9060
fe0cf571
RK
9061 /* If the inner mode is narrower and we are extracting the low part,
9062 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9063 if (subreg_lowpart_p (op0)
89f1c7f2
RS
9064 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9065 /* Fall through */ ;
9066 else
230d793d
RS
9067 break;
9068
9069 /* ... fall through ... */
9070
9071 case ZERO_EXTEND:
9072 if ((unsigned_comparison_p || equality_comparison_p)
9073 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9074 <= HOST_BITS_PER_WIDE_INT)
9075 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
9076 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9077 {
9078 op0 = XEXP (op0, 0);
9079 continue;
9080 }
9081 break;
9082
9083 case PLUS:
20fdd649 9084 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 9085 this for equality comparisons due to pathological cases involving
230d793d 9086 overflows. */
20fdd649
RK
9087 if (equality_comparison_p
9088 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9089 op1, XEXP (op0, 1))))
230d793d
RS
9090 {
9091 op0 = XEXP (op0, 0);
9092 op1 = tem;
9093 continue;
9094 }
9095
9096 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9097 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9098 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9099 {
9100 op0 = XEXP (XEXP (op0, 0), 0);
9101 code = (code == LT ? EQ : NE);
9102 continue;
9103 }
9104 break;
9105
9106 case MINUS:
20fdd649
RK
9107 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9108 (eq B (minus A C)), whichever simplifies. We can only do
9109 this for equality comparisons due to pathological cases involving
9110 overflows. */
9111 if (equality_comparison_p
9112 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9113 XEXP (op0, 1), op1)))
9114 {
9115 op0 = XEXP (op0, 0);
9116 op1 = tem;
9117 continue;
9118 }
9119
9120 if (equality_comparison_p
9121 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9122 XEXP (op0, 0), op1)))
9123 {
9124 op0 = XEXP (op0, 1);
9125 op1 = tem;
9126 continue;
9127 }
9128
230d793d
RS
9129 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9130 of bits in X minus 1, is one iff X > 0. */
9131 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9132 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9133 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9134 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9135 {
9136 op0 = XEXP (op0, 1);
9137 code = (code == GE ? LE : GT);
9138 continue;
9139 }
9140 break;
9141
9142 case XOR:
9143 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9144 if C is zero or B is a constant. */
9145 if (equality_comparison_p
9146 && 0 != (tem = simplify_binary_operation (XOR, mode,
9147 XEXP (op0, 1), op1)))
9148 {
9149 op0 = XEXP (op0, 0);
9150 op1 = tem;
9151 continue;
9152 }
9153 break;
9154
9155 case EQ: case NE:
9156 case LT: case LTU: case LE: case LEU:
9157 case GT: case GTU: case GE: case GEU:
9158 /* We can't do anything if OP0 is a condition code value, rather
9159 than an actual data value. */
9160 if (const_op != 0
9161#ifdef HAVE_cc0
9162 || XEXP (op0, 0) == cc0_rtx
9163#endif
9164 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9165 break;
9166
9167 /* Get the two operands being compared. */
9168 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9169 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9170 else
9171 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9172
9173 /* Check for the cases where we simply want the result of the
9174 earlier test or the opposite of that result. */
9175 if (code == NE
9176 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 9177 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 9178 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 9179 && (STORE_FLAG_VALUE
5f4f0e22
CH
9180 & (((HOST_WIDE_INT) 1
9181 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
9182 && (code == LT
9183 || (code == GE && reversible_comparison_p (op0)))))
9184 {
9185 code = (code == LT || code == NE
9186 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9187 op0 = tem, op1 = tem1;
9188 continue;
9189 }
9190 break;
9191
9192 case IOR:
9193 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9194 iff X <= 0. */
9195 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9196 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9197 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9198 {
9199 op0 = XEXP (op0, 1);
9200 code = (code == GE ? GT : LE);
9201 continue;
9202 }
9203 break;
9204
9205 case AND:
9206 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
9207 will be converted to a ZERO_EXTRACT later. */
9208 if (const_op == 0 && equality_comparison_p
45620ed4 9209 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
9210 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9211 {
9212 op0 = simplify_and_const_int
9213 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9214 XEXP (op0, 1),
9215 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 9216 (HOST_WIDE_INT) 1);
230d793d
RS
9217 continue;
9218 }
9219
9220 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9221 zero and X is a comparison and C1 and C2 describe only bits set
9222 in STORE_FLAG_VALUE, we can compare with X. */
9223 if (const_op == 0 && equality_comparison_p
5f4f0e22 9224 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
9225 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9226 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9227 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9228 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 9229 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
9230 {
9231 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9232 << INTVAL (XEXP (XEXP (op0, 0), 1)));
9233 if ((~ STORE_FLAG_VALUE & mask) == 0
9234 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9235 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9236 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9237 {
9238 op0 = XEXP (XEXP (op0, 0), 0);
9239 continue;
9240 }
9241 }
9242
9243 /* If we are doing an equality comparison of an AND of a bit equal
9244 to the sign bit, replace this with a LT or GE comparison of
9245 the underlying value. */
9246 if (equality_comparison_p
9247 && const_op == 0
9248 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9249 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9250 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 9251 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
9252 {
9253 op0 = XEXP (op0, 0);
9254 code = (code == EQ ? GE : LT);
9255 continue;
9256 }
9257
9258 /* If this AND operation is really a ZERO_EXTEND from a narrower
9259 mode, the constant fits within that mode, and this is either an
9260 equality or unsigned comparison, try to do this comparison in
9261 the narrower mode. */
9262 if ((equality_comparison_p || unsigned_comparison_p)
9263 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9264 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9265 & GET_MODE_MASK (mode))
9266 + 1)) >= 0
9267 && const_op >> i == 0
9268 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9269 {
9270 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9271 continue;
9272 }
9273 break;
9274
9275 case ASHIFT:
45620ed4 9276 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 9277 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 9278 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
9279 shifted right N bits so long as the low-order N bits of C are
9280 zero. */
9281 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9282 && INTVAL (XEXP (op0, 1)) >= 0
9283 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
9284 < HOST_BITS_PER_WIDE_INT)
9285 && ((const_op
34785d05 9286 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 9287 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9288 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
9289 & ~ (mask >> (INTVAL (XEXP (op0, 1))
9290 + ! equality_comparison_p))) == 0)
9291 {
9292 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 9293 op1 = GEN_INT (const_op);
230d793d
RS
9294 op0 = XEXP (op0, 0);
9295 continue;
9296 }
9297
dfbe1b2f 9298 /* If we are doing a sign bit comparison, it means we are testing
230d793d 9299 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 9300 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9301 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9302 {
5f4f0e22
CH
9303 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9304 ((HOST_WIDE_INT) 1
9305 << (mode_width - 1
9306 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9307 code = (code == LT ? NE : EQ);
9308 continue;
9309 }
dfbe1b2f
RK
9310
9311 /* If this an equality comparison with zero and we are shifting
9312 the low bit to the sign bit, we can convert this to an AND of the
9313 low-order bit. */
9314 if (const_op == 0 && equality_comparison_p
9315 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9316 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9317 {
5f4f0e22
CH
9318 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9319 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
9320 continue;
9321 }
230d793d
RS
9322 break;
9323
9324 case ASHIFTRT:
d0ab8cd3
RK
9325 /* If this is an equality comparison with zero, we can do this
9326 as a logical shift, which might be much simpler. */
9327 if (equality_comparison_p && const_op == 0
9328 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9329 {
9330 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
9331 XEXP (op0, 0),
9332 INTVAL (XEXP (op0, 1)));
9333 continue;
9334 }
9335
230d793d
RS
9336 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
9337 do the comparison in a narrower mode. */
9338 if (! unsigned_comparison_p
9339 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9340 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9341 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9342 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 9343 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
9344 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9345 || ((unsigned HOST_WIDE_INT) - const_op
9346 <= GET_MODE_MASK (tmode))))
230d793d
RS
9347 {
9348 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9349 continue;
9350 }
9351
9352 /* ... fall through ... */
9353 case LSHIFTRT:
9354 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 9355 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
9356 by comparing FOO with C shifted left N bits so long as no
9357 overflow occurs. */
9358 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9359 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
9360 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9361 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9362 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9363 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
9364 && (const_op == 0
9365 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9366 < mode_width)))
9367 {
9368 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 9369 op1 = GEN_INT (const_op);
230d793d
RS
9370 op0 = XEXP (op0, 0);
9371 continue;
9372 }
9373
9374 /* If we are using this shift to extract just the sign bit, we
9375 can replace this with an LT or GE comparison. */
9376 if (const_op == 0
9377 && (equality_comparison_p || sign_bit_comparison_p)
9378 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9379 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9380 {
9381 op0 = XEXP (op0, 0);
9382 code = (code == NE || code == GT ? LT : GE);
9383 continue;
9384 }
9385 break;
9386 }
9387
9388 break;
9389 }
9390
9391 /* Now make any compound operations involved in this comparison. Then,
9392 check for an outmost SUBREG on OP0 that isn't doing anything or is
9393 paradoxical. The latter case can only occur when it is known that the
9394 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9395 We can never remove a SUBREG for a non-equality comparison because the
9396 sign bit is in a different place in the underlying object. */
9397
9398 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9399 op1 = make_compound_operation (op1, SET);
9400
9401 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9402 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9403 && (code == NE || code == EQ)
9404 && ((GET_MODE_SIZE (GET_MODE (op0))
9405 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9406 {
9407 op0 = SUBREG_REG (op0);
9408 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9409 }
9410
9411 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9412 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9413 && (code == NE || code == EQ)
ac49a949
RS
9414 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9415 <= HOST_BITS_PER_WIDE_INT)
951553af 9416 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9417 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9418 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9419 op1),
951553af 9420 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9421 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9422 op0 = SUBREG_REG (op0), op1 = tem;
9423
9424 /* We now do the opposite procedure: Some machines don't have compare
9425 insns in all modes. If OP0's mode is an integer mode smaller than a
9426 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
9427 mode for which we can do the compare. There are a number of cases in
9428 which we can use the wider mode. */
230d793d
RS
9429
9430 mode = GET_MODE (op0);
9431 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9432 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9433 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9434 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
9435 (tmode != VOIDmode
9436 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 9437 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 9438 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 9439 {
951553af 9440 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
9441 narrower mode and this is an equality or unsigned comparison,
9442 we can use the wider mode. Similarly for sign-extended
7e4dc511 9443 values, in which case it is true for all comparisons. */
a687e897
RK
9444 if (((code == EQ || code == NE
9445 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
9446 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9447 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
9448 || ((num_sign_bit_copies (op0, tmode)
9449 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 9450 && (num_sign_bit_copies (op1, tmode)
58744483 9451 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
9452 {
9453 op0 = gen_lowpart_for_combine (tmode, op0);
9454 op1 = gen_lowpart_for_combine (tmode, op1);
9455 break;
9456 }
230d793d 9457
a687e897
RK
9458 /* If this is a test for negative, we can make an explicit
9459 test of the sign bit. */
9460
9461 if (op1 == const0_rtx && (code == LT || code == GE)
9462 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 9463 {
a687e897
RK
9464 op0 = gen_binary (AND, tmode,
9465 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
9466 GEN_INT ((HOST_WIDE_INT) 1
9467 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 9468 code = (code == LT) ? NE : EQ;
a687e897 9469 break;
230d793d 9470 }
230d793d
RS
9471 }
9472
b7a775b2
RK
9473#ifdef CANONICALIZE_COMPARISON
9474 /* If this machine only supports a subset of valid comparisons, see if we
9475 can convert an unsupported one into a supported one. */
9476 CANONICALIZE_COMPARISON (code, op0, op1);
9477#endif
9478
230d793d
RS
9479 *pop0 = op0;
9480 *pop1 = op1;
9481
9482 return code;
9483}
9484\f
9485/* Return 1 if we know that X, a comparison operation, is not operating
9486 on a floating-point value or is EQ or NE, meaning that we can safely
9487 reverse it. */
9488
9489static int
9490reversible_comparison_p (x)
9491 rtx x;
9492{
9493 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 9494 || flag_fast_math
230d793d
RS
9495 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
9496 return 1;
9497
9498 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
9499 {
9500 case MODE_INT:
3ad2180a
RK
9501 case MODE_PARTIAL_INT:
9502 case MODE_COMPLEX_INT:
230d793d
RS
9503 return 1;
9504
9505 case MODE_CC:
9210df58
RK
9506 /* If the mode of the condition codes tells us that this is safe,
9507 we need look no further. */
9508 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
9509 return 1;
9510
9511 /* Otherwise try and find where the condition codes were last set and
9512 use that. */
230d793d
RS
9513 x = get_last_value (XEXP (x, 0));
9514 return (x && GET_CODE (x) == COMPARE
3ad2180a 9515 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
230d793d
RS
9516 }
9517
9518 return 0;
9519}
9520\f
9521/* Utility function for following routine. Called when X is part of a value
9522 being stored into reg_last_set_value. Sets reg_last_set_table_tick
9523 for each register mentioned. Similar to mention_regs in cse.c */
9524
9525static void
9526update_table_tick (x)
9527 rtx x;
9528{
9529 register enum rtx_code code = GET_CODE (x);
9530 register char *fmt = GET_RTX_FORMAT (code);
9531 register int i;
9532
9533 if (code == REG)
9534 {
9535 int regno = REGNO (x);
9536 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9537 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9538
9539 for (i = regno; i < endregno; i++)
9540 reg_last_set_table_tick[i] = label_tick;
9541
9542 return;
9543 }
9544
9545 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9546 /* Note that we can't have an "E" in values stored; see
9547 get_last_value_validate. */
9548 if (fmt[i] == 'e')
9549 update_table_tick (XEXP (x, i));
9550}
9551
9552/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
9553 are saying that the register is clobbered and we no longer know its
7988fd36
RK
9554 value. If INSN is zero, don't update reg_last_set; this is only permitted
9555 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
9556
9557static void
9558record_value_for_reg (reg, insn, value)
9559 rtx reg;
9560 rtx insn;
9561 rtx value;
9562{
9563 int regno = REGNO (reg);
9564 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9565 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
9566 int i;
9567
9568 /* If VALUE contains REG and we have a previous value for REG, substitute
9569 the previous value. */
9570 if (value && insn && reg_overlap_mentioned_p (reg, value))
9571 {
9572 rtx tem;
9573
9574 /* Set things up so get_last_value is allowed to see anything set up to
9575 our insn. */
9576 subst_low_cuid = INSN_CUID (insn);
9577 tem = get_last_value (reg);
9578
9579 if (tem)
9580 value = replace_rtx (copy_rtx (value), reg, tem);
9581 }
9582
9583 /* For each register modified, show we don't know its value, that
ef026f91
RS
9584 we don't know about its bitwise content, that its value has been
9585 updated, and that we don't know the location of the death of the
9586 register. */
230d793d
RS
9587 for (i = regno; i < endregno; i ++)
9588 {
9589 if (insn)
9590 reg_last_set[i] = insn;
9591 reg_last_set_value[i] = 0;
ef026f91
RS
9592 reg_last_set_mode[i] = 0;
9593 reg_last_set_nonzero_bits[i] = 0;
9594 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
9595 reg_last_death[i] = 0;
9596 }
9597
9598 /* Mark registers that are being referenced in this value. */
9599 if (value)
9600 update_table_tick (value);
9601
9602 /* Now update the status of each register being set.
9603 If someone is using this register in this block, set this register
9604 to invalid since we will get confused between the two lives in this
9605 basic block. This makes using this register always invalid. In cse, we
9606 scan the table to invalidate all entries using this register, but this
9607 is too much work for us. */
9608
9609 for (i = regno; i < endregno; i++)
9610 {
9611 reg_last_set_label[i] = label_tick;
9612 if (value && reg_last_set_table_tick[i] == label_tick)
9613 reg_last_set_invalid[i] = 1;
9614 else
9615 reg_last_set_invalid[i] = 0;
9616 }
9617
9618 /* The value being assigned might refer to X (like in "x++;"). In that
9619 case, we must replace it with (clobber (const_int 0)) to prevent
9620 infinite loops. */
9621 if (value && ! get_last_value_validate (&value,
9622 reg_last_set_label[regno], 0))
9623 {
9624 value = copy_rtx (value);
9625 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9626 value = 0;
9627 }
9628
55310dad
RK
9629 /* For the main register being modified, update the value, the mode, the
9630 nonzero bits, and the number of sign bit copies. */
9631
230d793d
RS
9632 reg_last_set_value[regno] = value;
9633
55310dad
RK
9634 if (value)
9635 {
2afabb48 9636 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
9637 reg_last_set_mode[regno] = GET_MODE (reg);
9638 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9639 reg_last_set_sign_bit_copies[regno]
9640 = num_sign_bit_copies (value, GET_MODE (reg));
9641 }
230d793d
RS
9642}
9643
9644/* Used for communication between the following two routines. */
9645static rtx record_dead_insn;
9646
9647/* Called via note_stores from record_dead_and_set_regs to handle one
9648 SET or CLOBBER in an insn. */
9649
9650static void
9651record_dead_and_set_regs_1 (dest, setter)
9652 rtx dest, setter;
9653{
9654 if (GET_CODE (dest) == REG)
9655 {
9656 /* If we are setting the whole register, we know its value. Otherwise
9657 show that we don't know the value. We can handle SUBREG in
9658 some cases. */
9659 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9660 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9661 else if (GET_CODE (setter) == SET
9662 && GET_CODE (SET_DEST (setter)) == SUBREG
9663 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 9664 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 9665 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
9666 record_value_for_reg (dest, record_dead_insn,
9667 gen_lowpart_for_combine (GET_MODE (dest),
9668 SET_SRC (setter)));
230d793d 9669 else
5f4f0e22 9670 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
9671 }
9672 else if (GET_CODE (dest) == MEM
9673 /* Ignore pushes, they clobber nothing. */
9674 && ! push_operand (dest, GET_MODE (dest)))
9675 mem_last_set = INSN_CUID (record_dead_insn);
9676}
9677
9678/* Update the records of when each REG was most recently set or killed
9679 for the things done by INSN. This is the last thing done in processing
9680 INSN in the combiner loop.
9681
ef026f91
RS
9682 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
9683 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
9684 and also the similar information mem_last_set (which insn most recently
9685 modified memory) and last_call_cuid (which insn was the most recent
9686 subroutine call). */
230d793d
RS
9687
9688static void
9689record_dead_and_set_regs (insn)
9690 rtx insn;
9691{
9692 register rtx link;
55310dad
RK
9693 int i;
9694
230d793d
RS
9695 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9696 {
dbc131f3
RK
9697 if (REG_NOTE_KIND (link) == REG_DEAD
9698 && GET_CODE (XEXP (link, 0)) == REG)
9699 {
9700 int regno = REGNO (XEXP (link, 0));
9701 int endregno
9702 = regno + (regno < FIRST_PSEUDO_REGISTER
9703 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9704 : 1);
dbc131f3
RK
9705
9706 for (i = regno; i < endregno; i++)
9707 reg_last_death[i] = insn;
9708 }
230d793d 9709 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 9710 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
9711 }
9712
9713 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
9714 {
9715 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9716 if (call_used_regs[i])
9717 {
9718 reg_last_set_value[i] = 0;
ef026f91
RS
9719 reg_last_set_mode[i] = 0;
9720 reg_last_set_nonzero_bits[i] = 0;
9721 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
9722 reg_last_death[i] = 0;
9723 }
9724
9725 last_call_cuid = mem_last_set = INSN_CUID (insn);
9726 }
230d793d
RS
9727
9728 record_dead_insn = insn;
9729 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9730}
9731\f
9732/* Utility routine for the following function. Verify that all the registers
9733 mentioned in *LOC are valid when *LOC was part of a value set when
9734 label_tick == TICK. Return 0 if some are not.
9735
9736 If REPLACE is non-zero, replace the invalid reference with
9737 (clobber (const_int 0)) and return 1. This replacement is useful because
9738 we often can get useful information about the form of a value (e.g., if
9739 it was produced by a shift that always produces -1 or 0) even though
9740 we don't know exactly what registers it was produced from. */
9741
9742static int
9743get_last_value_validate (loc, tick, replace)
9744 rtx *loc;
9745 int tick;
9746 int replace;
9747{
9748 rtx x = *loc;
9749 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9750 int len = GET_RTX_LENGTH (GET_CODE (x));
9751 int i;
9752
9753 if (GET_CODE (x) == REG)
9754 {
9755 int regno = REGNO (x);
9756 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9757 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9758 int j;
9759
9760 for (j = regno; j < endregno; j++)
9761 if (reg_last_set_invalid[j]
9762 /* If this is a pseudo-register that was only set once, it is
9763 always valid. */
9764 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9765 && reg_last_set_label[j] > tick))
9766 {
9767 if (replace)
9768 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9769 return replace;
9770 }
9771
9772 return 1;
9773 }
9774
9775 for (i = 0; i < len; i++)
9776 if ((fmt[i] == 'e'
9777 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9778 /* Don't bother with these. They shouldn't occur anyway. */
9779 || fmt[i] == 'E')
9780 return 0;
9781
9782 /* If we haven't found a reason for it to be invalid, it is valid. */
9783 return 1;
9784}
9785
9786/* Get the last value assigned to X, if known. Some registers
9787 in the value may be replaced with (clobber (const_int 0)) if their value
9788 is known longer known reliably. */
9789
9790static rtx
9791get_last_value (x)
9792 rtx x;
9793{
9794 int regno;
9795 rtx value;
9796
9797 /* If this is a non-paradoxical SUBREG, get the value of its operand and
9798 then convert it to the desired mode. If this is a paradoxical SUBREG,
9799 we cannot predict what values the "extra" bits might have. */
9800 if (GET_CODE (x) == SUBREG
9801 && subreg_lowpart_p (x)
9802 && (GET_MODE_SIZE (GET_MODE (x))
9803 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
9804 && (value = get_last_value (SUBREG_REG (x))) != 0)
9805 return gen_lowpart_for_combine (GET_MODE (x), value);
9806
9807 if (GET_CODE (x) != REG)
9808 return 0;
9809
9810 regno = REGNO (x);
9811 value = reg_last_set_value[regno];
9812
d0ab8cd3 9813 /* If we don't have a value or if it isn't for this basic block, return 0. */
230d793d
RS
9814
9815 if (value == 0
9816 || (reg_n_sets[regno] != 1
55310dad 9817 && reg_last_set_label[regno] != label_tick))
230d793d
RS
9818 return 0;
9819
d0ab8cd3 9820 /* If the value was set in a later insn that the ones we are processing,
4090a6b3
RK
9821 we can't use it even if the register was only set once, but make a quick
9822 check to see if the previous insn set it to something. This is commonly
9823 the case when the same pseudo is used by repeated insns. */
d0ab8cd3 9824
4090a6b3 9825 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
9826 {
9827 rtx insn, set;
9828
3adde2a5
RK
9829 for (insn = prev_nonnote_insn (subst_insn);
9830 insn && INSN_CUID (insn) >= subst_low_cuid;
9831 insn = prev_nonnote_insn (insn))
9832 ;
d0ab8cd3
RK
9833
9834 if (insn
9835 && (set = single_set (insn)) != 0
9836 && rtx_equal_p (SET_DEST (set), x))
9837 {
9838 value = SET_SRC (set);
9839
9840 /* Make sure that VALUE doesn't reference X. Replace any
9841 expliit references with a CLOBBER. If there are any remaining
9842 references (rare), don't use the value. */
9843
9844 if (reg_mentioned_p (x, value))
9845 value = replace_rtx (copy_rtx (value), x,
9846 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9847
9848 if (reg_overlap_mentioned_p (x, value))
9849 return 0;
9850 }
9851 else
9852 return 0;
9853 }
9854
9855 /* If the value has all its registers valid, return it. */
230d793d
RS
9856 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9857 return value;
9858
9859 /* Otherwise, make a copy and replace any invalid register with
9860 (clobber (const_int 0)). If that fails for some reason, return 0. */
9861
9862 value = copy_rtx (value);
9863 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9864 return value;
9865
9866 return 0;
9867}
9868\f
9869/* Return nonzero if expression X refers to a REG or to memory
9870 that is set in an instruction more recent than FROM_CUID. */
9871
9872static int
9873use_crosses_set_p (x, from_cuid)
9874 register rtx x;
9875 int from_cuid;
9876{
9877 register char *fmt;
9878 register int i;
9879 register enum rtx_code code = GET_CODE (x);
9880
9881 if (code == REG)
9882 {
9883 register int regno = REGNO (x);
e28f5732
RK
9884 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
9885 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9886
230d793d
RS
9887#ifdef PUSH_ROUNDING
9888 /* Don't allow uses of the stack pointer to be moved,
9889 because we don't know whether the move crosses a push insn. */
9890 if (regno == STACK_POINTER_REGNUM)
9891 return 1;
9892#endif
e28f5732
RK
9893 for (;regno < endreg; regno++)
9894 if (reg_last_set[regno]
9895 && INSN_CUID (reg_last_set[regno]) > from_cuid)
9896 return 1;
9897 return 0;
230d793d
RS
9898 }
9899
9900 if (code == MEM && mem_last_set > from_cuid)
9901 return 1;
9902
9903 fmt = GET_RTX_FORMAT (code);
9904
9905 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9906 {
9907 if (fmt[i] == 'E')
9908 {
9909 register int j;
9910 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9911 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9912 return 1;
9913 }
9914 else if (fmt[i] == 'e'
9915 && use_crosses_set_p (XEXP (x, i), from_cuid))
9916 return 1;
9917 }
9918 return 0;
9919}
9920\f
9921/* Define three variables used for communication between the following
9922 routines. */
9923
9924static int reg_dead_regno, reg_dead_endregno;
9925static int reg_dead_flag;
9926
9927/* Function called via note_stores from reg_dead_at_p.
9928
9929 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9930 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9931
9932static void
9933reg_dead_at_p_1 (dest, x)
9934 rtx dest;
9935 rtx x;
9936{
9937 int regno, endregno;
9938
9939 if (GET_CODE (dest) != REG)
9940 return;
9941
9942 regno = REGNO (dest);
9943 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9944 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9945
9946 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9947 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9948}
9949
9950/* Return non-zero if REG is known to be dead at INSN.
9951
9952 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9953 referencing REG, it is dead. If we hit a SET referencing REG, it is
9954 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
9955 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
9956 must be assumed to be always live. */
230d793d
RS
9957
9958static int
9959reg_dead_at_p (reg, insn)
9960 rtx reg;
9961 rtx insn;
9962{
9963 int block, i;
9964
9965 /* Set variables for reg_dead_at_p_1. */
9966 reg_dead_regno = REGNO (reg);
9967 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9968 ? HARD_REGNO_NREGS (reg_dead_regno,
9969 GET_MODE (reg))
9970 : 1);
9971
9972 reg_dead_flag = 0;
9973
6e25d159
RK
9974 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
9975 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
9976 {
9977 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
9978 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
9979 return 0;
9980 }
9981
230d793d
RS
9982 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9983 beginning of function. */
9984 for (; insn && GET_CODE (insn) != CODE_LABEL;
9985 insn = prev_nonnote_insn (insn))
9986 {
9987 note_stores (PATTERN (insn), reg_dead_at_p_1);
9988 if (reg_dead_flag)
9989 return reg_dead_flag == 1 ? 1 : 0;
9990
9991 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
9992 return 1;
9993 }
9994
9995 /* Get the basic block number that we were in. */
9996 if (insn == 0)
9997 block = 0;
9998 else
9999 {
10000 for (block = 0; block < n_basic_blocks; block++)
10001 if (insn == basic_block_head[block])
10002 break;
10003
10004 if (block == n_basic_blocks)
10005 return 0;
10006 }
10007
10008 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
10009 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
10010 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
10011 return 0;
10012
10013 return 1;
10014}
6e25d159
RK
10015\f
10016/* Note hard registers in X that are used. This code is similar to
10017 that in flow.c, but much simpler since we don't care about pseudos. */
10018
10019static void
10020mark_used_regs_combine (x)
10021 rtx x;
10022{
10023 register RTX_CODE code = GET_CODE (x);
10024 register int regno;
10025 int i;
10026
10027 switch (code)
10028 {
10029 case LABEL_REF:
10030 case SYMBOL_REF:
10031 case CONST_INT:
10032 case CONST:
10033 case CONST_DOUBLE:
10034 case PC:
10035 case ADDR_VEC:
10036 case ADDR_DIFF_VEC:
10037 case ASM_INPUT:
10038#ifdef HAVE_cc0
10039 /* CC0 must die in the insn after it is set, so we don't need to take
10040 special note of it here. */
10041 case CC0:
10042#endif
10043 return;
10044
10045 case CLOBBER:
10046 /* If we are clobbering a MEM, mark any hard registers inside the
10047 address as used. */
10048 if (GET_CODE (XEXP (x, 0)) == MEM)
10049 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10050 return;
10051
10052 case REG:
10053 regno = REGNO (x);
10054 /* A hard reg in a wide mode may really be multiple registers.
10055 If so, mark all of them just like the first. */
10056 if (regno < FIRST_PSEUDO_REGISTER)
10057 {
10058 /* None of this applies to the stack, frame or arg pointers */
10059 if (regno == STACK_POINTER_REGNUM
10060#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10061 || regno == HARD_FRAME_POINTER_REGNUM
10062#endif
10063#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10064 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10065#endif
10066 || regno == FRAME_POINTER_REGNUM)
10067 return;
10068
10069 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10070 while (i-- > 0)
10071 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10072 }
10073 return;
10074
10075 case SET:
10076 {
10077 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10078 the address. */
10079 register rtx testreg = SET_DEST (x);
10080
e048778f
RK
10081 while (GET_CODE (testreg) == SUBREG
10082 || GET_CODE (testreg) == ZERO_EXTRACT
10083 || GET_CODE (testreg) == SIGN_EXTRACT
10084 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
10085 testreg = XEXP (testreg, 0);
10086
10087 if (GET_CODE (testreg) == MEM)
10088 mark_used_regs_combine (XEXP (testreg, 0));
10089
10090 mark_used_regs_combine (SET_SRC (x));
10091 return;
10092 }
10093 }
10094
10095 /* Recursively scan the operands of this expression. */
10096
10097 {
10098 register char *fmt = GET_RTX_FORMAT (code);
10099
10100 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10101 {
10102 if (fmt[i] == 'e')
10103 mark_used_regs_combine (XEXP (x, i));
10104 else if (fmt[i] == 'E')
10105 {
10106 register int j;
10107
10108 for (j = 0; j < XVECLEN (x, i); j++)
10109 mark_used_regs_combine (XVECEXP (x, i, j));
10110 }
10111 }
10112 }
10113}
10114
230d793d
RS
10115\f
10116/* Remove register number REGNO from the dead registers list of INSN.
10117
10118 Return the note used to record the death, if there was one. */
10119
10120rtx
10121remove_death (regno, insn)
10122 int regno;
10123 rtx insn;
10124{
10125 register rtx note = find_regno_note (insn, REG_DEAD, regno);
10126
10127 if (note)
1a26b032
RK
10128 {
10129 reg_n_deaths[regno]--;
10130 remove_note (insn, note);
10131 }
230d793d
RS
10132
10133 return note;
10134}
10135
10136/* For each register (hardware or pseudo) used within expression X, if its
10137 death is in an instruction with cuid between FROM_CUID (inclusive) and
10138 TO_INSN (exclusive), put a REG_DEAD note for that register in the
10139 list headed by PNOTES.
10140
10141 This is done when X is being merged by combination into TO_INSN. These
10142 notes will then be distributed as needed. */
10143
10144static void
10145move_deaths (x, from_cuid, to_insn, pnotes)
10146 rtx x;
10147 int from_cuid;
10148 rtx to_insn;
10149 rtx *pnotes;
10150{
10151 register char *fmt;
10152 register int len, i;
10153 register enum rtx_code code = GET_CODE (x);
10154
10155 if (code == REG)
10156 {
10157 register int regno = REGNO (x);
10158 register rtx where_dead = reg_last_death[regno];
10159
10160 if (where_dead && INSN_CUID (where_dead) >= from_cuid
10161 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
10162 {
dbc131f3 10163 rtx note = remove_death (regno, where_dead);
230d793d
RS
10164
10165 /* It is possible for the call above to return 0. This can occur
10166 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
10167 In that case make a new note.
10168
10169 We must also check for the case where X is a hard register
10170 and NOTE is a death note for a range of hard registers
10171 including X. In that case, we must put REG_DEAD notes for
10172 the remaining registers in place of NOTE. */
10173
10174 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10175 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10176 != GET_MODE_SIZE (GET_MODE (x))))
10177 {
10178 int deadregno = REGNO (XEXP (note, 0));
10179 int deadend
10180 = (deadregno + HARD_REGNO_NREGS (deadregno,
10181 GET_MODE (XEXP (note, 0))));
10182 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10183 int i;
10184
10185 for (i = deadregno; i < deadend; i++)
10186 if (i < regno || i >= ourend)
10187 REG_NOTES (where_dead)
10188 = gen_rtx (EXPR_LIST, REG_DEAD,
36b878d1 10189 gen_rtx (REG, reg_raw_mode[i], i),
dbc131f3
RK
10190 REG_NOTES (where_dead));
10191 }
230d793d 10192
dbc131f3 10193 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
10194 {
10195 XEXP (note, 1) = *pnotes;
10196 *pnotes = note;
10197 }
10198 else
10199 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
1a26b032
RK
10200
10201 reg_n_deaths[regno]++;
230d793d
RS
10202 }
10203
10204 return;
10205 }
10206
10207 else if (GET_CODE (x) == SET)
10208 {
10209 rtx dest = SET_DEST (x);
10210
10211 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
10212
a7c99304
RK
10213 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
10214 that accesses one word of a multi-word item, some
10215 piece of everything register in the expression is used by
10216 this insn, so remove any old death. */
10217
10218 if (GET_CODE (dest) == ZERO_EXTRACT
10219 || GET_CODE (dest) == STRICT_LOW_PART
10220 || (GET_CODE (dest) == SUBREG
10221 && (((GET_MODE_SIZE (GET_MODE (dest))
10222 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
10223 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
10224 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 10225 {
a7c99304
RK
10226 move_deaths (dest, from_cuid, to_insn, pnotes);
10227 return;
230d793d
RS
10228 }
10229
a7c99304
RK
10230 /* If this is some other SUBREG, we know it replaces the entire
10231 value, so use that as the destination. */
10232 if (GET_CODE (dest) == SUBREG)
10233 dest = SUBREG_REG (dest);
10234
10235 /* If this is a MEM, adjust deaths of anything used in the address.
10236 For a REG (the only other possibility), the entire value is
10237 being replaced so the old value is not used in this insn. */
230d793d
RS
10238
10239 if (GET_CODE (dest) == MEM)
10240 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
10241 return;
10242 }
10243
10244 else if (GET_CODE (x) == CLOBBER)
10245 return;
10246
10247 len = GET_RTX_LENGTH (code);
10248 fmt = GET_RTX_FORMAT (code);
10249
10250 for (i = 0; i < len; i++)
10251 {
10252 if (fmt[i] == 'E')
10253 {
10254 register int j;
10255 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10256 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
10257 }
10258 else if (fmt[i] == 'e')
10259 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
10260 }
10261}
10262\f
a7c99304
RK
10263/* Return 1 if X is the target of a bit-field assignment in BODY, the
10264 pattern of an insn. X must be a REG. */
230d793d
RS
10265
10266static int
a7c99304
RK
10267reg_bitfield_target_p (x, body)
10268 rtx x;
230d793d
RS
10269 rtx body;
10270{
10271 int i;
10272
10273 if (GET_CODE (body) == SET)
a7c99304
RK
10274 {
10275 rtx dest = SET_DEST (body);
10276 rtx target;
10277 int regno, tregno, endregno, endtregno;
10278
10279 if (GET_CODE (dest) == ZERO_EXTRACT)
10280 target = XEXP (dest, 0);
10281 else if (GET_CODE (dest) == STRICT_LOW_PART)
10282 target = SUBREG_REG (XEXP (dest, 0));
10283 else
10284 return 0;
10285
10286 if (GET_CODE (target) == SUBREG)
10287 target = SUBREG_REG (target);
10288
10289 if (GET_CODE (target) != REG)
10290 return 0;
10291
10292 tregno = REGNO (target), regno = REGNO (x);
10293 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
10294 return target == x;
10295
10296 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
10297 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10298
10299 return endregno > tregno && regno < endtregno;
10300 }
230d793d
RS
10301
10302 else if (GET_CODE (body) == PARALLEL)
10303 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 10304 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
10305 return 1;
10306
10307 return 0;
10308}
10309\f
10310/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
10311 as appropriate. I3 and I2 are the insns resulting from the combination
10312 insns including FROM (I2 may be zero).
10313
10314 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
10315 not need REG_DEAD notes because they are being substituted for. This
10316 saves searching in the most common cases.
10317
10318 Each note in the list is either ignored or placed on some insns, depending
10319 on the type of note. */
10320
10321static void
10322distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
10323 rtx notes;
10324 rtx from_insn;
10325 rtx i3, i2;
10326 rtx elim_i2, elim_i1;
10327{
10328 rtx note, next_note;
10329 rtx tem;
10330
10331 for (note = notes; note; note = next_note)
10332 {
10333 rtx place = 0, place2 = 0;
10334
10335 /* If this NOTE references a pseudo register, ensure it references
10336 the latest copy of that register. */
10337 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10338 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10339 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10340
10341 next_note = XEXP (note, 1);
10342 switch (REG_NOTE_KIND (note))
10343 {
10344 case REG_UNUSED:
176c9e6b
JW
10345 /* If this note is from any insn other than i3, then we have no
10346 use for it, and must ignore it.
10347
10348 Any clobbers for i3 may still exist, and so we must process
10349 REG_UNUSED notes from that insn.
10350
10351 Any clobbers from i2 or i1 can only exist if they were added by
10352 recog_for_combine. In that case, recog_for_combine created the
10353 necessary REG_UNUSED notes. Trying to keep any original
10354 REG_UNUSED notes from these insns can cause incorrect output
10355 if it is for the same register as the original i3 dest.
10356 In that case, we will notice that the register is set in i3,
10357 and then add a REG_UNUSED note for the destination of i3, which
10358 is wrong. */
10359 if (from_insn != i3)
10360 break;
10361
230d793d
RS
10362 /* If this register is set or clobbered in I3, put the note there
10363 unless there is one already. */
176c9e6b 10364 else if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
10365 {
10366 if (! (GET_CODE (XEXP (note, 0)) == REG
10367 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
10368 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
10369 place = i3;
10370 }
10371 /* Otherwise, if this register is used by I3, then this register
10372 now dies here, so we must put a REG_DEAD note here unless there
10373 is one already. */
10374 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
10375 && ! (GET_CODE (XEXP (note, 0)) == REG
10376 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
10377 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
10378 {
10379 PUT_REG_NOTE_KIND (note, REG_DEAD);
10380 place = i3;
10381 }
10382 break;
10383
10384 case REG_EQUAL:
10385 case REG_EQUIV:
10386 case REG_NONNEG:
10387 /* These notes say something about results of an insn. We can
10388 only support them if they used to be on I3 in which case they
a687e897
RK
10389 remain on I3. Otherwise they are ignored.
10390
10391 If the note refers to an expression that is not a constant, we
10392 must also ignore the note since we cannot tell whether the
10393 equivalence is still true. It might be possible to do
10394 slightly better than this (we only have a problem if I2DEST
10395 or I1DEST is present in the expression), but it doesn't
10396 seem worth the trouble. */
10397
10398 if (from_insn == i3
10399 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
10400 place = i3;
10401 break;
10402
10403 case REG_INC:
10404 case REG_NO_CONFLICT:
10405 case REG_LABEL:
10406 /* These notes say something about how a register is used. They must
10407 be present on any use of the register in I2 or I3. */
10408 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
10409 place = i3;
10410
10411 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
10412 {
10413 if (place)
10414 place2 = i2;
10415 else
10416 place = i2;
10417 }
10418 break;
10419
10420 case REG_WAS_0:
10421 /* It is too much trouble to try to see if this note is still
10422 correct in all situations. It is better to simply delete it. */
10423 break;
10424
10425 case REG_RETVAL:
10426 /* If the insn previously containing this note still exists,
10427 put it back where it was. Otherwise move it to the previous
10428 insn. Adjust the corresponding REG_LIBCALL note. */
10429 if (GET_CODE (from_insn) != NOTE)
10430 place = from_insn;
10431 else
10432 {
5f4f0e22 10433 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
10434 place = prev_real_insn (from_insn);
10435 if (tem && place)
10436 XEXP (tem, 0) = place;
10437 }
10438 break;
10439
10440 case REG_LIBCALL:
10441 /* This is handled similarly to REG_RETVAL. */
10442 if (GET_CODE (from_insn) != NOTE)
10443 place = from_insn;
10444 else
10445 {
5f4f0e22 10446 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
10447 place = next_real_insn (from_insn);
10448 if (tem && place)
10449 XEXP (tem, 0) = place;
10450 }
10451 break;
10452
10453 case REG_DEAD:
10454 /* If the register is used as an input in I3, it dies there.
10455 Similarly for I2, if it is non-zero and adjacent to I3.
10456
10457 If the register is not used as an input in either I3 or I2
10458 and it is not one of the registers we were supposed to eliminate,
10459 there are two possibilities. We might have a non-adjacent I2
10460 or we might have somehow eliminated an additional register
10461 from a computation. For example, we might have had A & B where
10462 we discover that B will always be zero. In this case we will
10463 eliminate the reference to A.
10464
10465 In both cases, we must search to see if we can find a previous
10466 use of A and put the death note there. */
10467
6e2d1486
RK
10468 if (from_insn
10469 && GET_CODE (from_insn) == CALL_INSN
10470 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
10471 place = from_insn;
10472 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
10473 place = i3;
10474 else if (i2 != 0 && next_nonnote_insn (i2) == i3
10475 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10476 place = i2;
10477
10478 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
10479 break;
10480
510dd77e
RK
10481 /* If the register is used in both I2 and I3 and it dies in I3,
10482 we might have added another reference to it. If reg_n_refs
10483 was 2, bump it to 3. This has to be correct since the
10484 register must have been set somewhere. The reason this is
10485 done is because local-alloc.c treats 2 references as a
10486 special case. */
10487
10488 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
10489 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
10490 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10491 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
10492
230d793d
RS
10493 if (place == 0)
10494 for (tem = prev_nonnote_insn (i3);
10495 tem && (GET_CODE (tem) == INSN
10496 || GET_CODE (tem) == CALL_INSN);
10497 tem = prev_nonnote_insn (tem))
10498 {
10499 /* If the register is being set at TEM, see if that is all
10500 TEM is doing. If so, delete TEM. Otherwise, make this
10501 into a REG_UNUSED note instead. */
10502 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
10503 {
10504 rtx set = single_set (tem);
10505
5089e22e
RS
10506 /* Verify that it was the set, and not a clobber that
10507 modified the register. */
10508
10509 if (set != 0 && ! side_effects_p (SET_SRC (set))
10510 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
230d793d
RS
10511 {
10512 /* Move the notes and links of TEM elsewhere.
10513 This might delete other dead insns recursively.
10514 First set the pattern to something that won't use
10515 any register. */
10516
10517 PATTERN (tem) = pc_rtx;
10518
5f4f0e22
CH
10519 distribute_notes (REG_NOTES (tem), tem, tem,
10520 NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
10521 distribute_links (LOG_LINKS (tem));
10522
10523 PUT_CODE (tem, NOTE);
10524 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10525 NOTE_SOURCE_FILE (tem) = 0;
10526 }
10527 else
10528 {
10529 PUT_REG_NOTE_KIND (note, REG_UNUSED);
10530
10531 /* If there isn't already a REG_UNUSED note, put one
10532 here. */
10533 if (! find_regno_note (tem, REG_UNUSED,
10534 REGNO (XEXP (note, 0))))
10535 place = tem;
10536 break;
10537 }
10538 }
13018fad
RE
10539 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
10540 || (GET_CODE (tem) == CALL_INSN
10541 && find_reg_fusage (tem, USE, XEXP (note, 0))))
230d793d
RS
10542 {
10543 place = tem;
10544 break;
10545 }
10546 }
10547
10548 /* If the register is set or already dead at PLACE, we needn't do
10549 anything with this note if it is still a REG_DEAD note.
10550
10551 Note that we cannot use just `dead_or_set_p' here since we can
10552 convert an assignment to a register into a bit-field assignment.
10553 Therefore, we must also omit the note if the register is the
10554 target of a bitfield assignment. */
10555
10556 if (place && REG_NOTE_KIND (note) == REG_DEAD)
10557 {
10558 int regno = REGNO (XEXP (note, 0));
10559
10560 if (dead_or_set_p (place, XEXP (note, 0))
10561 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10562 {
10563 /* Unless the register previously died in PLACE, clear
10564 reg_last_death. [I no longer understand why this is
10565 being done.] */
10566 if (reg_last_death[regno] != place)
10567 reg_last_death[regno] = 0;
10568 place = 0;
10569 }
10570 else
10571 reg_last_death[regno] = place;
10572
10573 /* If this is a death note for a hard reg that is occupying
10574 multiple registers, ensure that we are still using all
10575 parts of the object. If we find a piece of the object
10576 that is unused, we must add a USE for that piece before
10577 PLACE and put the appropriate REG_DEAD note on it.
10578
10579 An alternative would be to put a REG_UNUSED for the pieces
10580 on the insn that set the register, but that can't be done if
10581 it is not in the same block. It is simpler, though less
10582 efficient, to add the USE insns. */
10583
10584 if (place && regno < FIRST_PSEUDO_REGISTER
10585 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
10586 {
10587 int endregno
10588 = regno + HARD_REGNO_NREGS (regno,
10589 GET_MODE (XEXP (note, 0)));
10590 int all_used = 1;
10591 int i;
10592
10593 for (i = regno; i < endregno; i++)
9fd5bb62
JW
10594 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
10595 && ! find_regno_fusage (place, USE, i))
230d793d 10596 {
485eeec4 10597 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
28f6d3af
RK
10598 rtx p;
10599
10600 /* See if we already placed a USE note for this
10601 register in front of PLACE. */
10602 for (p = place;
10603 GET_CODE (PREV_INSN (p)) == INSN
10604 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10605 p = PREV_INSN (p))
10606 if (rtx_equal_p (piece,
10607 XEXP (PATTERN (PREV_INSN (p)), 0)))
10608 {
10609 p = 0;
10610 break;
10611 }
10612
10613 if (p)
10614 {
10615 rtx use_insn
10616 = emit_insn_before (gen_rtx (USE, VOIDmode,
10617 piece),
10618 p);
10619 REG_NOTES (use_insn)
10620 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
10621 REG_NOTES (use_insn));
10622 }
230d793d 10623
5089e22e 10624 all_used = 0;
230d793d
RS
10625 }
10626
a394b17b
JW
10627 /* Check for the case where the register dying partially
10628 overlaps the register set by this insn. */
10629 if (all_used)
10630 for (i = regno; i < endregno; i++)
10631 if (dead_or_set_regno_p (place, i))
10632 {
10633 all_used = 0;
10634 break;
10635 }
10636
230d793d
RS
10637 if (! all_used)
10638 {
10639 /* Put only REG_DEAD notes for pieces that are
10640 still used and that are not already dead or set. */
10641
10642 for (i = regno; i < endregno; i++)
10643 {
485eeec4 10644 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
230d793d
RS
10645
10646 if (reg_referenced_p (piece, PATTERN (place))
10647 && ! dead_or_set_p (place, piece)
10648 && ! reg_bitfield_target_p (piece,
10649 PATTERN (place)))
10650 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
10651 piece,
10652 REG_NOTES (place));
10653 }
10654
10655 place = 0;
10656 }
10657 }
10658 }
10659 break;
10660
10661 default:
10662 /* Any other notes should not be present at this point in the
10663 compilation. */
10664 abort ();
10665 }
10666
10667 if (place)
10668 {
10669 XEXP (note, 1) = REG_NOTES (place);
10670 REG_NOTES (place) = note;
10671 }
1a26b032
RK
10672 else if ((REG_NOTE_KIND (note) == REG_DEAD
10673 || REG_NOTE_KIND (note) == REG_UNUSED)
10674 && GET_CODE (XEXP (note, 0)) == REG)
10675 reg_n_deaths[REGNO (XEXP (note, 0))]--;
230d793d
RS
10676
10677 if (place2)
1a26b032
RK
10678 {
10679 if ((REG_NOTE_KIND (note) == REG_DEAD
10680 || REG_NOTE_KIND (note) == REG_UNUSED)
10681 && GET_CODE (XEXP (note, 0)) == REG)
10682 reg_n_deaths[REGNO (XEXP (note, 0))]++;
10683
10684 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
10685 XEXP (note, 0), REG_NOTES (place2));
10686 }
230d793d
RS
10687 }
10688}
10689\f
10690/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
10691 I3, I2, and I1 to new locations. This is also called in one case to
10692 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
10693
10694static void
10695distribute_links (links)
10696 rtx links;
10697{
10698 rtx link, next_link;
10699
10700 for (link = links; link; link = next_link)
10701 {
10702 rtx place = 0;
10703 rtx insn;
10704 rtx set, reg;
10705
10706 next_link = XEXP (link, 1);
10707
10708 /* If the insn that this link points to is a NOTE or isn't a single
10709 set, ignore it. In the latter case, it isn't clear what we
10710 can do other than ignore the link, since we can't tell which
10711 register it was for. Such links wouldn't be used by combine
10712 anyway.
10713
10714 It is not possible for the destination of the target of the link to
10715 have been changed by combine. The only potential of this is if we
10716 replace I3, I2, and I1 by I3 and I2. But in that case the
10717 destination of I2 also remains unchanged. */
10718
10719 if (GET_CODE (XEXP (link, 0)) == NOTE
10720 || (set = single_set (XEXP (link, 0))) == 0)
10721 continue;
10722
10723 reg = SET_DEST (set);
10724 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
10725 || GET_CODE (reg) == SIGN_EXTRACT
10726 || GET_CODE (reg) == STRICT_LOW_PART)
10727 reg = XEXP (reg, 0);
10728
10729 /* A LOG_LINK is defined as being placed on the first insn that uses
10730 a register and points to the insn that sets the register. Start
10731 searching at the next insn after the target of the link and stop
10732 when we reach a set of the register or the end of the basic block.
10733
10734 Note that this correctly handles the link that used to point from
5089e22e 10735 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
10736 since most links don't point very far away. */
10737
10738 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3
RK
10739 (insn && (this_basic_block == n_basic_blocks - 1
10740 || basic_block_head[this_basic_block + 1] != insn));
230d793d
RS
10741 insn = NEXT_INSN (insn))
10742 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
10743 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
10744 {
10745 if (reg_referenced_p (reg, PATTERN (insn)))
10746 place = insn;
10747 break;
10748 }
6e2d1486
RK
10749 else if (GET_CODE (insn) == CALL_INSN
10750 && find_reg_fusage (insn, USE, reg))
10751 {
10752 place = insn;
10753 break;
10754 }
230d793d
RS
10755
10756 /* If we found a place to put the link, place it there unless there
10757 is already a link to the same insn as LINK at that point. */
10758
10759 if (place)
10760 {
10761 rtx link2;
10762
10763 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
10764 if (XEXP (link2, 0) == XEXP (link, 0))
10765 break;
10766
10767 if (link2 == 0)
10768 {
10769 XEXP (link, 1) = LOG_LINKS (place);
10770 LOG_LINKS (place) = link;
abe6e52f
RK
10771
10772 /* Set added_links_insn to the earliest insn we added a
10773 link to. */
10774 if (added_links_insn == 0
10775 || INSN_CUID (added_links_insn) > INSN_CUID (place))
10776 added_links_insn = place;
230d793d
RS
10777 }
10778 }
10779 }
10780}
10781\f
10782void
10783dump_combine_stats (file)
10784 FILE *file;
10785{
10786 fprintf
10787 (file,
10788 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
10789 combine_attempts, combine_merges, combine_extras, combine_successes);
10790}
10791
10792void
10793dump_combine_total_stats (file)
10794 FILE *file;
10795{
10796 fprintf
10797 (file,
10798 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
10799 total_attempts, total_merges, total_extras, total_successes);
10800}
This page took 1.635262 seconds and 5 git commands to generate.