]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
(iterator_loop_epilogue): Don't clear DECL_RTL for a static decl.
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
e11fa86f 2 Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
230d793d
RS
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
4f90e4a0 78#ifdef __STDC__
04fe4385 79#include <stdarg.h>
4f90e4a0 80#else
04fe4385 81#include <varargs.h>
4f90e4a0 82#endif
dfa3449b 83
9c3b4c8b
RS
84/* Must precede rtl.h for FFS. */
85#include <stdio.h>
86
230d793d
RS
87#include "rtl.h"
88#include "flags.h"
89#include "regs.h"
55310dad 90#include "hard-reg-set.h"
230d793d
RS
91#include "expr.h"
92#include "basic-block.h"
93#include "insn-config.h"
94#include "insn-flags.h"
95#include "insn-codes.h"
96#include "insn-attr.h"
97#include "recog.h"
98#include "real.h"
99
100/* It is not safe to use ordinary gen_lowpart in combine.
101 Use gen_lowpart_for_combine instead. See comments there. */
102#define gen_lowpart dont_use_gen_lowpart_you_dummy
103
104/* Number of attempts to combine instructions in this function. */
105
106static int combine_attempts;
107
108/* Number of attempts that got as far as substitution in this function. */
109
110static int combine_merges;
111
112/* Number of instructions combined with added SETs in this function. */
113
114static int combine_extras;
115
116/* Number of instructions combined in this function. */
117
118static int combine_successes;
119
120/* Totals over entire compilation. */
121
122static int total_attempts, total_merges, total_extras, total_successes;
9210df58 123
ddd5a7c1 124/* Define a default value for REVERSIBLE_CC_MODE.
9210df58
RK
125 We can never assume that a condition code mode is safe to reverse unless
126 the md tells us so. */
127#ifndef REVERSIBLE_CC_MODE
128#define REVERSIBLE_CC_MODE(MODE) 0
129#endif
230d793d
RS
130\f
131/* Vector mapping INSN_UIDs to cuids.
5089e22e 132 The cuids are like uids but increase monotonically always.
230d793d
RS
133 Combine always uses cuids so that it can compare them.
134 But actually renumbering the uids, which we used to do,
135 proves to be a bad idea because it makes it hard to compare
136 the dumps produced by earlier passes with those from later passes. */
137
138static int *uid_cuid;
4255220d 139static int max_uid_cuid;
230d793d
RS
140
141/* Get the cuid of an insn. */
142
4255220d
JW
143#define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid_cuid \
144 ? (abort(), 0) \
145 : uid_cuid[INSN_UID (INSN)])
230d793d
RS
146
147/* Maximum register number, which is the size of the tables below. */
148
149static int combine_max_regno;
150
151/* Record last point of death of (hard or pseudo) register n. */
152
153static rtx *reg_last_death;
154
155/* Record last point of modification of (hard or pseudo) register n. */
156
157static rtx *reg_last_set;
158
159/* Record the cuid of the last insn that invalidated memory
160 (anything that writes memory, and subroutine calls, but not pushes). */
161
162static int mem_last_set;
163
164/* Record the cuid of the last CALL_INSN
165 so we can tell whether a potential combination crosses any calls. */
166
167static int last_call_cuid;
168
169/* When `subst' is called, this is the insn that is being modified
170 (by combining in a previous insn). The PATTERN of this insn
171 is still the old pattern partially modified and it should not be
172 looked at, but this may be used to examine the successors of the insn
173 to judge whether a simplification is valid. */
174
175static rtx subst_insn;
176
0d9641d1
JW
177/* This is an insn that belongs before subst_insn, but is not currently
178 on the insn chain. */
179
180static rtx subst_prev_insn;
181
230d793d
RS
182/* This is the lowest CUID that `subst' is currently dealing with.
183 get_last_value will not return a value if the register was set at or
184 after this CUID. If not for this mechanism, we could get confused if
185 I2 or I1 in try_combine were an insn that used the old value of a register
186 to obtain a new value. In that case, we might erroneously get the
187 new value of the register when we wanted the old one. */
188
189static int subst_low_cuid;
190
6e25d159
RK
191/* This contains any hard registers that are used in newpat; reg_dead_at_p
192 must consider all these registers to be always live. */
193
194static HARD_REG_SET newpat_used_regs;
195
abe6e52f
RK
196/* This is an insn to which a LOG_LINKS entry has been added. If this
197 insn is the earlier than I2 or I3, combine should rescan starting at
198 that location. */
199
200static rtx added_links_insn;
201
230d793d
RS
202/* This is the value of undobuf.num_undo when we started processing this
203 substitution. This will prevent gen_rtx_combine from re-used a piece
204 from the previous expression. Doing so can produce circular rtl
205 structures. */
206
207static int previous_num_undos;
ca5c3ef4 208
0d4d42c3
RK
209/* Basic block number of the block in which we are performing combines. */
210static int this_basic_block;
230d793d
RS
211\f
212/* The next group of arrays allows the recording of the last value assigned
213 to (hard or pseudo) register n. We use this information to see if a
5089e22e 214 operation being processed is redundant given a prior operation performed
230d793d
RS
215 on the register. For example, an `and' with a constant is redundant if
216 all the zero bits are already known to be turned off.
217
218 We use an approach similar to that used by cse, but change it in the
219 following ways:
220
221 (1) We do not want to reinitialize at each label.
222 (2) It is useful, but not critical, to know the actual value assigned
223 to a register. Often just its form is helpful.
224
225 Therefore, we maintain the following arrays:
226
227 reg_last_set_value the last value assigned
228 reg_last_set_label records the value of label_tick when the
229 register was assigned
230 reg_last_set_table_tick records the value of label_tick when a
231 value using the register is assigned
232 reg_last_set_invalid set to non-zero when it is not valid
233 to use the value of this register in some
234 register's value
235
236 To understand the usage of these tables, it is important to understand
237 the distinction between the value in reg_last_set_value being valid
238 and the register being validly contained in some other expression in the
239 table.
240
241 Entry I in reg_last_set_value is valid if it is non-zero, and either
242 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
243
244 Register I may validly appear in any expression returned for the value
245 of another register if reg_n_sets[i] is 1. It may also appear in the
246 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
247 reg_last_set_invalid[j] is zero.
248
249 If an expression is found in the table containing a register which may
250 not validly appear in an expression, the register is replaced by
251 something that won't match, (clobber (const_int 0)).
252
253 reg_last_set_invalid[i] is set non-zero when register I is being assigned
254 to and reg_last_set_table_tick[i] == label_tick. */
255
256/* Record last value assigned to (hard or pseudo) register n. */
257
258static rtx *reg_last_set_value;
259
260/* Record the value of label_tick when the value for register n is placed in
261 reg_last_set_value[n]. */
262
568356af 263static int *reg_last_set_label;
230d793d
RS
264
265/* Record the value of label_tick when an expression involving register n
266 is placed in reg_last_set_value. */
267
568356af 268static int *reg_last_set_table_tick;
230d793d
RS
269
270/* Set non-zero if references to register n in expressions should not be
271 used. */
272
273static char *reg_last_set_invalid;
274
275/* Incremented for each label. */
276
568356af 277static int label_tick;
230d793d
RS
278
279/* Some registers that are set more than once and used in more than one
280 basic block are nevertheless always set in similar ways. For example,
281 a QImode register may be loaded from memory in two places on a machine
282 where byte loads zero extend.
283
951553af 284 We record in the following array what we know about the nonzero
230d793d
RS
285 bits of a register, specifically which bits are known to be zero.
286
287 If an entry is zero, it means that we don't know anything special. */
288
55310dad 289static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 290
951553af 291/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 292 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 293
951553af 294static enum machine_mode nonzero_bits_mode;
230d793d 295
d0ab8cd3
RK
296/* Nonzero if we know that a register has some leading bits that are always
297 equal to the sign bit. */
298
299static char *reg_sign_bit_copies;
300
951553af 301/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
302 It is zero while computing them and after combine has completed. This
303 former test prevents propagating values based on previously set values,
304 which can be incorrect if a variable is modified in a loop. */
230d793d 305
951553af 306static int nonzero_sign_valid;
55310dad
RK
307
308/* These arrays are maintained in parallel with reg_last_set_value
309 and are used to store the mode in which the register was last set,
310 the bits that were known to be zero when it was last set, and the
311 number of sign bits copies it was known to have when it was last set. */
312
313static enum machine_mode *reg_last_set_mode;
314static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
315static char *reg_last_set_sign_bit_copies;
230d793d
RS
316\f
317/* Record one modification to rtl structure
318 to be undone by storing old_contents into *where.
319 is_int is 1 if the contents are an int. */
320
321struct undo
322{
230d793d 323 int is_int;
f5393ab9
RS
324 union {rtx r; int i;} old_contents;
325 union {rtx *r; int *i;} where;
230d793d
RS
326};
327
328/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
329 num_undo says how many are currently recorded.
330
331 storage is nonzero if we must undo the allocation of new storage.
332 The value of storage is what to pass to obfree.
333
334 other_insn is nonzero if we have modified some other insn in the process
335 of working on subst_insn. It must be verified too. */
336
337#define MAX_UNDO 50
338
339struct undobuf
340{
341 int num_undo;
342 char *storage;
343 struct undo undo[MAX_UNDO];
344 rtx other_insn;
345};
346
347static struct undobuf undobuf;
348
cc876596 349/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 350 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
351 set to NEWVAL, do not record this change. Because computing NEWVAL might
352 also call SUBST, we have to compute it before we put anything into
353 the undo table. */
230d793d
RS
354
355#define SUBST(INTO, NEWVAL) \
cc876596
RK
356 do { rtx _new = (NEWVAL); \
357 if (undobuf.num_undo < MAX_UNDO) \
230d793d 358 { \
230d793d 359 undobuf.undo[undobuf.num_undo].is_int = 0; \
f5393ab9
RS
360 undobuf.undo[undobuf.num_undo].where.r = &INTO; \
361 undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \
cc876596 362 INTO = _new; \
f5393ab9 363 if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \
230d793d
RS
364 undobuf.num_undo++; \
365 } \
366 } while (0)
367
368/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
369 expression.
370 Note that substitution for the value of a CONST_INT is not safe. */
371
372#define SUBST_INT(INTO, NEWVAL) \
373 do { if (undobuf.num_undo < MAX_UNDO) \
374{ \
7c046e4e
RK
375 undobuf.undo[undobuf.num_undo].is_int = 1; \
376 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
377 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
230d793d 378 INTO = NEWVAL; \
7c046e4e 379 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
230d793d
RS
380 undobuf.num_undo++; \
381 } \
382 } while (0)
383
384/* Number of times the pseudo being substituted for
385 was found and replaced. */
386
387static int n_occurrences;
388
ef026f91 389static void init_reg_last_arrays PROTO(());
fe2db4fb
RK
390static void setup_incoming_promotions PROTO(());
391static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
392static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
393static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
394static rtx try_combine PROTO((rtx, rtx, rtx));
395static void undo_all PROTO((void));
396static rtx *find_split_point PROTO((rtx *, rtx));
397static rtx subst PROTO((rtx, rtx, rtx, int, int));
8079805d
RK
398static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
399static rtx simplify_if_then_else PROTO((rtx));
400static rtx simplify_set PROTO((rtx));
401static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
402static rtx expand_compound_operation PROTO((rtx));
403static rtx expand_field_assignment PROTO((rtx));
404static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
405 int, int, int));
71923da7 406static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
407static rtx make_compound_operation PROTO((rtx, enum rtx_code));
408static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 409static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 410 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 411static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb 412static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
e11fa86f 413static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
fe2db4fb
RK
414static rtx make_field_assignment PROTO((rtx));
415static rtx apply_distributive_law PROTO((rtx));
416static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
417 unsigned HOST_WIDE_INT));
418static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
419static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
420static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
421 enum rtx_code, HOST_WIDE_INT,
422 enum machine_mode, int *));
423static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
424 rtx, int));
a29ca9db 425static int recog_for_combine PROTO((rtx *, rtx, rtx *, int *));
fe2db4fb 426static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 427static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 428 ...));
fe2db4fb
RK
429static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
430 rtx, rtx));
0c1c8ea6
RK
431static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
432 enum machine_mode, rtx));
fe2db4fb
RK
433static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
434static int reversible_comparison_p PROTO((rtx));
435static void update_table_tick PROTO((rtx));
436static void record_value_for_reg PROTO((rtx, rtx, rtx));
437static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
438static void record_dead_and_set_regs PROTO((rtx));
439static int get_last_value_validate PROTO((rtx *, int, int));
440static rtx get_last_value PROTO((rtx));
441static int use_crosses_set_p PROTO((rtx, int));
442static void reg_dead_at_p_1 PROTO((rtx, rtx));
443static int reg_dead_at_p PROTO((rtx, rtx));
444static void move_deaths PROTO((rtx, int, rtx, rtx *));
445static int reg_bitfield_target_p PROTO((rtx, rtx));
446static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
447static void distribute_links PROTO((rtx));
6e25d159 448static void mark_used_regs_combine PROTO((rtx));
230d793d
RS
449\f
450/* Main entry point for combiner. F is the first insn of the function.
451 NREGS is the first unused pseudo-reg number. */
452
453void
454combine_instructions (f, nregs)
455 rtx f;
456 int nregs;
457{
458 register rtx insn, next, prev;
459 register int i;
460 register rtx links, nextlinks;
461
462 combine_attempts = 0;
463 combine_merges = 0;
464 combine_extras = 0;
465 combine_successes = 0;
bef9925b 466 undobuf.num_undo = previous_num_undos = 0;
230d793d
RS
467
468 combine_max_regno = nregs;
469
ef026f91
RS
470 reg_nonzero_bits
471 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
472 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
473
4c9a05bc 474 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
475 bzero (reg_sign_bit_copies, nregs * sizeof (char));
476
230d793d
RS
477 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
478 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
479 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
480 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
481 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 482 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
483 reg_last_set_mode
484 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
485 reg_last_set_nonzero_bits
486 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
487 reg_last_set_sign_bit_copies
488 = (char *) alloca (nregs * sizeof (char));
489
ef026f91 490 init_reg_last_arrays ();
230d793d
RS
491
492 init_recog_no_volatile ();
493
494 /* Compute maximum uid value so uid_cuid can be allocated. */
495
496 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
497 if (INSN_UID (insn) > i)
498 i = INSN_UID (insn);
499
500 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
4255220d 501 max_uid_cuid = i;
230d793d 502
951553af 503 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 504
951553af 505 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
506 when, for example, we have j <<= 1 in a loop. */
507
951553af 508 nonzero_sign_valid = 0;
230d793d
RS
509
510 /* Compute the mapping from uids to cuids.
511 Cuids are numbers assigned to insns, like uids,
512 except that cuids increase monotonically through the code.
513
514 Scan all SETs and see if we can deduce anything about what
951553af 515 bits are known to be zero for some registers and how many copies
d79f08e0
RK
516 of the sign bit are known to exist for those registers.
517
518 Also set any known values so that we can use it while searching
519 for what bits are known to be set. */
520
521 label_tick = 1;
230d793d 522
bcd49eb7
JW
523 /* We need to initialize it here, because record_dead_and_set_regs may call
524 get_last_value. */
525 subst_prev_insn = NULL_RTX;
526
7988fd36
RK
527 setup_incoming_promotions ();
528
230d793d
RS
529 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
530 {
4255220d 531 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
532 subst_low_cuid = i;
533 subst_insn = insn;
534
230d793d 535 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
536 {
537 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
538 record_dead_and_set_regs (insn);
539 }
540
541 if (GET_CODE (insn) == CODE_LABEL)
542 label_tick++;
230d793d
RS
543 }
544
951553af 545 nonzero_sign_valid = 1;
230d793d
RS
546
547 /* Now scan all the insns in forward order. */
548
0d4d42c3 549 this_basic_block = -1;
230d793d
RS
550 label_tick = 1;
551 last_call_cuid = 0;
552 mem_last_set = 0;
ef026f91 553 init_reg_last_arrays ();
7988fd36
RK
554 setup_incoming_promotions ();
555
230d793d
RS
556 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
557 {
558 next = 0;
559
0d4d42c3 560 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 561 if (this_basic_block + 1 < n_basic_blocks
0d4d42c3
RK
562 && basic_block_head[this_basic_block + 1] == insn)
563 this_basic_block++;
564
230d793d
RS
565 if (GET_CODE (insn) == CODE_LABEL)
566 label_tick++;
567
0d4d42c3 568 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
569 {
570 /* Try this insn with each insn it links back to. */
571
572 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 573 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
574 goto retry;
575
576 /* Try each sequence of three linked insns ending with this one. */
577
578 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
579 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
580 nextlinks = XEXP (nextlinks, 1))
581 if ((next = try_combine (insn, XEXP (links, 0),
582 XEXP (nextlinks, 0))) != 0)
583 goto retry;
584
585#ifdef HAVE_cc0
586 /* Try to combine a jump insn that uses CC0
587 with a preceding insn that sets CC0, and maybe with its
588 logical predecessor as well.
589 This is how we make decrement-and-branch insns.
590 We need this special code because data flow connections
591 via CC0 do not get entered in LOG_LINKS. */
592
593 if (GET_CODE (insn) == JUMP_INSN
594 && (prev = prev_nonnote_insn (insn)) != 0
595 && GET_CODE (prev) == INSN
596 && sets_cc0_p (PATTERN (prev)))
597 {
5f4f0e22 598 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
599 goto retry;
600
601 for (nextlinks = LOG_LINKS (prev); nextlinks;
602 nextlinks = XEXP (nextlinks, 1))
603 if ((next = try_combine (insn, prev,
604 XEXP (nextlinks, 0))) != 0)
605 goto retry;
606 }
607
608 /* Do the same for an insn that explicitly references CC0. */
609 if (GET_CODE (insn) == INSN
610 && (prev = prev_nonnote_insn (insn)) != 0
611 && GET_CODE (prev) == INSN
612 && sets_cc0_p (PATTERN (prev))
613 && GET_CODE (PATTERN (insn)) == SET
614 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
615 {
5f4f0e22 616 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
617 goto retry;
618
619 for (nextlinks = LOG_LINKS (prev); nextlinks;
620 nextlinks = XEXP (nextlinks, 1))
621 if ((next = try_combine (insn, prev,
622 XEXP (nextlinks, 0))) != 0)
623 goto retry;
624 }
625
626 /* Finally, see if any of the insns that this insn links to
627 explicitly references CC0. If so, try this insn, that insn,
5089e22e 628 and its predecessor if it sets CC0. */
230d793d
RS
629 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
630 if (GET_CODE (XEXP (links, 0)) == INSN
631 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
632 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
633 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
634 && GET_CODE (prev) == INSN
635 && sets_cc0_p (PATTERN (prev))
636 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
637 goto retry;
638#endif
639
640 /* Try combining an insn with two different insns whose results it
641 uses. */
642 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
643 for (nextlinks = XEXP (links, 1); nextlinks;
644 nextlinks = XEXP (nextlinks, 1))
645 if ((next = try_combine (insn, XEXP (links, 0),
646 XEXP (nextlinks, 0))) != 0)
647 goto retry;
648
649 if (GET_CODE (insn) != NOTE)
650 record_dead_and_set_regs (insn);
651
652 retry:
653 ;
654 }
655 }
656
657 total_attempts += combine_attempts;
658 total_merges += combine_merges;
659 total_extras += combine_extras;
660 total_successes += combine_successes;
1a26b032 661
951553af 662 nonzero_sign_valid = 0;
230d793d 663}
ef026f91
RS
664
665/* Wipe the reg_last_xxx arrays in preparation for another pass. */
666
667static void
668init_reg_last_arrays ()
669{
670 int nregs = combine_max_regno;
671
4c9a05bc
RK
672 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
673 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
674 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
675 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
676 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 677 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
678 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
679 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
680 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
681}
230d793d 682\f
7988fd36
RK
683/* Set up any promoted values for incoming argument registers. */
684
ee791cc3 685static void
7988fd36
RK
686setup_incoming_promotions ()
687{
688#ifdef PROMOTE_FUNCTION_ARGS
689 int regno;
690 rtx reg;
691 enum machine_mode mode;
692 int unsignedp;
693 rtx first = get_insns ();
694
695 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
696 if (FUNCTION_ARG_REGNO_P (regno)
697 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
698 record_value_for_reg (reg, first,
699 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
500c518b
RK
700 GET_MODE (reg),
701 gen_rtx (CLOBBER, mode, const0_rtx)));
7988fd36
RK
702#endif
703}
704\f
91102d5a
RK
705/* Called via note_stores. If X is a pseudo that is narrower than
706 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
707
708 If we are setting only a portion of X and we can't figure out what
709 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
710 be happening.
711
712 Similarly, set how many bits of X are known to be copies of the sign bit
713 at all locations in the function. This is the smallest number implied
714 by any set of X. */
230d793d
RS
715
716static void
951553af 717set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
718 rtx x;
719 rtx set;
720{
d0ab8cd3
RK
721 int num;
722
230d793d
RS
723 if (GET_CODE (x) == REG
724 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
725 /* If this register is undefined at the start of the file, we can't
726 say what its contents were. */
727 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
728 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
5f4f0e22 729 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
730 {
731 if (GET_CODE (set) == CLOBBER)
e8095e80
RK
732 {
733 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
734 reg_sign_bit_copies[REGNO (x)] = 0;
735 return;
736 }
230d793d
RS
737
738 /* If this is a complex assignment, see if we can convert it into a
5089e22e 739 simple assignment. */
230d793d 740 set = expand_field_assignment (set);
d79f08e0
RK
741
742 /* If this is a simple assignment, or we have a paradoxical SUBREG,
743 set what we know about X. */
744
745 if (SET_DEST (set) == x
746 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
747 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
748 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 749 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 750 {
9afa3d54
RK
751 rtx src = SET_SRC (set);
752
753#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
754 /* If X is narrower than a word and SRC is a non-negative
755 constant that would appear negative in the mode of X,
756 sign-extend it for use in reg_nonzero_bits because some
757 machines (maybe most) will actually do the sign-extension
758 and this is the conservative approach.
759
760 ??? For 2.5, try to tighten up the MD files in this regard
761 instead of this kludge. */
762
763 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
764 && GET_CODE (src) == CONST_INT
765 && INTVAL (src) > 0
766 && 0 != (INTVAL (src)
767 & ((HOST_WIDE_INT) 1
9e69be8c 768 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
769 src = GEN_INT (INTVAL (src)
770 | ((HOST_WIDE_INT) (-1)
771 << GET_MODE_BITSIZE (GET_MODE (x))));
772#endif
773
951553af 774 reg_nonzero_bits[REGNO (x)]
9afa3d54 775 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
776 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
777 if (reg_sign_bit_copies[REGNO (x)] == 0
778 || reg_sign_bit_copies[REGNO (x)] > num)
779 reg_sign_bit_copies[REGNO (x)] = num;
780 }
230d793d 781 else
d0ab8cd3 782 {
951553af 783 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
d0ab8cd3
RK
784 reg_sign_bit_copies[REGNO (x)] = 0;
785 }
230d793d
RS
786 }
787}
788\f
789/* See if INSN can be combined into I3. PRED and SUCC are optionally
790 insns that were previously combined into I3 or that will be combined
791 into the merger of INSN and I3.
792
793 Return 0 if the combination is not allowed for any reason.
794
795 If the combination is allowed, *PDEST will be set to the single
796 destination of INSN and *PSRC to the single source, and this function
797 will return 1. */
798
799static int
800can_combine_p (insn, i3, pred, succ, pdest, psrc)
801 rtx insn;
802 rtx i3;
803 rtx pred, succ;
804 rtx *pdest, *psrc;
805{
806 int i;
807 rtx set = 0, src, dest;
808 rtx p, link;
809 int all_adjacent = (succ ? (next_active_insn (insn) == succ
810 && next_active_insn (succ) == i3)
811 : next_active_insn (insn) == i3);
812
813 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
814 or a PARALLEL consisting of such a SET and CLOBBERs.
815
816 If INSN has CLOBBER parallel parts, ignore them for our processing.
817 By definition, these happen during the execution of the insn. When it
818 is merged with another insn, all bets are off. If they are, in fact,
819 needed and aren't also supplied in I3, they may be added by
820 recog_for_combine. Otherwise, it won't match.
821
822 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
823 note.
824
825 Get the source and destination of INSN. If more than one, can't
826 combine. */
827
828 if (GET_CODE (PATTERN (insn)) == SET)
829 set = PATTERN (insn);
830 else if (GET_CODE (PATTERN (insn)) == PARALLEL
831 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
832 {
833 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
834 {
835 rtx elt = XVECEXP (PATTERN (insn), 0, i);
836
837 switch (GET_CODE (elt))
838 {
839 /* We can ignore CLOBBERs. */
840 case CLOBBER:
841 break;
842
843 case SET:
844 /* Ignore SETs whose result isn't used but not those that
845 have side-effects. */
846 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
847 && ! side_effects_p (elt))
848 break;
849
850 /* If we have already found a SET, this is a second one and
851 so we cannot combine with this insn. */
852 if (set)
853 return 0;
854
855 set = elt;
856 break;
857
858 default:
859 /* Anything else means we can't combine. */
860 return 0;
861 }
862 }
863
864 if (set == 0
865 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
866 so don't do anything with it. */
867 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
868 return 0;
869 }
870 else
871 return 0;
872
873 if (set == 0)
874 return 0;
875
876 set = expand_field_assignment (set);
877 src = SET_SRC (set), dest = SET_DEST (set);
878
879 /* Don't eliminate a store in the stack pointer. */
880 if (dest == stack_pointer_rtx
230d793d
RS
881 /* If we couldn't eliminate a field assignment, we can't combine. */
882 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
883 /* Don't combine with an insn that sets a register to itself if it has
884 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 885 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
886 /* Can't merge a function call. */
887 || GET_CODE (src) == CALL
cd5e8f1f 888 /* Don't eliminate a function call argument. */
4dca5ec5
RK
889 || (GET_CODE (i3) == CALL_INSN
890 && (find_reg_fusage (i3, USE, dest)
891 || (GET_CODE (dest) == REG
892 && REGNO (dest) < FIRST_PSEUDO_REGISTER
893 && global_regs[REGNO (dest)])))
230d793d
RS
894 /* Don't substitute into an incremented register. */
895 || FIND_REG_INC_NOTE (i3, dest)
896 || (succ && FIND_REG_INC_NOTE (succ, dest))
897 /* Don't combine the end of a libcall into anything. */
5f4f0e22 898 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
899 /* Make sure that DEST is not used after SUCC but before I3. */
900 || (succ && ! all_adjacent
901 && reg_used_between_p (dest, succ, i3))
902 /* Make sure that the value that is to be substituted for the register
903 does not use any registers whose values alter in between. However,
904 If the insns are adjacent, a use can't cross a set even though we
905 think it might (this can happen for a sequence of insns each setting
906 the same destination; reg_last_set of that register might point to
d81481d3
RK
907 a NOTE). If INSN has a REG_EQUIV note, the register is always
908 equivalent to the memory so the substitution is valid even if there
909 are intervening stores. Also, don't move a volatile asm or
910 UNSPEC_VOLATILE across any other insns. */
230d793d 911 || (! all_adjacent
d81481d3
RK
912 && (((GET_CODE (src) != MEM
913 || ! find_reg_note (insn, REG_EQUIV, src))
914 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
915 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
916 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
917 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
918 better register allocation by not doing the combine. */
919 || find_reg_note (i3, REG_NO_CONFLICT, dest)
920 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
921 /* Don't combine across a CALL_INSN, because that would possibly
922 change whether the life span of some REGs crosses calls or not,
923 and it is a pain to update that information.
924 Exception: if source is a constant, moving it later can't hurt.
925 Accept that special case, because it helps -fforce-addr a lot. */
926 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
927 return 0;
928
929 /* DEST must either be a REG or CC0. */
930 if (GET_CODE (dest) == REG)
931 {
932 /* If register alignment is being enforced for multi-word items in all
933 cases except for parameters, it is possible to have a register copy
934 insn referencing a hard register that is not allowed to contain the
935 mode being copied and which would not be valid as an operand of most
936 insns. Eliminate this problem by not combining with such an insn.
937
938 Also, on some machines we don't want to extend the life of a hard
939 register. */
940
941 if (GET_CODE (src) == REG
942 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
943 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
944 /* Don't extend the life of a hard register unless it is
945 user variable (if we have few registers) or it can't
946 fit into the desired register (meaning something special
947 is going on). */
230d793d 948 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e
RK
949 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
950#ifdef SMALL_REGISTER_CLASSES
951 || ! REG_USERVAR_P (src)
230d793d 952#endif
c448a43e 953 ))))
230d793d
RS
954 return 0;
955 }
956 else if (GET_CODE (dest) != CC0)
957 return 0;
958
5f96750d
RS
959 /* Don't substitute for a register intended as a clobberable operand.
960 Similarly, don't substitute an expression containing a register that
961 will be clobbered in I3. */
230d793d
RS
962 if (GET_CODE (PATTERN (i3)) == PARALLEL)
963 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
964 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
965 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
966 src)
967 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
968 return 0;
969
970 /* If INSN contains anything volatile, or is an `asm' (whether volatile
971 or not), reject, unless nothing volatile comes between it and I3,
972 with the exception of SUCC. */
973
974 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
975 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
976 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
977 && p != succ && volatile_refs_p (PATTERN (p)))
978 return 0;
979
4b2cb4a2
RS
980 /* If there are any volatile insns between INSN and I3, reject, because
981 they might affect machine state. */
982
983 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
984 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
985 && p != succ && volatile_insn_p (PATTERN (p)))
986 return 0;
987
230d793d
RS
988 /* If INSN or I2 contains an autoincrement or autodecrement,
989 make sure that register is not used between there and I3,
990 and not already used in I3 either.
991 Also insist that I3 not be a jump; if it were one
992 and the incremented register were spilled, we would lose. */
993
994#ifdef AUTO_INC_DEC
995 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
996 if (REG_NOTE_KIND (link) == REG_INC
997 && (GET_CODE (i3) == JUMP_INSN
998 || reg_used_between_p (XEXP (link, 0), insn, i3)
999 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1000 return 0;
1001#endif
1002
1003#ifdef HAVE_cc0
1004 /* Don't combine an insn that follows a CC0-setting insn.
1005 An insn that uses CC0 must not be separated from the one that sets it.
1006 We do, however, allow I2 to follow a CC0-setting insn if that insn
1007 is passed as I1; in that case it will be deleted also.
1008 We also allow combining in this case if all the insns are adjacent
1009 because that would leave the two CC0 insns adjacent as well.
1010 It would be more logical to test whether CC0 occurs inside I1 or I2,
1011 but that would be much slower, and this ought to be equivalent. */
1012
1013 p = prev_nonnote_insn (insn);
1014 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1015 && ! all_adjacent)
1016 return 0;
1017#endif
1018
1019 /* If we get here, we have passed all the tests and the combination is
1020 to be allowed. */
1021
1022 *pdest = dest;
1023 *psrc = src;
1024
1025 return 1;
1026}
1027\f
1028/* LOC is the location within I3 that contains its pattern or the component
1029 of a PARALLEL of the pattern. We validate that it is valid for combining.
1030
1031 One problem is if I3 modifies its output, as opposed to replacing it
1032 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1033 so would produce an insn that is not equivalent to the original insns.
1034
1035 Consider:
1036
1037 (set (reg:DI 101) (reg:DI 100))
1038 (set (subreg:SI (reg:DI 101) 0) <foo>)
1039
1040 This is NOT equivalent to:
1041
1042 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1043 (set (reg:DI 101) (reg:DI 100))])
1044
1045 Not only does this modify 100 (in which case it might still be valid
1046 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1047
1048 We can also run into a problem if I2 sets a register that I1
1049 uses and I1 gets directly substituted into I3 (not via I2). In that
1050 case, we would be getting the wrong value of I2DEST into I3, so we
1051 must reject the combination. This case occurs when I2 and I1 both
1052 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1053 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1054 of a SET must prevent combination from occurring.
1055
1056 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
c448a43e
RK
1057 if the destination of a SET is a hard register that isn't a user
1058 variable.
230d793d
RS
1059
1060 Before doing the above check, we first try to expand a field assignment
1061 into a set of logical operations.
1062
1063 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1064 we place a register that is both set and used within I3. If more than one
1065 such register is detected, we fail.
1066
1067 Return 1 if the combination is valid, zero otherwise. */
1068
1069static int
1070combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1071 rtx i3;
1072 rtx *loc;
1073 rtx i2dest;
1074 rtx i1dest;
1075 int i1_not_in_src;
1076 rtx *pi3dest_killed;
1077{
1078 rtx x = *loc;
1079
1080 if (GET_CODE (x) == SET)
1081 {
1082 rtx set = expand_field_assignment (x);
1083 rtx dest = SET_DEST (set);
1084 rtx src = SET_SRC (set);
1085 rtx inner_dest = dest, inner_src = src;
1086
1087 SUBST (*loc, set);
1088
1089 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1090 || GET_CODE (inner_dest) == SUBREG
1091 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1092 inner_dest = XEXP (inner_dest, 0);
1093
1094 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1095 was added. */
1096#if 0
1097 while (GET_CODE (inner_src) == STRICT_LOW_PART
1098 || GET_CODE (inner_src) == SUBREG
1099 || GET_CODE (inner_src) == ZERO_EXTRACT)
1100 inner_src = XEXP (inner_src, 0);
1101
1102 /* If it is better that two different modes keep two different pseudos,
1103 avoid combining them. This avoids producing the following pattern
1104 on a 386:
1105 (set (subreg:SI (reg/v:QI 21) 0)
1106 (lshiftrt:SI (reg/v:SI 20)
1107 (const_int 24)))
1108 If that were made, reload could not handle the pair of
1109 reg 20/21, since it would try to get any GENERAL_REGS
1110 but some of them don't handle QImode. */
1111
1112 if (rtx_equal_p (inner_src, i2dest)
1113 && GET_CODE (inner_dest) == REG
1114 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1115 return 0;
1116#endif
1117
1118 /* Check for the case where I3 modifies its output, as
1119 discussed above. */
1120 if ((inner_dest != dest
1121 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1122 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
1123 /* This is the same test done in can_combine_p except that we
1124 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1125 CALL operation. */
230d793d 1126 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1127 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1128 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1129 GET_MODE (inner_dest))
3f508eca 1130#ifdef SMALL_REGISTER_CLASSES
c448a43e 1131 || (GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest))
230d793d 1132#endif
c448a43e 1133 ))
230d793d
RS
1134 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1135 return 0;
1136
1137 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1138 so record that for later.
1139 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1140 STACK_POINTER_REGNUM, since these are always considered to be
1141 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1142 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1143 && reg_referenced_p (dest, PATTERN (i3))
1144 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1145#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1146 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1147#endif
36a9c2e9
JL
1148#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1149 && (REGNO (dest) != ARG_POINTER_REGNUM
1150 || ! fixed_regs [REGNO (dest)])
1151#endif
1152 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1153 {
1154 if (*pi3dest_killed)
1155 return 0;
1156
1157 *pi3dest_killed = dest;
1158 }
1159 }
1160
1161 else if (GET_CODE (x) == PARALLEL)
1162 {
1163 int i;
1164
1165 for (i = 0; i < XVECLEN (x, 0); i++)
1166 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1167 i1_not_in_src, pi3dest_killed))
1168 return 0;
1169 }
1170
1171 return 1;
1172}
1173\f
1174/* Try to combine the insns I1 and I2 into I3.
1175 Here I1 and I2 appear earlier than I3.
1176 I1 can be zero; then we combine just I2 into I3.
1177
1178 It we are combining three insns and the resulting insn is not recognized,
1179 try splitting it into two insns. If that happens, I2 and I3 are retained
1180 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1181 are pseudo-deleted.
1182
abe6e52f
RK
1183 Return 0 if the combination does not work. Then nothing is changed.
1184 If we did the combination, return the insn at which combine should
1185 resume scanning. */
230d793d
RS
1186
1187static rtx
1188try_combine (i3, i2, i1)
1189 register rtx i3, i2, i1;
1190{
1191 /* New patterns for I3 and I3, respectively. */
1192 rtx newpat, newi2pat = 0;
1193 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1194 int added_sets_1, added_sets_2;
1195 /* Total number of SETs to put into I3. */
1196 int total_sets;
1197 /* Nonzero is I2's body now appears in I3. */
1198 int i2_is_used;
1199 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1200 int insn_code_number, i2_code_number, other_code_number;
1201 /* Contains I3 if the destination of I3 is used in its source, which means
1202 that the old life of I3 is being killed. If that usage is placed into
1203 I2 and not in I3, a REG_DEAD note must be made. */
1204 rtx i3dest_killed = 0;
1205 /* SET_DEST and SET_SRC of I2 and I1. */
1206 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1207 /* PATTERN (I2), or a copy of it in certain cases. */
1208 rtx i2pat;
1209 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1210 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1211 int i1_feeds_i3 = 0;
1212 /* Notes that must be added to REG_NOTES in I3 and I2. */
1213 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1214 /* Notes that we substituted I3 into I2 instead of the normal case. */
1215 int i3_subst_into_i2 = 0;
df7d75de
RK
1216 /* Notes that I1, I2 or I3 is a MULT operation. */
1217 int have_mult = 0;
a29ca9db
RK
1218 /* Number of clobbers of SCRATCH we had to add. */
1219 int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
230d793d
RS
1220
1221 int maxreg;
1222 rtx temp;
1223 register rtx link;
1224 int i;
1225
1226 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1227 This can occur when flow deletes an insn that it has merged into an
1228 auto-increment address. We also can't do anything if I3 has a
1229 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1230 libcall. */
1231
1232 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1233 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1234 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1235 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1236 return 0;
1237
1238 combine_attempts++;
1239
1240 undobuf.num_undo = previous_num_undos = 0;
1241 undobuf.other_insn = 0;
1242
1243 /* Save the current high-water-mark so we can free storage if we didn't
1244 accept this combination. */
1245 undobuf.storage = (char *) oballoc (0);
1246
6e25d159
RK
1247 /* Reset the hard register usage information. */
1248 CLEAR_HARD_REG_SET (newpat_used_regs);
1249
230d793d
RS
1250 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1251 code below, set I1 to be the earlier of the two insns. */
1252 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1253 temp = i1, i1 = i2, i2 = temp;
1254
abe6e52f 1255 added_links_insn = 0;
137e889e 1256
230d793d
RS
1257 /* First check for one important special-case that the code below will
1258 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1259 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1260 we may be able to replace that destination with the destination of I3.
1261 This occurs in the common code where we compute both a quotient and
1262 remainder into a structure, in which case we want to do the computation
1263 directly into the structure to avoid register-register copies.
1264
1265 We make very conservative checks below and only try to handle the
1266 most common cases of this. For example, we only handle the case
1267 where I2 and I3 are adjacent to avoid making difficult register
1268 usage tests. */
1269
1270 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1271 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1272 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1273#ifdef SMALL_REGISTER_CLASSES
1274 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
c448a43e
RK
1275 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1276 || REG_USERVAR_P (SET_DEST (PATTERN (i3))))
230d793d
RS
1277#endif
1278 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1279 && GET_CODE (PATTERN (i2)) == PARALLEL
1280 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1281 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1282 below would need to check what is inside (and reg_overlap_mentioned_p
1283 doesn't support those codes anyway). Don't allow those destinations;
1284 the resulting insn isn't likely to be recognized anyway. */
1285 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1286 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1287 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1288 SET_DEST (PATTERN (i3)))
1289 && next_real_insn (i2) == i3)
5089e22e
RS
1290 {
1291 rtx p2 = PATTERN (i2);
1292
1293 /* Make sure that the destination of I3,
1294 which we are going to substitute into one output of I2,
1295 is not used within another output of I2. We must avoid making this:
1296 (parallel [(set (mem (reg 69)) ...)
1297 (set (reg 69) ...)])
1298 which is not well-defined as to order of actions.
1299 (Besides, reload can't handle output reloads for this.)
1300
1301 The problem can also happen if the dest of I3 is a memory ref,
1302 if another dest in I2 is an indirect memory ref. */
1303 for (i = 0; i < XVECLEN (p2, 0); i++)
1304 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1305 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1306 SET_DEST (XVECEXP (p2, 0, i))))
1307 break;
230d793d 1308
5089e22e
RS
1309 if (i == XVECLEN (p2, 0))
1310 for (i = 0; i < XVECLEN (p2, 0); i++)
1311 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1312 {
1313 combine_merges++;
230d793d 1314
5089e22e
RS
1315 subst_insn = i3;
1316 subst_low_cuid = INSN_CUID (i2);
230d793d 1317
c4e861e8 1318 added_sets_2 = added_sets_1 = 0;
5089e22e 1319 i2dest = SET_SRC (PATTERN (i3));
230d793d 1320
5089e22e
RS
1321 /* Replace the dest in I2 with our dest and make the resulting
1322 insn the new pattern for I3. Then skip to where we
1323 validate the pattern. Everything was set up above. */
1324 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1325 SET_DEST (PATTERN (i3)));
1326
1327 newpat = p2;
176c9e6b 1328 i3_subst_into_i2 = 1;
5089e22e
RS
1329 goto validate_replacement;
1330 }
1331 }
230d793d
RS
1332
1333#ifndef HAVE_cc0
1334 /* If we have no I1 and I2 looks like:
1335 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1336 (set Y OP)])
1337 make up a dummy I1 that is
1338 (set Y OP)
1339 and change I2 to be
1340 (set (reg:CC X) (compare:CC Y (const_int 0)))
1341
1342 (We can ignore any trailing CLOBBERs.)
1343
1344 This undoes a previous combination and allows us to match a branch-and-
1345 decrement insn. */
1346
1347 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1348 && XVECLEN (PATTERN (i2), 0) >= 2
1349 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1350 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1351 == MODE_CC)
1352 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1353 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1354 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1355 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1356 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1357 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1358 {
1359 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1360 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1361 break;
1362
1363 if (i == 1)
1364 {
1365 /* We make I1 with the same INSN_UID as I2. This gives it
1366 the same INSN_CUID for value tracking. Our fake I1 will
1367 never appear in the insn stream so giving it the same INSN_UID
1368 as I2 will not cause a problem. */
1369
0d9641d1
JW
1370 subst_prev_insn = i1
1371 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1372 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
230d793d
RS
1373
1374 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1375 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1376 SET_DEST (PATTERN (i1)));
1377 }
1378 }
1379#endif
1380
1381 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1382 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1383 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1384 {
1385 undo_all ();
1386 return 0;
1387 }
1388
1389 /* Record whether I2DEST is used in I2SRC and similarly for the other
1390 cases. Knowing this will help in register status updating below. */
1391 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1392 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1393 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1394
916f14f1 1395 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1396 in I2SRC. */
1397 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1398
1399 /* Ensure that I3's pattern can be the destination of combines. */
1400 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1401 i1 && i2dest_in_i1src && i1_feeds_i3,
1402 &i3dest_killed))
1403 {
1404 undo_all ();
1405 return 0;
1406 }
1407
df7d75de
RK
1408 /* See if any of the insns is a MULT operation. Unless one is, we will
1409 reject a combination that is, since it must be slower. Be conservative
1410 here. */
1411 if (GET_CODE (i2src) == MULT
1412 || (i1 != 0 && GET_CODE (i1src) == MULT)
1413 || (GET_CODE (PATTERN (i3)) == SET
1414 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1415 have_mult = 1;
1416
230d793d
RS
1417 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1418 We used to do this EXCEPT in one case: I3 has a post-inc in an
1419 output operand. However, that exception can give rise to insns like
1420 mov r3,(r3)+
1421 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1422 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1423
1424#if 0
1425 if (!(GET_CODE (PATTERN (i3)) == SET
1426 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1427 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1428 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1429 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1430 /* It's not the exception. */
1431#endif
1432#ifdef AUTO_INC_DEC
1433 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1434 if (REG_NOTE_KIND (link) == REG_INC
1435 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1436 || (i1 != 0
1437 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1438 {
1439 undo_all ();
1440 return 0;
1441 }
1442#endif
1443
1444 /* See if the SETs in I1 or I2 need to be kept around in the merged
1445 instruction: whenever the value set there is still needed past I3.
1446 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1447
1448 For the SET in I1, we have two cases: If I1 and I2 independently
1449 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1450 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1451 in I1 needs to be kept around unless I1DEST dies or is set in either
1452 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1453 I1DEST. If so, we know I1 feeds into I2. */
1454
1455 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1456
1457 added_sets_1
1458 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1459 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1460
1461 /* If the set in I2 needs to be kept around, we must make a copy of
1462 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1463 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1464 an already-substituted copy. This also prevents making self-referential
1465 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1466 I2DEST. */
1467
1468 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1469 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1470 : PATTERN (i2));
1471
1472 if (added_sets_2)
1473 i2pat = copy_rtx (i2pat);
1474
1475 combine_merges++;
1476
1477 /* Substitute in the latest insn for the regs set by the earlier ones. */
1478
1479 maxreg = max_reg_num ();
1480
1481 subst_insn = i3;
230d793d
RS
1482
1483 /* It is possible that the source of I2 or I1 may be performing an
1484 unneeded operation, such as a ZERO_EXTEND of something that is known
1485 to have the high part zero. Handle that case by letting subst look at
1486 the innermost one of them.
1487
1488 Another way to do this would be to have a function that tries to
1489 simplify a single insn instead of merging two or more insns. We don't
1490 do this because of the potential of infinite loops and because
1491 of the potential extra memory required. However, doing it the way
1492 we are is a bit of a kludge and doesn't catch all cases.
1493
1494 But only do this if -fexpensive-optimizations since it slows things down
1495 and doesn't usually win. */
1496
1497 if (flag_expensive_optimizations)
1498 {
1499 /* Pass pc_rtx so no substitutions are done, just simplifications.
1500 The cases that we are interested in here do not involve the few
1501 cases were is_replaced is checked. */
1502 if (i1)
d0ab8cd3
RK
1503 {
1504 subst_low_cuid = INSN_CUID (i1);
1505 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1506 }
230d793d 1507 else
d0ab8cd3
RK
1508 {
1509 subst_low_cuid = INSN_CUID (i2);
1510 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1511 }
230d793d
RS
1512
1513 previous_num_undos = undobuf.num_undo;
1514 }
1515
1516#ifndef HAVE_cc0
1517 /* Many machines that don't use CC0 have insns that can both perform an
1518 arithmetic operation and set the condition code. These operations will
1519 be represented as a PARALLEL with the first element of the vector
1520 being a COMPARE of an arithmetic operation with the constant zero.
1521 The second element of the vector will set some pseudo to the result
1522 of the same arithmetic operation. If we simplify the COMPARE, we won't
1523 match such a pattern and so will generate an extra insn. Here we test
1524 for this case, where both the comparison and the operation result are
1525 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1526 I2SRC. Later we will make the PARALLEL that contains I2. */
1527
1528 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1529 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1530 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1531 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1532 {
1533 rtx *cc_use;
1534 enum machine_mode compare_mode;
1535
1536 newpat = PATTERN (i3);
1537 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1538
1539 i2_is_used = 1;
1540
1541#ifdef EXTRA_CC_MODES
1542 /* See if a COMPARE with the operand we substituted in should be done
1543 with the mode that is currently being used. If not, do the same
1544 processing we do in `subst' for a SET; namely, if the destination
1545 is used only once, try to replace it with a register of the proper
1546 mode and also replace the COMPARE. */
1547 if (undobuf.other_insn == 0
1548 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1549 &undobuf.other_insn))
77fa0940
RK
1550 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1551 i2src, const0_rtx))
230d793d
RS
1552 != GET_MODE (SET_DEST (newpat))))
1553 {
1554 int regno = REGNO (SET_DEST (newpat));
1555 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1556
1557 if (regno < FIRST_PSEUDO_REGISTER
1558 || (reg_n_sets[regno] == 1 && ! added_sets_2
1559 && ! REG_USERVAR_P (SET_DEST (newpat))))
1560 {
1561 if (regno >= FIRST_PSEUDO_REGISTER)
1562 SUBST (regno_reg_rtx[regno], new_dest);
1563
1564 SUBST (SET_DEST (newpat), new_dest);
1565 SUBST (XEXP (*cc_use, 0), new_dest);
1566 SUBST (SET_SRC (newpat),
1567 gen_rtx_combine (COMPARE, compare_mode,
1568 i2src, const0_rtx));
1569 }
1570 else
1571 undobuf.other_insn = 0;
1572 }
1573#endif
1574 }
1575 else
1576#endif
1577 {
1578 n_occurrences = 0; /* `subst' counts here */
1579
1580 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1581 need to make a unique copy of I2SRC each time we substitute it
1582 to avoid self-referential rtl. */
1583
d0ab8cd3 1584 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1585 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1586 ! i1_feeds_i3 && i1dest_in_i1src);
1587 previous_num_undos = undobuf.num_undo;
1588
1589 /* Record whether i2's body now appears within i3's body. */
1590 i2_is_used = n_occurrences;
1591 }
1592
1593 /* If we already got a failure, don't try to do more. Otherwise,
1594 try to substitute in I1 if we have it. */
1595
1596 if (i1 && GET_CODE (newpat) != CLOBBER)
1597 {
1598 /* Before we can do this substitution, we must redo the test done
1599 above (see detailed comments there) that ensures that I1DEST
1600 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1601
5f4f0e22
CH
1602 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1603 0, NULL_PTR))
230d793d
RS
1604 {
1605 undo_all ();
1606 return 0;
1607 }
1608
1609 n_occurrences = 0;
d0ab8cd3 1610 subst_low_cuid = INSN_CUID (i1);
230d793d
RS
1611 newpat = subst (newpat, i1dest, i1src, 0, 0);
1612 previous_num_undos = undobuf.num_undo;
1613 }
1614
916f14f1
RK
1615 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1616 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1617 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1618 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1619 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1620 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1621 > 1))
230d793d
RS
1622 /* Fail if we tried to make a new register (we used to abort, but there's
1623 really no reason to). */
1624 || max_reg_num () != maxreg
1625 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1626 || GET_CODE (newpat) == CLOBBER
1627 /* Fail if this new pattern is a MULT and we didn't have one before
1628 at the outer level. */
1629 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1630 && ! have_mult))
230d793d
RS
1631 {
1632 undo_all ();
1633 return 0;
1634 }
1635
1636 /* If the actions of the earlier insns must be kept
1637 in addition to substituting them into the latest one,
1638 we must make a new PARALLEL for the latest insn
1639 to hold additional the SETs. */
1640
1641 if (added_sets_1 || added_sets_2)
1642 {
1643 combine_extras++;
1644
1645 if (GET_CODE (newpat) == PARALLEL)
1646 {
1647 rtvec old = XVEC (newpat, 0);
1648 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1649 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
4c9a05bc 1650 bcopy ((char *) &old->elem[0], (char *) &XVECEXP (newpat, 0, 0),
230d793d
RS
1651 sizeof (old->elem[0]) * old->num_elem);
1652 }
1653 else
1654 {
1655 rtx old = newpat;
1656 total_sets = 1 + added_sets_1 + added_sets_2;
1657 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1658 XVECEXP (newpat, 0, 0) = old;
1659 }
1660
1661 if (added_sets_1)
1662 XVECEXP (newpat, 0, --total_sets)
1663 = (GET_CODE (PATTERN (i1)) == PARALLEL
1664 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1665
1666 if (added_sets_2)
1667 {
1668 /* If there is no I1, use I2's body as is. We used to also not do
1669 the subst call below if I2 was substituted into I3,
1670 but that could lose a simplification. */
1671 if (i1 == 0)
1672 XVECEXP (newpat, 0, --total_sets) = i2pat;
1673 else
1674 /* See comment where i2pat is assigned. */
1675 XVECEXP (newpat, 0, --total_sets)
1676 = subst (i2pat, i1dest, i1src, 0, 0);
1677 }
1678 }
1679
1680 /* We come here when we are replacing a destination in I2 with the
1681 destination of I3. */
1682 validate_replacement:
1683
6e25d159
RK
1684 /* Note which hard regs this insn has as inputs. */
1685 mark_used_regs_combine (newpat);
1686
230d793d 1687 /* Is the result of combination a valid instruction? */
a29ca9db
RK
1688 insn_code_number
1689 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1690
1691 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1692 the second SET's destination is a register that is unused. In that case,
1693 we just need the first SET. This can occur when simplifying a divmod
1694 insn. We *must* test for this case here because the code below that
1695 splits two independent SETs doesn't handle this case correctly when it
1696 updates the register status. Also check the case where the first
1697 SET's destination is unused. That would not cause incorrect code, but
1698 does cause an unneeded insn to remain. */
1699
1700 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1701 && XVECLEN (newpat, 0) == 2
1702 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1703 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1704 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1705 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1706 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1707 && asm_noperands (newpat) < 0)
1708 {
1709 newpat = XVECEXP (newpat, 0, 0);
a29ca9db
RK
1710 insn_code_number
1711 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1712 }
1713
1714 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1715 && XVECLEN (newpat, 0) == 2
1716 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1717 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1718 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1719 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1720 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1721 && asm_noperands (newpat) < 0)
1722 {
1723 newpat = XVECEXP (newpat, 0, 1);
a29ca9db
RK
1724 insn_code_number
1725 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1726 }
1727
1728 /* If we were combining three insns and the result is a simple SET
1729 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1730 insns. There are two ways to do this. It can be split using a
1731 machine-specific method (like when you have an addition of a large
1732 constant) or by combine in the function find_split_point. */
1733
230d793d
RS
1734 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1735 && asm_noperands (newpat) < 0)
1736 {
916f14f1 1737 rtx m_split, *split;
42495ca0 1738 rtx ni2dest = i2dest;
916f14f1
RK
1739
1740 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1741 use I2DEST as a scratch register will help. In the latter case,
1742 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1743
1744 m_split = split_insns (newpat, i3);
a70c61d9
JW
1745
1746 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1747 inputs of NEWPAT. */
1748
1749 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1750 possible to try that as a scratch reg. This would require adding
1751 more code to make it work though. */
1752
1753 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1754 {
1755 /* If I2DEST is a hard register or the only use of a pseudo,
1756 we can change its mode. */
1757 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1758 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1759 && GET_CODE (i2dest) == REG
42495ca0
RK
1760 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1761 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1762 && ! REG_USERVAR_P (i2dest))))
1763 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1764 REGNO (i2dest));
1765
1766 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1767 gen_rtvec (2, newpat,
1768 gen_rtx (CLOBBER,
1769 VOIDmode,
1770 ni2dest))),
1771 i3);
1772 }
916f14f1
RK
1773
1774 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1775 && XVECLEN (m_split, 0) == 2
1776 && (next_real_insn (i2) == i3
1777 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1778 INSN_CUID (i2))))
916f14f1 1779 {
1a26b032 1780 rtx i2set, i3set;
d0ab8cd3 1781 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1782 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1783
e4ba89be
RK
1784 i3set = single_set (XVECEXP (m_split, 0, 1));
1785 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1786
42495ca0
RK
1787 /* In case we changed the mode of I2DEST, replace it in the
1788 pseudo-register table here. We can't do it above in case this
1789 code doesn't get executed and we do a split the other way. */
1790
1791 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1792 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1793
a29ca9db
RK
1794 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1795 &i2_scratches);
1a26b032
RK
1796
1797 /* If I2 or I3 has multiple SETs, we won't know how to track
1798 register status, so don't use these insns. */
1799
1800 if (i2_code_number >= 0 && i2set && i3set)
a29ca9db
RK
1801 insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1802 &i3_scratches);
d0ab8cd3
RK
1803 if (insn_code_number >= 0)
1804 newpat = newi3pat;
1805
c767f54b 1806 /* It is possible that both insns now set the destination of I3.
22609cbf 1807 If so, we must show an extra use of it. */
c767f54b 1808
393de53f
RK
1809 if (insn_code_number >= 0)
1810 {
1811 rtx new_i3_dest = SET_DEST (i3set);
1812 rtx new_i2_dest = SET_DEST (i2set);
1813
1814 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1815 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1816 || GET_CODE (new_i3_dest) == SUBREG)
1817 new_i3_dest = XEXP (new_i3_dest, 0);
1818
1819 if (GET_CODE (new_i3_dest) == REG
1820 && GET_CODE (new_i2_dest) == REG
1821 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
1822 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1823 }
916f14f1 1824 }
230d793d
RS
1825
1826 /* If we can split it and use I2DEST, go ahead and see if that
1827 helps things be recognized. Verify that none of the registers
1828 are set between I2 and I3. */
d0ab8cd3 1829 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1830#ifdef HAVE_cc0
1831 && GET_CODE (i2dest) == REG
1832#endif
1833 /* We need I2DEST in the proper mode. If it is a hard register
1834 or the only use of a pseudo, we can change its mode. */
1835 && (GET_MODE (*split) == GET_MODE (i2dest)
1836 || GET_MODE (*split) == VOIDmode
1837 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1838 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1839 && ! REG_USERVAR_P (i2dest)))
1840 && (next_real_insn (i2) == i3
1841 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1842 /* We can't overwrite I2DEST if its value is still used by
1843 NEWPAT. */
1844 && ! reg_referenced_p (i2dest, newpat))
1845 {
1846 rtx newdest = i2dest;
df7d75de
RK
1847 enum rtx_code split_code = GET_CODE (*split);
1848 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
1849
1850 /* Get NEWDEST as a register in the proper mode. We have already
1851 validated that we can do this. */
df7d75de 1852 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 1853 {
df7d75de 1854 newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
230d793d
RS
1855
1856 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1857 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1858 }
1859
1860 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1861 an ASHIFT. This can occur if it was inside a PLUS and hence
1862 appeared to be a memory address. This is a kludge. */
df7d75de 1863 if (split_code == MULT
230d793d
RS
1864 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1865 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
1866 {
1867 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
1868 XEXP (*split, 0), GEN_INT (i)));
1869 /* Update split_code because we may not have a multiply
1870 anymore. */
1871 split_code = GET_CODE (*split);
1872 }
230d793d
RS
1873
1874#ifdef INSN_SCHEDULING
1875 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1876 be written as a ZERO_EXTEND. */
df7d75de
RK
1877 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
1878 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
1879 XEXP (*split, 0)));
1880#endif
1881
1882 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1883 SUBST (*split, newdest);
a29ca9db
RK
1884 i2_code_number
1885 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
df7d75de
RK
1886
1887 /* If the split point was a MULT and we didn't have one before,
1888 don't use one now. */
1889 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
a29ca9db
RK
1890 insn_code_number
1891 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1892 }
1893 }
1894
1895 /* Check for a case where we loaded from memory in a narrow mode and
1896 then sign extended it, but we need both registers. In that case,
1897 we have a PARALLEL with both loads from the same memory location.
1898 We can split this into a load from memory followed by a register-register
1899 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
1900 eliminate the copy.
1901
1902 We cannot do this if the destination of the second assignment is
1903 a register that we have already assumed is zero-extended. Similarly
1904 for a SUBREG of such a register. */
230d793d
RS
1905
1906 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1907 && GET_CODE (newpat) == PARALLEL
1908 && XVECLEN (newpat, 0) == 2
1909 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1910 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1911 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1912 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1913 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1914 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1915 INSN_CUID (i2))
1916 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1917 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
1918 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1919 (GET_CODE (temp) == REG
1920 && reg_nonzero_bits[REGNO (temp)] != 0
1921 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1922 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1923 && (reg_nonzero_bits[REGNO (temp)]
1924 != GET_MODE_MASK (word_mode))))
1925 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1926 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1927 (GET_CODE (temp) == REG
1928 && reg_nonzero_bits[REGNO (temp)] != 0
1929 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1930 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1931 && (reg_nonzero_bits[REGNO (temp)]
1932 != GET_MODE_MASK (word_mode)))))
230d793d
RS
1933 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1934 SET_SRC (XVECEXP (newpat, 0, 1)))
1935 && ! find_reg_note (i3, REG_UNUSED,
1936 SET_DEST (XVECEXP (newpat, 0, 0))))
1937 {
472fbdd1
RK
1938 rtx ni2dest;
1939
230d793d 1940 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 1941 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
1942 newpat = XVECEXP (newpat, 0, 1);
1943 SUBST (SET_SRC (newpat),
472fbdd1 1944 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
a29ca9db
RK
1945 i2_code_number
1946 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
1947
230d793d 1948 if (i2_code_number >= 0)
a29ca9db
RK
1949 insn_code_number
1950 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
5089e22e
RS
1951
1952 if (insn_code_number >= 0)
1953 {
1954 rtx insn;
1955 rtx link;
1956
1957 /* If we will be able to accept this, we have made a change to the
1958 destination of I3. This can invalidate a LOG_LINKS pointing
1959 to I3. No other part of combine.c makes such a transformation.
1960
1961 The new I3 will have a destination that was previously the
1962 destination of I1 or I2 and which was used in i2 or I3. Call
1963 distribute_links to make a LOG_LINK from the next use of
1964 that destination. */
1965
1966 PATTERN (i3) = newpat;
5f4f0e22 1967 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
1968
1969 /* I3 now uses what used to be its destination and which is
1970 now I2's destination. That means we need a LOG_LINK from
1971 I3 to I2. But we used to have one, so we still will.
1972
1973 However, some later insn might be using I2's dest and have
1974 a LOG_LINK pointing at I3. We must remove this link.
1975 The simplest way to remove the link is to point it at I1,
1976 which we know will be a NOTE. */
1977
1978 for (insn = NEXT_INSN (i3);
0d4d42c3
RK
1979 insn && (this_basic_block == n_basic_blocks - 1
1980 || insn != basic_block_head[this_basic_block + 1]);
5089e22e
RS
1981 insn = NEXT_INSN (insn))
1982 {
1983 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 1984 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
1985 {
1986 for (link = LOG_LINKS (insn); link;
1987 link = XEXP (link, 1))
1988 if (XEXP (link, 0) == i3)
1989 XEXP (link, 0) = i1;
1990
1991 break;
1992 }
1993 }
1994 }
230d793d
RS
1995 }
1996
1997 /* Similarly, check for a case where we have a PARALLEL of two independent
1998 SETs but we started with three insns. In this case, we can do the sets
1999 as two separate insns. This case occurs when some SET allows two
2000 other insns to combine, but the destination of that SET is still live. */
2001
2002 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2003 && GET_CODE (newpat) == PARALLEL
2004 && XVECLEN (newpat, 0) == 2
2005 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2006 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2007 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2008 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2009 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2010 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2011 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2012 INSN_CUID (i2))
2013 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2014 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2015 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2016 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2017 XVECEXP (newpat, 0, 0))
2018 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2019 XVECEXP (newpat, 0, 1)))
2020 {
2021 newi2pat = XVECEXP (newpat, 0, 1);
2022 newpat = XVECEXP (newpat, 0, 0);
2023
a29ca9db
RK
2024 i2_code_number
2025 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2026
230d793d 2027 if (i2_code_number >= 0)
a29ca9db
RK
2028 insn_code_number
2029 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
2030 }
2031
2032 /* If it still isn't recognized, fail and change things back the way they
2033 were. */
2034 if ((insn_code_number < 0
2035 /* Is the result a reasonable ASM_OPERANDS? */
2036 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2037 {
2038 undo_all ();
2039 return 0;
2040 }
2041
2042 /* If we had to change another insn, make sure it is valid also. */
2043 if (undobuf.other_insn)
2044 {
230d793d
RS
2045 rtx other_pat = PATTERN (undobuf.other_insn);
2046 rtx new_other_notes;
2047 rtx note, next;
2048
6e25d159
RK
2049 CLEAR_HARD_REG_SET (newpat_used_regs);
2050
a29ca9db
RK
2051 other_code_number
2052 = recog_for_combine (&other_pat, undobuf.other_insn,
2053 &new_other_notes, &other_scratches);
230d793d
RS
2054
2055 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2056 {
2057 undo_all ();
2058 return 0;
2059 }
2060
2061 PATTERN (undobuf.other_insn) = other_pat;
2062
2063 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2064 are still valid. Then add any non-duplicate notes added by
2065 recog_for_combine. */
2066 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2067 {
2068 next = XEXP (note, 1);
2069
2070 if (REG_NOTE_KIND (note) == REG_UNUSED
2071 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2072 {
2073 if (GET_CODE (XEXP (note, 0)) == REG)
2074 reg_n_deaths[REGNO (XEXP (note, 0))]--;
2075
2076 remove_note (undobuf.other_insn, note);
2077 }
230d793d
RS
2078 }
2079
1a26b032
RK
2080 for (note = new_other_notes; note; note = XEXP (note, 1))
2081 if (GET_CODE (XEXP (note, 0)) == REG)
2082 reg_n_deaths[REGNO (XEXP (note, 0))]++;
2083
230d793d 2084 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2085 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2086 }
2087
2088 /* We now know that we can do this combination. Merge the insns and
2089 update the status of registers and LOG_LINKS. */
2090
2091 {
2092 rtx i3notes, i2notes, i1notes = 0;
2093 rtx i3links, i2links, i1links = 0;
2094 rtx midnotes = 0;
230d793d
RS
2095 register int regno;
2096 /* Compute which registers we expect to eliminate. */
2097 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2098 ? 0 : i2dest);
2099 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2100
2101 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2102 clear them. */
2103 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2104 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2105 if (i1)
2106 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2107
2108 /* Ensure that we do not have something that should not be shared but
2109 occurs multiple times in the new insns. Check this by first
5089e22e 2110 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2111
2112 reset_used_flags (i3notes);
2113 reset_used_flags (i2notes);
2114 reset_used_flags (i1notes);
2115 reset_used_flags (newpat);
2116 reset_used_flags (newi2pat);
2117 if (undobuf.other_insn)
2118 reset_used_flags (PATTERN (undobuf.other_insn));
2119
2120 i3notes = copy_rtx_if_shared (i3notes);
2121 i2notes = copy_rtx_if_shared (i2notes);
2122 i1notes = copy_rtx_if_shared (i1notes);
2123 newpat = copy_rtx_if_shared (newpat);
2124 newi2pat = copy_rtx_if_shared (newi2pat);
2125 if (undobuf.other_insn)
2126 reset_used_flags (PATTERN (undobuf.other_insn));
2127
2128 INSN_CODE (i3) = insn_code_number;
2129 PATTERN (i3) = newpat;
2130 if (undobuf.other_insn)
2131 INSN_CODE (undobuf.other_insn) = other_code_number;
2132
2133 /* We had one special case above where I2 had more than one set and
2134 we replaced a destination of one of those sets with the destination
2135 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2136 in this basic block. Note that this (expensive) case is rare.
2137
2138 Also, in this case, we must pretend that all REG_NOTEs for I2
2139 actually came from I3, so that REG_UNUSED notes from I2 will be
2140 properly handled. */
2141
2142 if (i3_subst_into_i2)
2143 {
2144 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2145 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2146 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2147 && ! find_reg_note (i2, REG_UNUSED,
2148 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2149 for (temp = NEXT_INSN (i2);
2150 temp && (this_basic_block == n_basic_blocks - 1
2151 || basic_block_head[this_basic_block] != temp);
2152 temp = NEXT_INSN (temp))
2153 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2154 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2155 if (XEXP (link, 0) == i2)
2156 XEXP (link, 0) = i3;
2157
2158 if (i3notes)
2159 {
2160 rtx link = i3notes;
2161 while (XEXP (link, 1))
2162 link = XEXP (link, 1);
2163 XEXP (link, 1) = i2notes;
2164 }
2165 else
2166 i3notes = i2notes;
2167 i2notes = 0;
2168 }
230d793d
RS
2169
2170 LOG_LINKS (i3) = 0;
2171 REG_NOTES (i3) = 0;
2172 LOG_LINKS (i2) = 0;
2173 REG_NOTES (i2) = 0;
2174
2175 if (newi2pat)
2176 {
2177 INSN_CODE (i2) = i2_code_number;
2178 PATTERN (i2) = newi2pat;
2179 }
2180 else
2181 {
2182 PUT_CODE (i2, NOTE);
2183 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2184 NOTE_SOURCE_FILE (i2) = 0;
2185 }
2186
2187 if (i1)
2188 {
2189 LOG_LINKS (i1) = 0;
2190 REG_NOTES (i1) = 0;
2191 PUT_CODE (i1, NOTE);
2192 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2193 NOTE_SOURCE_FILE (i1) = 0;
2194 }
2195
2196 /* Get death notes for everything that is now used in either I3 or
2197 I2 and used to die in a previous insn. */
2198
2199 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2200 if (newi2pat)
2201 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2202
2203 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2204 if (i3notes)
5f4f0e22
CH
2205 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2206 elim_i2, elim_i1);
230d793d 2207 if (i2notes)
5f4f0e22
CH
2208 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2209 elim_i2, elim_i1);
230d793d 2210 if (i1notes)
5f4f0e22
CH
2211 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2212 elim_i2, elim_i1);
230d793d 2213 if (midnotes)
5f4f0e22
CH
2214 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2215 elim_i2, elim_i1);
230d793d
RS
2216
2217 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2218 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2219 so we always pass it as i3. We have not counted the notes in
2220 reg_n_deaths yet, so we need to do so now. */
2221
230d793d 2222 if (newi2pat && new_i2_notes)
1a26b032
RK
2223 {
2224 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2225 if (GET_CODE (XEXP (temp, 0)) == REG)
2226 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2227
2228 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2229 }
2230
230d793d 2231 if (new_i3_notes)
1a26b032
RK
2232 {
2233 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2234 if (GET_CODE (XEXP (temp, 0)) == REG)
2235 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2236
2237 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2238 }
230d793d
RS
2239
2240 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1a26b032
RK
2241 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2242 Show an additional death due to the REG_DEAD note we make here. If
2243 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2244
230d793d 2245 if (i3dest_killed)
1a26b032
RK
2246 {
2247 if (GET_CODE (i3dest_killed) == REG)
2248 reg_n_deaths[REGNO (i3dest_killed)]++;
2249
2250 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2251 NULL_RTX),
2252 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2253 NULL_RTX, NULL_RTX);
2254 }
58c8c593
RK
2255
2256 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2257 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2258 we passed I3 in that case, it might delete I2. */
2259
230d793d 2260 if (i2dest_in_i2src)
58c8c593 2261 {
1a26b032
RK
2262 if (GET_CODE (i2dest) == REG)
2263 reg_n_deaths[REGNO (i2dest)]++;
2264
58c8c593
RK
2265 if (newi2pat && reg_set_p (i2dest, newi2pat))
2266 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2267 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2268 else
2269 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2270 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2271 NULL_RTX, NULL_RTX);
2272 }
2273
230d793d 2274 if (i1dest_in_i1src)
58c8c593 2275 {
1a26b032
RK
2276 if (GET_CODE (i1dest) == REG)
2277 reg_n_deaths[REGNO (i1dest)]++;
2278
58c8c593
RK
2279 if (newi2pat && reg_set_p (i1dest, newi2pat))
2280 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2281 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2282 else
2283 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2284 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2285 NULL_RTX, NULL_RTX);
2286 }
230d793d
RS
2287
2288 distribute_links (i3links);
2289 distribute_links (i2links);
2290 distribute_links (i1links);
2291
2292 if (GET_CODE (i2dest) == REG)
2293 {
d0ab8cd3
RK
2294 rtx link;
2295 rtx i2_insn = 0, i2_val = 0, set;
2296
2297 /* The insn that used to set this register doesn't exist, and
2298 this life of the register may not exist either. See if one of
2299 I3's links points to an insn that sets I2DEST. If it does,
2300 that is now the last known value for I2DEST. If we don't update
2301 this and I2 set the register to a value that depended on its old
230d793d
RS
2302 contents, we will get confused. If this insn is used, thing
2303 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2304
2305 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2306 if ((set = single_set (XEXP (link, 0))) != 0
2307 && rtx_equal_p (i2dest, SET_DEST (set)))
2308 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2309
2310 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2311
2312 /* If the reg formerly set in I2 died only once and that was in I3,
2313 zero its use count so it won't make `reload' do any work. */
5af91171 2314 if (! added_sets_2 && newi2pat == 0 && ! i2dest_in_i2src)
230d793d
RS
2315 {
2316 regno = REGNO (i2dest);
2317 reg_n_sets[regno]--;
2318 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2319 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2320 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2321 reg_n_refs[regno] = 0;
2322 }
2323 }
2324
2325 if (i1 && GET_CODE (i1dest) == REG)
2326 {
d0ab8cd3
RK
2327 rtx link;
2328 rtx i1_insn = 0, i1_val = 0, set;
2329
2330 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2331 if ((set = single_set (XEXP (link, 0))) != 0
2332 && rtx_equal_p (i1dest, SET_DEST (set)))
2333 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2334
2335 record_value_for_reg (i1dest, i1_insn, i1_val);
2336
230d793d 2337 regno = REGNO (i1dest);
5af91171 2338 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d
RS
2339 {
2340 reg_n_sets[regno]--;
2341 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2342 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2343 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2344 reg_n_refs[regno] = 0;
2345 }
2346 }
2347
951553af 2348 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2349 to this insn. */
2350
951553af 2351 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2352 if (newi2pat)
951553af 2353 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2354
a29ca9db
RK
2355 /* If we added any (clobber (scratch)), add them to the max for a
2356 block. This is a very pessimistic calculation, since we might
2357 have had them already and this might not be the worst block, but
2358 it's not worth doing any better. */
2359 max_scratch += i3_scratches + i2_scratches + other_scratches;
2360
230d793d
RS
2361 /* If I3 is now an unconditional jump, ensure that it has a
2362 BARRIER following it since it may have initially been a
381ee8af 2363 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2364
2365 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2366 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2367 || GET_CODE (temp) != BARRIER))
230d793d
RS
2368 emit_barrier_after (i3);
2369 }
2370
2371 combine_successes++;
2372
bcd49eb7
JW
2373 /* Clear this here, so that subsequent get_last_value calls are not
2374 affected. */
2375 subst_prev_insn = NULL_RTX;
2376
abe6e52f
RK
2377 if (added_links_insn
2378 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2379 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2380 return added_links_insn;
2381 else
2382 return newi2pat ? i2 : i3;
230d793d
RS
2383}
2384\f
2385/* Undo all the modifications recorded in undobuf. */
2386
2387static void
2388undo_all ()
2389{
2390 register int i;
2391 if (undobuf.num_undo > MAX_UNDO)
2392 undobuf.num_undo = MAX_UNDO;
2393 for (i = undobuf.num_undo - 1; i >= 0; i--)
7c046e4e
RK
2394 {
2395 if (undobuf.undo[i].is_int)
2396 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2397 else
f5393ab9 2398 *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
7c046e4e
RK
2399
2400 }
230d793d
RS
2401
2402 obfree (undobuf.storage);
2403 undobuf.num_undo = 0;
bcd49eb7
JW
2404
2405 /* Clear this here, so that subsequent get_last_value calls are not
2406 affected. */
2407 subst_prev_insn = NULL_RTX;
230d793d
RS
2408}
2409\f
2410/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2411 where we have an arithmetic expression and return that point. LOC will
2412 be inside INSN.
230d793d
RS
2413
2414 try_combine will call this function to see if an insn can be split into
2415 two insns. */
2416
2417static rtx *
d0ab8cd3 2418find_split_point (loc, insn)
230d793d 2419 rtx *loc;
d0ab8cd3 2420 rtx insn;
230d793d
RS
2421{
2422 rtx x = *loc;
2423 enum rtx_code code = GET_CODE (x);
2424 rtx *split;
2425 int len = 0, pos, unsignedp;
2426 rtx inner;
2427
2428 /* First special-case some codes. */
2429 switch (code)
2430 {
2431 case SUBREG:
2432#ifdef INSN_SCHEDULING
2433 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2434 point. */
2435 if (GET_CODE (SUBREG_REG (x)) == MEM)
2436 return loc;
2437#endif
d0ab8cd3 2438 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2439
230d793d 2440 case MEM:
916f14f1 2441#ifdef HAVE_lo_sum
230d793d
RS
2442 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2443 using LO_SUM and HIGH. */
2444 if (GET_CODE (XEXP (x, 0)) == CONST
2445 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2446 {
2447 SUBST (XEXP (x, 0),
2448 gen_rtx_combine (LO_SUM, Pmode,
2449 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2450 XEXP (x, 0)));
2451 return &XEXP (XEXP (x, 0), 0);
2452 }
230d793d
RS
2453#endif
2454
916f14f1
RK
2455 /* If we have a PLUS whose second operand is a constant and the
2456 address is not valid, perhaps will can split it up using
2457 the machine-specific way to split large constants. We use
ddd5a7c1 2458 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2459 it will not remain in the result. */
2460 if (GET_CODE (XEXP (x, 0)) == PLUS
2461 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2462 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2463 {
2464 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2465 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2466 subst_insn);
2467
2468 /* This should have produced two insns, each of which sets our
2469 placeholder. If the source of the second is a valid address,
2470 we can make put both sources together and make a split point
2471 in the middle. */
2472
2473 if (seq && XVECLEN (seq, 0) == 2
2474 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2475 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2476 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2477 && ! reg_mentioned_p (reg,
2478 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2479 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2480 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2481 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2482 && memory_address_p (GET_MODE (x),
2483 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2484 {
2485 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2486 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2487
2488 /* Replace the placeholder in SRC2 with SRC1. If we can
2489 find where in SRC2 it was placed, that can become our
2490 split point and we can replace this address with SRC2.
2491 Just try two obvious places. */
2492
2493 src2 = replace_rtx (src2, reg, src1);
2494 split = 0;
2495 if (XEXP (src2, 0) == src1)
2496 split = &XEXP (src2, 0);
2497 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2498 && XEXP (XEXP (src2, 0), 0) == src1)
2499 split = &XEXP (XEXP (src2, 0), 0);
2500
2501 if (split)
2502 {
2503 SUBST (XEXP (x, 0), src2);
2504 return split;
2505 }
2506 }
1a26b032
RK
2507
2508 /* If that didn't work, perhaps the first operand is complex and
2509 needs to be computed separately, so make a split point there.
2510 This will occur on machines that just support REG + CONST
2511 and have a constant moved through some previous computation. */
2512
2513 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2514 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2515 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2516 == 'o')))
2517 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2518 }
2519 break;
2520
230d793d
RS
2521 case SET:
2522#ifdef HAVE_cc0
2523 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2524 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2525 we need to put the operand into a register. So split at that
2526 point. */
2527
2528 if (SET_DEST (x) == cc0_rtx
2529 && GET_CODE (SET_SRC (x)) != COMPARE
2530 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2531 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2532 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2533 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2534 return &SET_SRC (x);
2535#endif
2536
2537 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2538 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2539 if (split && split != &SET_SRC (x))
2540 return split;
2541
041d7180
JL
2542 /* See if we can split SET_DEST as it stands. */
2543 split = find_split_point (&SET_DEST (x), insn);
2544 if (split && split != &SET_DEST (x))
2545 return split;
2546
230d793d
RS
2547 /* See if this is a bitfield assignment with everything constant. If
2548 so, this is an IOR of an AND, so split it into that. */
2549 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2550 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2551 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2552 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2553 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2554 && GET_CODE (SET_SRC (x)) == CONST_INT
2555 && ((INTVAL (XEXP (SET_DEST (x), 1))
2556 + INTVAL (XEXP (SET_DEST (x), 2)))
2557 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2558 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2559 {
2560 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2561 int len = INTVAL (XEXP (SET_DEST (x), 1));
2562 int src = INTVAL (SET_SRC (x));
2563 rtx dest = XEXP (SET_DEST (x), 0);
2564 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2565 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2566
f76b9db2
ILT
2567 if (BITS_BIG_ENDIAN)
2568 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d
RS
2569
2570 if (src == mask)
2571 SUBST (SET_SRC (x),
5f4f0e22 2572 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2573 else
2574 SUBST (SET_SRC (x),
2575 gen_binary (IOR, mode,
2576 gen_binary (AND, mode, dest,
5f4f0e22
CH
2577 GEN_INT (~ (mask << pos)
2578 & GET_MODE_MASK (mode))),
2579 GEN_INT (src << pos)));
230d793d
RS
2580
2581 SUBST (SET_DEST (x), dest);
2582
d0ab8cd3 2583 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2584 if (split && split != &SET_SRC (x))
2585 return split;
2586 }
2587
2588 /* Otherwise, see if this is an operation that we can split into two.
2589 If so, try to split that. */
2590 code = GET_CODE (SET_SRC (x));
2591
2592 switch (code)
2593 {
d0ab8cd3
RK
2594 case AND:
2595 /* If we are AND'ing with a large constant that is only a single
2596 bit and the result is only being used in a context where we
2597 need to know if it is zero or non-zero, replace it with a bit
2598 extraction. This will avoid the large constant, which might
2599 have taken more than one insn to make. If the constant were
2600 not a valid argument to the AND but took only one insn to make,
2601 this is no worse, but if it took more than one insn, it will
2602 be better. */
2603
2604 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2605 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2606 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2607 && GET_CODE (SET_DEST (x)) == REG
2608 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2609 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2610 && XEXP (*split, 0) == SET_DEST (x)
2611 && XEXP (*split, 1) == const0_rtx)
2612 {
76184def
DE
2613 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2614 XEXP (SET_SRC (x), 0),
2615 pos, NULL_RTX, 1, 1, 0, 0);
2616 if (extraction != 0)
2617 {
2618 SUBST (SET_SRC (x), extraction);
2619 return find_split_point (loc, insn);
2620 }
d0ab8cd3
RK
2621 }
2622 break;
2623
230d793d
RS
2624 case SIGN_EXTEND:
2625 inner = XEXP (SET_SRC (x), 0);
2626 pos = 0;
2627 len = GET_MODE_BITSIZE (GET_MODE (inner));
2628 unsignedp = 0;
2629 break;
2630
2631 case SIGN_EXTRACT:
2632 case ZERO_EXTRACT:
2633 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2634 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2635 {
2636 inner = XEXP (SET_SRC (x), 0);
2637 len = INTVAL (XEXP (SET_SRC (x), 1));
2638 pos = INTVAL (XEXP (SET_SRC (x), 2));
2639
f76b9db2
ILT
2640 if (BITS_BIG_ENDIAN)
2641 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
2642 unsignedp = (code == ZERO_EXTRACT);
2643 }
2644 break;
2645 }
2646
2647 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2648 {
2649 enum machine_mode mode = GET_MODE (SET_SRC (x));
2650
d0ab8cd3
RK
2651 /* For unsigned, we have a choice of a shift followed by an
2652 AND or two shifts. Use two shifts for field sizes where the
2653 constant might be too large. We assume here that we can
2654 always at least get 8-bit constants in an AND insn, which is
2655 true for every current RISC. */
2656
2657 if (unsignedp && len <= 8)
230d793d
RS
2658 {
2659 SUBST (SET_SRC (x),
2660 gen_rtx_combine
2661 (AND, mode,
2662 gen_rtx_combine (LSHIFTRT, mode,
2663 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2664 GEN_INT (pos)),
2665 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2666
d0ab8cd3 2667 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2668 if (split && split != &SET_SRC (x))
2669 return split;
2670 }
2671 else
2672 {
2673 SUBST (SET_SRC (x),
2674 gen_rtx_combine
d0ab8cd3 2675 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2676 gen_rtx_combine (ASHIFT, mode,
2677 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2678 GEN_INT (GET_MODE_BITSIZE (mode)
2679 - len - pos)),
2680 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2681
d0ab8cd3 2682 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2683 if (split && split != &SET_SRC (x))
2684 return split;
2685 }
2686 }
2687
2688 /* See if this is a simple operation with a constant as the second
2689 operand. It might be that this constant is out of range and hence
2690 could be used as a split point. */
2691 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2692 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2693 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2694 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2695 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2696 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2697 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2698 == 'o'))))
2699 return &XEXP (SET_SRC (x), 1);
2700
2701 /* Finally, see if this is a simple operation with its first operand
2702 not in a register. The operation might require this operand in a
2703 register, so return it as a split point. We can always do this
2704 because if the first operand were another operation, we would have
2705 already found it as a split point. */
2706 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2707 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2708 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2709 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2710 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2711 return &XEXP (SET_SRC (x), 0);
2712
2713 return 0;
2714
2715 case AND:
2716 case IOR:
2717 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2718 it is better to write this as (not (ior A B)) so we can split it.
2719 Similarly for IOR. */
2720 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2721 {
2722 SUBST (*loc,
2723 gen_rtx_combine (NOT, GET_MODE (x),
2724 gen_rtx_combine (code == IOR ? AND : IOR,
2725 GET_MODE (x),
2726 XEXP (XEXP (x, 0), 0),
2727 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2728 return find_split_point (loc, insn);
230d793d
RS
2729 }
2730
2731 /* Many RISC machines have a large set of logical insns. If the
2732 second operand is a NOT, put it first so we will try to split the
2733 other operand first. */
2734 if (GET_CODE (XEXP (x, 1)) == NOT)
2735 {
2736 rtx tem = XEXP (x, 0);
2737 SUBST (XEXP (x, 0), XEXP (x, 1));
2738 SUBST (XEXP (x, 1), tem);
2739 }
2740 break;
2741 }
2742
2743 /* Otherwise, select our actions depending on our rtx class. */
2744 switch (GET_RTX_CLASS (code))
2745 {
2746 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2747 case '3':
d0ab8cd3 2748 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2749 if (split)
2750 return split;
2751 /* ... fall through ... */
2752 case '2':
2753 case 'c':
2754 case '<':
d0ab8cd3 2755 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2756 if (split)
2757 return split;
2758 /* ... fall through ... */
2759 case '1':
2760 /* Some machines have (and (shift ...) ...) insns. If X is not
2761 an AND, but XEXP (X, 0) is, use it as our split point. */
2762 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2763 return &XEXP (x, 0);
2764
d0ab8cd3 2765 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2766 if (split)
2767 return split;
2768 return loc;
2769 }
2770
2771 /* Otherwise, we don't have a split point. */
2772 return 0;
2773}
2774\f
2775/* Throughout X, replace FROM with TO, and return the result.
2776 The result is TO if X is FROM;
2777 otherwise the result is X, but its contents may have been modified.
2778 If they were modified, a record was made in undobuf so that
2779 undo_all will (among other things) return X to its original state.
2780
2781 If the number of changes necessary is too much to record to undo,
2782 the excess changes are not made, so the result is invalid.
2783 The changes already made can still be undone.
2784 undobuf.num_undo is incremented for such changes, so by testing that
2785 the caller can tell whether the result is valid.
2786
2787 `n_occurrences' is incremented each time FROM is replaced.
2788
2789 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2790
5089e22e 2791 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2792 by copying if `n_occurrences' is non-zero. */
2793
2794static rtx
2795subst (x, from, to, in_dest, unique_copy)
2796 register rtx x, from, to;
2797 int in_dest;
2798 int unique_copy;
2799{
f24ad0e4 2800 register enum rtx_code code = GET_CODE (x);
230d793d 2801 enum machine_mode op0_mode = VOIDmode;
8079805d
RK
2802 register char *fmt;
2803 register int len, i;
2804 rtx new;
230d793d
RS
2805
2806/* Two expressions are equal if they are identical copies of a shared
2807 RTX or if they are both registers with the same register number
2808 and mode. */
2809
2810#define COMBINE_RTX_EQUAL_P(X,Y) \
2811 ((X) == (Y) \
2812 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2813 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2814
2815 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2816 {
2817 n_occurrences++;
2818 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2819 }
2820
2821 /* If X and FROM are the same register but different modes, they will
2822 not have been seen as equal above. However, flow.c will make a
2823 LOG_LINKS entry for that case. If we do nothing, we will try to
2824 rerecognize our original insn and, when it succeeds, we will
2825 delete the feeding insn, which is incorrect.
2826
2827 So force this insn not to match in this (rare) case. */
2828 if (! in_dest && code == REG && GET_CODE (from) == REG
2829 && REGNO (x) == REGNO (from))
2830 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2831
2832 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2833 of which may contain things that can be combined. */
2834 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2835 return x;
2836
2837 /* It is possible to have a subexpression appear twice in the insn.
2838 Suppose that FROM is a register that appears within TO.
2839 Then, after that subexpression has been scanned once by `subst',
2840 the second time it is scanned, TO may be found. If we were
2841 to scan TO here, we would find FROM within it and create a
2842 self-referent rtl structure which is completely wrong. */
2843 if (COMBINE_RTX_EQUAL_P (x, to))
2844 return to;
2845
2846 len = GET_RTX_LENGTH (code);
2847 fmt = GET_RTX_FORMAT (code);
2848
2849 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2850 set up to skip this common case. All other cases where we want to
2851 suppress replacing something inside a SET_SRC are handled via the
2852 IN_DEST operand. */
2853 if (code == SET
2854 && (GET_CODE (SET_DEST (x)) == REG
2855 || GET_CODE (SET_DEST (x)) == CC0
2856 || GET_CODE (SET_DEST (x)) == PC))
2857 fmt = "ie";
2858
2859 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2860 if (fmt[0] == 'e')
2861 op0_mode = GET_MODE (XEXP (x, 0));
2862
2863 for (i = 0; i < len; i++)
2864 {
2865 if (fmt[i] == 'E')
2866 {
2867 register int j;
2868 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2869 {
230d793d
RS
2870 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2871 {
2872 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2873 n_occurrences++;
2874 }
2875 else
2876 {
2877 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2878
2879 /* If this substitution failed, this whole thing fails. */
2880 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2881 return new;
2882 }
2883
2884 SUBST (XVECEXP (x, i, j), new);
2885 }
2886 }
2887 else if (fmt[i] == 'e')
2888 {
230d793d
RS
2889 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2890 {
42301240
RK
2891 /* In general, don't install a subreg involving two modes not
2892 tieable. It can worsen register allocation, and can even
2893 make invalid reload insns, since the reg inside may need to
2894 be copied from in the outside mode, and that may be invalid
2895 if it is an fp reg copied in integer mode.
2896
2897 We allow two exceptions to this: It is valid if it is inside
2898 another SUBREG and the mode of that SUBREG and the mode of
2899 the inside of TO is tieable and it is valid if X is a SET
2900 that copies FROM to CC0. */
2901 if (GET_CODE (to) == SUBREG
2902 && ! MODES_TIEABLE_P (GET_MODE (to),
2903 GET_MODE (SUBREG_REG (to)))
2904 && ! (code == SUBREG
8079805d
RK
2905 && MODES_TIEABLE_P (GET_MODE (x),
2906 GET_MODE (SUBREG_REG (to))))
42301240
RK
2907#ifdef HAVE_cc0
2908 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2909#endif
2910 )
2911 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2912
230d793d
RS
2913 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2914 n_occurrences++;
2915 }
2916 else
2917 /* If we are in a SET_DEST, suppress most cases unless we
2918 have gone inside a MEM, in which case we want to
2919 simplify the address. We assume here that things that
2920 are actually part of the destination have their inner
2921 parts in the first expression. This is true for SUBREG,
2922 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2923 things aside from REG and MEM that should appear in a
2924 SET_DEST. */
2925 new = subst (XEXP (x, i), from, to,
2926 (((in_dest
2927 && (code == SUBREG || code == STRICT_LOW_PART
2928 || code == ZERO_EXTRACT))
2929 || code == SET)
2930 && i == 0), unique_copy);
2931
2932 /* If we found that we will have to reject this combination,
2933 indicate that by returning the CLOBBER ourselves, rather than
2934 an expression containing it. This will speed things up as
2935 well as prevent accidents where two CLOBBERs are considered
2936 to be equal, thus producing an incorrect simplification. */
2937
2938 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2939 return new;
2940
2941 SUBST (XEXP (x, i), new);
2942 }
2943 }
2944
8079805d
RK
2945 /* Try to simplify X. If the simplification changed the code, it is likely
2946 that further simplification will help, so loop, but limit the number
2947 of repetitions that will be performed. */
2948
2949 for (i = 0; i < 4; i++)
2950 {
2951 /* If X is sufficiently simple, don't bother trying to do anything
2952 with it. */
2953 if (code != CONST_INT && code != REG && code != CLOBBER)
2954 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 2955
8079805d
RK
2956 if (GET_CODE (x) == code)
2957 break;
d0ab8cd3 2958
8079805d 2959 code = GET_CODE (x);
eeb43d32 2960
8079805d
RK
2961 /* We no longer know the original mode of operand 0 since we
2962 have changed the form of X) */
2963 op0_mode = VOIDmode;
2964 }
eeb43d32 2965
8079805d
RK
2966 return x;
2967}
2968\f
2969/* Simplify X, a piece of RTL. We just operate on the expression at the
2970 outer level; call `subst' to simplify recursively. Return the new
2971 expression.
2972
2973 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
2974 will be the iteration even if an expression with a code different from
2975 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 2976
8079805d
RK
2977static rtx
2978simplify_rtx (x, op0_mode, last, in_dest)
2979 rtx x;
2980 enum machine_mode op0_mode;
2981 int last;
2982 int in_dest;
2983{
2984 enum rtx_code code = GET_CODE (x);
2985 enum machine_mode mode = GET_MODE (x);
2986 rtx temp;
2987 int i;
d0ab8cd3 2988
230d793d
RS
2989 /* If this is a commutative operation, put a constant last and a complex
2990 expression first. We don't need to do this for comparisons here. */
2991 if (GET_RTX_CLASS (code) == 'c'
2992 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2993 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2994 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2995 || (GET_CODE (XEXP (x, 0)) == SUBREG
2996 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2997 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2998 {
2999 temp = XEXP (x, 0);
3000 SUBST (XEXP (x, 0), XEXP (x, 1));
3001 SUBST (XEXP (x, 1), temp);
3002 }
3003
22609cbf
RK
3004 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3005 sign extension of a PLUS with a constant, reverse the order of the sign
3006 extension and the addition. Note that this not the same as the original
3007 code, but overflow is undefined for signed values. Also note that the
3008 PLUS will have been partially moved "inside" the sign-extension, so that
3009 the first operand of X will really look like:
3010 (ashiftrt (plus (ashift A C4) C5) C4).
3011 We convert this to
3012 (plus (ashiftrt (ashift A C4) C2) C4)
3013 and replace the first operand of X with that expression. Later parts
3014 of this function may simplify the expression further.
3015
3016 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3017 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3018 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3019
3020 We do this to simplify address expressions. */
3021
3022 if ((code == PLUS || code == MINUS || code == MULT)
3023 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3024 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3025 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3026 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3027 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3028 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3029 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3030 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3031 XEXP (XEXP (XEXP (x, 0), 0), 1),
3032 XEXP (XEXP (x, 0), 1))) != 0)
3033 {
3034 rtx new
3035 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3036 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3037 INTVAL (XEXP (XEXP (x, 0), 1)));
3038
3039 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3040 INTVAL (XEXP (XEXP (x, 0), 1)));
3041
3042 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3043 }
3044
d0ab8cd3
RK
3045 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3046 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3047 things. Check for cases where both arms are testing the same
3048 condition.
3049
3050 Don't do anything if all operands are very simple. */
3051
3052 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3053 || GET_RTX_CLASS (code) == '<')
3054 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3055 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3056 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3057 == 'o')))
3058 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3059 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3060 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3061 == 'o')))))
3062 || (GET_RTX_CLASS (code) == '1'
3063 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3064 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3065 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3066 == 'o'))))))
d0ab8cd3 3067 {
abe6e52f
RK
3068 rtx cond, true, false;
3069
3070 cond = if_then_else_cond (x, &true, &false);
3071 if (cond != 0)
3072 {
3073 rtx cop1 = const0_rtx;
3074 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3075
15448afc
RK
3076 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3077 return x;
3078
9210df58
RK
3079 /* Simplify the alternative arms; this may collapse the true and
3080 false arms to store-flag values. */
3081 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3082 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3083
3084 /* Restarting if we generate a store-flag expression will cause
3085 us to loop. Just drop through in this case. */
3086
abe6e52f
RK
3087 /* If the result values are STORE_FLAG_VALUE and zero, we can
3088 just make the comparison operation. */
3089 if (true == const_true_rtx && false == const0_rtx)
3090 x = gen_binary (cond_code, mode, cond, cop1);
3091 else if (true == const0_rtx && false == const_true_rtx)
3092 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3093
3094 /* Likewise, we can make the negate of a comparison operation
3095 if the result values are - STORE_FLAG_VALUE and zero. */
3096 else if (GET_CODE (true) == CONST_INT
3097 && INTVAL (true) == - STORE_FLAG_VALUE
3098 && false == const0_rtx)
0c1c8ea6 3099 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3100 gen_binary (cond_code, mode, cond, cop1));
3101 else if (GET_CODE (false) == CONST_INT
3102 && INTVAL (false) == - STORE_FLAG_VALUE
3103 && true == const0_rtx)
0c1c8ea6 3104 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3105 gen_binary (reverse_condition (cond_code),
3106 mode, cond, cop1));
3107 else
8079805d
RK
3108 return gen_rtx (IF_THEN_ELSE, mode,
3109 gen_binary (cond_code, VOIDmode, cond, cop1),
3110 true, false);
5109d49f 3111
9210df58
RK
3112 code = GET_CODE (x);
3113 op0_mode = VOIDmode;
abe6e52f 3114 }
d0ab8cd3
RK
3115 }
3116
230d793d
RS
3117 /* Try to fold this expression in case we have constants that weren't
3118 present before. */
3119 temp = 0;
3120 switch (GET_RTX_CLASS (code))
3121 {
3122 case '1':
3123 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3124 break;
3125 case '<':
3126 temp = simplify_relational_operation (code, op0_mode,
3127 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3128#ifdef FLOAT_STORE_FLAG_VALUE
3129 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3130 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3131 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3132#endif
230d793d
RS
3133 break;
3134 case 'c':
3135 case '2':
3136 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3137 break;
3138 case 'b':
3139 case '3':
3140 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3141 XEXP (x, 1), XEXP (x, 2));
3142 break;
3143 }
3144
3145 if (temp)
d0ab8cd3 3146 x = temp, code = GET_CODE (temp);
230d793d 3147
230d793d 3148 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3149 if (code == PLUS || code == MINUS
3150 || code == AND || code == IOR || code == XOR)
230d793d
RS
3151 {
3152 x = apply_distributive_law (x);
3153 code = GET_CODE (x);
3154 }
3155
3156 /* If CODE is an associative operation not otherwise handled, see if we
3157 can associate some operands. This can win if they are constants or
3158 if they are logically related (i.e. (a & b) & a. */
3159 if ((code == PLUS || code == MINUS
3160 || code == MULT || code == AND || code == IOR || code == XOR
3161 || code == DIV || code == UDIV
3162 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3163 && INTEGRAL_MODE_P (mode))
230d793d
RS
3164 {
3165 if (GET_CODE (XEXP (x, 0)) == code)
3166 {
3167 rtx other = XEXP (XEXP (x, 0), 0);
3168 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3169 rtx inner_op1 = XEXP (x, 1);
3170 rtx inner;
3171
3172 /* Make sure we pass the constant operand if any as the second
3173 one if this is a commutative operation. */
3174 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3175 {
3176 rtx tem = inner_op0;
3177 inner_op0 = inner_op1;
3178 inner_op1 = tem;
3179 }
3180 inner = simplify_binary_operation (code == MINUS ? PLUS
3181 : code == DIV ? MULT
3182 : code == UDIV ? MULT
3183 : code,
3184 mode, inner_op0, inner_op1);
3185
3186 /* For commutative operations, try the other pair if that one
3187 didn't simplify. */
3188 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3189 {
3190 other = XEXP (XEXP (x, 0), 1);
3191 inner = simplify_binary_operation (code, mode,
3192 XEXP (XEXP (x, 0), 0),
3193 XEXP (x, 1));
3194 }
3195
3196 if (inner)
8079805d 3197 return gen_binary (code, mode, other, inner);
230d793d
RS
3198 }
3199 }
3200
3201 /* A little bit of algebraic simplification here. */
3202 switch (code)
3203 {
3204 case MEM:
3205 /* Ensure that our address has any ASHIFTs converted to MULT in case
3206 address-recognizing predicates are called later. */
3207 temp = make_compound_operation (XEXP (x, 0), MEM);
3208 SUBST (XEXP (x, 0), temp);
3209 break;
3210
3211 case SUBREG:
3212 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3213 is paradoxical. If we can't do that safely, then it becomes
3214 something nonsensical so that this combination won't take place. */
3215
3216 if (GET_CODE (SUBREG_REG (x)) == MEM
3217 && (GET_MODE_SIZE (mode)
3218 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3219 {
3220 rtx inner = SUBREG_REG (x);
3221 int endian_offset = 0;
3222 /* Don't change the mode of the MEM
3223 if that would change the meaning of the address. */
3224 if (MEM_VOLATILE_P (SUBREG_REG (x))
3225 || mode_dependent_address_p (XEXP (inner, 0)))
3226 return gen_rtx (CLOBBER, mode, const0_rtx);
3227
f76b9db2
ILT
3228 if (BYTES_BIG_ENDIAN)
3229 {
3230 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3231 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3232 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3233 endian_offset -= (UNITS_PER_WORD
3234 - GET_MODE_SIZE (GET_MODE (inner)));
3235 }
230d793d
RS
3236 /* Note if the plus_constant doesn't make a valid address
3237 then this combination won't be accepted. */
3238 x = gen_rtx (MEM, mode,
3239 plus_constant (XEXP (inner, 0),
3240 (SUBREG_WORD (x) * UNITS_PER_WORD
3241 + endian_offset)));
3242 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3243 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3244 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3245 return x;
3246 }
3247
3248 /* If we are in a SET_DEST, these other cases can't apply. */
3249 if (in_dest)
3250 return x;
3251
3252 /* Changing mode twice with SUBREG => just change it once,
3253 or not at all if changing back to starting mode. */
3254 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3255 {
3256 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3257 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3258 return SUBREG_REG (SUBREG_REG (x));
3259
3260 SUBST_INT (SUBREG_WORD (x),
3261 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3262 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3263 }
3264
3265 /* SUBREG of a hard register => just change the register number
3266 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3267 suppress this combination. If the hard register is the stack,
3268 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3269
3270 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3271 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3272 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3273#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3274 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3275#endif
26ecfc76
RK
3276#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3277 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3278#endif
3279 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3280 {
3281 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3282 mode))
3283 return gen_rtx (REG, mode,
3284 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3285 else
3286 return gen_rtx (CLOBBER, mode, const0_rtx);
3287 }
3288
3289 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3290 word and low-order part. Only do this if we are narrowing
3291 the constant; if it is being widened, we have no idea what
3292 the extra bits will have been set to. */
230d793d
RS
3293
3294 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3295 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 3296 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
3297 && GET_MODE_CLASS (mode) == MODE_INT)
3298 {
3299 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3300 0, op0_mode);
230d793d
RS
3301 if (temp)
3302 return temp;
3303 }
3304
19808e22
RS
3305 /* If we want a subreg of a constant, at offset 0,
3306 take the low bits. On a little-endian machine, that's
3307 always valid. On a big-endian machine, it's valid
3308 only if the constant's mode fits in one word. */
a4bde0b1 3309 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
f82da7d2 3310 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3311 && (! WORDS_BIG_ENDIAN
3312 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3313 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3314
b65c1b5b
RK
3315 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3316 since we are saying that the high bits don't matter. */
3317 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3318 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3319 return SUBREG_REG (x);
3320
87e3e0c1
RK
3321 /* Note that we cannot do any narrowing for non-constants since
3322 we might have been counting on using the fact that some bits were
3323 zero. We now do this in the SET. */
3324
230d793d
RS
3325 break;
3326
3327 case NOT:
3328 /* (not (plus X -1)) can become (neg X). */
3329 if (GET_CODE (XEXP (x, 0)) == PLUS
3330 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3331 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3332
3333 /* Similarly, (not (neg X)) is (plus X -1). */
3334 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3335 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3336 constm1_rtx);
230d793d 3337
d0ab8cd3
RK
3338 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3339 if (GET_CODE (XEXP (x, 0)) == XOR
3340 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3341 && (temp = simplify_unary_operation (NOT, mode,
3342 XEXP (XEXP (x, 0), 1),
3343 mode)) != 0)
787745f5 3344 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3345
230d793d
RS
3346 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3347 other than 1, but that is not valid. We could do a similar
3348 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3349 but this doesn't seem common enough to bother with. */
3350 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3351 && XEXP (XEXP (x, 0), 0) == const1_rtx)
0c1c8ea6 3352 return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
8079805d 3353 XEXP (XEXP (x, 0), 1));
230d793d
RS
3354
3355 if (GET_CODE (XEXP (x, 0)) == SUBREG
3356 && subreg_lowpart_p (XEXP (x, 0))
3357 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3358 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3359 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3360 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3361 {
3362 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3363
3364 x = gen_rtx (ROTATE, inner_mode,
0c1c8ea6 3365 gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
230d793d 3366 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3367 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3368 }
3369
3370#if STORE_FLAG_VALUE == -1
3371 /* (not (comparison foo bar)) can be done by reversing the comparison
3372 code if valid. */
3373 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3374 && reversible_comparison_p (XEXP (x, 0)))
3375 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3376 mode, XEXP (XEXP (x, 0), 0),
3377 XEXP (XEXP (x, 0), 1));
500c518b
RK
3378
3379 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3380 is (lt foo (const_int 0)), so we can perform the above
3381 simplification. */
3382
3383 if (XEXP (x, 1) == const1_rtx
3384 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3385 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3386 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3387 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3388#endif
3389
3390 /* Apply De Morgan's laws to reduce number of patterns for machines
3391 with negating logical insns (and-not, nand, etc.). If result has
3392 only one NOT, put it first, since that is how the patterns are
3393 coded. */
3394
3395 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3396 {
3397 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3398
3399 if (GET_CODE (in1) == NOT)
3400 in1 = XEXP (in1, 0);
3401 else
3402 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3403
3404 if (GET_CODE (in2) == NOT)
3405 in2 = XEXP (in2, 0);
3406 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3407 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3408 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3409 else
3410 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3411
3412 if (GET_CODE (in2) == NOT)
3413 {
3414 rtx tem = in2;
3415 in2 = in1; in1 = tem;
3416 }
3417
8079805d
RK
3418 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3419 mode, in1, in2);
230d793d
RS
3420 }
3421 break;
3422
3423 case NEG:
3424 /* (neg (plus X 1)) can become (not X). */
3425 if (GET_CODE (XEXP (x, 0)) == PLUS
3426 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3427 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3428
3429 /* Similarly, (neg (not X)) is (plus X 1). */
3430 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3431 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3432
230d793d
RS
3433 /* (neg (minus X Y)) can become (minus Y X). */
3434 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3435 && (! FLOAT_MODE_P (mode)
230d793d 3436 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3437 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3438 || flag_fast_math))
8079805d
RK
3439 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3440 XEXP (XEXP (x, 0), 0));
230d793d 3441
d0ab8cd3
RK
3442 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3443 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3444 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3445 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3446
230d793d
RS
3447 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3448 if we can then eliminate the NEG (e.g.,
3449 if the operand is a constant). */
3450
3451 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3452 {
3453 temp = simplify_unary_operation (NEG, mode,
3454 XEXP (XEXP (x, 0), 0), mode);
3455 if (temp)
3456 {
3457 SUBST (XEXP (XEXP (x, 0), 0), temp);
3458 return XEXP (x, 0);
3459 }
3460 }
3461
3462 temp = expand_compound_operation (XEXP (x, 0));
3463
3464 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3465 replaced by (lshiftrt X C). This will convert
3466 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3467
3468 if (GET_CODE (temp) == ASHIFTRT
3469 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3470 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3471 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3472 INTVAL (XEXP (temp, 1)));
230d793d 3473
951553af 3474 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3475 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3476 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3477 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3478 or a SUBREG of one since we'd be making the expression more
3479 complex if it was just a register. */
3480
3481 if (GET_CODE (temp) != REG
3482 && ! (GET_CODE (temp) == SUBREG
3483 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3484 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3485 {
3486 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3487 (NULL_RTX, ASHIFTRT, mode,
3488 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3489 GET_MODE_BITSIZE (mode) - 1 - i),
3490 GET_MODE_BITSIZE (mode) - 1 - i);
3491
3492 /* If all we did was surround TEMP with the two shifts, we
3493 haven't improved anything, so don't use it. Otherwise,
3494 we are better off with TEMP1. */
3495 if (GET_CODE (temp1) != ASHIFTRT
3496 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3497 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3498 return temp1;
230d793d
RS
3499 }
3500 break;
3501
2ca9ae17
JW
3502 case TRUNCATE:
3503 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3504 SUBST (XEXP (x, 0),
3505 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3506 GET_MODE_MASK (mode), NULL_RTX, 0));
3507 break;
3508
230d793d
RS
3509 case FLOAT_TRUNCATE:
3510 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3511 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3512 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3513 return XEXP (XEXP (x, 0), 0);
4635f748
RK
3514
3515 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3516 (OP:SF foo:SF) if OP is NEG or ABS. */
3517 if ((GET_CODE (XEXP (x, 0)) == ABS
3518 || GET_CODE (XEXP (x, 0)) == NEG)
3519 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3520 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
3521 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3522 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
3523
3524 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3525 is (float_truncate:SF x). */
3526 if (GET_CODE (XEXP (x, 0)) == SUBREG
3527 && subreg_lowpart_p (XEXP (x, 0))
3528 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3529 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
3530 break;
3531
3532#ifdef HAVE_cc0
3533 case COMPARE:
3534 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3535 using cc0, in which case we want to leave it as a COMPARE
3536 so we can distinguish it from a register-register-copy. */
3537 if (XEXP (x, 1) == const0_rtx)
3538 return XEXP (x, 0);
3539
3540 /* In IEEE floating point, x-0 is not the same as x. */
3541 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3542 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3543 || flag_fast_math)
230d793d
RS
3544 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3545 return XEXP (x, 0);
3546 break;
3547#endif
3548
3549 case CONST:
3550 /* (const (const X)) can become (const X). Do it this way rather than
3551 returning the inner CONST since CONST can be shared with a
3552 REG_EQUAL note. */
3553 if (GET_CODE (XEXP (x, 0)) == CONST)
3554 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3555 break;
3556
3557#ifdef HAVE_lo_sum
3558 case LO_SUM:
3559 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3560 can add in an offset. find_split_point will split this address up
3561 again if it doesn't match. */
3562 if (GET_CODE (XEXP (x, 0)) == HIGH
3563 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3564 return XEXP (x, 1);
3565 break;
3566#endif
3567
3568 case PLUS:
3569 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3570 outermost. That's because that's the way indexed addresses are
3571 supposed to appear. This code used to check many more cases, but
3572 they are now checked elsewhere. */
3573 if (GET_CODE (XEXP (x, 0)) == PLUS
3574 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3575 return gen_binary (PLUS, mode,
3576 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3577 XEXP (x, 1)),
3578 XEXP (XEXP (x, 0), 1));
3579
3580 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3581 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3582 bit-field and can be replaced by either a sign_extend or a
3583 sign_extract. The `and' may be a zero_extend. */
3584 if (GET_CODE (XEXP (x, 0)) == XOR
3585 && GET_CODE (XEXP (x, 1)) == CONST_INT
3586 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3587 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3588 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3589 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3590 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3591 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3592 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3593 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3594 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3595 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3596 == i + 1))))
8079805d
RK
3597 return simplify_shift_const
3598 (NULL_RTX, ASHIFTRT, mode,
3599 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3600 XEXP (XEXP (XEXP (x, 0), 0), 0),
3601 GET_MODE_BITSIZE (mode) - (i + 1)),
3602 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 3603
bc0776c6
RK
3604 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3605 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3606 is 1. This produces better code than the alternative immediately
3607 below. */
3608 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3609 && reversible_comparison_p (XEXP (x, 0))
3610 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3611 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 3612 return
0c1c8ea6 3613 gen_unary (NEG, mode, mode,
8079805d
RK
3614 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3615 mode, XEXP (XEXP (x, 0), 0),
3616 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
3617
3618 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3619 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3620 the bitsize of the mode - 1. This allows simplification of
3621 "a = (b & 8) == 0;" */
3622 if (XEXP (x, 1) == constm1_rtx
3623 && GET_CODE (XEXP (x, 0)) != REG
3624 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3625 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3626 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
3627 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3628 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3629 gen_rtx_combine (XOR, mode,
3630 XEXP (x, 0), const1_rtx),
3631 GET_MODE_BITSIZE (mode) - 1),
3632 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
3633
3634 /* If we are adding two things that have no bits in common, convert
3635 the addition into an IOR. This will often be further simplified,
3636 for example in cases like ((a & 1) + (a & 2)), which can
3637 become a & 3. */
3638
ac49a949 3639 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3640 && (nonzero_bits (XEXP (x, 0), mode)
3641 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 3642 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
3643 break;
3644
3645 case MINUS:
5109d49f
RK
3646#if STORE_FLAG_VALUE == 1
3647 /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3648 code if valid. */
3649 if (XEXP (x, 0) == const1_rtx
3650 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3651 && reversible_comparison_p (XEXP (x, 1)))
3652 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3653 mode, XEXP (XEXP (x, 1), 0),
3654 XEXP (XEXP (x, 1), 1));
3655#endif
3656
230d793d
RS
3657 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3658 (and <foo> (const_int pow2-1)) */
3659 if (GET_CODE (XEXP (x, 1)) == AND
3660 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3661 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3662 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
3663 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3664 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
3665
3666 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3667 integers. */
3668 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
3669 return gen_binary (MINUS, mode,
3670 gen_binary (MINUS, mode, XEXP (x, 0),
3671 XEXP (XEXP (x, 1), 0)),
3672 XEXP (XEXP (x, 1), 1));
230d793d
RS
3673 break;
3674
3675 case MULT:
3676 /* If we have (mult (plus A B) C), apply the distributive law and then
3677 the inverse distributive law to see if things simplify. This
3678 occurs mostly in addresses, often when unrolling loops. */
3679
3680 if (GET_CODE (XEXP (x, 0)) == PLUS)
3681 {
3682 x = apply_distributive_law
3683 (gen_binary (PLUS, mode,
3684 gen_binary (MULT, mode,
3685 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3686 gen_binary (MULT, mode,
3687 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3688
3689 if (GET_CODE (x) != MULT)
8079805d 3690 return x;
230d793d 3691 }
230d793d
RS
3692 break;
3693
3694 case UDIV:
3695 /* If this is a divide by a power of two, treat it as a shift if
3696 its first operand is a shift. */
3697 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3698 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3699 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3700 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3701 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3702 || GET_CODE (XEXP (x, 0)) == ROTATE
3703 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 3704 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3705 break;
3706
3707 case EQ: case NE:
3708 case GT: case GTU: case GE: case GEU:
3709 case LT: case LTU: case LE: case LEU:
3710 /* If the first operand is a condition code, we can't do anything
3711 with it. */
3712 if (GET_CODE (XEXP (x, 0)) == COMPARE
3713 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3714#ifdef HAVE_cc0
3715 && XEXP (x, 0) != cc0_rtx
3716#endif
3717 ))
3718 {
3719 rtx op0 = XEXP (x, 0);
3720 rtx op1 = XEXP (x, 1);
3721 enum rtx_code new_code;
3722
3723 if (GET_CODE (op0) == COMPARE)
3724 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3725
3726 /* Simplify our comparison, if possible. */
3727 new_code = simplify_comparison (code, &op0, &op1);
3728
3729#if STORE_FLAG_VALUE == 1
3730 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 3731 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
3732 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3733 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3734 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3735 (plus X 1).
3736
3737 Remove any ZERO_EXTRACT we made when thinking this was a
3738 comparison. It may now be simpler to use, e.g., an AND. If a
3739 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3740 the call to make_compound_operation in the SET case. */
3741
3f508eca 3742 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3743 && op1 == const0_rtx
5109d49f 3744 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3745 return gen_lowpart_for_combine (mode,
3746 expand_compound_operation (op0));
5109d49f
RK
3747
3748 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3749 && op1 == const0_rtx
3750 && (num_sign_bit_copies (op0, mode)
3751 == GET_MODE_BITSIZE (mode)))
3752 {
3753 op0 = expand_compound_operation (op0);
0c1c8ea6 3754 return gen_unary (NEG, mode, mode,
8079805d 3755 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3756 }
3757
3f508eca 3758 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3759 && op1 == const0_rtx
5109d49f 3760 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3761 {
3762 op0 = expand_compound_operation (op0);
8079805d
RK
3763 return gen_binary (XOR, mode,
3764 gen_lowpart_for_combine (mode, op0),
3765 const1_rtx);
5109d49f 3766 }
818b11b9 3767
5109d49f
RK
3768 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3769 && op1 == const0_rtx
3770 && (num_sign_bit_copies (op0, mode)
3771 == GET_MODE_BITSIZE (mode)))
3772 {
3773 op0 = expand_compound_operation (op0);
8079805d 3774 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 3775 }
230d793d
RS
3776#endif
3777
3778#if STORE_FLAG_VALUE == -1
5109d49f
RK
3779 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3780 those above. */
3f508eca 3781 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3782 && op1 == const0_rtx
5109d49f
RK
3783 && (num_sign_bit_copies (op0, mode)
3784 == GET_MODE_BITSIZE (mode)))
3785 return gen_lowpart_for_combine (mode,
3786 expand_compound_operation (op0));
3787
3788 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3789 && op1 == const0_rtx
3790 && nonzero_bits (op0, mode) == 1)
3791 {
3792 op0 = expand_compound_operation (op0);
0c1c8ea6 3793 return gen_unary (NEG, mode, mode,
8079805d 3794 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3795 }
3796
3797 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3798 && op1 == const0_rtx
3799 && (num_sign_bit_copies (op0, mode)
3800 == GET_MODE_BITSIZE (mode)))
230d793d 3801 {
818b11b9 3802 op0 = expand_compound_operation (op0);
0c1c8ea6 3803 return gen_unary (NOT, mode, mode,
8079805d 3804 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3805 }
3806
3807 /* If X is 0/1, (eq X 0) is X-1. */
3808 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3809 && op1 == const0_rtx
3810 && nonzero_bits (op0, mode) == 1)
3811 {
3812 op0 = expand_compound_operation (op0);
8079805d 3813 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d
RS
3814 }
3815#endif
3816
3817 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
3818 one bit that might be nonzero, we can convert (ne x 0) to
3819 (ashift x c) where C puts the bit in the sign bit. Remove any
3820 AND with STORE_FLAG_VALUE when we are done, since we are only
3821 going to test the sign bit. */
3f508eca 3822 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22
CH
3823 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3824 && (STORE_FLAG_VALUE
3825 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3826 && op1 == const0_rtx
3827 && mode == GET_MODE (op0)
5109d49f 3828 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 3829 {
818b11b9
RK
3830 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3831 expand_compound_operation (op0),
230d793d
RS
3832 GET_MODE_BITSIZE (mode) - 1 - i);
3833 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3834 return XEXP (x, 0);
3835 else
3836 return x;
3837 }
3838
3839 /* If the code changed, return a whole new comparison. */
3840 if (new_code != code)
3841 return gen_rtx_combine (new_code, mode, op0, op1);
3842
3843 /* Otherwise, keep this operation, but maybe change its operands.
3844 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3845 SUBST (XEXP (x, 0), op0);
3846 SUBST (XEXP (x, 1), op1);
3847 }
3848 break;
3849
3850 case IF_THEN_ELSE:
8079805d 3851 return simplify_if_then_else (x);
9210df58 3852
8079805d
RK
3853 case ZERO_EXTRACT:
3854 case SIGN_EXTRACT:
3855 case ZERO_EXTEND:
3856 case SIGN_EXTEND:
3857 /* If we are processing SET_DEST, we are done. */
3858 if (in_dest)
3859 return x;
d0ab8cd3 3860
8079805d 3861 return expand_compound_operation (x);
d0ab8cd3 3862
8079805d
RK
3863 case SET:
3864 return simplify_set (x);
1a26b032 3865
8079805d
RK
3866 case AND:
3867 case IOR:
3868 case XOR:
3869 return simplify_logical (x, last);
d0ab8cd3 3870
8079805d
RK
3871 case ABS:
3872 /* (abs (neg <foo>)) -> (abs <foo>) */
3873 if (GET_CODE (XEXP (x, 0)) == NEG)
3874 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 3875
8079805d
RK
3876 /* If operand is something known to be positive, ignore the ABS. */
3877 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3878 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3879 <= HOST_BITS_PER_WIDE_INT)
3880 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3881 & ((HOST_WIDE_INT) 1
3882 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3883 == 0)))
3884 return XEXP (x, 0);
1a26b032 3885
1a26b032 3886
8079805d
RK
3887 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3888 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
3889 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 3890
8079805d 3891 break;
1a26b032 3892
8079805d
RK
3893 case FFS:
3894 /* (ffs (*_extend <X>)) = (ffs <X>) */
3895 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3896 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3897 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3898 break;
1a26b032 3899
8079805d
RK
3900 case FLOAT:
3901 /* (float (sign_extend <X>)) = (float <X>). */
3902 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3903 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3904 break;
1a26b032 3905
8079805d
RK
3906 case ASHIFT:
3907 case LSHIFTRT:
3908 case ASHIFTRT:
3909 case ROTATE:
3910 case ROTATERT:
3911 /* If this is a shift by a constant amount, simplify it. */
3912 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3913 return simplify_shift_const (x, code, mode, XEXP (x, 0),
3914 INTVAL (XEXP (x, 1)));
3915
3916#ifdef SHIFT_COUNT_TRUNCATED
3917 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
3918 SUBST (XEXP (x, 1),
3919 force_to_mode (XEXP (x, 1), GET_MODE (x),
3920 ((HOST_WIDE_INT) 1
3921 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
3922 - 1,
3923 NULL_RTX, 0));
3924#endif
3925
3926 break;
3927 }
3928
3929 return x;
3930}
3931\f
3932/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 3933
8079805d
RK
3934static rtx
3935simplify_if_then_else (x)
3936 rtx x;
3937{
3938 enum machine_mode mode = GET_MODE (x);
3939 rtx cond = XEXP (x, 0);
3940 rtx true = XEXP (x, 1);
3941 rtx false = XEXP (x, 2);
3942 enum rtx_code true_code = GET_CODE (cond);
3943 int comparison_p = GET_RTX_CLASS (true_code) == '<';
3944 rtx temp;
3945 int i;
3946
3947 /* Simplify storing of the truth value. */
3948 if (comparison_p && true == const_true_rtx && false == const0_rtx)
3949 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
3950
3951 /* Also when the truth value has to be reversed. */
3952 if (comparison_p && reversible_comparison_p (cond)
3953 && true == const0_rtx && false == const_true_rtx)
3954 return gen_binary (reverse_condition (true_code),
3955 mode, XEXP (cond, 0), XEXP (cond, 1));
3956
3957 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
3958 in it is being compared against certain values. Get the true and false
3959 comparisons and see if that says anything about the value of each arm. */
3960
3961 if (comparison_p && reversible_comparison_p (cond)
3962 && GET_CODE (XEXP (cond, 0)) == REG)
3963 {
3964 HOST_WIDE_INT nzb;
3965 rtx from = XEXP (cond, 0);
3966 enum rtx_code false_code = reverse_condition (true_code);
3967 rtx true_val = XEXP (cond, 1);
3968 rtx false_val = true_val;
3969 int swapped = 0;
9210df58 3970
8079805d 3971 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 3972
8079805d 3973 if (false_code == EQ)
1a26b032 3974 {
8079805d
RK
3975 swapped = 1, true_code = EQ, false_code = NE;
3976 temp = true, true = false, false = temp;
3977 }
5109d49f 3978
8079805d
RK
3979 /* If we are comparing against zero and the expression being tested has
3980 only a single bit that might be nonzero, that is its value when it is
3981 not equal to zero. Similarly if it is known to be -1 or 0. */
3982
3983 if (true_code == EQ && true_val == const0_rtx
3984 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3985 false_code = EQ, false_val = GEN_INT (nzb);
3986 else if (true_code == EQ && true_val == const0_rtx
3987 && (num_sign_bit_copies (from, GET_MODE (from))
3988 == GET_MODE_BITSIZE (GET_MODE (from))))
3989 false_code = EQ, false_val = constm1_rtx;
3990
3991 /* Now simplify an arm if we know the value of the register in the
3992 branch and it is used in the arm. Be careful due to the potential
3993 of locally-shared RTL. */
3994
3995 if (reg_mentioned_p (from, true))
3996 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
3997 pc_rtx, pc_rtx, 0, 0);
3998 if (reg_mentioned_p (from, false))
3999 false = subst (known_cond (copy_rtx (false), false_code,
4000 from, false_val),
4001 pc_rtx, pc_rtx, 0, 0);
4002
4003 SUBST (XEXP (x, 1), swapped ? false : true);
4004 SUBST (XEXP (x, 2), swapped ? true : false);
4005
4006 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4007 }
5109d49f 4008
8079805d
RK
4009 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4010 reversed, do so to avoid needing two sets of patterns for
4011 subtract-and-branch insns. Similarly if we have a constant in the true
4012 arm, the false arm is the same as the first operand of the comparison, or
4013 the false arm is more complicated than the true arm. */
4014
4015 if (comparison_p && reversible_comparison_p (cond)
4016 && (true == pc_rtx
4017 || (CONSTANT_P (true)
4018 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4019 || true == const0_rtx
4020 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4021 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4022 || (GET_CODE (true) == SUBREG
4023 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4024 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4025 || reg_mentioned_p (true, false)
4026 || rtx_equal_p (false, XEXP (cond, 0))))
4027 {
4028 true_code = reverse_condition (true_code);
4029 SUBST (XEXP (x, 0),
4030 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4031 XEXP (cond, 1)));
5109d49f 4032
8079805d
RK
4033 SUBST (XEXP (x, 1), false);
4034 SUBST (XEXP (x, 2), true);
1a26b032 4035
8079805d
RK
4036 temp = true, true = false, false = temp, cond = XEXP (x, 0);
4037 }
abe6e52f 4038
8079805d 4039 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4040
8079805d
RK
4041 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4042 return true;
1a26b032 4043
5be669c7
RK
4044 /* Convert a == b ? b : a to "a". */
4045 if (true_code == EQ && ! side_effects_p (cond)
4046 && rtx_equal_p (XEXP (cond, 0), false)
4047 && rtx_equal_p (XEXP (cond, 1), true))
4048 return false;
4049 else if (true_code == NE && ! side_effects_p (cond)
4050 && rtx_equal_p (XEXP (cond, 0), true)
4051 && rtx_equal_p (XEXP (cond, 1), false))
4052 return true;
4053
8079805d
RK
4054 /* Look for cases where we have (abs x) or (neg (abs X)). */
4055
4056 if (GET_MODE_CLASS (mode) == MODE_INT
4057 && GET_CODE (false) == NEG
4058 && rtx_equal_p (true, XEXP (false, 0))
4059 && comparison_p
4060 && rtx_equal_p (true, XEXP (cond, 0))
4061 && ! side_effects_p (true))
4062 switch (true_code)
4063 {
4064 case GT:
4065 case GE:
0c1c8ea6 4066 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4067 case LT:
4068 case LE:
0c1c8ea6 4069 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
8079805d
RK
4070 }
4071
4072 /* Look for MIN or MAX. */
4073
34c8be72 4074 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4075 && comparison_p
4076 && rtx_equal_p (XEXP (cond, 0), true)
4077 && rtx_equal_p (XEXP (cond, 1), false)
4078 && ! side_effects_p (cond))
4079 switch (true_code)
4080 {
4081 case GE:
4082 case GT:
4083 return gen_binary (SMAX, mode, true, false);
4084 case LE:
4085 case LT:
4086 return gen_binary (SMIN, mode, true, false);
4087 case GEU:
4088 case GTU:
4089 return gen_binary (UMAX, mode, true, false);
4090 case LEU:
4091 case LTU:
4092 return gen_binary (UMIN, mode, true, false);
4093 }
4094
4095#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
4096
4097 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4098 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4099 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4100 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4101 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
d5a4ebdc 4102 neither of the above, but it isn't worth checking for. */
8079805d
RK
4103
4104 if (comparison_p && mode != VOIDmode && ! side_effects_p (x))
4105 {
4106 rtx t = make_compound_operation (true, SET);
4107 rtx f = make_compound_operation (false, SET);
4108 rtx cond_op0 = XEXP (cond, 0);
4109 rtx cond_op1 = XEXP (cond, 1);
4110 enum rtx_code op, extend_op = NIL;
4111 enum machine_mode m = mode;
f24ad0e4 4112 rtx z = 0, c1;
8079805d 4113
8079805d
RK
4114 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4115 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4116 || GET_CODE (t) == ASHIFT
4117 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4118 && rtx_equal_p (XEXP (t, 0), f))
4119 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4120
4121 /* If an identity-zero op is commutative, check whether there
4122 would be a match if we swapped the operands. */
4123 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4124 || GET_CODE (t) == XOR)
4125 && rtx_equal_p (XEXP (t, 1), f))
4126 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4127 else if (GET_CODE (t) == SIGN_EXTEND
4128 && (GET_CODE (XEXP (t, 0)) == PLUS
4129 || GET_CODE (XEXP (t, 0)) == MINUS
4130 || GET_CODE (XEXP (t, 0)) == IOR
4131 || GET_CODE (XEXP (t, 0)) == XOR
4132 || GET_CODE (XEXP (t, 0)) == ASHIFT
4133 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4134 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4135 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4136 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4137 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4138 && (num_sign_bit_copies (f, GET_MODE (f))
4139 > (GET_MODE_BITSIZE (mode)
4140 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4141 {
4142 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4143 extend_op = SIGN_EXTEND;
4144 m = GET_MODE (XEXP (t, 0));
1a26b032 4145 }
8079805d
RK
4146 else if (GET_CODE (t) == SIGN_EXTEND
4147 && (GET_CODE (XEXP (t, 0)) == PLUS
4148 || GET_CODE (XEXP (t, 0)) == IOR
4149 || GET_CODE (XEXP (t, 0)) == XOR)
4150 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4151 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4152 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4153 && (num_sign_bit_copies (f, GET_MODE (f))
4154 > (GET_MODE_BITSIZE (mode)
4155 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4156 {
4157 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4158 extend_op = SIGN_EXTEND;
4159 m = GET_MODE (XEXP (t, 0));
4160 }
4161 else if (GET_CODE (t) == ZERO_EXTEND
4162 && (GET_CODE (XEXP (t, 0)) == PLUS
4163 || GET_CODE (XEXP (t, 0)) == MINUS
4164 || GET_CODE (XEXP (t, 0)) == IOR
4165 || GET_CODE (XEXP (t, 0)) == XOR
4166 || GET_CODE (XEXP (t, 0)) == ASHIFT
4167 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4168 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4169 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4170 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4171 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4172 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4173 && ((nonzero_bits (f, GET_MODE (f))
4174 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4175 == 0))
4176 {
4177 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4178 extend_op = ZERO_EXTEND;
4179 m = GET_MODE (XEXP (t, 0));
4180 }
4181 else if (GET_CODE (t) == ZERO_EXTEND
4182 && (GET_CODE (XEXP (t, 0)) == PLUS
4183 || GET_CODE (XEXP (t, 0)) == IOR
4184 || GET_CODE (XEXP (t, 0)) == XOR)
4185 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4186 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4187 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4188 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4189 && ((nonzero_bits (f, GET_MODE (f))
4190 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4191 == 0))
4192 {
4193 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4194 extend_op = ZERO_EXTEND;
4195 m = GET_MODE (XEXP (t, 0));
4196 }
4197
4198 if (z)
4199 {
4200 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4201 pc_rtx, pc_rtx, 0, 0);
4202 temp = gen_binary (MULT, m, temp,
4203 gen_binary (MULT, m, c1, const_true_rtx));
4204 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4205 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4206
4207 if (extend_op != NIL)
0c1c8ea6 4208 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4209
4210 return temp;
4211 }
4212 }
5109d49f 4213#endif
224eeff2 4214
8079805d
RK
4215 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4216 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4217 negation of a single bit, we can convert this operation to a shift. We
4218 can actually do this more generally, but it doesn't seem worth it. */
4219
4220 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4221 && false == const0_rtx && GET_CODE (true) == CONST_INT
4222 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4223 && (i = exact_log2 (INTVAL (true))) >= 0)
4224 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4225 == GET_MODE_BITSIZE (mode))
4226 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4227 return
4228 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4229 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4230
8079805d
RK
4231 return x;
4232}
4233\f
4234/* Simplify X, a SET expression. Return the new expression. */
230d793d 4235
8079805d
RK
4236static rtx
4237simplify_set (x)
4238 rtx x;
4239{
4240 rtx src = SET_SRC (x);
4241 rtx dest = SET_DEST (x);
4242 enum machine_mode mode
4243 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4244 rtx other_insn;
4245 rtx *cc_use;
4246
4247 /* (set (pc) (return)) gets written as (return). */
4248 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4249 return src;
230d793d 4250
87e3e0c1
RK
4251 /* Now that we know for sure which bits of SRC we are using, see if we can
4252 simplify the expression for the object knowing that we only need the
4253 low-order bits. */
4254
4255 if (GET_MODE_CLASS (mode) == MODE_INT)
4256 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4257
8079805d
RK
4258 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4259 the comparison result and try to simplify it unless we already have used
4260 undobuf.other_insn. */
4261 if ((GET_CODE (src) == COMPARE
230d793d 4262#ifdef HAVE_cc0
8079805d 4263 || dest == cc0_rtx
230d793d 4264#endif
8079805d
RK
4265 )
4266 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4267 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4268 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4269 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4270 {
4271 enum rtx_code old_code = GET_CODE (*cc_use);
4272 enum rtx_code new_code;
4273 rtx op0, op1;
4274 int other_changed = 0;
4275 enum machine_mode compare_mode = GET_MODE (dest);
4276
4277 if (GET_CODE (src) == COMPARE)
4278 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4279 else
4280 op0 = src, op1 = const0_rtx;
230d793d 4281
8079805d
RK
4282 /* Simplify our comparison, if possible. */
4283 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4284
c141a106 4285#ifdef EXTRA_CC_MODES
8079805d
RK
4286 /* If this machine has CC modes other than CCmode, check to see if we
4287 need to use a different CC mode here. */
4288 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4289#endif /* EXTRA_CC_MODES */
230d793d 4290
c141a106 4291#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4292 /* If the mode changed, we have to change SET_DEST, the mode in the
4293 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4294 a hard register, just build new versions with the proper mode. If it
4295 is a pseudo, we lose unless it is only time we set the pseudo, in
4296 which case we can safely change its mode. */
4297 if (compare_mode != GET_MODE (dest))
4298 {
4299 int regno = REGNO (dest);
4300 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4301
4302 if (regno < FIRST_PSEUDO_REGISTER
4303 || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest)))
230d793d 4304 {
8079805d
RK
4305 if (regno >= FIRST_PSEUDO_REGISTER)
4306 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4307
8079805d
RK
4308 SUBST (SET_DEST (x), new_dest);
4309 SUBST (XEXP (*cc_use, 0), new_dest);
4310 other_changed = 1;
230d793d 4311
8079805d 4312 dest = new_dest;
230d793d 4313 }
8079805d 4314 }
230d793d
RS
4315#endif
4316
8079805d
RK
4317 /* If the code changed, we have to build a new comparison in
4318 undobuf.other_insn. */
4319 if (new_code != old_code)
4320 {
4321 unsigned HOST_WIDE_INT mask;
4322
4323 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4324 dest, const0_rtx));
4325
4326 /* If the only change we made was to change an EQ into an NE or
4327 vice versa, OP0 has only one bit that might be nonzero, and OP1
4328 is zero, check if changing the user of the condition code will
4329 produce a valid insn. If it won't, we can keep the original code
4330 in that insn by surrounding our operation with an XOR. */
4331
4332 if (((old_code == NE && new_code == EQ)
4333 || (old_code == EQ && new_code == NE))
4334 && ! other_changed && op1 == const0_rtx
4335 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4336 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4337 {
8079805d 4338 rtx pat = PATTERN (other_insn), note = 0;
a29ca9db 4339 int scratches;
230d793d 4340
a29ca9db 4341 if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
8079805d
RK
4342 && ! check_asm_operands (pat)))
4343 {
4344 PUT_CODE (*cc_use, old_code);
4345 other_insn = 0;
230d793d 4346
8079805d 4347 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4348 }
230d793d
RS
4349 }
4350
8079805d
RK
4351 other_changed = 1;
4352 }
4353
4354 if (other_changed)
4355 undobuf.other_insn = other_insn;
230d793d
RS
4356
4357#ifdef HAVE_cc0
8079805d
RK
4358 /* If we are now comparing against zero, change our source if
4359 needed. If we do not use cc0, we always have a COMPARE. */
4360 if (op1 == const0_rtx && dest == cc0_rtx)
4361 {
4362 SUBST (SET_SRC (x), op0);
4363 src = op0;
4364 }
4365 else
230d793d
RS
4366#endif
4367
8079805d
RK
4368 /* Otherwise, if we didn't previously have a COMPARE in the
4369 correct mode, we need one. */
4370 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4371 {
4372 SUBST (SET_SRC (x),
4373 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4374 src = SET_SRC (x);
230d793d
RS
4375 }
4376 else
4377 {
8079805d
RK
4378 /* Otherwise, update the COMPARE if needed. */
4379 SUBST (XEXP (src, 0), op0);
4380 SUBST (XEXP (src, 1), op1);
230d793d 4381 }
8079805d
RK
4382 }
4383 else
4384 {
4385 /* Get SET_SRC in a form where we have placed back any
4386 compound expressions. Then do the checks below. */
4387 src = make_compound_operation (src, SET);
4388 SUBST (SET_SRC (x), src);
4389 }
230d793d 4390
8079805d
RK
4391 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4392 and X being a REG or (subreg (reg)), we may be able to convert this to
4393 (set (subreg:m2 x) (op)).
df62f951 4394
8079805d
RK
4395 We can always do this if M1 is narrower than M2 because that means that
4396 we only care about the low bits of the result.
df62f951 4397
8079805d
RK
4398 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4399 perform a narrower operation that requested since the high-order bits will
4400 be undefined. On machine where it is defined, this transformation is safe
4401 as long as M1 and M2 have the same number of words. */
df62f951 4402
8079805d
RK
4403 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4404 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4405 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4406 / UNITS_PER_WORD)
4407 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4408 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4409#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4410 && (GET_MODE_SIZE (GET_MODE (src))
4411 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4412#endif
f507a070
RK
4413#ifdef CLASS_CANNOT_CHANGE_SIZE
4414 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4415 && (TEST_HARD_REG_BIT
4416 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4417 REGNO (dest)))
4418 && (GET_MODE_SIZE (GET_MODE (src))
4419 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4420#endif
8079805d
RK
4421 && (GET_CODE (dest) == REG
4422 || (GET_CODE (dest) == SUBREG
4423 && GET_CODE (SUBREG_REG (dest)) == REG)))
4424 {
4425 SUBST (SET_DEST (x),
4426 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4427 dest));
4428 SUBST (SET_SRC (x), SUBREG_REG (src));
4429
4430 src = SET_SRC (x), dest = SET_DEST (x);
4431 }
df62f951 4432
8baf60bb 4433#ifdef LOAD_EXTEND_OP
8079805d
RK
4434 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4435 would require a paradoxical subreg. Replace the subreg with a
4436 zero_extend to avoid the reload that would otherwise be required. */
4437
4438 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4439 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4440 && SUBREG_WORD (src) == 0
4441 && (GET_MODE_SIZE (GET_MODE (src))
4442 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4443 && GET_CODE (SUBREG_REG (src)) == MEM)
4444 {
4445 SUBST (SET_SRC (x),
4446 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4447 GET_MODE (src), XEXP (src, 0)));
4448
4449 src = SET_SRC (x);
4450 }
230d793d
RS
4451#endif
4452
8079805d
RK
4453 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4454 are comparing an item known to be 0 or -1 against 0, use a logical
4455 operation instead. Check for one of the arms being an IOR of the other
4456 arm with some value. We compute three terms to be IOR'ed together. In
4457 practice, at most two will be nonzero. Then we do the IOR's. */
4458
4459 if (GET_CODE (dest) != PC
4460 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 4461 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4462 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4463 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 4464 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
4465#ifdef HAVE_conditional_move
4466 && ! can_conditionally_move_p (GET_MODE (src))
4467#endif
8079805d
RK
4468 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4469 GET_MODE (XEXP (XEXP (src, 0), 0)))
4470 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4471 && ! side_effects_p (src))
4472 {
4473 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4474 ? XEXP (src, 1) : XEXP (src, 2));
4475 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4476 ? XEXP (src, 2) : XEXP (src, 1));
4477 rtx term1 = const0_rtx, term2, term3;
4478
4479 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4480 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4481 else if (GET_CODE (true) == IOR
4482 && rtx_equal_p (XEXP (true, 1), false))
4483 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4484 else if (GET_CODE (false) == IOR
4485 && rtx_equal_p (XEXP (false, 0), true))
4486 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4487 else if (GET_CODE (false) == IOR
4488 && rtx_equal_p (XEXP (false, 1), true))
4489 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4490
4491 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4492 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 4493 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
4494 XEXP (XEXP (src, 0), 0)),
4495 false);
4496
4497 SUBST (SET_SRC (x),
4498 gen_binary (IOR, GET_MODE (src),
4499 gen_binary (IOR, GET_MODE (src), term1, term2),
4500 term3));
4501
4502 src = SET_SRC (x);
4503 }
230d793d 4504
246e00f2
RK
4505 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4506 whole thing fail. */
4507 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4508 return src;
4509 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4510 return dest;
4511 else
4512 /* Convert this into a field assignment operation, if possible. */
4513 return make_field_assignment (x);
8079805d
RK
4514}
4515\f
4516/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4517 result. LAST is nonzero if this is the last retry. */
4518
4519static rtx
4520simplify_logical (x, last)
4521 rtx x;
4522 int last;
4523{
4524 enum machine_mode mode = GET_MODE (x);
4525 rtx op0 = XEXP (x, 0);
4526 rtx op1 = XEXP (x, 1);
4527
4528 switch (GET_CODE (x))
4529 {
230d793d 4530 case AND:
8079805d
RK
4531 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4532 insn (and may simplify more). */
4533 if (GET_CODE (op0) == XOR
4534 && rtx_equal_p (XEXP (op0, 0), op1)
4535 && ! side_effects_p (op1))
0c1c8ea6
RK
4536 x = gen_binary (AND, mode,
4537 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
4538
4539 if (GET_CODE (op0) == XOR
4540 && rtx_equal_p (XEXP (op0, 1), op1)
4541 && ! side_effects_p (op1))
0c1c8ea6
RK
4542 x = gen_binary (AND, mode,
4543 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
4544
4545 /* Similarly for (~ (A ^ B)) & A. */
4546 if (GET_CODE (op0) == NOT
4547 && GET_CODE (XEXP (op0, 0)) == XOR
4548 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4549 && ! side_effects_p (op1))
4550 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4551
4552 if (GET_CODE (op0) == NOT
4553 && GET_CODE (XEXP (op0, 0)) == XOR
4554 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4555 && ! side_effects_p (op1))
4556 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4557
4558 if (GET_CODE (op1) == CONST_INT)
230d793d 4559 {
8079805d 4560 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
4561
4562 /* If we have (ior (and (X C1) C2)) and the next restart would be
4563 the last, simplify this by making C1 as small as possible
4564 and then exit. */
8079805d
RK
4565 if (last
4566 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4567 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4568 && GET_CODE (op1) == CONST_INT)
4569 return gen_binary (IOR, mode,
4570 gen_binary (AND, mode, XEXP (op0, 0),
4571 GEN_INT (INTVAL (XEXP (op0, 1))
4572 & ~ INTVAL (op1))), op1);
230d793d
RS
4573
4574 if (GET_CODE (x) != AND)
8079805d 4575 return x;
0e32506c
RK
4576
4577 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4578 || GET_RTX_CLASS (GET_CODE (x)) == '2')
4579 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
4580 }
4581
4582 /* Convert (A | B) & A to A. */
8079805d
RK
4583 if (GET_CODE (op0) == IOR
4584 && (rtx_equal_p (XEXP (op0, 0), op1)
4585 || rtx_equal_p (XEXP (op0, 1), op1))
4586 && ! side_effects_p (XEXP (op0, 0))
4587 && ! side_effects_p (XEXP (op0, 1)))
4588 return op1;
230d793d 4589
d0ab8cd3 4590 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4591 we start with some combination of logical operations and apply
4592 the distributive law followed by the inverse distributive law.
4593 Most of the time, this results in no change. However, if some of
4594 the operands are the same or inverses of each other, simplifications
4595 will result.
4596
4597 For example, (and (ior A B) (not B)) can occur as the result of
4598 expanding a bit field assignment. When we apply the distributive
4599 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 4600 which then simplifies to (and (A (not B))).
230d793d 4601
8079805d 4602 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
4603 the inverse distributive law to see if things simplify. */
4604
8079805d 4605 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
4606 {
4607 x = apply_distributive_law
8079805d
RK
4608 (gen_binary (GET_CODE (op0), mode,
4609 gen_binary (AND, mode, XEXP (op0, 0), op1),
4610 gen_binary (AND, mode, XEXP (op0, 1), op1)));
230d793d 4611 if (GET_CODE (x) != AND)
8079805d 4612 return x;
230d793d
RS
4613 }
4614
8079805d
RK
4615 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4616 return apply_distributive_law
4617 (gen_binary (GET_CODE (op1), mode,
4618 gen_binary (AND, mode, XEXP (op1, 0), op0),
4619 gen_binary (AND, mode, XEXP (op1, 1), op0)));
230d793d
RS
4620
4621 /* Similarly, taking advantage of the fact that
4622 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4623
8079805d
RK
4624 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4625 return apply_distributive_law
4626 (gen_binary (XOR, mode,
4627 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4628 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
230d793d 4629
8079805d
RK
4630 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4631 return apply_distributive_law
4632 (gen_binary (XOR, mode,
4633 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4634 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
230d793d
RS
4635 break;
4636
4637 case IOR:
951553af 4638 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 4639 if (GET_CODE (op1) == CONST_INT
ac49a949 4640 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
4641 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4642 return op1;
d0ab8cd3 4643
230d793d 4644 /* Convert (A & B) | A to A. */
8079805d
RK
4645 if (GET_CODE (op0) == AND
4646 && (rtx_equal_p (XEXP (op0, 0), op1)
4647 || rtx_equal_p (XEXP (op0, 1), op1))
4648 && ! side_effects_p (XEXP (op0, 0))
4649 && ! side_effects_p (XEXP (op0, 1)))
4650 return op1;
230d793d
RS
4651
4652 /* If we have (ior (and A B) C), apply the distributive law and then
4653 the inverse distributive law to see if things simplify. */
4654
8079805d 4655 if (GET_CODE (op0) == AND)
230d793d
RS
4656 {
4657 x = apply_distributive_law
4658 (gen_binary (AND, mode,
8079805d
RK
4659 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4660 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
230d793d
RS
4661
4662 if (GET_CODE (x) != IOR)
8079805d 4663 return x;
230d793d
RS
4664 }
4665
8079805d 4666 if (GET_CODE (op1) == AND)
230d793d
RS
4667 {
4668 x = apply_distributive_law
4669 (gen_binary (AND, mode,
8079805d
RK
4670 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4671 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
230d793d
RS
4672
4673 if (GET_CODE (x) != IOR)
8079805d 4674 return x;
230d793d
RS
4675 }
4676
4677 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4678 mode size to (rotate A CX). */
4679
8079805d
RK
4680 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4681 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4682 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4683 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4684 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4685 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 4686 == GET_MODE_BITSIZE (mode)))
8079805d
RK
4687 return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4688 (GET_CODE (op0) == ASHIFT
4689 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 4690
71923da7
RK
4691 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4692 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4693 does not affect any of the bits in OP1, it can really be done
4694 as a PLUS and we can associate. We do this by seeing if OP1
4695 can be safely shifted left C bits. */
4696 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4697 && GET_CODE (XEXP (op0, 0)) == PLUS
4698 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4699 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4700 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4701 {
4702 int count = INTVAL (XEXP (op0, 1));
4703 HOST_WIDE_INT mask = INTVAL (op1) << count;
4704
4705 if (mask >> count == INTVAL (op1)
4706 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4707 {
4708 SUBST (XEXP (XEXP (op0, 0), 1),
4709 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
4710 return op0;
4711 }
4712 }
230d793d
RS
4713 break;
4714
4715 case XOR:
4716 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4717 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4718 (NOT y). */
4719 {
4720 int num_negated = 0;
230d793d 4721
8079805d
RK
4722 if (GET_CODE (op0) == NOT)
4723 num_negated++, op0 = XEXP (op0, 0);
4724 if (GET_CODE (op1) == NOT)
4725 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
4726
4727 if (num_negated == 2)
4728 {
8079805d
RK
4729 SUBST (XEXP (x, 0), op0);
4730 SUBST (XEXP (x, 1), op1);
230d793d
RS
4731 }
4732 else if (num_negated == 1)
0c1c8ea6 4733 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
4734 }
4735
4736 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4737 correspond to a machine insn or result in further simplifications
4738 if B is a constant. */
4739
8079805d
RK
4740 if (GET_CODE (op0) == AND
4741 && rtx_equal_p (XEXP (op0, 1), op1)
4742 && ! side_effects_p (op1))
0c1c8ea6
RK
4743 return gen_binary (AND, mode,
4744 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 4745 op1);
230d793d 4746
8079805d
RK
4747 else if (GET_CODE (op0) == AND
4748 && rtx_equal_p (XEXP (op0, 0), op1)
4749 && ! side_effects_p (op1))
0c1c8ea6
RK
4750 return gen_binary (AND, mode,
4751 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 4752 op1);
230d793d
RS
4753
4754#if STORE_FLAG_VALUE == 1
4755 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4756 comparison. */
8079805d
RK
4757 if (op1 == const1_rtx
4758 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4759 && reversible_comparison_p (op0))
4760 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4761 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
4762
4763 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4764 is (lt foo (const_int 0)), so we can perform the above
4765 simplification. */
4766
8079805d
RK
4767 if (op1 == const1_rtx
4768 && GET_CODE (op0) == LSHIFTRT
4769 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4770 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
4771 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
4772#endif
4773
4774 /* (xor (comparison foo bar) (const_int sign-bit))
4775 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22
CH
4776 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4777 && (STORE_FLAG_VALUE
4778 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
4779 && op1 == const_true_rtx
4780 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4781 && reversible_comparison_p (op0))
4782 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4783 mode, XEXP (op0, 0), XEXP (op0, 1));
230d793d
RS
4784 break;
4785 }
4786
4787 return x;
4788}
4789\f
4790/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4791 operations" because they can be replaced with two more basic operations.
4792 ZERO_EXTEND is also considered "compound" because it can be replaced with
4793 an AND operation, which is simpler, though only one operation.
4794
4795 The function expand_compound_operation is called with an rtx expression
4796 and will convert it to the appropriate shifts and AND operations,
4797 simplifying at each stage.
4798
4799 The function make_compound_operation is called to convert an expression
4800 consisting of shifts and ANDs into the equivalent compound expression.
4801 It is the inverse of this function, loosely speaking. */
4802
4803static rtx
4804expand_compound_operation (x)
4805 rtx x;
4806{
4807 int pos = 0, len;
4808 int unsignedp = 0;
4809 int modewidth;
4810 rtx tem;
4811
4812 switch (GET_CODE (x))
4813 {
4814 case ZERO_EXTEND:
4815 unsignedp = 1;
4816 case SIGN_EXTEND:
75473182
RS
4817 /* We can't necessarily use a const_int for a multiword mode;
4818 it depends on implicitly extending the value.
4819 Since we don't know the right way to extend it,
4820 we can't tell whether the implicit way is right.
4821
4822 Even for a mode that is no wider than a const_int,
4823 we can't win, because we need to sign extend one of its bits through
4824 the rest of it, and we don't know which bit. */
230d793d 4825 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 4826 return x;
230d793d 4827
8079805d
RK
4828 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
4829 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
4830 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
4831 reloaded. If not for that, MEM's would very rarely be safe.
4832
4833 Reject MODEs bigger than a word, because we might not be able
4834 to reference a two-register group starting with an arbitrary register
4835 (and currently gen_lowpart might crash for a SUBREG). */
4836
4837 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
4838 return x;
4839
4840 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4841 /* If the inner object has VOIDmode (the only way this can happen
4842 is if it is a ASM_OPERANDS), we can't do anything since we don't
4843 know how much masking to do. */
4844 if (len == 0)
4845 return x;
4846
4847 break;
4848
4849 case ZERO_EXTRACT:
4850 unsignedp = 1;
4851 case SIGN_EXTRACT:
4852 /* If the operand is a CLOBBER, just return it. */
4853 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4854 return XEXP (x, 0);
4855
4856 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4857 || GET_CODE (XEXP (x, 2)) != CONST_INT
4858 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4859 return x;
4860
4861 len = INTVAL (XEXP (x, 1));
4862 pos = INTVAL (XEXP (x, 2));
4863
4864 /* If this goes outside the object being extracted, replace the object
4865 with a (use (mem ...)) construct that only combine understands
4866 and is used only for this purpose. */
4867 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4868 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4869
f76b9db2
ILT
4870 if (BITS_BIG_ENDIAN)
4871 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4872
230d793d
RS
4873 break;
4874
4875 default:
4876 return x;
4877 }
4878
4879 /* If we reach here, we want to return a pair of shifts. The inner
4880 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4881 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4882 logical depending on the value of UNSIGNEDP.
4883
4884 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4885 converted into an AND of a shift.
4886
4887 We must check for the case where the left shift would have a negative
4888 count. This can happen in a case like (x >> 31) & 255 on machines
4889 that can't shift by a constant. On those machines, we would first
4890 combine the shift with the AND to produce a variable-position
4891 extraction. Then the constant of 31 would be substituted in to produce
4892 a such a position. */
4893
4894 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4895 if (modewidth >= pos - len)
5f4f0e22 4896 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 4897 GET_MODE (x),
5f4f0e22
CH
4898 simplify_shift_const (NULL_RTX, ASHIFT,
4899 GET_MODE (x),
230d793d
RS
4900 XEXP (x, 0),
4901 modewidth - pos - len),
4902 modewidth - len);
4903
5f4f0e22
CH
4904 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4905 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4906 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
4907 GET_MODE (x),
4908 XEXP (x, 0), pos),
5f4f0e22 4909 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4910 else
4911 /* Any other cases we can't handle. */
4912 return x;
4913
4914
4915 /* If we couldn't do this for some reason, return the original
4916 expression. */
4917 if (GET_CODE (tem) == CLOBBER)
4918 return x;
4919
4920 return tem;
4921}
4922\f
4923/* X is a SET which contains an assignment of one object into
4924 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4925 or certain SUBREGS). If possible, convert it into a series of
4926 logical operations.
4927
4928 We half-heartedly support variable positions, but do not at all
4929 support variable lengths. */
4930
4931static rtx
4932expand_field_assignment (x)
4933 rtx x;
4934{
4935 rtx inner;
4936 rtx pos; /* Always counts from low bit. */
4937 int len;
4938 rtx mask;
4939 enum machine_mode compute_mode;
4940
4941 /* Loop until we find something we can't simplify. */
4942 while (1)
4943 {
4944 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4945 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4946 {
4947 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4948 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4949 pos = const0_rtx;
4950 }
4951 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4952 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4953 {
4954 inner = XEXP (SET_DEST (x), 0);
4955 len = INTVAL (XEXP (SET_DEST (x), 1));
4956 pos = XEXP (SET_DEST (x), 2);
4957
4958 /* If the position is constant and spans the width of INNER,
4959 surround INNER with a USE to indicate this. */
4960 if (GET_CODE (pos) == CONST_INT
4961 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4962 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4963
f76b9db2
ILT
4964 if (BITS_BIG_ENDIAN)
4965 {
4966 if (GET_CODE (pos) == CONST_INT)
4967 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4968 - INTVAL (pos));
4969 else if (GET_CODE (pos) == MINUS
4970 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4971 && (INTVAL (XEXP (pos, 1))
4972 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4973 /* If position is ADJUST - X, new position is X. */
4974 pos = XEXP (pos, 0);
4975 else
4976 pos = gen_binary (MINUS, GET_MODE (pos),
4977 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4978 - len),
4979 pos);
4980 }
230d793d
RS
4981 }
4982
4983 /* A SUBREG between two modes that occupy the same numbers of words
4984 can be done by moving the SUBREG to the source. */
4985 else if (GET_CODE (SET_DEST (x)) == SUBREG
4986 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4987 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4988 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4989 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4990 {
4991 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4992 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4993 SET_SRC (x)));
4994 continue;
4995 }
4996 else
4997 break;
4998
4999 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5000 inner = SUBREG_REG (inner);
5001
5002 compute_mode = GET_MODE (inner);
5003
5004 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5005 if (len < HOST_BITS_PER_WIDE_INT)
5006 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5007 else
5008 break;
5009
5010 /* Now compute the equivalent expression. Make a copy of INNER
5011 for the SET_DEST in case it is a MEM into which we will substitute;
5012 we don't want shared RTL in that case. */
5013 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
5014 gen_binary (IOR, compute_mode,
5015 gen_binary (AND, compute_mode,
5016 gen_unary (NOT, compute_mode,
0c1c8ea6 5017 compute_mode,
230d793d
RS
5018 gen_binary (ASHIFT,
5019 compute_mode,
5020 mask, pos)),
5021 inner),
5022 gen_binary (ASHIFT, compute_mode,
5023 gen_binary (AND, compute_mode,
5024 gen_lowpart_for_combine
5025 (compute_mode,
5026 SET_SRC (x)),
5027 mask),
5028 pos)));
5029 }
5030
5031 return x;
5032}
5033\f
8999a12e
RK
5034/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5035 it is an RTX that represents a variable starting position; otherwise,
5036 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5037
5038 INNER may be a USE. This will occur when we started with a bitfield
5039 that went outside the boundary of the object in memory, which is
5040 allowed on most machines. To isolate this case, we produce a USE
5041 whose mode is wide enough and surround the MEM with it. The only
5042 code that understands the USE is this routine. If it is not removed,
5043 it will cause the resulting insn not to match.
5044
5045 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5046 signed reference.
5047
5048 IN_DEST is non-zero if this is a reference in the destination of a
5049 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5050 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5051 be used.
5052
5053 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5054 ZERO_EXTRACT should be built even for bits starting at bit 0.
5055
76184def
DE
5056 MODE is the desired mode of the result (if IN_DEST == 0).
5057
5058 The result is an RTX for the extraction or NULL_RTX if the target
5059 can't handle it. */
230d793d
RS
5060
5061static rtx
5062make_extraction (mode, inner, pos, pos_rtx, len,
5063 unsignedp, in_dest, in_compare)
5064 enum machine_mode mode;
5065 rtx inner;
5066 int pos;
5067 rtx pos_rtx;
5068 int len;
5069 int unsignedp;
5070 int in_dest, in_compare;
5071{
94b4b17a
RS
5072 /* This mode describes the size of the storage area
5073 to fetch the overall value from. Within that, we
5074 ignore the POS lowest bits, etc. */
230d793d
RS
5075 enum machine_mode is_mode = GET_MODE (inner);
5076 enum machine_mode inner_mode;
d7cd794f
RK
5077 enum machine_mode wanted_inner_mode = byte_mode;
5078 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5079 enum machine_mode pos_mode = word_mode;
5080 enum machine_mode extraction_mode = word_mode;
5081 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5082 int spans_byte = 0;
5083 rtx new = 0;
8999a12e 5084 rtx orig_pos_rtx = pos_rtx;
6139ff20 5085 int orig_pos;
230d793d
RS
5086
5087 /* Get some information about INNER and get the innermost object. */
5088 if (GET_CODE (inner) == USE)
94b4b17a 5089 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5090 /* We don't need to adjust the position because we set up the USE
5091 to pretend that it was a full-word object. */
5092 spans_byte = 1, inner = XEXP (inner, 0);
5093 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5094 {
5095 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5096 consider just the QI as the memory to extract from.
5097 The subreg adds or removes high bits; its mode is
5098 irrelevant to the meaning of this extraction,
5099 since POS and LEN count from the lsb. */
5100 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5101 is_mode = GET_MODE (SUBREG_REG (inner));
5102 inner = SUBREG_REG (inner);
5103 }
230d793d
RS
5104
5105 inner_mode = GET_MODE (inner);
5106
5107 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5108 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5109
5110 /* See if this can be done without an extraction. We never can if the
5111 width of the field is not the same as that of some integer mode. For
5112 registers, we can only avoid the extraction if the position is at the
5113 low-order bit and this is either not in the destination or we have the
5114 appropriate STRICT_LOW_PART operation available.
5115
5116 For MEM, we can avoid an extract if the field starts on an appropriate
5117 boundary and we can change the mode of the memory reference. However,
5118 we cannot directly access the MEM if we have a USE and the underlying
5119 MEM is not TMODE. This combination means that MEM was being used in a
5120 context where bits outside its mode were being referenced; that is only
5121 valid in bit-field insns. */
5122
5123 if (tmode != BLKmode
5124 && ! (spans_byte && inner_mode != tmode)
8999a12e 5125 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
230d793d 5126 && (! in_dest
df62f951
RK
5127 || (GET_CODE (inner) == REG
5128 && (movstrict_optab->handlers[(int) tmode].insn_code
5129 != CODE_FOR_nothing))))
8999a12e 5130 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5131 && (pos
5132 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5133 : BITS_PER_UNIT)) == 0
230d793d
RS
5134 /* We can't do this if we are widening INNER_MODE (it
5135 may not be aligned, for one thing). */
5136 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5137 && (inner_mode == tmode
5138 || (! mode_dependent_address_p (XEXP (inner, 0))
5139 && ! MEM_VOLATILE_P (inner))))))
5140 {
230d793d
RS
5141 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5142 field. If the original and current mode are the same, we need not
5143 adjust the offset. Otherwise, we do if bytes big endian.
5144
5145 If INNER is not a MEM, get a piece consisting of the just the field
df62f951 5146 of interest (in this case POS must be 0). */
230d793d
RS
5147
5148 if (GET_CODE (inner) == MEM)
5149 {
94b4b17a
RS
5150 int offset;
5151 /* POS counts from lsb, but make OFFSET count in memory order. */
5152 if (BYTES_BIG_ENDIAN)
5153 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5154 else
5155 offset = pos / BITS_PER_UNIT;
230d793d
RS
5156
5157 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5158 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5159 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5160 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5161 }
df62f951 5162 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5163 {
5164 /* We can't call gen_lowpart_for_combine here since we always want
5165 a SUBREG and it would sometimes return a new hard register. */
5166 if (tmode != inner_mode)
5167 new = gen_rtx (SUBREG, tmode, inner,
5168 (WORDS_BIG_ENDIAN
5169 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5170 ? ((GET_MODE_SIZE (inner_mode)
5171 - GET_MODE_SIZE (tmode))
5172 / UNITS_PER_WORD)
5173 : 0));
5174 else
5175 new = inner;
5176 }
230d793d 5177 else
6139ff20
RK
5178 new = force_to_mode (inner, tmode,
5179 len >= HOST_BITS_PER_WIDE_INT
5180 ? GET_MODE_MASK (tmode)
5181 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5182 NULL_RTX, 0);
230d793d
RS
5183
5184 /* If this extraction is going into the destination of a SET,
5185 make a STRICT_LOW_PART unless we made a MEM. */
5186
5187 if (in_dest)
5188 return (GET_CODE (new) == MEM ? new
77fa0940
RK
5189 : (GET_CODE (new) != SUBREG
5190 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5191 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5192
5193 /* Otherwise, sign- or zero-extend unless we already are in the
5194 proper mode. */
5195
5196 return (mode == tmode ? new
5197 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5198 mode, new));
5199 }
5200
cc471082
RS
5201 /* Unless this is a COMPARE or we have a funny memory reference,
5202 don't do anything with zero-extending field extracts starting at
5203 the low-order bit since they are simple AND operations. */
8999a12e
RK
5204 if (pos_rtx == 0 && pos == 0 && ! in_dest
5205 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5206 return 0;
5207
e7373556
RK
5208 /* Unless we are allowed to span bytes, reject this if we would be
5209 spanning bytes or if the position is not a constant and the length
5210 is not 1. In all other cases, we would only be going outside
5211 out object in cases when an original shift would have been
5212 undefined. */
5213 if (! spans_byte
5214 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5215 || (pos_rtx != 0 && len != 1)))
5216 return 0;
5217
d7cd794f 5218 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
5219 and the mode for the result. */
5220#ifdef HAVE_insv
5221 if (in_dest)
5222 {
d7cd794f 5223 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
230d793d
RS
5224 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5225 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5226 }
5227#endif
5228
5229#ifdef HAVE_extzv
5230 if (! in_dest && unsignedp)
5231 {
d7cd794f 5232 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
230d793d
RS
5233 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5234 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5235 }
5236#endif
5237
5238#ifdef HAVE_extv
5239 if (! in_dest && ! unsignedp)
5240 {
d7cd794f 5241 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
230d793d
RS
5242 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5243 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5244 }
5245#endif
5246
5247 /* Never narrow an object, since that might not be safe. */
5248
5249 if (mode != VOIDmode
5250 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5251 extraction_mode = mode;
5252
5253 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5254 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5255 pos_mode = GET_MODE (pos_rtx);
5256
d7cd794f
RK
5257 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5258 if we have to change the mode of memory and cannot, the desired mode is
5259 EXTRACTION_MODE. */
5260 if (GET_CODE (inner) != MEM)
5261 wanted_inner_mode = wanted_inner_reg_mode;
5262 else if (inner_mode != wanted_inner_mode
5263 && (mode_dependent_address_p (XEXP (inner, 0))
5264 || MEM_VOLATILE_P (inner)))
5265 wanted_inner_mode = extraction_mode;
230d793d 5266
6139ff20
RK
5267 orig_pos = pos;
5268
f76b9db2
ILT
5269 if (BITS_BIG_ENDIAN)
5270 {
5271 /* If position is constant, compute new position. Otherwise,
5272 build subtraction. */
5273 if (pos_rtx == 0)
d7cd794f 5274 pos = GET_MODE_BITSIZE (wanted_inner_mode) - len - pos;
f76b9db2
ILT
5275 else
5276 pos_rtx
5277 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
d7cd794f 5278 GEN_INT (GET_MODE_BITSIZE (wanted_inner_mode)
f76b9db2
ILT
5279 - len),
5280 pos_rtx);
5281 }
230d793d
RS
5282
5283 /* If INNER has a wider mode, make it smaller. If this is a constant
5284 extract, try to adjust the byte to point to the byte containing
5285 the value. */
d7cd794f
RK
5286 if (wanted_inner_mode != VOIDmode
5287 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 5288 && ((GET_CODE (inner) == MEM
d7cd794f 5289 && (inner_mode == wanted_inner_mode
230d793d
RS
5290 || (! mode_dependent_address_p (XEXP (inner, 0))
5291 && ! MEM_VOLATILE_P (inner))))))
5292 {
5293 int offset = 0;
5294
5295 /* The computations below will be correct if the machine is big
5296 endian in both bits and bytes or little endian in bits and bytes.
5297 If it is mixed, we must adjust. */
5298
230d793d
RS
5299 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5300 adjust OFFSET to compensate. */
f76b9db2
ILT
5301 if (BYTES_BIG_ENDIAN
5302 && ! spans_byte
230d793d
RS
5303 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5304 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
5305
5306 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5307 if (pos_rtx == 0)
230d793d
RS
5308 {
5309 offset += pos / BITS_PER_UNIT;
d7cd794f 5310 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
5311 }
5312
f76b9db2
ILT
5313 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5314 && ! spans_byte
d7cd794f 5315 && is_mode != wanted_inner_mode)
c6b3f1f2 5316 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 5317 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 5318
d7cd794f 5319 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 5320 {
d7cd794f 5321 rtx newmem = gen_rtx (MEM, wanted_inner_mode,
230d793d
RS
5322 plus_constant (XEXP (inner, 0), offset));
5323 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5324 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5325 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5326 inner = newmem;
5327 }
5328 }
5329
9e74dc41
RK
5330 /* If INNER is not memory, we can always get it into the proper mode. If we
5331 are changing its mode, POS must be a constant and smaller than the size
5332 of the new mode. */
230d793d 5333 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
5334 {
5335 if (GET_MODE (inner) != wanted_inner_mode
5336 && (pos_rtx != 0
5337 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5338 return 0;
5339
5340 inner = force_to_mode (inner, wanted_inner_mode,
5341 pos_rtx
5342 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5343 ? GET_MODE_MASK (wanted_inner_mode)
5344 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5345 NULL_RTX, 0);
5346 }
230d793d
RS
5347
5348 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5349 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5350 if (pos_rtx != 0
230d793d
RS
5351 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5352 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5353 else if (pos_rtx != 0
230d793d
RS
5354 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5355 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5356
8999a12e
RK
5357 /* Make POS_RTX unless we already have it and it is correct. If we don't
5358 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5359 be a CONST_INT. */
5360 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5361 pos_rtx = orig_pos_rtx;
5362
5363 else if (pos_rtx == 0)
5f4f0e22 5364 pos_rtx = GEN_INT (pos);
230d793d
RS
5365
5366 /* Make the required operation. See if we can use existing rtx. */
5367 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5368 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5369 if (! in_dest)
5370 new = gen_lowpart_for_combine (mode, new);
5371
5372 return new;
5373}
5374\f
71923da7
RK
5375/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5376 with any other operations in X. Return X without that shift if so. */
5377
5378static rtx
5379extract_left_shift (x, count)
5380 rtx x;
5381 int count;
5382{
5383 enum rtx_code code = GET_CODE (x);
5384 enum machine_mode mode = GET_MODE (x);
5385 rtx tem;
5386
5387 switch (code)
5388 {
5389 case ASHIFT:
5390 /* This is the shift itself. If it is wide enough, we will return
5391 either the value being shifted if the shift count is equal to
5392 COUNT or a shift for the difference. */
5393 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5394 && INTVAL (XEXP (x, 1)) >= count)
5395 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5396 INTVAL (XEXP (x, 1)) - count);
5397 break;
5398
5399 case NEG: case NOT:
5400 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 5401 return gen_unary (code, mode, mode, tem);
71923da7
RK
5402
5403 break;
5404
5405 case PLUS: case IOR: case XOR: case AND:
5406 /* If we can safely shift this constant and we find the inner shift,
5407 make a new operation. */
5408 if (GET_CODE (XEXP (x,1)) == CONST_INT
5409 && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5410 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5411 return gen_binary (code, mode, tem,
5412 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5413
5414 break;
5415 }
5416
5417 return 0;
5418}
5419\f
230d793d
RS
5420/* Look at the expression rooted at X. Look for expressions
5421 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5422 Form these expressions.
5423
5424 Return the new rtx, usually just X.
5425
5426 Also, for machines like the Vax that don't have logical shift insns,
5427 try to convert logical to arithmetic shift operations in cases where
5428 they are equivalent. This undoes the canonicalizations to logical
5429 shifts done elsewhere.
5430
5431 We try, as much as possible, to re-use rtl expressions to save memory.
5432
5433 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5434 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5435 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5436 or a COMPARE against zero, it is COMPARE. */
5437
5438static rtx
5439make_compound_operation (x, in_code)
5440 rtx x;
5441 enum rtx_code in_code;
5442{
5443 enum rtx_code code = GET_CODE (x);
5444 enum machine_mode mode = GET_MODE (x);
5445 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 5446 rtx rhs, lhs;
230d793d 5447 enum rtx_code next_code;
f24ad0e4 5448 int i;
230d793d 5449 rtx new = 0;
280f58ba 5450 rtx tem;
230d793d
RS
5451 char *fmt;
5452
5453 /* Select the code to be used in recursive calls. Once we are inside an
5454 address, we stay there. If we have a comparison, set to COMPARE,
5455 but once inside, go back to our default of SET. */
5456
42495ca0 5457 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5458 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5459 && XEXP (x, 1) == const0_rtx) ? COMPARE
5460 : in_code == COMPARE ? SET : in_code);
5461
5462 /* Process depending on the code of this operation. If NEW is set
5463 non-zero, it will be returned. */
5464
5465 switch (code)
5466 {
5467 case ASHIFT:
230d793d
RS
5468 /* Convert shifts by constants into multiplications if inside
5469 an address. */
5470 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5471 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5472 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5473 {
5474 new = make_compound_operation (XEXP (x, 0), next_code);
5475 new = gen_rtx_combine (MULT, mode, new,
5476 GEN_INT ((HOST_WIDE_INT) 1
5477 << INTVAL (XEXP (x, 1))));
5478 }
230d793d
RS
5479 break;
5480
5481 case AND:
5482 /* If the second operand is not a constant, we can't do anything
5483 with it. */
5484 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5485 break;
5486
5487 /* If the constant is a power of two minus one and the first operand
5488 is a logical right shift, make an extraction. */
5489 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5490 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5491 {
5492 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5493 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5494 0, in_code == COMPARE);
5495 }
dfbe1b2f 5496
230d793d
RS
5497 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5498 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5499 && subreg_lowpart_p (XEXP (x, 0))
5500 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5501 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5502 {
5503 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5504 next_code);
2f99f437 5505 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
5506 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5507 0, in_code == COMPARE);
5508 }
45620ed4 5509 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
5510 else if ((GET_CODE (XEXP (x, 0)) == XOR
5511 || GET_CODE (XEXP (x, 0)) == IOR)
5512 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5513 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5514 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5515 {
5516 /* Apply the distributive law, and then try to make extractions. */
5517 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5518 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5519 XEXP (x, 1)),
5520 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5521 XEXP (x, 1)));
5522 new = make_compound_operation (new, in_code);
5523 }
a7c99304
RK
5524
5525 /* If we are have (and (rotate X C) M) and C is larger than the number
5526 of bits in M, this is an extraction. */
5527
5528 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5529 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5530 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5531 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5532 {
5533 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5534 new = make_extraction (mode, new,
5535 (GET_MODE_BITSIZE (mode)
5536 - INTVAL (XEXP (XEXP (x, 0), 1))),
5537 NULL_RTX, i, 1, 0, in_code == COMPARE);
5538 }
a7c99304
RK
5539
5540 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5541 a logical shift and our mask turns off all the propagated sign
5542 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5543 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5544 && (lshr_optab->handlers[(int) mode].insn_code
5545 == CODE_FOR_nothing)
230d793d
RS
5546 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5547 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5548 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5549 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5550 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5551 {
5f4f0e22 5552 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5553
5554 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5555 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5556 SUBST (XEXP (x, 0),
280f58ba
RK
5557 gen_rtx_combine (ASHIFTRT, mode,
5558 make_compound_operation (XEXP (XEXP (x, 0), 0),
5559 next_code),
230d793d
RS
5560 XEXP (XEXP (x, 0), 1)));
5561 }
5562
5563 /* If the constant is one less than a power of two, this might be
5564 representable by an extraction even if no shift is present.
5565 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5566 we are in a COMPARE. */
5567 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5568 new = make_extraction (mode,
5569 make_compound_operation (XEXP (x, 0),
5570 next_code),
5571 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5572
5573 /* If we are in a comparison and this is an AND with a power of two,
5574 convert this into the appropriate bit extract. */
5575 else if (in_code == COMPARE
5576 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5577 new = make_extraction (mode,
5578 make_compound_operation (XEXP (x, 0),
5579 next_code),
5580 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5581
5582 break;
5583
5584 case LSHIFTRT:
5585 /* If the sign bit is known to be zero, replace this with an
5586 arithmetic shift. */
d0ab8cd3
RK
5587 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5588 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5589 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5590 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5591 {
280f58ba
RK
5592 new = gen_rtx_combine (ASHIFTRT, mode,
5593 make_compound_operation (XEXP (x, 0),
5594 next_code),
5595 XEXP (x, 1));
230d793d
RS
5596 break;
5597 }
5598
5599 /* ... fall through ... */
5600
5601 case ASHIFTRT:
71923da7
RK
5602 lhs = XEXP (x, 0);
5603 rhs = XEXP (x, 1);
5604
230d793d
RS
5605 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5606 this is a SIGN_EXTRACT. */
71923da7
RK
5607 if (GET_CODE (rhs) == CONST_INT
5608 && GET_CODE (lhs) == ASHIFT
5609 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5610 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 5611 {
71923da7 5612 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 5613 new = make_extraction (mode, new,
71923da7
RK
5614 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5615 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
5616 code == LSHIFTRT, 0, in_code == COMPARE);
5617 }
5618
71923da7
RK
5619 /* See if we have operations between an ASHIFTRT and an ASHIFT.
5620 If so, try to merge the shifts into a SIGN_EXTEND. We could
5621 also do this for some cases of SIGN_EXTRACT, but it doesn't
5622 seem worth the effort; the case checked for occurs on Alpha. */
5623
5624 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
5625 && ! (GET_CODE (lhs) == SUBREG
5626 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
5627 && GET_CODE (rhs) == CONST_INT
5628 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
5629 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
5630 new = make_extraction (mode, make_compound_operation (new, next_code),
5631 0, NULL_RTX, mode_width - INTVAL (rhs),
5632 code == LSHIFTRT, 0, in_code == COMPARE);
5633
230d793d 5634 break;
280f58ba
RK
5635
5636 case SUBREG:
5637 /* Call ourselves recursively on the inner expression. If we are
5638 narrowing the object and it has a different RTL code from
5639 what it originally did, do this SUBREG as a force_to_mode. */
5640
0a5cbff6 5641 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
5642 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5643 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5644 && subreg_lowpart_p (x))
0a5cbff6
RK
5645 {
5646 rtx newer = force_to_mode (tem, mode,
e3d616e3 5647 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
5648
5649 /* If we have something other than a SUBREG, we might have
5650 done an expansion, so rerun outselves. */
5651 if (GET_CODE (newer) != SUBREG)
5652 newer = make_compound_operation (newer, in_code);
5653
5654 return newer;
5655 }
230d793d
RS
5656 }
5657
5658 if (new)
5659 {
df62f951 5660 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
5661 code = GET_CODE (x);
5662 }
5663
5664 /* Now recursively process each operand of this operation. */
5665 fmt = GET_RTX_FORMAT (code);
5666 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5667 if (fmt[i] == 'e')
5668 {
5669 new = make_compound_operation (XEXP (x, i), next_code);
5670 SUBST (XEXP (x, i), new);
5671 }
5672
5673 return x;
5674}
5675\f
5676/* Given M see if it is a value that would select a field of bits
5677 within an item, but not the entire word. Return -1 if not.
5678 Otherwise, return the starting position of the field, where 0 is the
5679 low-order bit.
5680
5681 *PLEN is set to the length of the field. */
5682
5683static int
5684get_pos_from_mask (m, plen)
5f4f0e22 5685 unsigned HOST_WIDE_INT m;
230d793d
RS
5686 int *plen;
5687{
5688 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5689 int pos = exact_log2 (m & - m);
5690
5691 if (pos < 0)
5692 return -1;
5693
5694 /* Now shift off the low-order zero bits and see if we have a power of
5695 two minus 1. */
5696 *plen = exact_log2 ((m >> pos) + 1);
5697
5698 if (*plen <= 0)
5699 return -1;
5700
5701 return pos;
5702}
5703\f
6139ff20
RK
5704/* See if X can be simplified knowing that we will only refer to it in
5705 MODE and will only refer to those bits that are nonzero in MASK.
5706 If other bits are being computed or if masking operations are done
5707 that select a superset of the bits in MASK, they can sometimes be
5708 ignored.
5709
5710 Return a possibly simplified expression, but always convert X to
5711 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
5712
5713 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
5714 replace X with REG.
5715
5716 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
5717 are all off in X. This is used when X will be complemented, by either
180b8e4b 5718 NOT, NEG, or XOR. */
dfbe1b2f
RK
5719
5720static rtx
e3d616e3 5721force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
5722 rtx x;
5723 enum machine_mode mode;
6139ff20 5724 unsigned HOST_WIDE_INT mask;
dfbe1b2f 5725 rtx reg;
e3d616e3 5726 int just_select;
dfbe1b2f
RK
5727{
5728 enum rtx_code code = GET_CODE (x);
180b8e4b 5729 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
5730 enum machine_mode op_mode;
5731 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
5732 rtx op0, op1, temp;
5733
246e00f2
RK
5734 /* If this is a CALL, don't do anything. Some of the code below
5735 will do the wrong thing since the mode of a CALL is VOIDmode. */
5736 if (code == CALL)
5737 return x;
5738
6139ff20
RK
5739 /* We want to perform the operation is its present mode unless we know
5740 that the operation is valid in MODE, in which case we do the operation
5741 in MODE. */
1c75dfa4
RK
5742 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
5743 && code_to_optab[(int) code] != 0
ef026f91
RS
5744 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5745 != CODE_FOR_nothing))
5746 ? mode : GET_MODE (x));
e3d616e3 5747
aa988991
RS
5748 /* It is not valid to do a right-shift in a narrower mode
5749 than the one it came in with. */
5750 if ((code == LSHIFTRT || code == ASHIFTRT)
5751 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
5752 op_mode = GET_MODE (x);
ef026f91
RS
5753
5754 /* Truncate MASK to fit OP_MODE. */
5755 if (op_mode)
5756 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
5757
5758 /* When we have an arithmetic operation, or a shift whose count we
5759 do not know, we need to assume that all bit the up to the highest-order
5760 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
5761 if (op_mode)
5762 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5763 ? GET_MODE_MASK (op_mode)
5764 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5765 else
5766 fuller_mask = ~ (HOST_WIDE_INT) 0;
5767
5768 /* Determine what bits of X are guaranteed to be (non)zero. */
5769 nonzero = nonzero_bits (x, mode);
6139ff20
RK
5770
5771 /* If none of the bits in X are needed, return a zero. */
e3d616e3 5772 if (! just_select && (nonzero & mask) == 0)
6139ff20 5773 return const0_rtx;
dfbe1b2f 5774
6139ff20
RK
5775 /* If X is a CONST_INT, return a new one. Do this here since the
5776 test below will fail. */
5777 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
5778 {
5779 HOST_WIDE_INT cval = INTVAL (x) & mask;
5780 int width = GET_MODE_BITSIZE (mode);
5781
5782 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5783 number, sign extend it. */
5784 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5785 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5786 cval |= (HOST_WIDE_INT) -1 << width;
5787
5788 return GEN_INT (cval);
5789 }
dfbe1b2f 5790
180b8e4b
RK
5791 /* If X is narrower than MODE and we want all the bits in X's mode, just
5792 get X in the proper mode. */
5793 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5794 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
5795 return gen_lowpart_for_combine (mode, x);
5796
71923da7
RK
5797 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
5798 MASK are already known to be zero in X, we need not do anything. */
5799 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
5800 return x;
5801
dfbe1b2f
RK
5802 switch (code)
5803 {
6139ff20
RK
5804 case CLOBBER:
5805 /* If X is a (clobber (const_int)), return it since we know we are
5806 generating something that won't match. */
5807 return x;
5808
6139ff20
RK
5809 case USE:
5810 /* X is a (use (mem ..)) that was made from a bit-field extraction that
5811 spanned the boundary of the MEM. If we are now masking so it is
5812 within that boundary, we don't need the USE any more. */
f76b9db2
ILT
5813 if (! BITS_BIG_ENDIAN
5814 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 5815 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 5816 break;
6139ff20 5817
dfbe1b2f
RK
5818 case SIGN_EXTEND:
5819 case ZERO_EXTEND:
5820 case ZERO_EXTRACT:
5821 case SIGN_EXTRACT:
5822 x = expand_compound_operation (x);
5823 if (GET_CODE (x) != code)
e3d616e3 5824 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
5825 break;
5826
5827 case REG:
5828 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5829 || rtx_equal_p (reg, get_last_value (x))))
5830 x = reg;
5831 break;
5832
dfbe1b2f 5833 case SUBREG:
6139ff20 5834 if (subreg_lowpart_p (x)
180b8e4b
RK
5835 /* We can ignore the effect of this SUBREG if it narrows the mode or
5836 if the constant masks to zero all the bits the mode doesn't
5837 have. */
6139ff20
RK
5838 && ((GET_MODE_SIZE (GET_MODE (x))
5839 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
5840 || (0 == (mask
5841 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 5842 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 5843 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
5844 break;
5845
5846 case AND:
6139ff20
RK
5847 /* If this is an AND with a constant, convert it into an AND
5848 whose constant is the AND of that constant with MASK. If it
5849 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 5850
2ca9ae17 5851 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 5852 {
6139ff20
RK
5853 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
5854 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
5855
5856 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
5857 is just some low-order bits. If so, and it is MASK, we don't
5858 need it. */
dfbe1b2f
RK
5859
5860 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5861 && INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 5862 x = XEXP (x, 0);
d0ab8cd3 5863
71923da7
RK
5864 /* If it remains an AND, try making another AND with the bits
5865 in the mode mask that aren't in MASK turned on. If the
5866 constant in the AND is wide enough, this might make a
5867 cheaper constant. */
5868
5869 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
5870 && GET_MODE_MASK (GET_MODE (x)) != mask
5871 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
5872 {
5873 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
5874 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
5875 int width = GET_MODE_BITSIZE (GET_MODE (x));
5876 rtx y;
5877
5878 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5879 number, sign extend it. */
5880 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5881 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5882 cval |= (HOST_WIDE_INT) -1 << width;
5883
5884 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
5885 if (rtx_cost (y, SET) < rtx_cost (x, SET))
5886 x = y;
5887 }
5888
d0ab8cd3 5889 break;
dfbe1b2f
RK
5890 }
5891
6139ff20 5892 goto binop;
dfbe1b2f
RK
5893
5894 case PLUS:
6139ff20
RK
5895 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5896 low-order bits (as in an alignment operation) and FOO is already
5897 aligned to that boundary, mask C1 to that boundary as well.
5898 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
5899
5900 {
5901 int width = GET_MODE_BITSIZE (mode);
5902 unsigned HOST_WIDE_INT smask = mask;
5903
5904 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
5905 number, sign extend it. */
5906
5907 if (width < HOST_BITS_PER_WIDE_INT
5908 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5909 smask |= (HOST_WIDE_INT) -1 << width;
5910
5911 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5912 && exact_log2 (- smask) >= 0
5913 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5914 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5915 return force_to_mode (plus_constant (XEXP (x, 0),
5916 INTVAL (XEXP (x, 1)) & mask),
5917 mode, mask, reg, next_select);
5918 }
6139ff20
RK
5919
5920 /* ... fall through ... */
5921
dfbe1b2f
RK
5922 case MINUS:
5923 case MULT:
6139ff20
RK
5924 /* For PLUS, MINUS and MULT, we need any bits less significant than the
5925 most significant bit in MASK since carries from those bits will
5926 affect the bits we are interested in. */
5927 mask = fuller_mask;
5928 goto binop;
5929
dfbe1b2f
RK
5930 case IOR:
5931 case XOR:
6139ff20
RK
5932 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5933 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5934 operation which may be a bitfield extraction. Ensure that the
5935 constant we form is not wider than the mode of X. */
5936
5937 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5938 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5939 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5940 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5941 && GET_CODE (XEXP (x, 1)) == CONST_INT
5942 && ((INTVAL (XEXP (XEXP (x, 0), 1))
5943 + floor_log2 (INTVAL (XEXP (x, 1))))
5944 < GET_MODE_BITSIZE (GET_MODE (x)))
5945 && (INTVAL (XEXP (x, 1))
01c82bbb 5946 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
5947 {
5948 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5949 << INTVAL (XEXP (XEXP (x, 0), 1)));
5950 temp = gen_binary (GET_CODE (x), GET_MODE (x),
5951 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
5952 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
5953 XEXP (XEXP (x, 0), 1));
e3d616e3 5954 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
5955 }
5956
5957 binop:
dfbe1b2f 5958 /* For most binary operations, just propagate into the operation and
6139ff20
RK
5959 change the mode if we have an operation of that mode. */
5960
e3d616e3
RK
5961 op0 = gen_lowpart_for_combine (op_mode,
5962 force_to_mode (XEXP (x, 0), mode, mask,
5963 reg, next_select));
5964 op1 = gen_lowpart_for_combine (op_mode,
5965 force_to_mode (XEXP (x, 1), mode, mask,
5966 reg, next_select));
6139ff20 5967
2dd484ed
RK
5968 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
5969 MASK since OP1 might have been sign-extended but we never want
5970 to turn on extra bits, since combine might have previously relied
5971 on them being off. */
5972 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
5973 && (INTVAL (op1) & mask) != 0)
5974 op1 = GEN_INT (INTVAL (op1) & mask);
5975
6139ff20
RK
5976 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
5977 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 5978 break;
dfbe1b2f
RK
5979
5980 case ASHIFT:
dfbe1b2f 5981 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
5982 However, we cannot do anything with shifts where we cannot
5983 guarantee that the counts are smaller than the size of the mode
5984 because such a count will have a different meaning in a
6139ff20 5985 wider mode. */
f6785026
RK
5986
5987 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5988 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
5989 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5990 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5991 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 5992 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
5993 break;
5994
6139ff20
RK
5995 /* If the shift count is a constant and we can do arithmetic in
5996 the mode of the shift, refine which bits we need. Otherwise, use the
5997 conservative form of the mask. */
5998 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5999 && INTVAL (XEXP (x, 1)) >= 0
6000 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6001 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6002 mask >>= INTVAL (XEXP (x, 1));
6003 else
6004 mask = fuller_mask;
6005
6006 op0 = gen_lowpart_for_combine (op_mode,
6007 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6008 mask, reg, next_select));
6139ff20
RK
6009
6010 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6011 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6012 break;
dfbe1b2f
RK
6013
6014 case LSHIFTRT:
1347292b
JW
6015 /* Here we can only do something if the shift count is a constant,
6016 this shift constant is valid for the host, and we can do arithmetic
6017 in OP_MODE. */
dfbe1b2f
RK
6018
6019 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6020 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6021 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6022 {
6139ff20
RK
6023 rtx inner = XEXP (x, 0);
6024
6025 /* Select the mask of the bits we need for the shift operand. */
6026 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 6027
6139ff20
RK
6028 /* We can only change the mode of the shift if we can do arithmetic
6029 in the mode of the shift and MASK is no wider than the width of
6030 OP_MODE. */
6031 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6032 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6033 op_mode = GET_MODE (x);
6034
e3d616e3 6035 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
6036
6037 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6038 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6039 }
6139ff20
RK
6040
6041 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6042 shift and AND produces only copies of the sign bit (C2 is one less
6043 than a power of two), we can do this with just a shift. */
6044
6045 if (GET_CODE (x) == LSHIFTRT
6046 && GET_CODE (XEXP (x, 1)) == CONST_INT
6047 && ((INTVAL (XEXP (x, 1))
6048 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6049 >= GET_MODE_BITSIZE (GET_MODE (x)))
6050 && exact_log2 (mask + 1) >= 0
6051 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6052 >= exact_log2 (mask + 1)))
6053 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6054 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6055 - exact_log2 (mask + 1)));
d0ab8cd3
RK
6056 break;
6057
6058 case ASHIFTRT:
6139ff20
RK
6059 /* If we are just looking for the sign bit, we don't need this shift at
6060 all, even if it has a variable count. */
9bf22b75
RK
6061 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6062 && (mask == ((HOST_WIDE_INT) 1
6063 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6064 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6065
6066 /* If this is a shift by a constant, get a mask that contains those bits
6067 that are not copies of the sign bit. We then have two cases: If
6068 MASK only includes those bits, this can be a logical shift, which may
6069 allow simplifications. If MASK is a single-bit field not within
6070 those bits, we are requesting a copy of the sign bit and hence can
6071 shift the sign bit to the appropriate location. */
6072
6073 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6074 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6075 {
6076 int i = -1;
6077
b69960ac
RK
6078 /* If the considered data is wider then HOST_WIDE_INT, we can't
6079 represent a mask for all its bits in a single scalar.
6080 But we only care about the lower bits, so calculate these. */
6081
6a11342f 6082 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac
RK
6083 {
6084 nonzero = ~(HOST_WIDE_INT)0;
6085
6086 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6087 is the number of bits a full-width mask would have set.
6088 We need only shift if these are fewer than nonzero can
6089 hold. If not, we must keep all bits set in nonzero. */
6090
6091 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6092 < HOST_BITS_PER_WIDE_INT)
6093 nonzero >>= INTVAL (XEXP (x, 1))
6094 + HOST_BITS_PER_WIDE_INT
6095 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6096 }
6097 else
6098 {
6099 nonzero = GET_MODE_MASK (GET_MODE (x));
6100 nonzero >>= INTVAL (XEXP (x, 1));
6101 }
6139ff20
RK
6102
6103 if ((mask & ~ nonzero) == 0
6104 || (i = exact_log2 (mask)) >= 0)
6105 {
6106 x = simplify_shift_const
6107 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6108 i < 0 ? INTVAL (XEXP (x, 1))
6109 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6110
6111 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 6112 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6113 }
6114 }
6115
6116 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6117 even if the shift count isn't a constant. */
6118 if (mask == 1)
6119 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6120
d0ab8cd3 6121 /* If this is a sign-extension operation that just affects bits
4c002f29
RK
6122 we don't care about, remove it. Be sure the call above returned
6123 something that is still a shift. */
d0ab8cd3 6124
4c002f29
RK
6125 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6126 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 6127 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
6128 && (INTVAL (XEXP (x, 1))
6129 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
6130 && GET_CODE (XEXP (x, 0)) == ASHIFT
6131 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6132 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
6133 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6134 reg, next_select);
6139ff20 6135
dfbe1b2f
RK
6136 break;
6137
6139ff20
RK
6138 case ROTATE:
6139 case ROTATERT:
6140 /* If the shift count is constant and we can do computations
6141 in the mode of X, compute where the bits we care about are.
6142 Otherwise, we can't do anything. Don't change the mode of
6143 the shift or propagate MODE into the shift, though. */
6144 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6145 && INTVAL (XEXP (x, 1)) >= 0)
6146 {
6147 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6148 GET_MODE (x), GEN_INT (mask),
6149 XEXP (x, 1));
7d171a1e 6150 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
6151 SUBST (XEXP (x, 0),
6152 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6153 INTVAL (temp), reg, next_select));
6139ff20
RK
6154 }
6155 break;
6156
dfbe1b2f 6157 case NEG:
180b8e4b
RK
6158 /* If we just want the low-order bit, the NEG isn't needed since it
6159 won't change the low-order bit. */
6160 if (mask == 1)
6161 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6162
6139ff20
RK
6163 /* We need any bits less significant than the most significant bit in
6164 MASK since carries from those bits will affect the bits we are
6165 interested in. */
6166 mask = fuller_mask;
6167 goto unop;
6168
dfbe1b2f 6169 case NOT:
6139ff20
RK
6170 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6171 same as the XOR case above. Ensure that the constant we form is not
6172 wider than the mode of X. */
6173
6174 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6175 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6176 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6177 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6178 < GET_MODE_BITSIZE (GET_MODE (x)))
6179 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6180 {
6181 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6182 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6183 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6184
e3d616e3 6185 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6186 }
6187
f82da7d2
JW
6188 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6189 use the full mask inside the NOT. */
6190 mask = fuller_mask;
6191
6139ff20 6192 unop:
e3d616e3
RK
6193 op0 = gen_lowpart_for_combine (op_mode,
6194 force_to_mode (XEXP (x, 0), mode, mask,
6195 reg, next_select));
6139ff20 6196 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 6197 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
6198 break;
6199
6200 case NE:
6201 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
6202 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
6203 in CONST. */
6204 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
6205 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
e3d616e3 6206 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6207
d0ab8cd3
RK
6208 break;
6209
6210 case IF_THEN_ELSE:
6211 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6212 written in a narrower mode. We play it safe and do not do so. */
6213
6214 SUBST (XEXP (x, 1),
6215 gen_lowpart_for_combine (GET_MODE (x),
6216 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6217 mask, reg, next_select)));
d0ab8cd3
RK
6218 SUBST (XEXP (x, 2),
6219 gen_lowpart_for_combine (GET_MODE (x),
6220 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6221 mask, reg,next_select)));
d0ab8cd3 6222 break;
dfbe1b2f
RK
6223 }
6224
d0ab8cd3 6225 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6226 return gen_lowpart_for_combine (mode, x);
6227}
6228\f
abe6e52f
RK
6229/* Return nonzero if X is an expression that has one of two values depending on
6230 whether some other value is zero or nonzero. In that case, we return the
6231 value that is being tested, *PTRUE is set to the value if the rtx being
6232 returned has a nonzero value, and *PFALSE is set to the other alternative.
6233
6234 If we return zero, we set *PTRUE and *PFALSE to X. */
6235
6236static rtx
6237if_then_else_cond (x, ptrue, pfalse)
6238 rtx x;
6239 rtx *ptrue, *pfalse;
6240{
6241 enum machine_mode mode = GET_MODE (x);
6242 enum rtx_code code = GET_CODE (x);
6243 int size = GET_MODE_BITSIZE (mode);
6244 rtx cond0, cond1, true0, true1, false0, false1;
6245 unsigned HOST_WIDE_INT nz;
6246
6247 /* If this is a unary operation whose operand has one of two values, apply
6248 our opcode to compute those values. */
6249 if (GET_RTX_CLASS (code) == '1'
6250 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6251 {
0c1c8ea6
RK
6252 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6253 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
6254 return cond0;
6255 }
6256
3a19aabc 6257 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 6258 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
6259 else if (code == COMPARE)
6260 ;
6261
abe6e52f
RK
6262 /* If this is a binary operation, see if either side has only one of two
6263 values. If either one does or if both do and they are conditional on
6264 the same value, compute the new true and false values. */
6265 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6266 || GET_RTX_CLASS (code) == '<')
6267 {
6268 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6269 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6270
6271 if ((cond0 != 0 || cond1 != 0)
6272 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6273 {
6274 *ptrue = gen_binary (code, mode, true0, true1);
6275 *pfalse = gen_binary (code, mode, false0, false1);
6276 return cond0 ? cond0 : cond1;
6277 }
9210df58
RK
6278
6279#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
6280
6281 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
6282 operands is zero when the other is non-zero, and vice-versa. */
6283
6284 if ((code == PLUS || code == IOR || code == XOR || code == MINUS
6285 || code == UMAX)
6286 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6287 {
6288 rtx op0 = XEXP (XEXP (x, 0), 1);
6289 rtx op1 = XEXP (XEXP (x, 1), 1);
6290
6291 cond0 = XEXP (XEXP (x, 0), 0);
6292 cond1 = XEXP (XEXP (x, 1), 0);
6293
6294 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6295 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6296 && reversible_comparison_p (cond1)
6297 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6298 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6299 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6300 || ((swap_condition (GET_CODE (cond0))
6301 == reverse_condition (GET_CODE (cond1)))
6302 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6303 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6304 && ! side_effects_p (x))
6305 {
6306 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6307 *pfalse = gen_binary (MULT, mode,
6308 (code == MINUS
0c1c8ea6 6309 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
6310 const_true_rtx);
6311 return cond0;
6312 }
6313 }
6314
6315 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6316 is always zero. */
6317 if ((code == MULT || code == AND || code == UMIN)
6318 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6319 {
6320 cond0 = XEXP (XEXP (x, 0), 0);
6321 cond1 = XEXP (XEXP (x, 1), 0);
6322
6323 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6324 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6325 && reversible_comparison_p (cond1)
6326 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6327 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6328 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6329 || ((swap_condition (GET_CODE (cond0))
6330 == reverse_condition (GET_CODE (cond1)))
6331 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6332 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6333 && ! side_effects_p (x))
6334 {
6335 *ptrue = *pfalse = const0_rtx;
6336 return cond0;
6337 }
6338 }
6339#endif
abe6e52f
RK
6340 }
6341
6342 else if (code == IF_THEN_ELSE)
6343 {
6344 /* If we have IF_THEN_ELSE already, extract the condition and
6345 canonicalize it if it is NE or EQ. */
6346 cond0 = XEXP (x, 0);
6347 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6348 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6349 return XEXP (cond0, 0);
6350 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6351 {
6352 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6353 return XEXP (cond0, 0);
6354 }
6355 else
6356 return cond0;
6357 }
6358
6359 /* If X is a normal SUBREG with both inner and outer modes integral,
6360 we can narrow both the true and false values of the inner expression,
6361 if there is a condition. */
6362 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6363 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6364 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6365 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6366 &true0, &false0)))
6367 {
00244e6b
RK
6368 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6369 *pfalse
6370 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 6371
abe6e52f
RK
6372 return cond0;
6373 }
6374
6375 /* If X is a constant, this isn't special and will cause confusions
6376 if we treat it as such. Likewise if it is equivalent to a constant. */
6377 else if (CONSTANT_P (x)
6378 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6379 ;
6380
6381 /* If X is known to be either 0 or -1, those are the true and
6382 false values when testing X. */
6383 else if (num_sign_bit_copies (x, mode) == size)
6384 {
6385 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6386 return x;
6387 }
6388
6389 /* Likewise for 0 or a single bit. */
6390 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6391 {
6392 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6393 return x;
6394 }
6395
6396 /* Otherwise fail; show no condition with true and false values the same. */
6397 *ptrue = *pfalse = x;
6398 return 0;
6399}
6400\f
1a26b032
RK
6401/* Return the value of expression X given the fact that condition COND
6402 is known to be true when applied to REG as its first operand and VAL
6403 as its second. X is known to not be shared and so can be modified in
6404 place.
6405
6406 We only handle the simplest cases, and specifically those cases that
6407 arise with IF_THEN_ELSE expressions. */
6408
6409static rtx
6410known_cond (x, cond, reg, val)
6411 rtx x;
6412 enum rtx_code cond;
6413 rtx reg, val;
6414{
6415 enum rtx_code code = GET_CODE (x);
f24ad0e4 6416 rtx temp;
1a26b032
RK
6417 char *fmt;
6418 int i, j;
6419
6420 if (side_effects_p (x))
6421 return x;
6422
6423 if (cond == EQ && rtx_equal_p (x, reg))
6424 return val;
6425
6426 /* If X is (abs REG) and we know something about REG's relationship
6427 with zero, we may be able to simplify this. */
6428
6429 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6430 switch (cond)
6431 {
6432 case GE: case GT: case EQ:
6433 return XEXP (x, 0);
6434 case LT: case LE:
0c1c8ea6
RK
6435 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6436 XEXP (x, 0));
1a26b032
RK
6437 }
6438
6439 /* The only other cases we handle are MIN, MAX, and comparisons if the
6440 operands are the same as REG and VAL. */
6441
6442 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6443 {
6444 if (rtx_equal_p (XEXP (x, 0), val))
6445 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6446
6447 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6448 {
6449 if (GET_RTX_CLASS (code) == '<')
6450 return (comparison_dominates_p (cond, code) ? const_true_rtx
6451 : (comparison_dominates_p (cond,
6452 reverse_condition (code))
6453 ? const0_rtx : x));
6454
6455 else if (code == SMAX || code == SMIN
6456 || code == UMIN || code == UMAX)
6457 {
6458 int unsignedp = (code == UMIN || code == UMAX);
6459
6460 if (code == SMAX || code == UMAX)
6461 cond = reverse_condition (cond);
6462
6463 switch (cond)
6464 {
6465 case GE: case GT:
6466 return unsignedp ? x : XEXP (x, 1);
6467 case LE: case LT:
6468 return unsignedp ? x : XEXP (x, 0);
6469 case GEU: case GTU:
6470 return unsignedp ? XEXP (x, 1) : x;
6471 case LEU: case LTU:
6472 return unsignedp ? XEXP (x, 0) : x;
6473 }
6474 }
6475 }
6476 }
6477
6478 fmt = GET_RTX_FORMAT (code);
6479 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6480 {
6481 if (fmt[i] == 'e')
6482 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6483 else if (fmt[i] == 'E')
6484 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6485 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6486 cond, reg, val));
6487 }
6488
6489 return x;
6490}
6491\f
e11fa86f
RK
6492/* See if X and Y are equal for the purposes of seeing if we can rewrite an
6493 assignment as a field assignment. */
6494
6495static int
6496rtx_equal_for_field_assignment_p (x, y)
6497 rtx x;
6498 rtx y;
6499{
6500 rtx last_x, last_y;
6501
6502 if (x == y || rtx_equal_p (x, y))
6503 return 1;
6504
6505 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
6506 return 0;
6507
6508 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
6509 Note that all SUBREGs of MEM are paradoxical; otherwise they
6510 would have been rewritten. */
6511 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
6512 && GET_CODE (SUBREG_REG (y)) == MEM
6513 && rtx_equal_p (SUBREG_REG (y),
6514 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
6515 return 1;
6516
6517 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
6518 && GET_CODE (SUBREG_REG (x)) == MEM
6519 && rtx_equal_p (SUBREG_REG (x),
6520 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
6521 return 1;
6522
6523 last_x = get_last_value (x);
6524 last_y = get_last_value (y);
6525
6526 return ((last_x != 0 && rtx_equal_for_field_assignment_p (last_x, y))
6527 || (last_y != 0 && rtx_equal_for_field_assignment_p (x, last_y))
6528 || (last_x != 0 && last_y != 0
6529 && rtx_equal_for_field_assignment_p (last_x, last_y)));
6530}
6531\f
230d793d
RS
6532/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6533 Return that assignment if so.
6534
6535 We only handle the most common cases. */
6536
6537static rtx
6538make_field_assignment (x)
6539 rtx x;
6540{
6541 rtx dest = SET_DEST (x);
6542 rtx src = SET_SRC (x);
dfbe1b2f 6543 rtx assign;
e11fa86f 6544 rtx rhs, lhs;
5f4f0e22
CH
6545 HOST_WIDE_INT c1;
6546 int pos, len;
dfbe1b2f
RK
6547 rtx other;
6548 enum machine_mode mode;
230d793d
RS
6549
6550 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6551 a clear of a one-bit field. We will have changed it to
6552 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6553 for a SUBREG. */
6554
6555 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6556 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6557 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 6558 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6559 {
8999a12e 6560 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6561 1, 1, 1, 0);
76184def
DE
6562 if (assign != 0)
6563 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6564 return x;
230d793d
RS
6565 }
6566
6567 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6568 && subreg_lowpart_p (XEXP (src, 0))
6569 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6570 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6571 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6572 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 6573 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6574 {
8999a12e 6575 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
6576 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6577 1, 1, 1, 0);
76184def
DE
6578 if (assign != 0)
6579 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6580 return x;
230d793d
RS
6581 }
6582
6583 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
6584 one-bit field. */
6585 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6586 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 6587 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6588 {
8999a12e 6589 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6590 1, 1, 1, 0);
76184def
DE
6591 if (assign != 0)
6592 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
6593 return x;
230d793d
RS
6594 }
6595
dfbe1b2f
RK
6596 /* The other case we handle is assignments into a constant-position
6597 field. They look like (ior (and DEST C1) OTHER). If C1 represents
6598 a mask that has all one bits except for a group of zero bits and
6599 OTHER is known to have zeros where C1 has ones, this is such an
6600 assignment. Compute the position and length from C1. Shift OTHER
6601 to the appropriate position, force it to the required mode, and
6602 make the extraction. Check for the AND in both operands. */
6603
e11fa86f
RK
6604 if (GET_CODE (src) != IOR)
6605 return x;
6606
6607 rhs = expand_compound_operation (XEXP (src, 0));
6608 lhs = expand_compound_operation (XEXP (src, 1));
6609
6610 if (GET_CODE (rhs) == AND
6611 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
6612 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
6613 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
6614 else if (GET_CODE (lhs) == AND
6615 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6616 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
6617 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
6618 else
6619 return x;
230d793d 6620
e11fa86f 6621 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 6622 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 6623 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 6624 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 6625 return x;
230d793d 6626
5f4f0e22 6627 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
6628 if (assign == 0)
6629 return x;
230d793d 6630
dfbe1b2f
RK
6631 /* The mode to use for the source is the mode of the assignment, or of
6632 what is inside a possible STRICT_LOW_PART. */
6633 mode = (GET_CODE (assign) == STRICT_LOW_PART
6634 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 6635
dfbe1b2f
RK
6636 /* Shift OTHER right POS places and make it the source, restricting it
6637 to the proper length and mode. */
230d793d 6638
5f4f0e22
CH
6639 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6640 GET_MODE (src), other, pos),
6139ff20
RK
6641 mode,
6642 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6643 ? GET_MODE_MASK (mode)
6644 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 6645 dest, 0);
230d793d 6646
dfbe1b2f 6647 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
6648}
6649\f
6650/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6651 if so. */
6652
6653static rtx
6654apply_distributive_law (x)
6655 rtx x;
6656{
6657 enum rtx_code code = GET_CODE (x);
6658 rtx lhs, rhs, other;
6659 rtx tem;
6660 enum rtx_code inner_code;
6661
d8a8a4da
RS
6662 /* Distributivity is not true for floating point.
6663 It can change the value. So don't do it.
6664 -- rms and moshier@world.std.com. */
3ad2180a 6665 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
6666 return x;
6667
230d793d
RS
6668 /* The outer operation can only be one of the following: */
6669 if (code != IOR && code != AND && code != XOR
6670 && code != PLUS && code != MINUS)
6671 return x;
6672
6673 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6674
dfbe1b2f 6675 /* If either operand is a primitive we can't do anything, so get out fast. */
230d793d 6676 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 6677 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
6678 return x;
6679
6680 lhs = expand_compound_operation (lhs);
6681 rhs = expand_compound_operation (rhs);
6682 inner_code = GET_CODE (lhs);
6683 if (inner_code != GET_CODE (rhs))
6684 return x;
6685
6686 /* See if the inner and outer operations distribute. */
6687 switch (inner_code)
6688 {
6689 case LSHIFTRT:
6690 case ASHIFTRT:
6691 case AND:
6692 case IOR:
6693 /* These all distribute except over PLUS. */
6694 if (code == PLUS || code == MINUS)
6695 return x;
6696 break;
6697
6698 case MULT:
6699 if (code != PLUS && code != MINUS)
6700 return x;
6701 break;
6702
6703 case ASHIFT:
45620ed4 6704 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
6705 break;
6706
6707 case SUBREG:
dfbe1b2f
RK
6708 /* Non-paradoxical SUBREGs distributes over all operations, provided
6709 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
6710 of a low-order part, we don't convert an fp operation to int or
6711 vice versa, and we would not be converting a single-word
dfbe1b2f 6712 operation into a multi-word operation. The latter test is not
2b4bd1bc 6713 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
6714 Some of the previous tests are redundant given the latter test, but
6715 are retained because they are required for correctness.
6716
6717 We produce the result slightly differently in this case. */
6718
6719 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6720 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6721 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
6722 || (GET_MODE_CLASS (GET_MODE (lhs))
6723 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 6724 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 6725 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 6726 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
6727 return x;
6728
6729 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6730 SUBREG_REG (lhs), SUBREG_REG (rhs));
6731 return gen_lowpart_for_combine (GET_MODE (x), tem);
6732
6733 default:
6734 return x;
6735 }
6736
6737 /* Set LHS and RHS to the inner operands (A and B in the example
6738 above) and set OTHER to the common operand (C in the example).
6739 These is only one way to do this unless the inner operation is
6740 commutative. */
6741 if (GET_RTX_CLASS (inner_code) == 'c'
6742 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6743 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6744 else if (GET_RTX_CLASS (inner_code) == 'c'
6745 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6746 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6747 else if (GET_RTX_CLASS (inner_code) == 'c'
6748 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6749 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6750 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6751 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6752 else
6753 return x;
6754
6755 /* Form the new inner operation, seeing if it simplifies first. */
6756 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6757
6758 /* There is one exception to the general way of distributing:
6759 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6760 if (code == XOR && inner_code == IOR)
6761 {
6762 inner_code = AND;
0c1c8ea6 6763 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
6764 }
6765
6766 /* We may be able to continuing distributing the result, so call
6767 ourselves recursively on the inner operation before forming the
6768 outer operation, which we return. */
6769 return gen_binary (inner_code, GET_MODE (x),
6770 apply_distributive_law (tem), other);
6771}
6772\f
6773/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6774 in MODE.
6775
6776 Return an equivalent form, if different from X. Otherwise, return X. If
6777 X is zero, we are to always construct the equivalent form. */
6778
6779static rtx
6780simplify_and_const_int (x, mode, varop, constop)
6781 rtx x;
6782 enum machine_mode mode;
6783 rtx varop;
5f4f0e22 6784 unsigned HOST_WIDE_INT constop;
230d793d 6785{
951553af 6786 unsigned HOST_WIDE_INT nonzero;
9fa6d012 6787 int width = GET_MODE_BITSIZE (mode);
42301240 6788 int i;
230d793d 6789
6139ff20
RK
6790 /* Simplify VAROP knowing that we will be only looking at some of the
6791 bits in it. */
e3d616e3 6792 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 6793
6139ff20
RK
6794 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
6795 CONST_INT, we are done. */
6796 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
6797 return varop;
230d793d 6798
fc06d7aa
RK
6799 /* See what bits may be nonzero in VAROP. Unlike the general case of
6800 a call to nonzero_bits, here we don't care about bits outside
6801 MODE. */
6802
6803 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d 6804
9fa6d012
TG
6805 /* If this would be an entire word for the target, but is not for
6806 the host, then sign-extend on the host so that the number will look
6807 the same way on the host that it would on the target.
6808
6809 For example, when building a 64 bit alpha hosted 32 bit sparc
6810 targeted compiler, then we want the 32 bit unsigned value -1 to be
6811 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
6812 The later confuses the sparc backend. */
6813
6814 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
6815 && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
6816 nonzero |= ((HOST_WIDE_INT) (-1) << width);
6817
230d793d 6818 /* Turn off all bits in the constant that are known to already be zero.
951553af 6819 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
6820 which is tested below. */
6821
951553af 6822 constop &= nonzero;
230d793d
RS
6823
6824 /* If we don't have any bits left, return zero. */
6825 if (constop == 0)
6826 return const0_rtx;
6827
42301240
RK
6828 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
6829 a power of two, we can replace this with a ASHIFT. */
6830 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
6831 && (i = exact_log2 (constop)) >= 0)
6832 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
6833
6139ff20
RK
6834 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
6835 or XOR, then try to apply the distributive law. This may eliminate
6836 operations if either branch can be simplified because of the AND.
6837 It may also make some cases more complex, but those cases probably
6838 won't match a pattern either with or without this. */
6839
6840 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
6841 return
6842 gen_lowpart_for_combine
6843 (mode,
6844 apply_distributive_law
6845 (gen_binary (GET_CODE (varop), GET_MODE (varop),
6846 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6847 XEXP (varop, 0), constop),
6848 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6849 XEXP (varop, 1), constop))));
6850
230d793d
RS
6851 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6852 if we already had one (just check for the simplest cases). */
6853 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6854 && GET_MODE (XEXP (x, 0)) == mode
6855 && SUBREG_REG (XEXP (x, 0)) == varop)
6856 varop = XEXP (x, 0);
6857 else
6858 varop = gen_lowpart_for_combine (mode, varop);
6859
6860 /* If we can't make the SUBREG, try to return what we were given. */
6861 if (GET_CODE (varop) == CLOBBER)
6862 return x ? x : varop;
6863
6864 /* If we are only masking insignificant bits, return VAROP. */
951553af 6865 if (constop == nonzero)
230d793d
RS
6866 x = varop;
6867
6868 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6869 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 6870 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
6871
6872 else
6873 {
6874 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6875 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 6876 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
6877
6878 SUBST (XEXP (x, 0), varop);
6879 }
6880
6881 return x;
6882}
6883\f
6884/* Given an expression, X, compute which bits in X can be non-zero.
6885 We don't care about bits outside of those defined in MODE.
6886
6887 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6888 a shift, AND, or zero_extract, we can do better. */
6889
5f4f0e22 6890static unsigned HOST_WIDE_INT
951553af 6891nonzero_bits (x, mode)
230d793d
RS
6892 rtx x;
6893 enum machine_mode mode;
6894{
951553af
RK
6895 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6896 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
6897 enum rtx_code code;
6898 int mode_width = GET_MODE_BITSIZE (mode);
6899 rtx tem;
6900
1c75dfa4
RK
6901 /* For floating-point values, assume all bits are needed. */
6902 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
6903 return nonzero;
6904
230d793d
RS
6905 /* If X is wider than MODE, use its mode instead. */
6906 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6907 {
6908 mode = GET_MODE (x);
951553af 6909 nonzero = GET_MODE_MASK (mode);
230d793d
RS
6910 mode_width = GET_MODE_BITSIZE (mode);
6911 }
6912
5f4f0e22 6913 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
6914 /* Our only callers in this case look for single bit values. So
6915 just return the mode mask. Those tests will then be false. */
951553af 6916 return nonzero;
230d793d 6917
8baf60bb 6918#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 6919 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
6920 and target machines, we can compute this from which bits of the
6921 object might be nonzero in its own mode, taking into account the fact
6922 that on many CISC machines, accessing an object in a wider mode
6923 causes the high-order bits to become undefined. So they are
6924 not known to be zero. */
6925
6926 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
6927 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
6928 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 6929 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
6930 {
6931 nonzero &= nonzero_bits (x, GET_MODE (x));
6932 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
6933 return nonzero;
6934 }
6935#endif
6936
230d793d
RS
6937 code = GET_CODE (x);
6938 switch (code)
6939 {
6940 case REG:
320dd7a7
RK
6941#ifdef POINTERS_EXTEND_UNSIGNED
6942 /* If pointers extend unsigned and this is a pointer in Pmode, say that
6943 all the bits above ptr_mode are known to be zero. */
6944 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
6945 && REGNO_POINTER_FLAG (REGNO (x)))
6946 nonzero &= GET_MODE_MASK (ptr_mode);
6947#endif
6948
b0d71df9
RK
6949#ifdef STACK_BOUNDARY
6950 /* If this is the stack pointer, we may know something about its
6951 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
6952 stack to be momentarily aligned only to that amount, so we pick
6953 the least alignment. */
6954
b0d71df9
RK
6955 if (x == stack_pointer_rtx || x == frame_pointer_rtx
6956 || x == arg_pointer_rtx || x == hard_frame_pointer_rtx
6957 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
6958 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
230d793d 6959 {
b0d71df9 6960 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
6961
6962#ifdef PUSH_ROUNDING
91102d5a 6963 if (REGNO (x) == STACK_POINTER_REGNUM)
b0d71df9 6964 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
6965#endif
6966
320dd7a7
RK
6967 /* We must return here, otherwise we may get a worse result from
6968 one of the choices below. There is nothing useful below as
6969 far as the stack pointer is concerned. */
b0d71df9 6970 return nonzero &= ~ (sp_alignment - 1);
230d793d 6971 }
b0d71df9 6972#endif
230d793d 6973
55310dad
RK
6974 /* If X is a register whose nonzero bits value is current, use it.
6975 Otherwise, if X is a register whose value we can find, use that
6976 value. Otherwise, use the previously-computed global nonzero bits
6977 for this register. */
6978
6979 if (reg_last_set_value[REGNO (x)] != 0
6980 && reg_last_set_mode[REGNO (x)] == mode
6981 && (reg_n_sets[REGNO (x)] == 1
6982 || reg_last_set_label[REGNO (x)] == label_tick)
6983 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6984 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
6985
6986 tem = get_last_value (x);
9afa3d54 6987
230d793d 6988 if (tem)
9afa3d54
RK
6989 {
6990#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6991 /* If X is narrower than MODE and TEM is a non-negative
6992 constant that would appear negative in the mode of X,
6993 sign-extend it for use in reg_nonzero_bits because some
6994 machines (maybe most) will actually do the sign-extension
6995 and this is the conservative approach.
6996
6997 ??? For 2.5, try to tighten up the MD files in this regard
6998 instead of this kludge. */
6999
7000 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7001 && GET_CODE (tem) == CONST_INT
7002 && INTVAL (tem) > 0
7003 && 0 != (INTVAL (tem)
7004 & ((HOST_WIDE_INT) 1
9e69be8c 7005 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7006 tem = GEN_INT (INTVAL (tem)
7007 | ((HOST_WIDE_INT) (-1)
7008 << GET_MODE_BITSIZE (GET_MODE (x))));
7009#endif
7010 return nonzero_bits (tem, mode);
7011 }
951553af
RK
7012 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7013 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7014 else
951553af 7015 return nonzero;
230d793d
RS
7016
7017 case CONST_INT:
9afa3d54
RK
7018#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7019 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
7020 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7021 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7022 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
7023#endif
7024
230d793d
RS
7025 return INTVAL (x);
7026
230d793d 7027 case MEM:
8baf60bb 7028#ifdef LOAD_EXTEND_OP
230d793d
RS
7029 /* In many, if not most, RISC machines, reading a byte from memory
7030 zeros the rest of the register. Noticing that fact saves a lot
7031 of extra zero-extends. */
8baf60bb
RK
7032 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7033 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 7034#endif
8baf60bb 7035 break;
230d793d 7036
230d793d
RS
7037 case EQ: case NE:
7038 case GT: case GTU:
7039 case LT: case LTU:
7040 case GE: case GEU:
7041 case LE: case LEU:
3f508eca 7042
c6965c0f
RK
7043 /* If this produces an integer result, we know which bits are set.
7044 Code here used to clear bits outside the mode of X, but that is
7045 now done above. */
230d793d 7046
c6965c0f
RK
7047 if (GET_MODE_CLASS (mode) == MODE_INT
7048 && mode_width <= HOST_BITS_PER_WIDE_INT)
7049 nonzero = STORE_FLAG_VALUE;
230d793d 7050 break;
230d793d 7051
230d793d 7052 case NEG:
d0ab8cd3
RK
7053 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7054 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7055 nonzero = 1;
230d793d
RS
7056
7057 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 7058 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 7059 break;
d0ab8cd3
RK
7060
7061 case ABS:
7062 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7063 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7064 nonzero = 1;
d0ab8cd3 7065 break;
230d793d
RS
7066
7067 case TRUNCATE:
951553af 7068 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
7069 break;
7070
7071 case ZERO_EXTEND:
951553af 7072 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 7073 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 7074 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
7075 break;
7076
7077 case SIGN_EXTEND:
7078 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7079 Otherwise, show all the bits in the outer mode but not the inner
7080 may be non-zero. */
951553af 7081 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
7082 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7083 {
951553af
RK
7084 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7085 if (inner_nz &
5f4f0e22
CH
7086 (((HOST_WIDE_INT) 1
7087 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 7088 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
7089 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7090 }
7091
951553af 7092 nonzero &= inner_nz;
230d793d
RS
7093 break;
7094
7095 case AND:
951553af
RK
7096 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7097 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7098 break;
7099
d0ab8cd3
RK
7100 case XOR: case IOR:
7101 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
7102 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7103 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7104 break;
7105
7106 case PLUS: case MINUS:
7107 case MULT:
7108 case DIV: case UDIV:
7109 case MOD: case UMOD:
7110 /* We can apply the rules of arithmetic to compute the number of
7111 high- and low-order zero bits of these operations. We start by
7112 computing the width (position of the highest-order non-zero bit)
7113 and the number of low-order zero bits for each value. */
7114 {
951553af
RK
7115 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7116 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7117 int width0 = floor_log2 (nz0) + 1;
7118 int width1 = floor_log2 (nz1) + 1;
7119 int low0 = floor_log2 (nz0 & -nz0);
7120 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
7121 HOST_WIDE_INT op0_maybe_minusp
7122 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7123 HOST_WIDE_INT op1_maybe_minusp
7124 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
7125 int result_width = mode_width;
7126 int result_low = 0;
7127
7128 switch (code)
7129 {
7130 case PLUS:
7131 result_width = MAX (width0, width1) + 1;
7132 result_low = MIN (low0, low1);
7133 break;
7134 case MINUS:
7135 result_low = MIN (low0, low1);
7136 break;
7137 case MULT:
7138 result_width = width0 + width1;
7139 result_low = low0 + low1;
7140 break;
7141 case DIV:
7142 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7143 result_width = width0;
7144 break;
7145 case UDIV:
7146 result_width = width0;
7147 break;
7148 case MOD:
7149 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7150 result_width = MIN (width0, width1);
7151 result_low = MIN (low0, low1);
7152 break;
7153 case UMOD:
7154 result_width = MIN (width0, width1);
7155 result_low = MIN (low0, low1);
7156 break;
7157 }
7158
7159 if (result_width < mode_width)
951553af 7160 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
7161
7162 if (result_low > 0)
951553af 7163 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
7164 }
7165 break;
7166
7167 case ZERO_EXTRACT:
7168 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 7169 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 7170 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
7171 break;
7172
7173 case SUBREG:
c3c2cb37
RK
7174 /* If this is a SUBREG formed for a promoted variable that has
7175 been zero-extended, we know that at least the high-order bits
7176 are zero, though others might be too. */
7177
7178 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
7179 nonzero = (GET_MODE_MASK (GET_MODE (x))
7180 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 7181
230d793d
RS
7182 /* If the inner mode is a single word for both the host and target
7183 machines, we can compute this from which bits of the inner
951553af 7184 object might be nonzero. */
230d793d 7185 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
7186 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7187 <= HOST_BITS_PER_WIDE_INT))
230d793d 7188 {
951553af 7189 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb
RK
7190
7191#ifndef WORD_REGISTER_OPERATIONS
230d793d
RS
7192 /* On many CISC machines, accessing an object in a wider mode
7193 causes the high-order bits to become undefined. So they are
7194 not known to be zero. */
7195 if (GET_MODE_SIZE (GET_MODE (x))
7196 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
7197 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7198 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
7199#endif
7200 }
7201 break;
7202
7203 case ASHIFTRT:
7204 case LSHIFTRT:
7205 case ASHIFT:
230d793d 7206 case ROTATE:
951553af 7207 /* The nonzero bits are in two classes: any bits within MODE
230d793d 7208 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 7209 nonzero bits are those that are significant in the operand of
230d793d
RS
7210 the shift when shifted the appropriate number of bits. This
7211 shows that high-order bits are cleared by the right shift and
7212 low-order bits by left shifts. */
7213 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7214 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 7215 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7216 {
7217 enum machine_mode inner_mode = GET_MODE (x);
7218 int width = GET_MODE_BITSIZE (inner_mode);
7219 int count = INTVAL (XEXP (x, 1));
5f4f0e22 7220 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
7221 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7222 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 7223 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
7224
7225 if (mode_width > width)
951553af 7226 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
7227
7228 if (code == LSHIFTRT)
7229 inner >>= count;
7230 else if (code == ASHIFTRT)
7231 {
7232 inner >>= count;
7233
951553af 7234 /* If the sign bit may have been nonzero before the shift, we
230d793d 7235 need to mark all the places it could have been copied to
951553af 7236 by the shift as possibly nonzero. */
5f4f0e22
CH
7237 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7238 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 7239 }
45620ed4 7240 else if (code == ASHIFT)
230d793d
RS
7241 inner <<= count;
7242 else
7243 inner = ((inner << (count % width)
7244 | (inner >> (width - (count % width)))) & mode_mask);
7245
951553af 7246 nonzero &= (outer | inner);
230d793d
RS
7247 }
7248 break;
7249
7250 case FFS:
7251 /* This is at most the number of bits in the mode. */
951553af 7252 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 7253 break;
d0ab8cd3
RK
7254
7255 case IF_THEN_ELSE:
951553af
RK
7256 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7257 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 7258 break;
230d793d
RS
7259 }
7260
951553af 7261 return nonzero;
230d793d
RS
7262}
7263\f
d0ab8cd3 7264/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
7265 be equal to the sign bit. X will be used in mode MODE; if MODE is
7266 VOIDmode, X will be used in its own mode. The returned value will always
7267 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
7268
7269static int
7270num_sign_bit_copies (x, mode)
7271 rtx x;
7272 enum machine_mode mode;
7273{
7274 enum rtx_code code = GET_CODE (x);
7275 int bitwidth;
7276 int num0, num1, result;
951553af 7277 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
7278 rtx tem;
7279
7280 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
7281 VOIDmode, we don't know anything. Likewise if one of the modes is
7282 floating-point. */
d0ab8cd3
RK
7283
7284 if (mode == VOIDmode)
7285 mode = GET_MODE (x);
7286
1c75dfa4 7287 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 7288 return 1;
d0ab8cd3
RK
7289
7290 bitwidth = GET_MODE_BITSIZE (mode);
7291
312def2e
RK
7292 /* For a smaller object, just ignore the high bits. */
7293 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7294 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7295 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7296
0c314d1a
RK
7297#ifndef WORD_REGISTER_OPERATIONS
7298 /* If this machine does not do all register operations on the entire
7299 register and MODE is wider than the mode of X, we can say nothing
7300 at all about the high-order bits. */
7301 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7302 return 1;
7303#endif
7304
d0ab8cd3
RK
7305 switch (code)
7306 {
7307 case REG:
55310dad 7308
ff0dbdd1
RK
7309#ifdef POINTERS_EXTEND_UNSIGNED
7310 /* If pointers extend signed and this is a pointer in Pmode, say that
7311 all the bits above ptr_mode are known to be sign bit copies. */
7312 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7313 && REGNO_POINTER_FLAG (REGNO (x)))
7314 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7315#endif
7316
55310dad
RK
7317 if (reg_last_set_value[REGNO (x)] != 0
7318 && reg_last_set_mode[REGNO (x)] == mode
7319 && (reg_n_sets[REGNO (x)] == 1
7320 || reg_last_set_label[REGNO (x)] == label_tick)
7321 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7322 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7323
7324 tem = get_last_value (x);
7325 if (tem != 0)
7326 return num_sign_bit_copies (tem, mode);
55310dad
RK
7327
7328 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7329 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7330 break;
7331
457816e2 7332 case MEM:
8baf60bb 7333#ifdef LOAD_EXTEND_OP
457816e2 7334 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
7335 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7336 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 7337#endif
8baf60bb 7338 break;
457816e2 7339
d0ab8cd3
RK
7340 case CONST_INT:
7341 /* If the constant is negative, take its 1's complement and remask.
7342 Then see how many zero bits we have. */
951553af 7343 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 7344 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
7345 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7346 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 7347
951553af 7348 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7349
7350 case SUBREG:
c3c2cb37
RK
7351 /* If this is a SUBREG for a promoted object that is sign-extended
7352 and we are looking at it in a wider mode, we know that at least the
7353 high-order bits are known to be sign bit copies. */
7354
7355 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
7356 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7357 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 7358
d0ab8cd3
RK
7359 /* For a smaller object, just ignore the high bits. */
7360 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7361 {
7362 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7363 return MAX (1, (num0
7364 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7365 - bitwidth)));
7366 }
457816e2 7367
8baf60bb 7368#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 7369#ifdef LOAD_EXTEND_OP
8baf60bb
RK
7370 /* For paradoxical SUBREGs on machines where all register operations
7371 affect the entire register, just look inside. Note that we are
7372 passing MODE to the recursive call, so the number of sign bit copies
7373 will remain relative to that mode, not the inner mode. */
457816e2 7374
2aec5b7a
JW
7375 /* This works only if loads sign extend. Otherwise, if we get a
7376 reload for the inner part, it may be loaded from the stack, and
7377 then we lose all sign bit copies that existed before the store
7378 to the stack. */
7379
7380 if ((GET_MODE_SIZE (GET_MODE (x))
7381 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7382 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 7383 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 7384#endif
457816e2 7385#endif
d0ab8cd3
RK
7386 break;
7387
7388 case SIGN_EXTRACT:
7389 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7390 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7391 break;
7392
7393 case SIGN_EXTEND:
7394 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7395 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7396
7397 case TRUNCATE:
7398 /* For a smaller object, just ignore the high bits. */
7399 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7400 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7401 - bitwidth)));
7402
7403 case NOT:
7404 return num_sign_bit_copies (XEXP (x, 0), mode);
7405
7406 case ROTATE: case ROTATERT:
7407 /* If we are rotating left by a number of bits less than the number
7408 of sign bit copies, we can just subtract that amount from the
7409 number. */
7410 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7411 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7412 {
7413 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7414 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7415 : bitwidth - INTVAL (XEXP (x, 1))));
7416 }
7417 break;
7418
7419 case NEG:
7420 /* In general, this subtracts one sign bit copy. But if the value
7421 is known to be positive, the number of sign bit copies is the
951553af
RK
7422 same as that of the input. Finally, if the input has just one bit
7423 that might be nonzero, all the bits are copies of the sign bit. */
7424 nonzero = nonzero_bits (XEXP (x, 0), mode);
7425 if (nonzero == 1)
d0ab8cd3
RK
7426 return bitwidth;
7427
7428 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7429 if (num0 > 1
ac49a949 7430 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7431 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
7432 num0--;
7433
7434 return num0;
7435
7436 case IOR: case AND: case XOR:
7437 case SMIN: case SMAX: case UMIN: case UMAX:
7438 /* Logical operations will preserve the number of sign-bit copies.
7439 MIN and MAX operations always return one of the operands. */
7440 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7441 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7442 return MIN (num0, num1);
7443
7444 case PLUS: case MINUS:
7445 /* For addition and subtraction, we can have a 1-bit carry. However,
7446 if we are subtracting 1 from a positive number, there will not
7447 be such a carry. Furthermore, if the positive number is known to
7448 be 0 or 1, we know the result is either -1 or 0. */
7449
3e3ea975 7450 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 7451 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7452 {
951553af
RK
7453 nonzero = nonzero_bits (XEXP (x, 0), mode);
7454 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7455 return (nonzero == 1 || nonzero == 0 ? bitwidth
7456 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7457 }
7458
7459 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7460 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7461 return MAX (1, MIN (num0, num1) - 1);
7462
7463 case MULT:
7464 /* The number of bits of the product is the sum of the number of
7465 bits of both terms. However, unless one of the terms if known
7466 to be positive, we must allow for an additional bit since negating
7467 a negative number can remove one sign bit copy. */
7468
7469 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7470 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7471
7472 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7473 if (result > 0
9295e6af 7474 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7475 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 7476 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
01c82bbb
RK
7477 && ((nonzero_bits (XEXP (x, 1), mode)
7478 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
d0ab8cd3
RK
7479 result--;
7480
7481 return MAX (1, result);
7482
7483 case UDIV:
7484 /* The result must be <= the first operand. */
7485 return num_sign_bit_copies (XEXP (x, 0), mode);
7486
7487 case UMOD:
7488 /* The result must be <= the scond operand. */
7489 return num_sign_bit_copies (XEXP (x, 1), mode);
7490
7491 case DIV:
7492 /* Similar to unsigned division, except that we have to worry about
7493 the case where the divisor is negative, in which case we have
7494 to add 1. */
7495 result = num_sign_bit_copies (XEXP (x, 0), mode);
7496 if (result > 1
ac49a949 7497 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7498 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7499 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7500 result --;
7501
7502 return result;
7503
7504 case MOD:
7505 result = num_sign_bit_copies (XEXP (x, 1), mode);
7506 if (result > 1
ac49a949 7507 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7508 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7509 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7510 result --;
7511
7512 return result;
7513
7514 case ASHIFTRT:
7515 /* Shifts by a constant add to the number of bits equal to the
7516 sign bit. */
7517 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7518 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7519 && INTVAL (XEXP (x, 1)) > 0)
7520 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7521
7522 return num0;
7523
7524 case ASHIFT:
d0ab8cd3
RK
7525 /* Left shifts destroy copies. */
7526 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7527 || INTVAL (XEXP (x, 1)) < 0
7528 || INTVAL (XEXP (x, 1)) >= bitwidth)
7529 return 1;
7530
7531 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7532 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7533
7534 case IF_THEN_ELSE:
7535 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7536 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7537 return MIN (num0, num1);
7538
7539#if STORE_FLAG_VALUE == -1
7540 case EQ: case NE: case GE: case GT: case LE: case LT:
7541 case GEU: case GTU: case LEU: case LTU:
7542 return bitwidth;
7543#endif
7544 }
7545
7546 /* If we haven't been able to figure it out by one of the above rules,
7547 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
7548 count those bits and return one less than that amount. If we can't
7549 safely compute the mask for this mode, always return BITWIDTH. */
7550
7551 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 7552 return 1;
d0ab8cd3 7553
951553af 7554 nonzero = nonzero_bits (x, mode);
df6f4086 7555 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 7556 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7557}
7558\f
1a26b032
RK
7559/* Return the number of "extended" bits there are in X, when interpreted
7560 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7561 unsigned quantities, this is the number of high-order zero bits.
7562 For signed quantities, this is the number of copies of the sign bit
7563 minus 1. In both case, this function returns the number of "spare"
7564 bits. For example, if two quantities for which this function returns
7565 at least 1 are added, the addition is known not to overflow.
7566
7567 This function will always return 0 unless called during combine, which
7568 implies that it must be called from a define_split. */
7569
7570int
7571extended_count (x, mode, unsignedp)
7572 rtx x;
7573 enum machine_mode mode;
7574 int unsignedp;
7575{
951553af 7576 if (nonzero_sign_valid == 0)
1a26b032
RK
7577 return 0;
7578
7579 return (unsignedp
ac49a949
RS
7580 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7581 && (GET_MODE_BITSIZE (mode) - 1
951553af 7582 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
7583 : num_sign_bit_copies (x, mode) - 1);
7584}
7585\f
230d793d
RS
7586/* This function is called from `simplify_shift_const' to merge two
7587 outer operations. Specifically, we have already found that we need
7588 to perform operation *POP0 with constant *PCONST0 at the outermost
7589 position. We would now like to also perform OP1 with constant CONST1
7590 (with *POP0 being done last).
7591
7592 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7593 the resulting operation. *PCOMP_P is set to 1 if we would need to
7594 complement the innermost operand, otherwise it is unchanged.
7595
7596 MODE is the mode in which the operation will be done. No bits outside
7597 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 7598 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
7599
7600 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7601 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7602 result is simply *PCONST0.
7603
7604 If the resulting operation cannot be expressed as one operation, we
7605 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7606
7607static int
7608merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7609 enum rtx_code *pop0;
5f4f0e22 7610 HOST_WIDE_INT *pconst0;
230d793d 7611 enum rtx_code op1;
5f4f0e22 7612 HOST_WIDE_INT const1;
230d793d
RS
7613 enum machine_mode mode;
7614 int *pcomp_p;
7615{
7616 enum rtx_code op0 = *pop0;
5f4f0e22 7617 HOST_WIDE_INT const0 = *pconst0;
9fa6d012 7618 int width = GET_MODE_BITSIZE (mode);
230d793d
RS
7619
7620 const0 &= GET_MODE_MASK (mode);
7621 const1 &= GET_MODE_MASK (mode);
7622
7623 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7624 if (op0 == AND)
7625 const1 &= const0;
7626
7627 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7628 if OP0 is SET. */
7629
7630 if (op1 == NIL || op0 == SET)
7631 return 1;
7632
7633 else if (op0 == NIL)
7634 op0 = op1, const0 = const1;
7635
7636 else if (op0 == op1)
7637 {
7638 switch (op0)
7639 {
7640 case AND:
7641 const0 &= const1;
7642 break;
7643 case IOR:
7644 const0 |= const1;
7645 break;
7646 case XOR:
7647 const0 ^= const1;
7648 break;
7649 case PLUS:
7650 const0 += const1;
7651 break;
7652 case NEG:
7653 op0 = NIL;
7654 break;
7655 }
7656 }
7657
7658 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7659 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7660 return 0;
7661
7662 /* If the two constants aren't the same, we can't do anything. The
7663 remaining six cases can all be done. */
7664 else if (const0 != const1)
7665 return 0;
7666
7667 else
7668 switch (op0)
7669 {
7670 case IOR:
7671 if (op1 == AND)
7672 /* (a & b) | b == b */
7673 op0 = SET;
7674 else /* op1 == XOR */
7675 /* (a ^ b) | b == a | b */
7676 ;
7677 break;
7678
7679 case XOR:
7680 if (op1 == AND)
7681 /* (a & b) ^ b == (~a) & b */
7682 op0 = AND, *pcomp_p = 1;
7683 else /* op1 == IOR */
7684 /* (a | b) ^ b == a & ~b */
7685 op0 = AND, *pconst0 = ~ const0;
7686 break;
7687
7688 case AND:
7689 if (op1 == IOR)
7690 /* (a | b) & b == b */
7691 op0 = SET;
7692 else /* op1 == XOR */
7693 /* (a ^ b) & b) == (~a) & b */
7694 *pcomp_p = 1;
7695 break;
7696 }
7697
7698 /* Check for NO-OP cases. */
7699 const0 &= GET_MODE_MASK (mode);
7700 if (const0 == 0
7701 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7702 op0 = NIL;
7703 else if (const0 == 0 && op0 == AND)
7704 op0 = SET;
7705 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7706 op0 = NIL;
7707
9fa6d012
TG
7708 /* If this would be an entire word for the target, but is not for
7709 the host, then sign-extend on the host so that the number will look
7710 the same way on the host that it would on the target.
7711
7712 For example, when building a 64 bit alpha hosted 32 bit sparc
7713 targeted compiler, then we want the 32 bit unsigned value -1 to be
7714 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7715 The later confuses the sparc backend. */
7716
7717 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7718 && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
7719 const0 |= ((HOST_WIDE_INT) (-1) << width);
7720
230d793d
RS
7721 *pop0 = op0;
7722 *pconst0 = const0;
7723
7724 return 1;
7725}
7726\f
7727/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7728 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7729 that we started with.
7730
7731 The shift is normally computed in the widest mode we find in VAROP, as
7732 long as it isn't a different number of words than RESULT_MODE. Exceptions
7733 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7734
7735static rtx
7736simplify_shift_const (x, code, result_mode, varop, count)
7737 rtx x;
7738 enum rtx_code code;
7739 enum machine_mode result_mode;
7740 rtx varop;
7741 int count;
7742{
7743 enum rtx_code orig_code = code;
7744 int orig_count = count;
7745 enum machine_mode mode = result_mode;
7746 enum machine_mode shift_mode, tmode;
7747 int mode_words
7748 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7749 /* We form (outer_op (code varop count) (outer_const)). */
7750 enum rtx_code outer_op = NIL;
c4e861e8 7751 HOST_WIDE_INT outer_const = 0;
230d793d
RS
7752 rtx const_rtx;
7753 int complement_p = 0;
7754 rtx new;
7755
7756 /* If we were given an invalid count, don't do anything except exactly
7757 what was requested. */
7758
7759 if (count < 0 || count > GET_MODE_BITSIZE (mode))
7760 {
7761 if (x)
7762 return x;
7763
5f4f0e22 7764 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
7765 }
7766
7767 /* Unless one of the branches of the `if' in this loop does a `continue',
7768 we will `break' the loop after the `if'. */
7769
7770 while (count != 0)
7771 {
7772 /* If we have an operand of (clobber (const_int 0)), just return that
7773 value. */
7774 if (GET_CODE (varop) == CLOBBER)
7775 return varop;
7776
7777 /* If we discovered we had to complement VAROP, leave. Making a NOT
7778 here would cause an infinite loop. */
7779 if (complement_p)
7780 break;
7781
abc95ed3 7782 /* Convert ROTATERT to ROTATE. */
230d793d
RS
7783 if (code == ROTATERT)
7784 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7785
230d793d 7786 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
7787 shift is a right shift or a ROTATE, we must always do it in the mode
7788 it was originally done in. Otherwise, we can do it in MODE, the
7789 widest mode encountered. */
7790 shift_mode
7791 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
7792 ? result_mode : mode);
230d793d
RS
7793
7794 /* Handle cases where the count is greater than the size of the mode
7795 minus 1. For ASHIFT, use the size minus one as the count (this can
7796 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7797 take the count modulo the size. For other shifts, the result is
7798 zero.
7799
7800 Since these shifts are being produced by the compiler by combining
7801 multiple operations, each of which are defined, we know what the
7802 result is supposed to be. */
7803
7804 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7805 {
7806 if (code == ASHIFTRT)
7807 count = GET_MODE_BITSIZE (shift_mode) - 1;
7808 else if (code == ROTATE || code == ROTATERT)
7809 count %= GET_MODE_BITSIZE (shift_mode);
7810 else
7811 {
7812 /* We can't simply return zero because there may be an
7813 outer op. */
7814 varop = const0_rtx;
7815 count = 0;
7816 break;
7817 }
7818 }
7819
7820 /* Negative counts are invalid and should not have been made (a
7821 programmer-specified negative count should have been handled
7822 above). */
7823 else if (count < 0)
7824 abort ();
7825
312def2e
RK
7826 /* An arithmetic right shift of a quantity known to be -1 or 0
7827 is a no-op. */
7828 if (code == ASHIFTRT
7829 && (num_sign_bit_copies (varop, shift_mode)
7830 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 7831 {
312def2e
RK
7832 count = 0;
7833 break;
7834 }
d0ab8cd3 7835
312def2e
RK
7836 /* If we are doing an arithmetic right shift and discarding all but
7837 the sign bit copies, this is equivalent to doing a shift by the
7838 bitsize minus one. Convert it into that shift because it will often
7839 allow other simplifications. */
500c518b 7840
312def2e
RK
7841 if (code == ASHIFTRT
7842 && (count + num_sign_bit_copies (varop, shift_mode)
7843 >= GET_MODE_BITSIZE (shift_mode)))
7844 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 7845
230d793d
RS
7846 /* We simplify the tests below and elsewhere by converting
7847 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7848 `make_compound_operation' will convert it to a ASHIFTRT for
7849 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 7850 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7851 && code == ASHIFTRT
951553af 7852 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
7853 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7854 == 0))
230d793d
RS
7855 code = LSHIFTRT;
7856
7857 switch (GET_CODE (varop))
7858 {
7859 case SIGN_EXTEND:
7860 case ZERO_EXTEND:
7861 case SIGN_EXTRACT:
7862 case ZERO_EXTRACT:
7863 new = expand_compound_operation (varop);
7864 if (new != varop)
7865 {
7866 varop = new;
7867 continue;
7868 }
7869 break;
7870
7871 case MEM:
7872 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7873 minus the width of a smaller mode, we can do this with a
7874 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7875 if ((code == ASHIFTRT || code == LSHIFTRT)
7876 && ! mode_dependent_address_p (XEXP (varop, 0))
7877 && ! MEM_VOLATILE_P (varop)
7878 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7879 MODE_INT, 1)) != BLKmode)
7880 {
f76b9db2
ILT
7881 if (BYTES_BIG_ENDIAN)
7882 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7883 else
e24b00c8
ILT
7884 new = gen_rtx (MEM, tmode,
7885 plus_constant (XEXP (varop, 0),
7886 count / BITS_PER_UNIT));
7887 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7888 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7889 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
230d793d
RS
7890 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7891 : ZERO_EXTEND, mode, new);
7892 count = 0;
7893 continue;
7894 }
7895 break;
7896
7897 case USE:
7898 /* Similar to the case above, except that we can only do this if
7899 the resulting mode is the same as that of the underlying
7900 MEM and adjust the address depending on the *bits* endianness
7901 because of the way that bit-field extract insns are defined. */
7902 if ((code == ASHIFTRT || code == LSHIFTRT)
7903 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7904 MODE_INT, 1)) != BLKmode
7905 && tmode == GET_MODE (XEXP (varop, 0)))
7906 {
f76b9db2
ILT
7907 if (BITS_BIG_ENDIAN)
7908 new = XEXP (varop, 0);
7909 else
7910 {
7911 new = copy_rtx (XEXP (varop, 0));
7912 SUBST (XEXP (new, 0),
7913 plus_constant (XEXP (new, 0),
7914 count / BITS_PER_UNIT));
7915 }
230d793d
RS
7916
7917 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7918 : ZERO_EXTEND, mode, new);
7919 count = 0;
7920 continue;
7921 }
7922 break;
7923
7924 case SUBREG:
7925 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7926 the same number of words as what we've seen so far. Then store
7927 the widest mode in MODE. */
f9e67232
RS
7928 if (subreg_lowpart_p (varop)
7929 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7930 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
7931 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7932 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7933 == mode_words))
7934 {
7935 varop = SUBREG_REG (varop);
7936 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7937 mode = GET_MODE (varop);
7938 continue;
7939 }
7940 break;
7941
7942 case MULT:
7943 /* Some machines use MULT instead of ASHIFT because MULT
7944 is cheaper. But it is still better on those machines to
7945 merge two shifts into one. */
7946 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7947 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7948 {
7949 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7950 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
7951 continue;
7952 }
7953 break;
7954
7955 case UDIV:
7956 /* Similar, for when divides are cheaper. */
7957 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7958 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7959 {
7960 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7961 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
7962 continue;
7963 }
7964 break;
7965
7966 case ASHIFTRT:
7967 /* If we are extracting just the sign bit of an arithmetic right
7968 shift, that shift is not needed. */
7969 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7970 {
7971 varop = XEXP (varop, 0);
7972 continue;
7973 }
7974
7975 /* ... fall through ... */
7976
7977 case LSHIFTRT:
7978 case ASHIFT:
230d793d
RS
7979 case ROTATE:
7980 /* Here we have two nested shifts. The result is usually the
7981 AND of a new shift with a mask. We compute the result below. */
7982 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7983 && INTVAL (XEXP (varop, 1)) >= 0
7984 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
7985 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7986 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
7987 {
7988 enum rtx_code first_code = GET_CODE (varop);
7989 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 7990 unsigned HOST_WIDE_INT mask;
230d793d 7991 rtx mask_rtx;
230d793d 7992
230d793d
RS
7993 /* We have one common special case. We can't do any merging if
7994 the inner code is an ASHIFTRT of a smaller mode. However, if
7995 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7996 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7997 we can convert it to
7998 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7999 This simplifies certain SIGN_EXTEND operations. */
8000 if (code == ASHIFT && first_code == ASHIFTRT
8001 && (GET_MODE_BITSIZE (result_mode)
8002 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8003 {
8004 /* C3 has the low-order C1 bits zero. */
8005
5f4f0e22
CH
8006 mask = (GET_MODE_MASK (mode)
8007 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 8008
5f4f0e22 8009 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 8010 XEXP (varop, 0), mask);
5f4f0e22 8011 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
8012 varop, count);
8013 count = first_count;
8014 code = ASHIFTRT;
8015 continue;
8016 }
8017
d0ab8cd3
RK
8018 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8019 than C1 high-order bits equal to the sign bit, we can convert
8020 this to either an ASHIFT or a ASHIFTRT depending on the
8021 two counts.
230d793d
RS
8022
8023 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8024
8025 if (code == ASHIFTRT && first_code == ASHIFT
8026 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
8027 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8028 > first_count))
230d793d 8029 {
d0ab8cd3
RK
8030 count -= first_count;
8031 if (count < 0)
8032 count = - count, code = ASHIFT;
8033 varop = XEXP (varop, 0);
8034 continue;
230d793d
RS
8035 }
8036
8037 /* There are some cases we can't do. If CODE is ASHIFTRT,
8038 we can only do this if FIRST_CODE is also ASHIFTRT.
8039
8040 We can't do the case when CODE is ROTATE and FIRST_CODE is
8041 ASHIFTRT.
8042
8043 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 8044 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
8045
8046 Finally, we can't do any of these if the mode is too wide
8047 unless the codes are the same.
8048
8049 Handle the case where the shift codes are the same
8050 first. */
8051
8052 if (code == first_code)
8053 {
8054 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
8055 && (code == ASHIFTRT || code == LSHIFTRT
8056 || code == ROTATE))
230d793d
RS
8057 break;
8058
8059 count += first_count;
8060 varop = XEXP (varop, 0);
8061 continue;
8062 }
8063
8064 if (code == ASHIFTRT
8065 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 8066 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 8067 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
8068 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8069 || first_code == ROTATE
230d793d
RS
8070 || code == ROTATE)))
8071 break;
8072
8073 /* To compute the mask to apply after the shift, shift the
951553af 8074 nonzero bits of the inner shift the same way the
230d793d
RS
8075 outer shift will. */
8076
951553af 8077 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
8078
8079 mask_rtx
8080 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 8081 GEN_INT (count));
230d793d
RS
8082
8083 /* Give up if we can't compute an outer operation to use. */
8084 if (mask_rtx == 0
8085 || GET_CODE (mask_rtx) != CONST_INT
8086 || ! merge_outer_ops (&outer_op, &outer_const, AND,
8087 INTVAL (mask_rtx),
8088 result_mode, &complement_p))
8089 break;
8090
8091 /* If the shifts are in the same direction, we add the
8092 counts. Otherwise, we subtract them. */
8093 if ((code == ASHIFTRT || code == LSHIFTRT)
8094 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8095 count += first_count;
8096 else
8097 count -= first_count;
8098
8099 /* If COUNT is positive, the new shift is usually CODE,
8100 except for the two exceptions below, in which case it is
8101 FIRST_CODE. If the count is negative, FIRST_CODE should
8102 always be used */
8103 if (count > 0
8104 && ((first_code == ROTATE && code == ASHIFT)
8105 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8106 code = first_code;
8107 else if (count < 0)
8108 code = first_code, count = - count;
8109
8110 varop = XEXP (varop, 0);
8111 continue;
8112 }
8113
8114 /* If we have (A << B << C) for any shift, we can convert this to
8115 (A << C << B). This wins if A is a constant. Only try this if
8116 B is not a constant. */
8117
8118 else if (GET_CODE (varop) == code
8119 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8120 && 0 != (new
8121 = simplify_binary_operation (code, mode,
8122 XEXP (varop, 0),
5f4f0e22 8123 GEN_INT (count))))
230d793d
RS
8124 {
8125 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8126 count = 0;
8127 continue;
8128 }
8129 break;
8130
8131 case NOT:
8132 /* Make this fit the case below. */
8133 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 8134 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
8135 continue;
8136
8137 case IOR:
8138 case AND:
8139 case XOR:
8140 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8141 with C the size of VAROP - 1 and the shift is logical if
8142 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8143 we have an (le X 0) operation. If we have an arithmetic shift
8144 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8145 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8146
8147 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8148 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8149 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8150 && (code == LSHIFTRT || code == ASHIFTRT)
8151 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8152 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8153 {
8154 count = 0;
8155 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8156 const0_rtx);
8157
8158 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8159 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8160
8161 continue;
8162 }
8163
8164 /* If we have (shift (logical)), move the logical to the outside
8165 to allow it to possibly combine with another logical and the
8166 shift to combine with another shift. This also canonicalizes to
8167 what a ZERO_EXTRACT looks like. Also, some machines have
8168 (and (shift)) insns. */
8169
8170 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8171 && (new = simplify_binary_operation (code, result_mode,
8172 XEXP (varop, 1),
5f4f0e22 8173 GEN_INT (count))) != 0
7d171a1e 8174 && GET_CODE(new) == CONST_INT
230d793d
RS
8175 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8176 INTVAL (new), result_mode, &complement_p))
8177 {
8178 varop = XEXP (varop, 0);
8179 continue;
8180 }
8181
8182 /* If we can't do that, try to simplify the shift in each arm of the
8183 logical expression, make a new logical expression, and apply
8184 the inverse distributive law. */
8185 {
00d4ca1c 8186 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 8187 XEXP (varop, 0), count);
00d4ca1c 8188 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
8189 XEXP (varop, 1), count);
8190
21a64bf1 8191 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
8192 varop = apply_distributive_law (varop);
8193
8194 count = 0;
8195 }
8196 break;
8197
8198 case EQ:
45620ed4 8199 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 8200 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
8201 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8202 that may be nonzero. */
8203 if (code == LSHIFTRT
230d793d
RS
8204 && XEXP (varop, 1) == const0_rtx
8205 && GET_MODE (XEXP (varop, 0)) == result_mode
8206 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 8207 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8208 && ((STORE_FLAG_VALUE
5f4f0e22 8209 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 8210 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8211 && merge_outer_ops (&outer_op, &outer_const, XOR,
8212 (HOST_WIDE_INT) 1, result_mode,
8213 &complement_p))
230d793d
RS
8214 {
8215 varop = XEXP (varop, 0);
8216 count = 0;
8217 continue;
8218 }
8219 break;
8220
8221 case NEG:
d0ab8cd3
RK
8222 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8223 than the number of bits in the mode is equivalent to A. */
8224 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 8225 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 8226 {
d0ab8cd3 8227 varop = XEXP (varop, 0);
230d793d
RS
8228 count = 0;
8229 continue;
8230 }
8231
8232 /* NEG commutes with ASHIFT since it is multiplication. Move the
8233 NEG outside to allow shifts to combine. */
8234 if (code == ASHIFT
5f4f0e22
CH
8235 && merge_outer_ops (&outer_op, &outer_const, NEG,
8236 (HOST_WIDE_INT) 0, result_mode,
8237 &complement_p))
230d793d
RS
8238 {
8239 varop = XEXP (varop, 0);
8240 continue;
8241 }
8242 break;
8243
8244 case PLUS:
d0ab8cd3
RK
8245 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8246 is one less than the number of bits in the mode is
8247 equivalent to (xor A 1). */
230d793d
RS
8248 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8249 && XEXP (varop, 1) == constm1_rtx
951553af 8250 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8251 && merge_outer_ops (&outer_op, &outer_const, XOR,
8252 (HOST_WIDE_INT) 1, result_mode,
8253 &complement_p))
230d793d
RS
8254 {
8255 count = 0;
8256 varop = XEXP (varop, 0);
8257 continue;
8258 }
8259
3f508eca 8260 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 8261 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
8262 bits are known zero in FOO, we can replace the PLUS with FOO.
8263 Similarly in the other operand order. This code occurs when
8264 we are computing the size of a variable-size array. */
8265
8266 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8267 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
8268 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8269 && (nonzero_bits (XEXP (varop, 1), result_mode)
8270 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
8271 {
8272 varop = XEXP (varop, 0);
8273 continue;
8274 }
8275 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8276 && count < HOST_BITS_PER_WIDE_INT
ac49a949 8277 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 8278 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 8279 >> count)
951553af
RK
8280 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8281 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
8282 result_mode)))
8283 {
8284 varop = XEXP (varop, 1);
8285 continue;
8286 }
8287
230d793d
RS
8288 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8289 if (code == ASHIFT
8290 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8291 && (new = simplify_binary_operation (ASHIFT, result_mode,
8292 XEXP (varop, 1),
5f4f0e22 8293 GEN_INT (count))) != 0
7d171a1e 8294 && GET_CODE(new) == CONST_INT
230d793d
RS
8295 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8296 INTVAL (new), result_mode, &complement_p))
8297 {
8298 varop = XEXP (varop, 0);
8299 continue;
8300 }
8301 break;
8302
8303 case MINUS:
8304 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8305 with C the size of VAROP - 1 and the shift is logical if
8306 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8307 we have a (gt X 0) operation. If the shift is arithmetic with
8308 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8309 we have a (neg (gt X 0)) operation. */
8310
8311 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
8312 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8313 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8314 && (code == LSHIFTRT || code == ASHIFTRT)
8315 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8316 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8317 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8318 {
8319 count = 0;
8320 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8321 const0_rtx);
8322
8323 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8324 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8325
8326 continue;
8327 }
8328 break;
8329 }
8330
8331 break;
8332 }
8333
8334 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
8335 a right shift or ROTATE, we must always do it in the mode it was
8336 originally done in. Otherwise, we can do it in MODE, the widest mode
8337 encountered. The code we care about is that of the shift that will
8338 actually be done, not the shift that was originally requested. */
8339 shift_mode
8340 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8341 ? result_mode : mode);
230d793d
RS
8342
8343 /* We have now finished analyzing the shift. The result should be
8344 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8345 OUTER_OP is non-NIL, it is an operation that needs to be applied
8346 to the result of the shift. OUTER_CONST is the relevant constant,
8347 but we must turn off all bits turned off in the shift.
8348
8349 If we were passed a value for X, see if we can use any pieces of
8350 it. If not, make new rtx. */
8351
8352 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8353 && GET_CODE (XEXP (x, 1)) == CONST_INT
8354 && INTVAL (XEXP (x, 1)) == count)
8355 const_rtx = XEXP (x, 1);
8356 else
5f4f0e22 8357 const_rtx = GEN_INT (count);
230d793d
RS
8358
8359 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8360 && GET_MODE (XEXP (x, 0)) == shift_mode
8361 && SUBREG_REG (XEXP (x, 0)) == varop)
8362 varop = XEXP (x, 0);
8363 else if (GET_MODE (varop) != shift_mode)
8364 varop = gen_lowpart_for_combine (shift_mode, varop);
8365
8366 /* If we can't make the SUBREG, try to return what we were given. */
8367 if (GET_CODE (varop) == CLOBBER)
8368 return x ? x : varop;
8369
8370 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8371 if (new != 0)
8372 x = new;
8373 else
8374 {
8375 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8376 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8377
8378 SUBST (XEXP (x, 0), varop);
8379 SUBST (XEXP (x, 1), const_rtx);
8380 }
8381
224eeff2
RK
8382 /* If we have an outer operation and we just made a shift, it is
8383 possible that we could have simplified the shift were it not
8384 for the outer operation. So try to do the simplification
8385 recursively. */
8386
8387 if (outer_op != NIL && GET_CODE (x) == code
8388 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8389 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8390 INTVAL (XEXP (x, 1)));
8391
230d793d
RS
8392 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8393 turn off all the bits that the shift would have turned off. */
8394 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 8395 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
8396 GET_MODE_MASK (result_mode) >> orig_count);
8397
8398 /* Do the remainder of the processing in RESULT_MODE. */
8399 x = gen_lowpart_for_combine (result_mode, x);
8400
8401 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8402 operation. */
8403 if (complement_p)
0c1c8ea6 8404 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
8405
8406 if (outer_op != NIL)
8407 {
5f4f0e22 8408 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9fa6d012
TG
8409 {
8410 int width = GET_MODE_BITSIZE (result_mode);
8411
8412 outer_const &= GET_MODE_MASK (result_mode);
8413
8414 /* If this would be an entire word for the target, but is not for
8415 the host, then sign-extend on the host so that the number will
8416 look the same way on the host that it would on the target.
8417
8418 For example, when building a 64 bit alpha hosted 32 bit sparc
8419 targeted compiler, then we want the 32 bit unsigned value -1 to be
8420 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8421 The later confuses the sparc backend. */
8422
8423 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8424 && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
8425 outer_const |= ((HOST_WIDE_INT) (-1) << width);
8426 }
230d793d
RS
8427
8428 if (outer_op == AND)
5f4f0e22 8429 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
8430 else if (outer_op == SET)
8431 /* This means that we have determined that the result is
8432 equivalent to a constant. This should be rare. */
5f4f0e22 8433 x = GEN_INT (outer_const);
230d793d 8434 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 8435 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 8436 else
5f4f0e22 8437 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
8438 }
8439
8440 return x;
8441}
8442\f
8443/* Like recog, but we receive the address of a pointer to a new pattern.
8444 We try to match the rtx that the pointer points to.
8445 If that fails, we may try to modify or replace the pattern,
8446 storing the replacement into the same pointer object.
8447
8448 Modifications include deletion or addition of CLOBBERs.
8449
8450 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8451 the CLOBBERs are placed.
8452
a29ca9db
RK
8453 PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
8454 we had to add.
8455
230d793d
RS
8456 The value is the final insn code from the pattern ultimately matched,
8457 or -1. */
8458
8459static int
a29ca9db 8460recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
230d793d
RS
8461 rtx *pnewpat;
8462 rtx insn;
8463 rtx *pnotes;
a29ca9db 8464 int *padded_scratches;
230d793d
RS
8465{
8466 register rtx pat = *pnewpat;
8467 int insn_code_number;
8468 int num_clobbers_to_add = 0;
8469 int i;
8470 rtx notes = 0;
8471
a29ca9db
RK
8472 *padded_scratches = 0;
8473
974f4146
RK
8474 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8475 we use to indicate that something didn't match. If we find such a
8476 thing, force rejection. */
d96023cf 8477 if (GET_CODE (pat) == PARALLEL)
974f4146 8478 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
8479 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8480 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
8481 return -1;
8482
230d793d
RS
8483 /* Is the result of combination a valid instruction? */
8484 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8485
8486 /* If it isn't, there is the possibility that we previously had an insn
8487 that clobbered some register as a side effect, but the combined
8488 insn doesn't need to do that. So try once more without the clobbers
8489 unless this represents an ASM insn. */
8490
8491 if (insn_code_number < 0 && ! check_asm_operands (pat)
8492 && GET_CODE (pat) == PARALLEL)
8493 {
8494 int pos;
8495
8496 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8497 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8498 {
8499 if (i != pos)
8500 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8501 pos++;
8502 }
8503
8504 SUBST_INT (XVECLEN (pat, 0), pos);
8505
8506 if (pos == 1)
8507 pat = XVECEXP (pat, 0, 0);
8508
8509 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8510 }
8511
8512 /* If we had any clobbers to add, make a new pattern than contains
8513 them. Then check to make sure that all of them are dead. */
8514 if (num_clobbers_to_add)
8515 {
8516 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8517 gen_rtvec (GET_CODE (pat) == PARALLEL
8518 ? XVECLEN (pat, 0) + num_clobbers_to_add
8519 : num_clobbers_to_add + 1));
8520
8521 if (GET_CODE (pat) == PARALLEL)
8522 for (i = 0; i < XVECLEN (pat, 0); i++)
8523 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8524 else
8525 XVECEXP (newpat, 0, 0) = pat;
8526
8527 add_clobbers (newpat, insn_code_number);
8528
8529 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8530 i < XVECLEN (newpat, 0); i++)
8531 {
8532 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8533 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8534 return -1;
a29ca9db
RK
8535 else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
8536 (*padded_scratches)++;
230d793d
RS
8537 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8538 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8539 }
8540 pat = newpat;
8541 }
8542
8543 *pnewpat = pat;
8544 *pnotes = notes;
8545
8546 return insn_code_number;
8547}
8548\f
8549/* Like gen_lowpart but for use by combine. In combine it is not possible
8550 to create any new pseudoregs. However, it is safe to create
8551 invalid memory addresses, because combine will try to recognize
8552 them and all they will do is make the combine attempt fail.
8553
8554 If for some reason this cannot do its job, an rtx
8555 (clobber (const_int 0)) is returned.
8556 An insn containing that will not be recognized. */
8557
8558#undef gen_lowpart
8559
8560static rtx
8561gen_lowpart_for_combine (mode, x)
8562 enum machine_mode mode;
8563 register rtx x;
8564{
8565 rtx result;
8566
8567 if (GET_MODE (x) == mode)
8568 return x;
8569
eae957a8
RK
8570 /* We can only support MODE being wider than a word if X is a
8571 constant integer or has a mode the same size. */
8572
8573 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8574 && ! ((GET_MODE (x) == VOIDmode
8575 && (GET_CODE (x) == CONST_INT
8576 || GET_CODE (x) == CONST_DOUBLE))
8577 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
230d793d
RS
8578 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8579
8580 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8581 won't know what to do. So we will strip off the SUBREG here and
8582 process normally. */
8583 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8584 {
8585 x = SUBREG_REG (x);
8586 if (GET_MODE (x) == mode)
8587 return x;
8588 }
8589
8590 result = gen_lowpart_common (mode, x);
64bf47a2
RK
8591 if (result != 0
8592 && GET_CODE (result) == SUBREG
8593 && GET_CODE (SUBREG_REG (result)) == REG
8594 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
8595 && (GET_MODE_SIZE (GET_MODE (result))
8596 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
8597 reg_changes_size[REGNO (SUBREG_REG (result))] = 1;
8598
230d793d
RS
8599 if (result)
8600 return result;
8601
8602 if (GET_CODE (x) == MEM)
8603 {
8604 register int offset = 0;
8605 rtx new;
8606
8607 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8608 address. */
8609 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8610 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8611
8612 /* If we want to refer to something bigger than the original memref,
8613 generate a perverse subreg instead. That will force a reload
8614 of the original memref X. */
8615 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8616 return gen_rtx (SUBREG, mode, x, 0);
8617
f76b9db2
ILT
8618 if (WORDS_BIG_ENDIAN)
8619 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8620 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8621 if (BYTES_BIG_ENDIAN)
8622 {
8623 /* Adjust the address so that the address-after-the-data is
8624 unchanged. */
8625 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8626 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8627 }
230d793d
RS
8628 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8629 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8630 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8631 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8632 return new;
8633 }
8634
8635 /* If X is a comparison operator, rewrite it in a new mode. This
8636 probably won't match, but may allow further simplifications. */
8637 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8638 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8639
8640 /* If we couldn't simplify X any other way, just enclose it in a
8641 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 8642 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 8643 else
dfbe1b2f
RK
8644 {
8645 int word = 0;
8646
8647 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8648 word = ((GET_MODE_SIZE (GET_MODE (x))
8649 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8650 / UNITS_PER_WORD);
8651 return gen_rtx (SUBREG, mode, x, word);
8652 }
230d793d
RS
8653}
8654\f
8655/* Make an rtx expression. This is a subset of gen_rtx and only supports
8656 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8657
8658 If the identical expression was previously in the insn (in the undobuf),
8659 it will be returned. Only if it is not found will a new expression
8660 be made. */
8661
8662/*VARARGS2*/
8663static rtx
4f90e4a0 8664gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 8665{
4f90e4a0 8666#ifndef __STDC__
230d793d
RS
8667 enum rtx_code code;
8668 enum machine_mode mode;
4f90e4a0
RK
8669#endif
8670 va_list p;
230d793d
RS
8671 int n_args;
8672 rtx args[3];
8673 int i, j;
8674 char *fmt;
8675 rtx rt;
8676
4f90e4a0
RK
8677 VA_START (p, mode);
8678
8679#ifndef __STDC__
230d793d
RS
8680 code = va_arg (p, enum rtx_code);
8681 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
8682#endif
8683
230d793d
RS
8684 n_args = GET_RTX_LENGTH (code);
8685 fmt = GET_RTX_FORMAT (code);
8686
8687 if (n_args == 0 || n_args > 3)
8688 abort ();
8689
8690 /* Get each arg and verify that it is supposed to be an expression. */
8691 for (j = 0; j < n_args; j++)
8692 {
8693 if (*fmt++ != 'e')
8694 abort ();
8695
8696 args[j] = va_arg (p, rtx);
8697 }
8698
8699 /* See if this is in undobuf. Be sure we don't use objects that came
8700 from another insn; this could produce circular rtl structures. */
8701
8702 for (i = previous_num_undos; i < undobuf.num_undo; i++)
8703 if (!undobuf.undo[i].is_int
f5393ab9
RS
8704 && GET_CODE (undobuf.undo[i].old_contents.r) == code
8705 && GET_MODE (undobuf.undo[i].old_contents.r) == mode)
230d793d
RS
8706 {
8707 for (j = 0; j < n_args; j++)
f5393ab9 8708 if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
230d793d
RS
8709 break;
8710
8711 if (j == n_args)
f5393ab9 8712 return undobuf.undo[i].old_contents.r;
230d793d
RS
8713 }
8714
8715 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8716 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8717 rt = rtx_alloc (code);
8718 PUT_MODE (rt, mode);
8719 XEXP (rt, 0) = args[0];
8720 if (n_args > 1)
8721 {
8722 XEXP (rt, 1) = args[1];
8723 if (n_args > 2)
8724 XEXP (rt, 2) = args[2];
8725 }
8726 return rt;
8727}
8728
8729/* These routines make binary and unary operations by first seeing if they
8730 fold; if not, a new expression is allocated. */
8731
8732static rtx
8733gen_binary (code, mode, op0, op1)
8734 enum rtx_code code;
8735 enum machine_mode mode;
8736 rtx op0, op1;
8737{
8738 rtx result;
1a26b032
RK
8739 rtx tem;
8740
8741 if (GET_RTX_CLASS (code) == 'c'
8742 && (GET_CODE (op0) == CONST_INT
8743 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8744 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
8745
8746 if (GET_RTX_CLASS (code) == '<')
8747 {
8748 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
8749
8750 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
8751 just (REL_OP X Y). */
8752 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
8753 {
8754 op1 = XEXP (op0, 1);
8755 op0 = XEXP (op0, 0);
8756 op_mode = GET_MODE (op0);
8757 }
8758
230d793d
RS
8759 if (op_mode == VOIDmode)
8760 op_mode = GET_MODE (op1);
8761 result = simplify_relational_operation (code, op_mode, op0, op1);
8762 }
8763 else
8764 result = simplify_binary_operation (code, mode, op0, op1);
8765
8766 if (result)
8767 return result;
8768
8769 /* Put complex operands first and constants second. */
8770 if (GET_RTX_CLASS (code) == 'c'
8771 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8772 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8773 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8774 || (GET_CODE (op0) == SUBREG
8775 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8776 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8777 return gen_rtx_combine (code, mode, op1, op0);
8778
8779 return gen_rtx_combine (code, mode, op0, op1);
8780}
8781
8782static rtx
0c1c8ea6 8783gen_unary (code, mode, op0_mode, op0)
230d793d 8784 enum rtx_code code;
0c1c8ea6 8785 enum machine_mode mode, op0_mode;
230d793d
RS
8786 rtx op0;
8787{
0c1c8ea6 8788 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
8789
8790 if (result)
8791 return result;
8792
8793 return gen_rtx_combine (code, mode, op0);
8794}
8795\f
8796/* Simplify a comparison between *POP0 and *POP1 where CODE is the
8797 comparison code that will be tested.
8798
8799 The result is a possibly different comparison code to use. *POP0 and
8800 *POP1 may be updated.
8801
8802 It is possible that we might detect that a comparison is either always
8803 true or always false. However, we do not perform general constant
5089e22e 8804 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
8805 should have been detected earlier. Hence we ignore all such cases. */
8806
8807static enum rtx_code
8808simplify_comparison (code, pop0, pop1)
8809 enum rtx_code code;
8810 rtx *pop0;
8811 rtx *pop1;
8812{
8813 rtx op0 = *pop0;
8814 rtx op1 = *pop1;
8815 rtx tem, tem1;
8816 int i;
8817 enum machine_mode mode, tmode;
8818
8819 /* Try a few ways of applying the same transformation to both operands. */
8820 while (1)
8821 {
3a19aabc
RK
8822#ifndef WORD_REGISTER_OPERATIONS
8823 /* The test below this one won't handle SIGN_EXTENDs on these machines,
8824 so check specially. */
8825 if (code != GTU && code != GEU && code != LTU && code != LEU
8826 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
8827 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8828 && GET_CODE (XEXP (op1, 0)) == ASHIFT
8829 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
8830 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
8831 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 8832 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
8833 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8834 && GET_CODE (XEXP (op1, 1)) == CONST_INT
8835 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8836 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
8837 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
8838 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
8839 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
8840 && (INTVAL (XEXP (op0, 1))
8841 == (GET_MODE_BITSIZE (GET_MODE (op0))
8842 - (GET_MODE_BITSIZE
8843 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
8844 {
8845 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
8846 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
8847 }
8848#endif
8849
230d793d
RS
8850 /* If both operands are the same constant shift, see if we can ignore the
8851 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 8852 this shift are known to be zero for both inputs and if the type of
230d793d 8853 comparison is compatible with the shift. */
67232b23
RK
8854 if (GET_CODE (op0) == GET_CODE (op1)
8855 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8856 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 8857 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
8858 && (code != GT && code != LT && code != GE && code != LE))
8859 || (GET_CODE (op0) == ASHIFTRT
8860 && (code != GTU && code != LTU
8861 && code != GEU && code != GEU)))
8862 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8863 && INTVAL (XEXP (op0, 1)) >= 0
8864 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8865 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
8866 {
8867 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 8868 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8869 int shift_count = INTVAL (XEXP (op0, 1));
8870
8871 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8872 mask &= (mask >> shift_count) << shift_count;
45620ed4 8873 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
8874 mask = (mask & (mask << shift_count)) >> shift_count;
8875
951553af
RK
8876 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8877 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
8878 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8879 else
8880 break;
8881 }
8882
8883 /* If both operands are AND's of a paradoxical SUBREG by constant, the
8884 SUBREGs are of the same mode, and, in both cases, the AND would
8885 be redundant if the comparison was done in the narrower mode,
8886 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
8887 and the operand's possibly nonzero bits are 0xffffff01; in that case
8888 if we only care about QImode, we don't need the AND). This case
8889 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
8890 STORE_FLAG_VALUE == 1 (e.g., the 386).
8891
8892 Similarly, check for a case where the AND's are ZERO_EXTEND
8893 operations from some narrower mode even though a SUBREG is not
8894 present. */
230d793d
RS
8895
8896 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8897 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 8898 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 8899 {
7e4dc511
RK
8900 rtx inner_op0 = XEXP (op0, 0);
8901 rtx inner_op1 = XEXP (op1, 0);
8902 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
8903 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
8904 int changed = 0;
8905
8906 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
8907 && (GET_MODE_SIZE (GET_MODE (inner_op0))
8908 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
8909 && (GET_MODE (SUBREG_REG (inner_op0))
8910 == GET_MODE (SUBREG_REG (inner_op1)))
8911 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8912 <= HOST_BITS_PER_WIDE_INT)
01c82bbb
RK
8913 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
8914 GET_MODE (SUBREG_REG (op0)))))
8915 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
8916 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
8917 {
8918 op0 = SUBREG_REG (inner_op0);
8919 op1 = SUBREG_REG (inner_op1);
8920
8921 /* The resulting comparison is always unsigned since we masked
8922 off the original sign bit. */
8923 code = unsigned_condition (code);
8924
8925 changed = 1;
8926 }
230d793d 8927
7e4dc511
RK
8928 else if (c0 == c1)
8929 for (tmode = GET_CLASS_NARROWEST_MODE
8930 (GET_MODE_CLASS (GET_MODE (op0)));
8931 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
8932 if (c0 == GET_MODE_MASK (tmode))
8933 {
8934 op0 = gen_lowpart_for_combine (tmode, inner_op0);
8935 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 8936 code = unsigned_condition (code);
7e4dc511
RK
8937 changed = 1;
8938 break;
8939 }
8940
8941 if (! changed)
8942 break;
230d793d 8943 }
3a19aabc 8944
ad25ba17
RK
8945 /* If both operands are NOT, we can strip off the outer operation
8946 and adjust the comparison code for swapped operands; similarly for
8947 NEG, except that this must be an equality comparison. */
8948 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
8949 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
8950 && (code == EQ || code == NE)))
8951 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 8952
230d793d
RS
8953 else
8954 break;
8955 }
8956
8957 /* If the first operand is a constant, swap the operands and adjust the
8958 comparison code appropriately. */
8959 if (CONSTANT_P (op0))
8960 {
8961 tem = op0, op0 = op1, op1 = tem;
8962 code = swap_condition (code);
8963 }
8964
8965 /* We now enter a loop during which we will try to simplify the comparison.
8966 For the most part, we only are concerned with comparisons with zero,
8967 but some things may really be comparisons with zero but not start
8968 out looking that way. */
8969
8970 while (GET_CODE (op1) == CONST_INT)
8971 {
8972 enum machine_mode mode = GET_MODE (op0);
8973 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 8974 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8975 int equality_comparison_p;
8976 int sign_bit_comparison_p;
8977 int unsigned_comparison_p;
5f4f0e22 8978 HOST_WIDE_INT const_op;
230d793d
RS
8979
8980 /* We only want to handle integral modes. This catches VOIDmode,
8981 CCmode, and the floating-point modes. An exception is that we
8982 can handle VOIDmode if OP0 is a COMPARE or a comparison
8983 operation. */
8984
8985 if (GET_MODE_CLASS (mode) != MODE_INT
8986 && ! (mode == VOIDmode
8987 && (GET_CODE (op0) == COMPARE
8988 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8989 break;
8990
8991 /* Get the constant we are comparing against and turn off all bits
8992 not on in our mode. */
8993 const_op = INTVAL (op1);
5f4f0e22 8994 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 8995 const_op &= mask;
230d793d
RS
8996
8997 /* If we are comparing against a constant power of two and the value
951553af 8998 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
8999 `and'ed with that bit), we can replace this with a comparison
9000 with zero. */
9001 if (const_op
9002 && (code == EQ || code == NE || code == GE || code == GEU
9003 || code == LT || code == LTU)
5f4f0e22 9004 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9005 && exact_log2 (const_op) >= 0
951553af 9006 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
9007 {
9008 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9009 op1 = const0_rtx, const_op = 0;
9010 }
9011
d0ab8cd3
RK
9012 /* Similarly, if we are comparing a value known to be either -1 or
9013 0 with -1, change it to the opposite comparison against zero. */
9014
9015 if (const_op == -1
9016 && (code == EQ || code == NE || code == GT || code == LE
9017 || code == GEU || code == LTU)
9018 && num_sign_bit_copies (op0, mode) == mode_width)
9019 {
9020 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9021 op1 = const0_rtx, const_op = 0;
9022 }
9023
230d793d 9024 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
9025 comparisons against zero and then prefer equality comparisons.
9026 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
9027
9028 switch (code)
9029 {
9030 case LT:
4803a34a
RK
9031 /* < C is equivalent to <= (C - 1) */
9032 if (const_op > 0)
230d793d 9033 {
4803a34a 9034 const_op -= 1;
5f4f0e22 9035 op1 = GEN_INT (const_op);
230d793d
RS
9036 code = LE;
9037 /* ... fall through to LE case below. */
9038 }
9039 else
9040 break;
9041
9042 case LE:
4803a34a
RK
9043 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9044 if (const_op < 0)
9045 {
9046 const_op += 1;
5f4f0e22 9047 op1 = GEN_INT (const_op);
4803a34a
RK
9048 code = LT;
9049 }
230d793d
RS
9050
9051 /* If we are doing a <= 0 comparison on a value known to have
9052 a zero sign bit, we can replace this with == 0. */
9053 else if (const_op == 0
5f4f0e22 9054 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9055 && (nonzero_bits (op0, mode)
5f4f0e22 9056 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9057 code = EQ;
9058 break;
9059
9060 case GE:
4803a34a
RK
9061 /* >= C is equivalent to > (C - 1). */
9062 if (const_op > 0)
230d793d 9063 {
4803a34a 9064 const_op -= 1;
5f4f0e22 9065 op1 = GEN_INT (const_op);
230d793d
RS
9066 code = GT;
9067 /* ... fall through to GT below. */
9068 }
9069 else
9070 break;
9071
9072 case GT:
4803a34a
RK
9073 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9074 if (const_op < 0)
9075 {
9076 const_op += 1;
5f4f0e22 9077 op1 = GEN_INT (const_op);
4803a34a
RK
9078 code = GE;
9079 }
230d793d
RS
9080
9081 /* If we are doing a > 0 comparison on a value known to have
9082 a zero sign bit, we can replace this with != 0. */
9083 else if (const_op == 0
5f4f0e22 9084 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9085 && (nonzero_bits (op0, mode)
5f4f0e22 9086 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9087 code = NE;
9088 break;
9089
230d793d 9090 case LTU:
4803a34a
RK
9091 /* < C is equivalent to <= (C - 1). */
9092 if (const_op > 0)
9093 {
9094 const_op -= 1;
5f4f0e22 9095 op1 = GEN_INT (const_op);
4803a34a
RK
9096 code = LEU;
9097 /* ... fall through ... */
9098 }
d0ab8cd3
RK
9099
9100 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
9101 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
9102 {
9103 const_op = 0, op1 = const0_rtx;
9104 code = GE;
9105 break;
9106 }
4803a34a
RK
9107 else
9108 break;
230d793d
RS
9109
9110 case LEU:
9111 /* unsigned <= 0 is equivalent to == 0 */
9112 if (const_op == 0)
9113 code = EQ;
d0ab8cd3
RK
9114
9115 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
9116 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
9117 {
9118 const_op = 0, op1 = const0_rtx;
9119 code = GE;
9120 }
230d793d
RS
9121 break;
9122
4803a34a
RK
9123 case GEU:
9124 /* >= C is equivalent to < (C - 1). */
9125 if (const_op > 1)
9126 {
9127 const_op -= 1;
5f4f0e22 9128 op1 = GEN_INT (const_op);
4803a34a
RK
9129 code = GTU;
9130 /* ... fall through ... */
9131 }
d0ab8cd3
RK
9132
9133 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
9134 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
9135 {
9136 const_op = 0, op1 = const0_rtx;
9137 code = LT;
8b2e69e1 9138 break;
d0ab8cd3 9139 }
4803a34a
RK
9140 else
9141 break;
9142
230d793d
RS
9143 case GTU:
9144 /* unsigned > 0 is equivalent to != 0 */
9145 if (const_op == 0)
9146 code = NE;
d0ab8cd3
RK
9147
9148 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
9149 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
9150 {
9151 const_op = 0, op1 = const0_rtx;
9152 code = LT;
9153 }
230d793d
RS
9154 break;
9155 }
9156
9157 /* Compute some predicates to simplify code below. */
9158
9159 equality_comparison_p = (code == EQ || code == NE);
9160 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9161 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9162 || code == LEU);
9163
6139ff20
RK
9164 /* If this is a sign bit comparison and we can do arithmetic in
9165 MODE, say that we will only be needing the sign bit of OP0. */
9166 if (sign_bit_comparison_p
9167 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9168 op0 = force_to_mode (op0, mode,
9169 ((HOST_WIDE_INT) 1
9170 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 9171 NULL_RTX, 0);
6139ff20 9172
230d793d
RS
9173 /* Now try cases based on the opcode of OP0. If none of the cases
9174 does a "continue", we exit this loop immediately after the
9175 switch. */
9176
9177 switch (GET_CODE (op0))
9178 {
9179 case ZERO_EXTRACT:
9180 /* If we are extracting a single bit from a variable position in
9181 a constant that has only a single bit set and are comparing it
9182 with zero, we can convert this into an equality comparison
d7cd794f 9183 between the position and the location of the single bit. */
230d793d 9184
230d793d
RS
9185 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9186 && XEXP (op0, 1) == const1_rtx
9187 && equality_comparison_p && const_op == 0
d7cd794f 9188 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 9189 {
f76b9db2 9190 if (BITS_BIG_ENDIAN)
d7cd794f 9191#ifdef HAVE_extzv
f76b9db2
ILT
9192 i = (GET_MODE_BITSIZE
9193 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
d7cd794f
RK
9194#else
9195 i = BITS_PER_WORD - 1 - i;
230d793d
RS
9196#endif
9197
9198 op0 = XEXP (op0, 2);
5f4f0e22 9199 op1 = GEN_INT (i);
230d793d
RS
9200 const_op = i;
9201
9202 /* Result is nonzero iff shift count is equal to I. */
9203 code = reverse_condition (code);
9204 continue;
9205 }
230d793d
RS
9206
9207 /* ... fall through ... */
9208
9209 case SIGN_EXTRACT:
9210 tem = expand_compound_operation (op0);
9211 if (tem != op0)
9212 {
9213 op0 = tem;
9214 continue;
9215 }
9216 break;
9217
9218 case NOT:
9219 /* If testing for equality, we can take the NOT of the constant. */
9220 if (equality_comparison_p
9221 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9222 {
9223 op0 = XEXP (op0, 0);
9224 op1 = tem;
9225 continue;
9226 }
9227
9228 /* If just looking at the sign bit, reverse the sense of the
9229 comparison. */
9230 if (sign_bit_comparison_p)
9231 {
9232 op0 = XEXP (op0, 0);
9233 code = (code == GE ? LT : GE);
9234 continue;
9235 }
9236 break;
9237
9238 case NEG:
9239 /* If testing for equality, we can take the NEG of the constant. */
9240 if (equality_comparison_p
9241 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9242 {
9243 op0 = XEXP (op0, 0);
9244 op1 = tem;
9245 continue;
9246 }
9247
9248 /* The remaining cases only apply to comparisons with zero. */
9249 if (const_op != 0)
9250 break;
9251
9252 /* When X is ABS or is known positive,
9253 (neg X) is < 0 if and only if X != 0. */
9254
9255 if (sign_bit_comparison_p
9256 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 9257 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9258 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9259 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
9260 {
9261 op0 = XEXP (op0, 0);
9262 code = (code == LT ? NE : EQ);
9263 continue;
9264 }
9265
3bed8141
RK
9266 /* If we have NEG of something whose two high-order bits are the
9267 same, we know that "(-a) < 0" is equivalent to "a > 0". */
9268 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
9269 {
9270 op0 = XEXP (op0, 0);
9271 code = swap_condition (code);
9272 continue;
9273 }
9274 break;
9275
9276 case ROTATE:
9277 /* If we are testing equality and our count is a constant, we
9278 can perform the inverse operation on our RHS. */
9279 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9280 && (tem = simplify_binary_operation (ROTATERT, mode,
9281 op1, XEXP (op0, 1))) != 0)
9282 {
9283 op0 = XEXP (op0, 0);
9284 op1 = tem;
9285 continue;
9286 }
9287
9288 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9289 a particular bit. Convert it to an AND of a constant of that
9290 bit. This will be converted into a ZERO_EXTRACT. */
9291 if (const_op == 0 && sign_bit_comparison_p
9292 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9293 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9294 {
5f4f0e22
CH
9295 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9296 ((HOST_WIDE_INT) 1
9297 << (mode_width - 1
9298 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9299 code = (code == LT ? NE : EQ);
9300 continue;
9301 }
9302
9303 /* ... fall through ... */
9304
9305 case ABS:
9306 /* ABS is ignorable inside an equality comparison with zero. */
9307 if (const_op == 0 && equality_comparison_p)
9308 {
9309 op0 = XEXP (op0, 0);
9310 continue;
9311 }
9312 break;
9313
9314
9315 case SIGN_EXTEND:
9316 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9317 to (compare FOO CONST) if CONST fits in FOO's mode and we
9318 are either testing inequality or have an unsigned comparison
9319 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9320 if (! unsigned_comparison_p
9321 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9322 <= HOST_BITS_PER_WIDE_INT)
9323 && ((unsigned HOST_WIDE_INT) const_op
9324 < (((HOST_WIDE_INT) 1
9325 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
9326 {
9327 op0 = XEXP (op0, 0);
9328 continue;
9329 }
9330 break;
9331
9332 case SUBREG:
a687e897 9333 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 9334 both constants are smaller than 1/2 the maximum positive
a687e897
RK
9335 value in MODE, and the comparison is equality or unsigned.
9336 In that case, if A is either zero-extended to MODE or has
9337 sufficient sign bits so that the high-order bit in MODE
9338 is a copy of the sign in the inner mode, we can prove that it is
9339 safe to do the operation in the wider mode. This simplifies
9340 many range checks. */
9341
9342 if (mode_width <= HOST_BITS_PER_WIDE_INT
9343 && subreg_lowpart_p (op0)
9344 && GET_CODE (SUBREG_REG (op0)) == PLUS
9345 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9346 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9347 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9348 < GET_MODE_MASK (mode) / 2)
adb7a1cb 9349 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
9350 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9351 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
9352 & ~ GET_MODE_MASK (mode))
9353 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9354 GET_MODE (SUBREG_REG (op0)))
9355 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9356 - GET_MODE_BITSIZE (mode)))))
9357 {
9358 op0 = SUBREG_REG (op0);
9359 continue;
9360 }
9361
fe0cf571
RK
9362 /* If the inner mode is narrower and we are extracting the low part,
9363 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9364 if (subreg_lowpart_p (op0)
89f1c7f2
RS
9365 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9366 /* Fall through */ ;
9367 else
230d793d
RS
9368 break;
9369
9370 /* ... fall through ... */
9371
9372 case ZERO_EXTEND:
9373 if ((unsigned_comparison_p || equality_comparison_p)
9374 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9375 <= HOST_BITS_PER_WIDE_INT)
9376 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
9377 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9378 {
9379 op0 = XEXP (op0, 0);
9380 continue;
9381 }
9382 break;
9383
9384 case PLUS:
20fdd649 9385 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 9386 this for equality comparisons due to pathological cases involving
230d793d 9387 overflows. */
20fdd649
RK
9388 if (equality_comparison_p
9389 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9390 op1, XEXP (op0, 1))))
230d793d
RS
9391 {
9392 op0 = XEXP (op0, 0);
9393 op1 = tem;
9394 continue;
9395 }
9396
9397 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9398 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9399 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9400 {
9401 op0 = XEXP (XEXP (op0, 0), 0);
9402 code = (code == LT ? EQ : NE);
9403 continue;
9404 }
9405 break;
9406
9407 case MINUS:
20fdd649
RK
9408 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9409 (eq B (minus A C)), whichever simplifies. We can only do
9410 this for equality comparisons due to pathological cases involving
9411 overflows. */
9412 if (equality_comparison_p
9413 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9414 XEXP (op0, 1), op1)))
9415 {
9416 op0 = XEXP (op0, 0);
9417 op1 = tem;
9418 continue;
9419 }
9420
9421 if (equality_comparison_p
9422 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9423 XEXP (op0, 0), op1)))
9424 {
9425 op0 = XEXP (op0, 1);
9426 op1 = tem;
9427 continue;
9428 }
9429
230d793d
RS
9430 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9431 of bits in X minus 1, is one iff X > 0. */
9432 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9433 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9434 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9435 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9436 {
9437 op0 = XEXP (op0, 1);
9438 code = (code == GE ? LE : GT);
9439 continue;
9440 }
9441 break;
9442
9443 case XOR:
9444 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9445 if C is zero or B is a constant. */
9446 if (equality_comparison_p
9447 && 0 != (tem = simplify_binary_operation (XOR, mode,
9448 XEXP (op0, 1), op1)))
9449 {
9450 op0 = XEXP (op0, 0);
9451 op1 = tem;
9452 continue;
9453 }
9454 break;
9455
9456 case EQ: case NE:
9457 case LT: case LTU: case LE: case LEU:
9458 case GT: case GTU: case GE: case GEU:
9459 /* We can't do anything if OP0 is a condition code value, rather
9460 than an actual data value. */
9461 if (const_op != 0
9462#ifdef HAVE_cc0
9463 || XEXP (op0, 0) == cc0_rtx
9464#endif
9465 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9466 break;
9467
9468 /* Get the two operands being compared. */
9469 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9470 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9471 else
9472 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9473
9474 /* Check for the cases where we simply want the result of the
9475 earlier test or the opposite of that result. */
9476 if (code == NE
9477 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 9478 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 9479 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 9480 && (STORE_FLAG_VALUE
5f4f0e22
CH
9481 & (((HOST_WIDE_INT) 1
9482 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
9483 && (code == LT
9484 || (code == GE && reversible_comparison_p (op0)))))
9485 {
9486 code = (code == LT || code == NE
9487 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9488 op0 = tem, op1 = tem1;
9489 continue;
9490 }
9491 break;
9492
9493 case IOR:
9494 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9495 iff X <= 0. */
9496 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9497 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9498 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9499 {
9500 op0 = XEXP (op0, 1);
9501 code = (code == GE ? GT : LE);
9502 continue;
9503 }
9504 break;
9505
9506 case AND:
9507 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
9508 will be converted to a ZERO_EXTRACT later. */
9509 if (const_op == 0 && equality_comparison_p
45620ed4 9510 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
9511 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9512 {
9513 op0 = simplify_and_const_int
9514 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9515 XEXP (op0, 1),
9516 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 9517 (HOST_WIDE_INT) 1);
230d793d
RS
9518 continue;
9519 }
9520
9521 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9522 zero and X is a comparison and C1 and C2 describe only bits set
9523 in STORE_FLAG_VALUE, we can compare with X. */
9524 if (const_op == 0 && equality_comparison_p
5f4f0e22 9525 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
9526 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9527 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9528 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9529 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 9530 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
9531 {
9532 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9533 << INTVAL (XEXP (XEXP (op0, 0), 1)));
9534 if ((~ STORE_FLAG_VALUE & mask) == 0
9535 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9536 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9537 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9538 {
9539 op0 = XEXP (XEXP (op0, 0), 0);
9540 continue;
9541 }
9542 }
9543
9544 /* If we are doing an equality comparison of an AND of a bit equal
9545 to the sign bit, replace this with a LT or GE comparison of
9546 the underlying value. */
9547 if (equality_comparison_p
9548 && const_op == 0
9549 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9550 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9551 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 9552 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
9553 {
9554 op0 = XEXP (op0, 0);
9555 code = (code == EQ ? GE : LT);
9556 continue;
9557 }
9558
9559 /* If this AND operation is really a ZERO_EXTEND from a narrower
9560 mode, the constant fits within that mode, and this is either an
9561 equality or unsigned comparison, try to do this comparison in
9562 the narrower mode. */
9563 if ((equality_comparison_p || unsigned_comparison_p)
9564 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9565 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9566 & GET_MODE_MASK (mode))
9567 + 1)) >= 0
9568 && const_op >> i == 0
9569 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9570 {
9571 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9572 continue;
9573 }
9574 break;
9575
9576 case ASHIFT:
45620ed4 9577 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 9578 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 9579 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
9580 shifted right N bits so long as the low-order N bits of C are
9581 zero. */
9582 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9583 && INTVAL (XEXP (op0, 1)) >= 0
9584 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
9585 < HOST_BITS_PER_WIDE_INT)
9586 && ((const_op
34785d05 9587 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 9588 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9589 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
9590 & ~ (mask >> (INTVAL (XEXP (op0, 1))
9591 + ! equality_comparison_p))) == 0)
9592 {
9593 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 9594 op1 = GEN_INT (const_op);
230d793d
RS
9595 op0 = XEXP (op0, 0);
9596 continue;
9597 }
9598
dfbe1b2f 9599 /* If we are doing a sign bit comparison, it means we are testing
230d793d 9600 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 9601 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9602 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9603 {
5f4f0e22
CH
9604 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9605 ((HOST_WIDE_INT) 1
9606 << (mode_width - 1
9607 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9608 code = (code == LT ? NE : EQ);
9609 continue;
9610 }
dfbe1b2f
RK
9611
9612 /* If this an equality comparison with zero and we are shifting
9613 the low bit to the sign bit, we can convert this to an AND of the
9614 low-order bit. */
9615 if (const_op == 0 && equality_comparison_p
9616 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9617 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9618 {
5f4f0e22
CH
9619 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9620 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
9621 continue;
9622 }
230d793d
RS
9623 break;
9624
9625 case ASHIFTRT:
d0ab8cd3
RK
9626 /* If this is an equality comparison with zero, we can do this
9627 as a logical shift, which might be much simpler. */
9628 if (equality_comparison_p && const_op == 0
9629 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9630 {
9631 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
9632 XEXP (op0, 0),
9633 INTVAL (XEXP (op0, 1)));
9634 continue;
9635 }
9636
230d793d
RS
9637 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
9638 do the comparison in a narrower mode. */
9639 if (! unsigned_comparison_p
9640 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9641 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9642 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9643 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 9644 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
9645 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9646 || ((unsigned HOST_WIDE_INT) - const_op
9647 <= GET_MODE_MASK (tmode))))
230d793d
RS
9648 {
9649 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9650 continue;
9651 }
9652
9653 /* ... fall through ... */
9654 case LSHIFTRT:
9655 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 9656 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
9657 by comparing FOO with C shifted left N bits so long as no
9658 overflow occurs. */
9659 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9660 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
9661 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9662 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9663 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9664 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
9665 && (const_op == 0
9666 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9667 < mode_width)))
9668 {
9669 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 9670 op1 = GEN_INT (const_op);
230d793d
RS
9671 op0 = XEXP (op0, 0);
9672 continue;
9673 }
9674
9675 /* If we are using this shift to extract just the sign bit, we
9676 can replace this with an LT or GE comparison. */
9677 if (const_op == 0
9678 && (equality_comparison_p || sign_bit_comparison_p)
9679 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9680 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9681 {
9682 op0 = XEXP (op0, 0);
9683 code = (code == NE || code == GT ? LT : GE);
9684 continue;
9685 }
9686 break;
9687 }
9688
9689 break;
9690 }
9691
9692 /* Now make any compound operations involved in this comparison. Then,
9693 check for an outmost SUBREG on OP0 that isn't doing anything or is
9694 paradoxical. The latter case can only occur when it is known that the
9695 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9696 We can never remove a SUBREG for a non-equality comparison because the
9697 sign bit is in a different place in the underlying object. */
9698
9699 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9700 op1 = make_compound_operation (op1, SET);
9701
9702 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9703 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9704 && (code == NE || code == EQ)
9705 && ((GET_MODE_SIZE (GET_MODE (op0))
9706 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9707 {
9708 op0 = SUBREG_REG (op0);
9709 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9710 }
9711
9712 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9713 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9714 && (code == NE || code == EQ)
ac49a949
RS
9715 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9716 <= HOST_BITS_PER_WIDE_INT)
951553af 9717 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9718 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9719 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9720 op1),
951553af 9721 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9722 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9723 op0 = SUBREG_REG (op0), op1 = tem;
9724
9725 /* We now do the opposite procedure: Some machines don't have compare
9726 insns in all modes. If OP0's mode is an integer mode smaller than a
9727 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
9728 mode for which we can do the compare. There are a number of cases in
9729 which we can use the wider mode. */
230d793d
RS
9730
9731 mode = GET_MODE (op0);
9732 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9733 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9734 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9735 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
9736 (tmode != VOIDmode
9737 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 9738 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 9739 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 9740 {
951553af 9741 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
9742 narrower mode and this is an equality or unsigned comparison,
9743 we can use the wider mode. Similarly for sign-extended
7e4dc511 9744 values, in which case it is true for all comparisons. */
a687e897
RK
9745 if (((code == EQ || code == NE
9746 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
9747 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9748 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
9749 || ((num_sign_bit_copies (op0, tmode)
9750 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 9751 && (num_sign_bit_copies (op1, tmode)
58744483 9752 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
9753 {
9754 op0 = gen_lowpart_for_combine (tmode, op0);
9755 op1 = gen_lowpart_for_combine (tmode, op1);
9756 break;
9757 }
230d793d 9758
a687e897
RK
9759 /* If this is a test for negative, we can make an explicit
9760 test of the sign bit. */
9761
9762 if (op1 == const0_rtx && (code == LT || code == GE)
9763 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 9764 {
a687e897
RK
9765 op0 = gen_binary (AND, tmode,
9766 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
9767 GEN_INT ((HOST_WIDE_INT) 1
9768 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 9769 code = (code == LT) ? NE : EQ;
a687e897 9770 break;
230d793d 9771 }
230d793d
RS
9772 }
9773
b7a775b2
RK
9774#ifdef CANONICALIZE_COMPARISON
9775 /* If this machine only supports a subset of valid comparisons, see if we
9776 can convert an unsupported one into a supported one. */
9777 CANONICALIZE_COMPARISON (code, op0, op1);
9778#endif
9779
230d793d
RS
9780 *pop0 = op0;
9781 *pop1 = op1;
9782
9783 return code;
9784}
9785\f
9786/* Return 1 if we know that X, a comparison operation, is not operating
9787 on a floating-point value or is EQ or NE, meaning that we can safely
9788 reverse it. */
9789
9790static int
9791reversible_comparison_p (x)
9792 rtx x;
9793{
9794 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 9795 || flag_fast_math
230d793d
RS
9796 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
9797 return 1;
9798
9799 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
9800 {
9801 case MODE_INT:
3ad2180a
RK
9802 case MODE_PARTIAL_INT:
9803 case MODE_COMPLEX_INT:
230d793d
RS
9804 return 1;
9805
9806 case MODE_CC:
9210df58
RK
9807 /* If the mode of the condition codes tells us that this is safe,
9808 we need look no further. */
9809 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
9810 return 1;
9811
9812 /* Otherwise try and find where the condition codes were last set and
9813 use that. */
230d793d
RS
9814 x = get_last_value (XEXP (x, 0));
9815 return (x && GET_CODE (x) == COMPARE
3ad2180a 9816 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
230d793d
RS
9817 }
9818
9819 return 0;
9820}
9821\f
9822/* Utility function for following routine. Called when X is part of a value
9823 being stored into reg_last_set_value. Sets reg_last_set_table_tick
9824 for each register mentioned. Similar to mention_regs in cse.c */
9825
9826static void
9827update_table_tick (x)
9828 rtx x;
9829{
9830 register enum rtx_code code = GET_CODE (x);
9831 register char *fmt = GET_RTX_FORMAT (code);
9832 register int i;
9833
9834 if (code == REG)
9835 {
9836 int regno = REGNO (x);
9837 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9838 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9839
9840 for (i = regno; i < endregno; i++)
9841 reg_last_set_table_tick[i] = label_tick;
9842
9843 return;
9844 }
9845
9846 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9847 /* Note that we can't have an "E" in values stored; see
9848 get_last_value_validate. */
9849 if (fmt[i] == 'e')
9850 update_table_tick (XEXP (x, i));
9851}
9852
9853/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
9854 are saying that the register is clobbered and we no longer know its
7988fd36
RK
9855 value. If INSN is zero, don't update reg_last_set; this is only permitted
9856 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
9857
9858static void
9859record_value_for_reg (reg, insn, value)
9860 rtx reg;
9861 rtx insn;
9862 rtx value;
9863{
9864 int regno = REGNO (reg);
9865 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9866 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
9867 int i;
9868
9869 /* If VALUE contains REG and we have a previous value for REG, substitute
9870 the previous value. */
9871 if (value && insn && reg_overlap_mentioned_p (reg, value))
9872 {
9873 rtx tem;
9874
9875 /* Set things up so get_last_value is allowed to see anything set up to
9876 our insn. */
9877 subst_low_cuid = INSN_CUID (insn);
9878 tem = get_last_value (reg);
9879
9880 if (tem)
9881 value = replace_rtx (copy_rtx (value), reg, tem);
9882 }
9883
9884 /* For each register modified, show we don't know its value, that
ef026f91
RS
9885 we don't know about its bitwise content, that its value has been
9886 updated, and that we don't know the location of the death of the
9887 register. */
230d793d
RS
9888 for (i = regno; i < endregno; i ++)
9889 {
9890 if (insn)
9891 reg_last_set[i] = insn;
9892 reg_last_set_value[i] = 0;
ef026f91
RS
9893 reg_last_set_mode[i] = 0;
9894 reg_last_set_nonzero_bits[i] = 0;
9895 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
9896 reg_last_death[i] = 0;
9897 }
9898
9899 /* Mark registers that are being referenced in this value. */
9900 if (value)
9901 update_table_tick (value);
9902
9903 /* Now update the status of each register being set.
9904 If someone is using this register in this block, set this register
9905 to invalid since we will get confused between the two lives in this
9906 basic block. This makes using this register always invalid. In cse, we
9907 scan the table to invalidate all entries using this register, but this
9908 is too much work for us. */
9909
9910 for (i = regno; i < endregno; i++)
9911 {
9912 reg_last_set_label[i] = label_tick;
9913 if (value && reg_last_set_table_tick[i] == label_tick)
9914 reg_last_set_invalid[i] = 1;
9915 else
9916 reg_last_set_invalid[i] = 0;
9917 }
9918
9919 /* The value being assigned might refer to X (like in "x++;"). In that
9920 case, we must replace it with (clobber (const_int 0)) to prevent
9921 infinite loops. */
9922 if (value && ! get_last_value_validate (&value,
9923 reg_last_set_label[regno], 0))
9924 {
9925 value = copy_rtx (value);
9926 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9927 value = 0;
9928 }
9929
55310dad
RK
9930 /* For the main register being modified, update the value, the mode, the
9931 nonzero bits, and the number of sign bit copies. */
9932
230d793d
RS
9933 reg_last_set_value[regno] = value;
9934
55310dad
RK
9935 if (value)
9936 {
2afabb48 9937 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
9938 reg_last_set_mode[regno] = GET_MODE (reg);
9939 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9940 reg_last_set_sign_bit_copies[regno]
9941 = num_sign_bit_copies (value, GET_MODE (reg));
9942 }
230d793d
RS
9943}
9944
9945/* Used for communication between the following two routines. */
9946static rtx record_dead_insn;
9947
9948/* Called via note_stores from record_dead_and_set_regs to handle one
9949 SET or CLOBBER in an insn. */
9950
9951static void
9952record_dead_and_set_regs_1 (dest, setter)
9953 rtx dest, setter;
9954{
ca89d290
RK
9955 if (GET_CODE (dest) == SUBREG)
9956 dest = SUBREG_REG (dest);
9957
230d793d
RS
9958 if (GET_CODE (dest) == REG)
9959 {
9960 /* If we are setting the whole register, we know its value. Otherwise
9961 show that we don't know the value. We can handle SUBREG in
9962 some cases. */
9963 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9964 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9965 else if (GET_CODE (setter) == SET
9966 && GET_CODE (SET_DEST (setter)) == SUBREG
9967 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 9968 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 9969 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
9970 record_value_for_reg (dest, record_dead_insn,
9971 gen_lowpart_for_combine (GET_MODE (dest),
9972 SET_SRC (setter)));
230d793d 9973 else
5f4f0e22 9974 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
9975 }
9976 else if (GET_CODE (dest) == MEM
9977 /* Ignore pushes, they clobber nothing. */
9978 && ! push_operand (dest, GET_MODE (dest)))
9979 mem_last_set = INSN_CUID (record_dead_insn);
9980}
9981
9982/* Update the records of when each REG was most recently set or killed
9983 for the things done by INSN. This is the last thing done in processing
9984 INSN in the combiner loop.
9985
ef026f91
RS
9986 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
9987 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
9988 and also the similar information mem_last_set (which insn most recently
9989 modified memory) and last_call_cuid (which insn was the most recent
9990 subroutine call). */
230d793d
RS
9991
9992static void
9993record_dead_and_set_regs (insn)
9994 rtx insn;
9995{
9996 register rtx link;
55310dad
RK
9997 int i;
9998
230d793d
RS
9999 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10000 {
dbc131f3
RK
10001 if (REG_NOTE_KIND (link) == REG_DEAD
10002 && GET_CODE (XEXP (link, 0)) == REG)
10003 {
10004 int regno = REGNO (XEXP (link, 0));
10005 int endregno
10006 = regno + (regno < FIRST_PSEUDO_REGISTER
10007 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10008 : 1);
dbc131f3
RK
10009
10010 for (i = regno; i < endregno; i++)
10011 reg_last_death[i] = insn;
10012 }
230d793d 10013 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 10014 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
10015 }
10016
10017 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
10018 {
10019 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10020 if (call_used_regs[i])
10021 {
10022 reg_last_set_value[i] = 0;
ef026f91
RS
10023 reg_last_set_mode[i] = 0;
10024 reg_last_set_nonzero_bits[i] = 0;
10025 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
10026 reg_last_death[i] = 0;
10027 }
10028
10029 last_call_cuid = mem_last_set = INSN_CUID (insn);
10030 }
230d793d
RS
10031
10032 record_dead_insn = insn;
10033 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10034}
10035\f
10036/* Utility routine for the following function. Verify that all the registers
10037 mentioned in *LOC are valid when *LOC was part of a value set when
10038 label_tick == TICK. Return 0 if some are not.
10039
10040 If REPLACE is non-zero, replace the invalid reference with
10041 (clobber (const_int 0)) and return 1. This replacement is useful because
10042 we often can get useful information about the form of a value (e.g., if
10043 it was produced by a shift that always produces -1 or 0) even though
10044 we don't know exactly what registers it was produced from. */
10045
10046static int
10047get_last_value_validate (loc, tick, replace)
10048 rtx *loc;
10049 int tick;
10050 int replace;
10051{
10052 rtx x = *loc;
10053 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
10054 int len = GET_RTX_LENGTH (GET_CODE (x));
10055 int i;
10056
10057 if (GET_CODE (x) == REG)
10058 {
10059 int regno = REGNO (x);
10060 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10061 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10062 int j;
10063
10064 for (j = regno; j < endregno; j++)
10065 if (reg_last_set_invalid[j]
10066 /* If this is a pseudo-register that was only set once, it is
10067 always valid. */
10068 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
10069 && reg_last_set_label[j] > tick))
10070 {
10071 if (replace)
10072 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
10073 return replace;
10074 }
10075
10076 return 1;
10077 }
10078
10079 for (i = 0; i < len; i++)
10080 if ((fmt[i] == 'e'
10081 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
10082 /* Don't bother with these. They shouldn't occur anyway. */
10083 || fmt[i] == 'E')
10084 return 0;
10085
10086 /* If we haven't found a reason for it to be invalid, it is valid. */
10087 return 1;
10088}
10089
10090/* Get the last value assigned to X, if known. Some registers
10091 in the value may be replaced with (clobber (const_int 0)) if their value
10092 is known longer known reliably. */
10093
10094static rtx
10095get_last_value (x)
10096 rtx x;
10097{
10098 int regno;
10099 rtx value;
10100
10101 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10102 then convert it to the desired mode. If this is a paradoxical SUBREG,
10103 we cannot predict what values the "extra" bits might have. */
10104 if (GET_CODE (x) == SUBREG
10105 && subreg_lowpart_p (x)
10106 && (GET_MODE_SIZE (GET_MODE (x))
10107 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10108 && (value = get_last_value (SUBREG_REG (x))) != 0)
10109 return gen_lowpart_for_combine (GET_MODE (x), value);
10110
10111 if (GET_CODE (x) != REG)
10112 return 0;
10113
10114 regno = REGNO (x);
10115 value = reg_last_set_value[regno];
10116
d0ab8cd3 10117 /* If we don't have a value or if it isn't for this basic block, return 0. */
230d793d
RS
10118
10119 if (value == 0
10120 || (reg_n_sets[regno] != 1
55310dad 10121 && reg_last_set_label[regno] != label_tick))
230d793d
RS
10122 return 0;
10123
4255220d 10124 /* If the value was set in a later insn than the ones we are processing,
4090a6b3
RK
10125 we can't use it even if the register was only set once, but make a quick
10126 check to see if the previous insn set it to something. This is commonly
0d9641d1
JW
10127 the case when the same pseudo is used by repeated insns.
10128
10129 This does not work if there exists an instruction which is temporarily
10130 not on the insn chain. */
d0ab8cd3 10131
bcd49eb7 10132 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
10133 {
10134 rtx insn, set;
10135
bcd49eb7
JW
10136 /* We can not do anything useful in this case, because there is
10137 an instruction which is not on the insn chain. */
10138 if (subst_prev_insn)
10139 return 0;
10140
4255220d
JW
10141 /* Skip over USE insns. They are not useful here, and they may have
10142 been made by combine, in which case they do not have a INSN_CUID
d6c80562 10143 value. We can't use prev_real_insn, because that would incorrectly
e340018d
JW
10144 take us backwards across labels. Skip over BARRIERs also, since
10145 they could have been made by combine. If we see one, we must be
10146 optimizing dead code, so it doesn't matter what we do. */
d6c80562
JW
10147 for (insn = prev_nonnote_insn (subst_insn);
10148 insn && ((GET_CODE (insn) == INSN
10149 && GET_CODE (PATTERN (insn)) == USE)
e340018d 10150 || GET_CODE (insn) == BARRIER
4255220d 10151 || INSN_CUID (insn) >= subst_low_cuid);
d6c80562 10152 insn = prev_nonnote_insn (insn))
3adde2a5 10153 ;
d0ab8cd3
RK
10154
10155 if (insn
10156 && (set = single_set (insn)) != 0
10157 && rtx_equal_p (SET_DEST (set), x))
10158 {
10159 value = SET_SRC (set);
10160
10161 /* Make sure that VALUE doesn't reference X. Replace any
ddd5a7c1 10162 explicit references with a CLOBBER. If there are any remaining
d0ab8cd3
RK
10163 references (rare), don't use the value. */
10164
10165 if (reg_mentioned_p (x, value))
10166 value = replace_rtx (copy_rtx (value), x,
10167 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
10168
10169 if (reg_overlap_mentioned_p (x, value))
10170 return 0;
10171 }
10172 else
10173 return 0;
10174 }
10175
10176 /* If the value has all its registers valid, return it. */
230d793d
RS
10177 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
10178 return value;
10179
10180 /* Otherwise, make a copy and replace any invalid register with
10181 (clobber (const_int 0)). If that fails for some reason, return 0. */
10182
10183 value = copy_rtx (value);
10184 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
10185 return value;
10186
10187 return 0;
10188}
10189\f
10190/* Return nonzero if expression X refers to a REG or to memory
10191 that is set in an instruction more recent than FROM_CUID. */
10192
10193static int
10194use_crosses_set_p (x, from_cuid)
10195 register rtx x;
10196 int from_cuid;
10197{
10198 register char *fmt;
10199 register int i;
10200 register enum rtx_code code = GET_CODE (x);
10201
10202 if (code == REG)
10203 {
10204 register int regno = REGNO (x);
e28f5732
RK
10205 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10206 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10207
230d793d
RS
10208#ifdef PUSH_ROUNDING
10209 /* Don't allow uses of the stack pointer to be moved,
10210 because we don't know whether the move crosses a push insn. */
10211 if (regno == STACK_POINTER_REGNUM)
10212 return 1;
10213#endif
e28f5732
RK
10214 for (;regno < endreg; regno++)
10215 if (reg_last_set[regno]
10216 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10217 return 1;
10218 return 0;
230d793d
RS
10219 }
10220
10221 if (code == MEM && mem_last_set > from_cuid)
10222 return 1;
10223
10224 fmt = GET_RTX_FORMAT (code);
10225
10226 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10227 {
10228 if (fmt[i] == 'E')
10229 {
10230 register int j;
10231 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10232 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10233 return 1;
10234 }
10235 else if (fmt[i] == 'e'
10236 && use_crosses_set_p (XEXP (x, i), from_cuid))
10237 return 1;
10238 }
10239 return 0;
10240}
10241\f
10242/* Define three variables used for communication between the following
10243 routines. */
10244
10245static int reg_dead_regno, reg_dead_endregno;
10246static int reg_dead_flag;
10247
10248/* Function called via note_stores from reg_dead_at_p.
10249
ddd5a7c1 10250 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
10251 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10252
10253static void
10254reg_dead_at_p_1 (dest, x)
10255 rtx dest;
10256 rtx x;
10257{
10258 int regno, endregno;
10259
10260 if (GET_CODE (dest) != REG)
10261 return;
10262
10263 regno = REGNO (dest);
10264 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10265 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10266
10267 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10268 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10269}
10270
10271/* Return non-zero if REG is known to be dead at INSN.
10272
10273 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10274 referencing REG, it is dead. If we hit a SET referencing REG, it is
10275 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
10276 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10277 must be assumed to be always live. */
230d793d
RS
10278
10279static int
10280reg_dead_at_p (reg, insn)
10281 rtx reg;
10282 rtx insn;
10283{
10284 int block, i;
10285
10286 /* Set variables for reg_dead_at_p_1. */
10287 reg_dead_regno = REGNO (reg);
10288 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10289 ? HARD_REGNO_NREGS (reg_dead_regno,
10290 GET_MODE (reg))
10291 : 1);
10292
10293 reg_dead_flag = 0;
10294
6e25d159
RK
10295 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10296 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10297 {
10298 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10299 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10300 return 0;
10301 }
10302
230d793d
RS
10303 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10304 beginning of function. */
60715d0b 10305 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
10306 insn = prev_nonnote_insn (insn))
10307 {
10308 note_stores (PATTERN (insn), reg_dead_at_p_1);
10309 if (reg_dead_flag)
10310 return reg_dead_flag == 1 ? 1 : 0;
10311
10312 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10313 return 1;
10314 }
10315
10316 /* Get the basic block number that we were in. */
10317 if (insn == 0)
10318 block = 0;
10319 else
10320 {
10321 for (block = 0; block < n_basic_blocks; block++)
10322 if (insn == basic_block_head[block])
10323 break;
10324
10325 if (block == n_basic_blocks)
10326 return 0;
10327 }
10328
10329 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
10330 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
10331 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
10332 return 0;
10333
10334 return 1;
10335}
6e25d159
RK
10336\f
10337/* Note hard registers in X that are used. This code is similar to
10338 that in flow.c, but much simpler since we don't care about pseudos. */
10339
10340static void
10341mark_used_regs_combine (x)
10342 rtx x;
10343{
10344 register RTX_CODE code = GET_CODE (x);
10345 register int regno;
10346 int i;
10347
10348 switch (code)
10349 {
10350 case LABEL_REF:
10351 case SYMBOL_REF:
10352 case CONST_INT:
10353 case CONST:
10354 case CONST_DOUBLE:
10355 case PC:
10356 case ADDR_VEC:
10357 case ADDR_DIFF_VEC:
10358 case ASM_INPUT:
10359#ifdef HAVE_cc0
10360 /* CC0 must die in the insn after it is set, so we don't need to take
10361 special note of it here. */
10362 case CC0:
10363#endif
10364 return;
10365
10366 case CLOBBER:
10367 /* If we are clobbering a MEM, mark any hard registers inside the
10368 address as used. */
10369 if (GET_CODE (XEXP (x, 0)) == MEM)
10370 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10371 return;
10372
10373 case REG:
10374 regno = REGNO (x);
10375 /* A hard reg in a wide mode may really be multiple registers.
10376 If so, mark all of them just like the first. */
10377 if (regno < FIRST_PSEUDO_REGISTER)
10378 {
10379 /* None of this applies to the stack, frame or arg pointers */
10380 if (regno == STACK_POINTER_REGNUM
10381#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10382 || regno == HARD_FRAME_POINTER_REGNUM
10383#endif
10384#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10385 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10386#endif
10387 || regno == FRAME_POINTER_REGNUM)
10388 return;
10389
10390 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10391 while (i-- > 0)
10392 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10393 }
10394 return;
10395
10396 case SET:
10397 {
10398 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10399 the address. */
10400 register rtx testreg = SET_DEST (x);
10401
e048778f
RK
10402 while (GET_CODE (testreg) == SUBREG
10403 || GET_CODE (testreg) == ZERO_EXTRACT
10404 || GET_CODE (testreg) == SIGN_EXTRACT
10405 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
10406 testreg = XEXP (testreg, 0);
10407
10408 if (GET_CODE (testreg) == MEM)
10409 mark_used_regs_combine (XEXP (testreg, 0));
10410
10411 mark_used_regs_combine (SET_SRC (x));
10412 return;
10413 }
10414 }
10415
10416 /* Recursively scan the operands of this expression. */
10417
10418 {
10419 register char *fmt = GET_RTX_FORMAT (code);
10420
10421 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10422 {
10423 if (fmt[i] == 'e')
10424 mark_used_regs_combine (XEXP (x, i));
10425 else if (fmt[i] == 'E')
10426 {
10427 register int j;
10428
10429 for (j = 0; j < XVECLEN (x, i); j++)
10430 mark_used_regs_combine (XVECEXP (x, i, j));
10431 }
10432 }
10433 }
10434}
10435
230d793d
RS
10436\f
10437/* Remove register number REGNO from the dead registers list of INSN.
10438
10439 Return the note used to record the death, if there was one. */
10440
10441rtx
10442remove_death (regno, insn)
10443 int regno;
10444 rtx insn;
10445{
10446 register rtx note = find_regno_note (insn, REG_DEAD, regno);
10447
10448 if (note)
1a26b032
RK
10449 {
10450 reg_n_deaths[regno]--;
10451 remove_note (insn, note);
10452 }
230d793d
RS
10453
10454 return note;
10455}
10456
10457/* For each register (hardware or pseudo) used within expression X, if its
10458 death is in an instruction with cuid between FROM_CUID (inclusive) and
10459 TO_INSN (exclusive), put a REG_DEAD note for that register in the
10460 list headed by PNOTES.
10461
10462 This is done when X is being merged by combination into TO_INSN. These
10463 notes will then be distributed as needed. */
10464
10465static void
10466move_deaths (x, from_cuid, to_insn, pnotes)
10467 rtx x;
10468 int from_cuid;
10469 rtx to_insn;
10470 rtx *pnotes;
10471{
10472 register char *fmt;
10473 register int len, i;
10474 register enum rtx_code code = GET_CODE (x);
10475
10476 if (code == REG)
10477 {
10478 register int regno = REGNO (x);
10479 register rtx where_dead = reg_last_death[regno];
e340018d
JW
10480 register rtx before_dead, after_dead;
10481
10482 /* WHERE_DEAD could be a USE insn made by combine, so first we
10483 make sure that we have insns with valid INSN_CUID values. */
10484 before_dead = where_dead;
10485 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
10486 before_dead = PREV_INSN (before_dead);
10487 after_dead = where_dead;
10488 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
10489 after_dead = NEXT_INSN (after_dead);
10490
10491 if (before_dead && after_dead
10492 && INSN_CUID (before_dead) >= from_cuid
10493 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
10494 || (where_dead != after_dead
10495 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 10496 {
dbc131f3 10497 rtx note = remove_death (regno, where_dead);
230d793d
RS
10498
10499 /* It is possible for the call above to return 0. This can occur
10500 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
10501 In that case make a new note.
10502
10503 We must also check for the case where X is a hard register
10504 and NOTE is a death note for a range of hard registers
10505 including X. In that case, we must put REG_DEAD notes for
10506 the remaining registers in place of NOTE. */
10507
10508 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10509 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10510 != GET_MODE_SIZE (GET_MODE (x))))
10511 {
10512 int deadregno = REGNO (XEXP (note, 0));
10513 int deadend
10514 = (deadregno + HARD_REGNO_NREGS (deadregno,
10515 GET_MODE (XEXP (note, 0))));
10516 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10517 int i;
10518
10519 for (i = deadregno; i < deadend; i++)
10520 if (i < regno || i >= ourend)
10521 REG_NOTES (where_dead)
10522 = gen_rtx (EXPR_LIST, REG_DEAD,
36b878d1 10523 gen_rtx (REG, reg_raw_mode[i], i),
dbc131f3
RK
10524 REG_NOTES (where_dead));
10525 }
fabd69e8
RK
10526 /* If we didn't find any note, and we have a multi-reg hard
10527 register, then to be safe we must check for REG_DEAD notes
10528 for each register other than the first. They could have
10529 their own REG_DEAD notes lying around. */
10530 else if (note == 0 && regno < FIRST_PSEUDO_REGISTER
10531 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
10532 {
10533 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10534 int i;
10535 rtx oldnotes = 0;
10536
10537 for (i = regno + 1; i < ourend; i++)
10538 move_deaths (gen_rtx (REG, reg_raw_mode[i], i),
10539 from_cuid, to_insn, &oldnotes);
10540 }
230d793d 10541
dbc131f3 10542 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
10543 {
10544 XEXP (note, 1) = *pnotes;
10545 *pnotes = note;
10546 }
10547 else
10548 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
1a26b032
RK
10549
10550 reg_n_deaths[regno]++;
230d793d
RS
10551 }
10552
10553 return;
10554 }
10555
10556 else if (GET_CODE (x) == SET)
10557 {
10558 rtx dest = SET_DEST (x);
10559
10560 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
10561
a7c99304
RK
10562 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
10563 that accesses one word of a multi-word item, some
10564 piece of everything register in the expression is used by
10565 this insn, so remove any old death. */
10566
10567 if (GET_CODE (dest) == ZERO_EXTRACT
10568 || GET_CODE (dest) == STRICT_LOW_PART
10569 || (GET_CODE (dest) == SUBREG
10570 && (((GET_MODE_SIZE (GET_MODE (dest))
10571 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
10572 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
10573 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 10574 {
a7c99304
RK
10575 move_deaths (dest, from_cuid, to_insn, pnotes);
10576 return;
230d793d
RS
10577 }
10578
a7c99304
RK
10579 /* If this is some other SUBREG, we know it replaces the entire
10580 value, so use that as the destination. */
10581 if (GET_CODE (dest) == SUBREG)
10582 dest = SUBREG_REG (dest);
10583
10584 /* If this is a MEM, adjust deaths of anything used in the address.
10585 For a REG (the only other possibility), the entire value is
10586 being replaced so the old value is not used in this insn. */
230d793d
RS
10587
10588 if (GET_CODE (dest) == MEM)
10589 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
10590 return;
10591 }
10592
10593 else if (GET_CODE (x) == CLOBBER)
10594 return;
10595
10596 len = GET_RTX_LENGTH (code);
10597 fmt = GET_RTX_FORMAT (code);
10598
10599 for (i = 0; i < len; i++)
10600 {
10601 if (fmt[i] == 'E')
10602 {
10603 register int j;
10604 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10605 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
10606 }
10607 else if (fmt[i] == 'e')
10608 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
10609 }
10610}
10611\f
a7c99304
RK
10612/* Return 1 if X is the target of a bit-field assignment in BODY, the
10613 pattern of an insn. X must be a REG. */
230d793d
RS
10614
10615static int
a7c99304
RK
10616reg_bitfield_target_p (x, body)
10617 rtx x;
230d793d
RS
10618 rtx body;
10619{
10620 int i;
10621
10622 if (GET_CODE (body) == SET)
a7c99304
RK
10623 {
10624 rtx dest = SET_DEST (body);
10625 rtx target;
10626 int regno, tregno, endregno, endtregno;
10627
10628 if (GET_CODE (dest) == ZERO_EXTRACT)
10629 target = XEXP (dest, 0);
10630 else if (GET_CODE (dest) == STRICT_LOW_PART)
10631 target = SUBREG_REG (XEXP (dest, 0));
10632 else
10633 return 0;
10634
10635 if (GET_CODE (target) == SUBREG)
10636 target = SUBREG_REG (target);
10637
10638 if (GET_CODE (target) != REG)
10639 return 0;
10640
10641 tregno = REGNO (target), regno = REGNO (x);
10642 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
10643 return target == x;
10644
10645 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
10646 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10647
10648 return endregno > tregno && regno < endtregno;
10649 }
230d793d
RS
10650
10651 else if (GET_CODE (body) == PARALLEL)
10652 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 10653 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
10654 return 1;
10655
10656 return 0;
10657}
10658\f
10659/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
10660 as appropriate. I3 and I2 are the insns resulting from the combination
10661 insns including FROM (I2 may be zero).
10662
10663 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
10664 not need REG_DEAD notes because they are being substituted for. This
10665 saves searching in the most common cases.
10666
10667 Each note in the list is either ignored or placed on some insns, depending
10668 on the type of note. */
10669
10670static void
10671distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
10672 rtx notes;
10673 rtx from_insn;
10674 rtx i3, i2;
10675 rtx elim_i2, elim_i1;
10676{
10677 rtx note, next_note;
10678 rtx tem;
10679
10680 for (note = notes; note; note = next_note)
10681 {
10682 rtx place = 0, place2 = 0;
10683
10684 /* If this NOTE references a pseudo register, ensure it references
10685 the latest copy of that register. */
10686 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10687 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10688 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10689
10690 next_note = XEXP (note, 1);
10691 switch (REG_NOTE_KIND (note))
10692 {
10693 case REG_UNUSED:
07d0cbdd 10694 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
10695 REG_UNUSED notes from that insn.
10696
10697 Any clobbers from i2 or i1 can only exist if they were added by
10698 recog_for_combine. In that case, recog_for_combine created the
10699 necessary REG_UNUSED notes. Trying to keep any original
10700 REG_UNUSED notes from these insns can cause incorrect output
10701 if it is for the same register as the original i3 dest.
10702 In that case, we will notice that the register is set in i3,
10703 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
10704 is wrong. However, it is possible to have REG_UNUSED notes from
10705 i2 or i1 for register which were both used and clobbered, so
10706 we keep notes from i2 or i1 if they will turn into REG_DEAD
10707 notes. */
176c9e6b 10708
230d793d
RS
10709 /* If this register is set or clobbered in I3, put the note there
10710 unless there is one already. */
07d0cbdd 10711 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 10712 {
07d0cbdd
JW
10713 if (from_insn != i3)
10714 break;
10715
230d793d
RS
10716 if (! (GET_CODE (XEXP (note, 0)) == REG
10717 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
10718 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
10719 place = i3;
10720 }
10721 /* Otherwise, if this register is used by I3, then this register
10722 now dies here, so we must put a REG_DEAD note here unless there
10723 is one already. */
10724 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
10725 && ! (GET_CODE (XEXP (note, 0)) == REG
10726 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
10727 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
10728 {
10729 PUT_REG_NOTE_KIND (note, REG_DEAD);
10730 place = i3;
10731 }
10732 break;
10733
10734 case REG_EQUAL:
10735 case REG_EQUIV:
10736 case REG_NONNEG:
10737 /* These notes say something about results of an insn. We can
10738 only support them if they used to be on I3 in which case they
a687e897
RK
10739 remain on I3. Otherwise they are ignored.
10740
10741 If the note refers to an expression that is not a constant, we
10742 must also ignore the note since we cannot tell whether the
10743 equivalence is still true. It might be possible to do
10744 slightly better than this (we only have a problem if I2DEST
10745 or I1DEST is present in the expression), but it doesn't
10746 seem worth the trouble. */
10747
10748 if (from_insn == i3
10749 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
10750 place = i3;
10751 break;
10752
10753 case REG_INC:
10754 case REG_NO_CONFLICT:
10755 case REG_LABEL:
10756 /* These notes say something about how a register is used. They must
10757 be present on any use of the register in I2 or I3. */
10758 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
10759 place = i3;
10760
10761 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
10762 {
10763 if (place)
10764 place2 = i2;
10765 else
10766 place = i2;
10767 }
10768 break;
10769
10770 case REG_WAS_0:
10771 /* It is too much trouble to try to see if this note is still
10772 correct in all situations. It is better to simply delete it. */
10773 break;
10774
10775 case REG_RETVAL:
10776 /* If the insn previously containing this note still exists,
10777 put it back where it was. Otherwise move it to the previous
10778 insn. Adjust the corresponding REG_LIBCALL note. */
10779 if (GET_CODE (from_insn) != NOTE)
10780 place = from_insn;
10781 else
10782 {
5f4f0e22 10783 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
10784 place = prev_real_insn (from_insn);
10785 if (tem && place)
10786 XEXP (tem, 0) = place;
10787 }
10788 break;
10789
10790 case REG_LIBCALL:
10791 /* This is handled similarly to REG_RETVAL. */
10792 if (GET_CODE (from_insn) != NOTE)
10793 place = from_insn;
10794 else
10795 {
5f4f0e22 10796 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
10797 place = next_real_insn (from_insn);
10798 if (tem && place)
10799 XEXP (tem, 0) = place;
10800 }
10801 break;
10802
10803 case REG_DEAD:
10804 /* If the register is used as an input in I3, it dies there.
10805 Similarly for I2, if it is non-zero and adjacent to I3.
10806
10807 If the register is not used as an input in either I3 or I2
10808 and it is not one of the registers we were supposed to eliminate,
10809 there are two possibilities. We might have a non-adjacent I2
10810 or we might have somehow eliminated an additional register
10811 from a computation. For example, we might have had A & B where
10812 we discover that B will always be zero. In this case we will
10813 eliminate the reference to A.
10814
10815 In both cases, we must search to see if we can find a previous
10816 use of A and put the death note there. */
10817
6e2d1486
RK
10818 if (from_insn
10819 && GET_CODE (from_insn) == CALL_INSN
10820 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
10821 place = from_insn;
10822 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
10823 place = i3;
10824 else if (i2 != 0 && next_nonnote_insn (i2) == i3
10825 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10826 place = i2;
10827
10828 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
10829 break;
10830
510dd77e
RK
10831 /* If the register is used in both I2 and I3 and it dies in I3,
10832 we might have added another reference to it. If reg_n_refs
10833 was 2, bump it to 3. This has to be correct since the
10834 register must have been set somewhere. The reason this is
10835 done is because local-alloc.c treats 2 references as a
10836 special case. */
10837
10838 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
10839 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
10840 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10841 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
10842
230d793d 10843 if (place == 0)
38d8473f
RK
10844 {
10845 for (tem = prev_nonnote_insn (i3);
10846 place == 0 && tem
10847 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
10848 tem = prev_nonnote_insn (tem))
10849 {
10850 /* If the register is being set at TEM, see if that is all
10851 TEM is doing. If so, delete TEM. Otherwise, make this
10852 into a REG_UNUSED note instead. */
10853 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
10854 {
10855 rtx set = single_set (tem);
10856
10857 /* Verify that it was the set, and not a clobber that
10858 modified the register. */
10859
10860 if (set != 0 && ! side_effects_p (SET_SRC (set))
d02089a5
RK
10861 && (rtx_equal_p (XEXP (note, 0), SET_DEST (set))
10862 || (GET_CODE (SET_DEST (set)) == SUBREG
10863 && rtx_equal_p (XEXP (note, 0),
10864 XEXP (SET_DEST (set), 0)))))
38d8473f
RK
10865 {
10866 /* Move the notes and links of TEM elsewhere.
10867 This might delete other dead insns recursively.
10868 First set the pattern to something that won't use
10869 any register. */
10870
10871 PATTERN (tem) = pc_rtx;
10872
10873 distribute_notes (REG_NOTES (tem), tem, tem,
10874 NULL_RTX, NULL_RTX, NULL_RTX);
10875 distribute_links (LOG_LINKS (tem));
10876
10877 PUT_CODE (tem, NOTE);
10878 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10879 NOTE_SOURCE_FILE (tem) = 0;
10880 }
10881 else
10882 {
10883 PUT_REG_NOTE_KIND (note, REG_UNUSED);
10884
10885 /* If there isn't already a REG_UNUSED note, put one
10886 here. */
10887 if (! find_regno_note (tem, REG_UNUSED,
10888 REGNO (XEXP (note, 0))))
10889 place = tem;
10890 break;
230d793d
RS
10891 }
10892 }
13018fad
RE
10893 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
10894 || (GET_CODE (tem) == CALL_INSN
10895 && find_reg_fusage (tem, USE, XEXP (note, 0))))
230d793d
RS
10896 {
10897 place = tem;
932d1119
RK
10898
10899 /* If we are doing a 3->2 combination, and we have a
10900 register which formerly died in i3 and was not used
10901 by i2, which now no longer dies in i3 and is used in
10902 i2 but does not die in i2, and place is between i2
10903 and i3, then we may need to move a link from place to
10904 i2. */
a8908849
RK
10905 if (i2 && INSN_UID (place) <= max_uid_cuid
10906 && INSN_CUID (place) > INSN_CUID (i2)
932d1119
RK
10907 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
10908 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
10909 {
10910 rtx links = LOG_LINKS (place);
10911 LOG_LINKS (place) = 0;
10912 distribute_links (links);
10913 }
230d793d
RS
10914 break;
10915 }
38d8473f
RK
10916 }
10917
10918 /* If we haven't found an insn for the death note and it
10919 is still a REG_DEAD note, but we have hit a CODE_LABEL,
10920 insert a USE insn for the register at that label and
10921 put the death node there. This prevents problems with
10922 call-state tracking in caller-save.c. */
10923 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
e2cce0cf
RK
10924 {
10925 place
10926 = emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (note, 0)),
10927 tem);
10928
10929 /* If this insn was emitted between blocks, then update
10930 basic_block_head of the current block to include it. */
10931 if (basic_block_end[this_basic_block - 1] == tem)
10932 basic_block_head[this_basic_block] = place;
10933 }
38d8473f 10934 }
230d793d
RS
10935
10936 /* If the register is set or already dead at PLACE, we needn't do
10937 anything with this note if it is still a REG_DEAD note.
10938
10939 Note that we cannot use just `dead_or_set_p' here since we can
10940 convert an assignment to a register into a bit-field assignment.
10941 Therefore, we must also omit the note if the register is the
10942 target of a bitfield assignment. */
10943
10944 if (place && REG_NOTE_KIND (note) == REG_DEAD)
10945 {
10946 int regno = REGNO (XEXP (note, 0));
10947
10948 if (dead_or_set_p (place, XEXP (note, 0))
10949 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10950 {
10951 /* Unless the register previously died in PLACE, clear
10952 reg_last_death. [I no longer understand why this is
10953 being done.] */
10954 if (reg_last_death[regno] != place)
10955 reg_last_death[regno] = 0;
10956 place = 0;
10957 }
10958 else
10959 reg_last_death[regno] = place;
10960
10961 /* If this is a death note for a hard reg that is occupying
10962 multiple registers, ensure that we are still using all
10963 parts of the object. If we find a piece of the object
10964 that is unused, we must add a USE for that piece before
10965 PLACE and put the appropriate REG_DEAD note on it.
10966
10967 An alternative would be to put a REG_UNUSED for the pieces
10968 on the insn that set the register, but that can't be done if
10969 it is not in the same block. It is simpler, though less
10970 efficient, to add the USE insns. */
10971
10972 if (place && regno < FIRST_PSEUDO_REGISTER
10973 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
10974 {
10975 int endregno
10976 = regno + HARD_REGNO_NREGS (regno,
10977 GET_MODE (XEXP (note, 0)));
10978 int all_used = 1;
10979 int i;
10980
10981 for (i = regno; i < endregno; i++)
9fd5bb62
JW
10982 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
10983 && ! find_regno_fusage (place, USE, i))
230d793d 10984 {
485eeec4 10985 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
28f6d3af
RK
10986 rtx p;
10987
10988 /* See if we already placed a USE note for this
10989 register in front of PLACE. */
10990 for (p = place;
10991 GET_CODE (PREV_INSN (p)) == INSN
10992 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10993 p = PREV_INSN (p))
10994 if (rtx_equal_p (piece,
10995 XEXP (PATTERN (PREV_INSN (p)), 0)))
10996 {
10997 p = 0;
10998 break;
10999 }
11000
11001 if (p)
11002 {
11003 rtx use_insn
11004 = emit_insn_before (gen_rtx (USE, VOIDmode,
11005 piece),
11006 p);
11007 REG_NOTES (use_insn)
11008 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
11009 REG_NOTES (use_insn));
11010 }
230d793d 11011
5089e22e 11012 all_used = 0;
230d793d
RS
11013 }
11014
a394b17b
JW
11015 /* Check for the case where the register dying partially
11016 overlaps the register set by this insn. */
11017 if (all_used)
11018 for (i = regno; i < endregno; i++)
11019 if (dead_or_set_regno_p (place, i))
11020 {
11021 all_used = 0;
11022 break;
11023 }
11024
230d793d
RS
11025 if (! all_used)
11026 {
11027 /* Put only REG_DEAD notes for pieces that are
11028 still used and that are not already dead or set. */
11029
11030 for (i = regno; i < endregno; i++)
11031 {
485eeec4 11032 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
230d793d 11033
17cbf358
JW
11034 if ((reg_referenced_p (piece, PATTERN (place))
11035 || (GET_CODE (place) == CALL_INSN
11036 && find_reg_fusage (place, USE, piece)))
230d793d
RS
11037 && ! dead_or_set_p (place, piece)
11038 && ! reg_bitfield_target_p (piece,
11039 PATTERN (place)))
11040 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
11041 piece,
11042 REG_NOTES (place));
11043 }
11044
11045 place = 0;
11046 }
11047 }
11048 }
11049 break;
11050
11051 default:
11052 /* Any other notes should not be present at this point in the
11053 compilation. */
11054 abort ();
11055 }
11056
11057 if (place)
11058 {
11059 XEXP (note, 1) = REG_NOTES (place);
11060 REG_NOTES (place) = note;
11061 }
1a26b032
RK
11062 else if ((REG_NOTE_KIND (note) == REG_DEAD
11063 || REG_NOTE_KIND (note) == REG_UNUSED)
11064 && GET_CODE (XEXP (note, 0)) == REG)
11065 reg_n_deaths[REGNO (XEXP (note, 0))]--;
230d793d
RS
11066
11067 if (place2)
1a26b032
RK
11068 {
11069 if ((REG_NOTE_KIND (note) == REG_DEAD
11070 || REG_NOTE_KIND (note) == REG_UNUSED)
11071 && GET_CODE (XEXP (note, 0)) == REG)
11072 reg_n_deaths[REGNO (XEXP (note, 0))]++;
11073
11074 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
11075 XEXP (note, 0), REG_NOTES (place2));
11076 }
230d793d
RS
11077 }
11078}
11079\f
11080/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
11081 I3, I2, and I1 to new locations. This is also called in one case to
11082 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
11083
11084static void
11085distribute_links (links)
11086 rtx links;
11087{
11088 rtx link, next_link;
11089
11090 for (link = links; link; link = next_link)
11091 {
11092 rtx place = 0;
11093 rtx insn;
11094 rtx set, reg;
11095
11096 next_link = XEXP (link, 1);
11097
11098 /* If the insn that this link points to is a NOTE or isn't a single
11099 set, ignore it. In the latter case, it isn't clear what we
11100 can do other than ignore the link, since we can't tell which
11101 register it was for. Such links wouldn't be used by combine
11102 anyway.
11103
11104 It is not possible for the destination of the target of the link to
11105 have been changed by combine. The only potential of this is if we
11106 replace I3, I2, and I1 by I3 and I2. But in that case the
11107 destination of I2 also remains unchanged. */
11108
11109 if (GET_CODE (XEXP (link, 0)) == NOTE
11110 || (set = single_set (XEXP (link, 0))) == 0)
11111 continue;
11112
11113 reg = SET_DEST (set);
11114 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11115 || GET_CODE (reg) == SIGN_EXTRACT
11116 || GET_CODE (reg) == STRICT_LOW_PART)
11117 reg = XEXP (reg, 0);
11118
11119 /* A LOG_LINK is defined as being placed on the first insn that uses
11120 a register and points to the insn that sets the register. Start
11121 searching at the next insn after the target of the link and stop
11122 when we reach a set of the register or the end of the basic block.
11123
11124 Note that this correctly handles the link that used to point from
5089e22e 11125 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
11126 since most links don't point very far away. */
11127
11128 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3
RK
11129 (insn && (this_basic_block == n_basic_blocks - 1
11130 || basic_block_head[this_basic_block + 1] != insn));
230d793d
RS
11131 insn = NEXT_INSN (insn))
11132 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11133 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11134 {
11135 if (reg_referenced_p (reg, PATTERN (insn)))
11136 place = insn;
11137 break;
11138 }
6e2d1486
RK
11139 else if (GET_CODE (insn) == CALL_INSN
11140 && find_reg_fusage (insn, USE, reg))
11141 {
11142 place = insn;
11143 break;
11144 }
230d793d
RS
11145
11146 /* If we found a place to put the link, place it there unless there
11147 is already a link to the same insn as LINK at that point. */
11148
11149 if (place)
11150 {
11151 rtx link2;
11152
11153 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11154 if (XEXP (link2, 0) == XEXP (link, 0))
11155 break;
11156
11157 if (link2 == 0)
11158 {
11159 XEXP (link, 1) = LOG_LINKS (place);
11160 LOG_LINKS (place) = link;
abe6e52f
RK
11161
11162 /* Set added_links_insn to the earliest insn we added a
11163 link to. */
11164 if (added_links_insn == 0
11165 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11166 added_links_insn = place;
230d793d
RS
11167 }
11168 }
11169 }
11170}
11171\f
11172void
11173dump_combine_stats (file)
11174 FILE *file;
11175{
11176 fprintf
11177 (file,
11178 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11179 combine_attempts, combine_merges, combine_extras, combine_successes);
11180}
11181
11182void
11183dump_combine_total_stats (file)
11184 FILE *file;
11185{
11186 fprintf
11187 (file,
11188 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11189 total_attempts, total_merges, total_extras, total_successes);
11190}
This page took 1.770247 seconds and 5 git commands to generate.