]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
G19990304_01.out: New file.
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
1bf27b5b 2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
230d793d
RS
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
670ee920 78#include "system.h"
789f983a 79#include "rtl.h" /* stdio.h must precede rtl.h for FFS. */
230d793d
RS
80#include "flags.h"
81#include "regs.h"
55310dad 82#include "hard-reg-set.h"
230d793d
RS
83#include "basic-block.h"
84#include "insn-config.h"
49ad7cfa 85#include "function.h"
d6f4ec51
KG
86/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
87#include "expr.h"
230d793d
RS
88#include "insn-flags.h"
89#include "insn-codes.h"
90#include "insn-attr.h"
91#include "recog.h"
92#include "real.h"
2e107e9e 93#include "toplev.h"
230d793d
RS
94
95/* It is not safe to use ordinary gen_lowpart in combine.
96 Use gen_lowpart_for_combine instead. See comments there. */
97#define gen_lowpart dont_use_gen_lowpart_you_dummy
98
99/* Number of attempts to combine instructions in this function. */
100
101static int combine_attempts;
102
103/* Number of attempts that got as far as substitution in this function. */
104
105static int combine_merges;
106
107/* Number of instructions combined with added SETs in this function. */
108
109static int combine_extras;
110
111/* Number of instructions combined in this function. */
112
113static int combine_successes;
114
115/* Totals over entire compilation. */
116
117static int total_attempts, total_merges, total_extras, total_successes;
9210df58 118
ddd5a7c1 119/* Define a default value for REVERSIBLE_CC_MODE.
9210df58
RK
120 We can never assume that a condition code mode is safe to reverse unless
121 the md tells us so. */
122#ifndef REVERSIBLE_CC_MODE
123#define REVERSIBLE_CC_MODE(MODE) 0
124#endif
230d793d
RS
125\f
126/* Vector mapping INSN_UIDs to cuids.
5089e22e 127 The cuids are like uids but increase monotonically always.
230d793d
RS
128 Combine always uses cuids so that it can compare them.
129 But actually renumbering the uids, which we used to do,
130 proves to be a bad idea because it makes it hard to compare
131 the dumps produced by earlier passes with those from later passes. */
132
133static int *uid_cuid;
4255220d 134static int max_uid_cuid;
230d793d
RS
135
136/* Get the cuid of an insn. */
137
1427d6d2
RK
138#define INSN_CUID(INSN) \
139(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d
RS
140
141/* Maximum register number, which is the size of the tables below. */
142
143static int combine_max_regno;
144
145/* Record last point of death of (hard or pseudo) register n. */
146
147static rtx *reg_last_death;
148
149/* Record last point of modification of (hard or pseudo) register n. */
150
151static rtx *reg_last_set;
152
153/* Record the cuid of the last insn that invalidated memory
154 (anything that writes memory, and subroutine calls, but not pushes). */
155
156static int mem_last_set;
157
158/* Record the cuid of the last CALL_INSN
159 so we can tell whether a potential combination crosses any calls. */
160
161static int last_call_cuid;
162
163/* When `subst' is called, this is the insn that is being modified
164 (by combining in a previous insn). The PATTERN of this insn
165 is still the old pattern partially modified and it should not be
166 looked at, but this may be used to examine the successors of the insn
167 to judge whether a simplification is valid. */
168
169static rtx subst_insn;
170
0d9641d1
JW
171/* This is an insn that belongs before subst_insn, but is not currently
172 on the insn chain. */
173
174static rtx subst_prev_insn;
175
230d793d
RS
176/* This is the lowest CUID that `subst' is currently dealing with.
177 get_last_value will not return a value if the register was set at or
178 after this CUID. If not for this mechanism, we could get confused if
179 I2 or I1 in try_combine were an insn that used the old value of a register
180 to obtain a new value. In that case, we might erroneously get the
181 new value of the register when we wanted the old one. */
182
183static int subst_low_cuid;
184
6e25d159
RK
185/* This contains any hard registers that are used in newpat; reg_dead_at_p
186 must consider all these registers to be always live. */
187
188static HARD_REG_SET newpat_used_regs;
189
abe6e52f
RK
190/* This is an insn to which a LOG_LINKS entry has been added. If this
191 insn is the earlier than I2 or I3, combine should rescan starting at
192 that location. */
193
194static rtx added_links_insn;
195
0d4d42c3
RK
196/* Basic block number of the block in which we are performing combines. */
197static int this_basic_block;
230d793d
RS
198\f
199/* The next group of arrays allows the recording of the last value assigned
200 to (hard or pseudo) register n. We use this information to see if a
5089e22e 201 operation being processed is redundant given a prior operation performed
230d793d
RS
202 on the register. For example, an `and' with a constant is redundant if
203 all the zero bits are already known to be turned off.
204
205 We use an approach similar to that used by cse, but change it in the
206 following ways:
207
208 (1) We do not want to reinitialize at each label.
209 (2) It is useful, but not critical, to know the actual value assigned
210 to a register. Often just its form is helpful.
211
212 Therefore, we maintain the following arrays:
213
214 reg_last_set_value the last value assigned
215 reg_last_set_label records the value of label_tick when the
216 register was assigned
217 reg_last_set_table_tick records the value of label_tick when a
218 value using the register is assigned
219 reg_last_set_invalid set to non-zero when it is not valid
220 to use the value of this register in some
221 register's value
222
223 To understand the usage of these tables, it is important to understand
224 the distinction between the value in reg_last_set_value being valid
225 and the register being validly contained in some other expression in the
226 table.
227
228 Entry I in reg_last_set_value is valid if it is non-zero, and either
229 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
230
231 Register I may validly appear in any expression returned for the value
232 of another register if reg_n_sets[i] is 1. It may also appear in the
233 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
234 reg_last_set_invalid[j] is zero.
235
236 If an expression is found in the table containing a register which may
237 not validly appear in an expression, the register is replaced by
238 something that won't match, (clobber (const_int 0)).
239
240 reg_last_set_invalid[i] is set non-zero when register I is being assigned
241 to and reg_last_set_table_tick[i] == label_tick. */
242
0f41302f 243/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
244
245static rtx *reg_last_set_value;
246
247/* Record the value of label_tick when the value for register n is placed in
248 reg_last_set_value[n]. */
249
568356af 250static int *reg_last_set_label;
230d793d
RS
251
252/* Record the value of label_tick when an expression involving register n
0f41302f 253 is placed in reg_last_set_value. */
230d793d 254
568356af 255static int *reg_last_set_table_tick;
230d793d
RS
256
257/* Set non-zero if references to register n in expressions should not be
258 used. */
259
260static char *reg_last_set_invalid;
261
0f41302f 262/* Incremented for each label. */
230d793d 263
568356af 264static int label_tick;
230d793d
RS
265
266/* Some registers that are set more than once and used in more than one
267 basic block are nevertheless always set in similar ways. For example,
268 a QImode register may be loaded from memory in two places on a machine
269 where byte loads zero extend.
270
951553af 271 We record in the following array what we know about the nonzero
230d793d
RS
272 bits of a register, specifically which bits are known to be zero.
273
274 If an entry is zero, it means that we don't know anything special. */
275
55310dad 276static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 277
951553af 278/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 279 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 280
951553af 281static enum machine_mode nonzero_bits_mode;
230d793d 282
d0ab8cd3
RK
283/* Nonzero if we know that a register has some leading bits that are always
284 equal to the sign bit. */
285
286static char *reg_sign_bit_copies;
287
951553af 288/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
289 It is zero while computing them and after combine has completed. This
290 former test prevents propagating values based on previously set values,
291 which can be incorrect if a variable is modified in a loop. */
230d793d 292
951553af 293static int nonzero_sign_valid;
55310dad
RK
294
295/* These arrays are maintained in parallel with reg_last_set_value
296 and are used to store the mode in which the register was last set,
297 the bits that were known to be zero when it was last set, and the
298 number of sign bits copies it was known to have when it was last set. */
299
300static enum machine_mode *reg_last_set_mode;
301static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
302static char *reg_last_set_sign_bit_copies;
230d793d
RS
303\f
304/* Record one modification to rtl structure
305 to be undone by storing old_contents into *where.
306 is_int is 1 if the contents are an int. */
307
308struct undo
309{
241cea85 310 struct undo *next;
230d793d 311 int is_int;
f5393ab9
RS
312 union {rtx r; int i;} old_contents;
313 union {rtx *r; int *i;} where;
230d793d
RS
314};
315
316/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
317 num_undo says how many are currently recorded.
318
319 storage is nonzero if we must undo the allocation of new storage.
320 The value of storage is what to pass to obfree.
321
322 other_insn is nonzero if we have modified some other insn in the process
241cea85 323 of working on subst_insn. It must be verified too.
230d793d 324
241cea85
RK
325 previous_undos is the value of undobuf.undos when we started processing
326 this substitution. This will prevent gen_rtx_combine from re-used a piece
327 from the previous expression. Doing so can produce circular rtl
328 structures. */
230d793d
RS
329
330struct undobuf
331{
230d793d 332 char *storage;
241cea85
RK
333 struct undo *undos;
334 struct undo *frees;
335 struct undo *previous_undos;
230d793d
RS
336 rtx other_insn;
337};
338
339static struct undobuf undobuf;
340
cc876596 341/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 342 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
343 set to NEWVAL, do not record this change. Because computing NEWVAL might
344 also call SUBST, we have to compute it before we put anything into
345 the undo table. */
230d793d
RS
346
347#define SUBST(INTO, NEWVAL) \
241cea85
RK
348 do { rtx _new = (NEWVAL); \
349 struct undo *_buf; \
350 \
351 if (undobuf.frees) \
352 _buf = undobuf.frees, undobuf.frees = _buf->next; \
353 else \
354 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
355 \
356 _buf->is_int = 0; \
357 _buf->where.r = &INTO; \
358 _buf->old_contents.r = INTO; \
359 INTO = _new; \
360 if (_buf->old_contents.r == INTO) \
361 _buf->next = undobuf.frees, undobuf.frees = _buf; \
362 else \
363 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
364 } while (0)
365
241cea85
RK
366/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
367 for the value of a HOST_WIDE_INT value (including CONST_INT) is
368 not safe. */
230d793d
RS
369
370#define SUBST_INT(INTO, NEWVAL) \
241cea85
RK
371 do { struct undo *_buf; \
372 \
373 if (undobuf.frees) \
374 _buf = undobuf.frees, undobuf.frees = _buf->next; \
375 else \
376 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
377 \
378 _buf->is_int = 1; \
379 _buf->where.i = (int *) &INTO; \
380 _buf->old_contents.i = INTO; \
381 INTO = NEWVAL; \
382 if (_buf->old_contents.i == INTO) \
383 _buf->next = undobuf.frees, undobuf.frees = _buf; \
384 else \
385 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
386 } while (0)
387
388/* Number of times the pseudo being substituted for
389 was found and replaced. */
390
391static int n_occurrences;
392
c5ad722c
RK
393static void init_reg_last_arrays PROTO((void));
394static void setup_incoming_promotions PROTO((void));
fe2db4fb
RK
395static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
396static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
e009aaf3 397static int sets_function_arg_p PROTO((rtx));
fe2db4fb
RK
398static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
399static rtx try_combine PROTO((rtx, rtx, rtx));
400static void undo_all PROTO((void));
401static rtx *find_split_point PROTO((rtx *, rtx));
402static rtx subst PROTO((rtx, rtx, rtx, int, int));
8079805d
RK
403static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
404static rtx simplify_if_then_else PROTO((rtx));
405static rtx simplify_set PROTO((rtx));
406static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
407static rtx expand_compound_operation PROTO((rtx));
408static rtx expand_field_assignment PROTO((rtx));
409static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
410 int, int, int));
71923da7 411static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
412static rtx make_compound_operation PROTO((rtx, enum rtx_code));
413static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 414static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 415 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 416static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb 417static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
e11fa86f 418static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
fe2db4fb
RK
419static rtx make_field_assignment PROTO((rtx));
420static rtx apply_distributive_law PROTO((rtx));
421static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
422 unsigned HOST_WIDE_INT));
423static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
424static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
425static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
426 enum rtx_code, HOST_WIDE_INT,
427 enum machine_mode, int *));
428static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
429 rtx, int));
8e2f6e35 430static int recog_for_combine PROTO((rtx *, rtx, rtx *));
fe2db4fb 431static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 432static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 433 ...));
fe2db4fb
RK
434static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
435 rtx, rtx));
0c1c8ea6
RK
436static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
437 enum machine_mode, rtx));
fe2db4fb
RK
438static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
439static int reversible_comparison_p PROTO((rtx));
440static void update_table_tick PROTO((rtx));
441static void record_value_for_reg PROTO((rtx, rtx, rtx));
442static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
443static void record_dead_and_set_regs PROTO((rtx));
9a893315 444static int get_last_value_validate PROTO((rtx *, rtx, int, int));
fe2db4fb
RK
445static rtx get_last_value PROTO((rtx));
446static int use_crosses_set_p PROTO((rtx, int));
447static void reg_dead_at_p_1 PROTO((rtx, rtx));
448static int reg_dead_at_p PROTO((rtx, rtx));
6eb12cef 449static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
fe2db4fb
RK
450static int reg_bitfield_target_p PROTO((rtx, rtx));
451static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
452static void distribute_links PROTO((rtx));
6e25d159 453static void mark_used_regs_combine PROTO((rtx));
1427d6d2 454static int insn_cuid PROTO((rtx));
230d793d
RS
455\f
456/* Main entry point for combiner. F is the first insn of the function.
457 NREGS is the first unused pseudo-reg number. */
458
459void
460combine_instructions (f, nregs)
461 rtx f;
462 int nregs;
463{
b729186a
JL
464 register rtx insn, next;
465#ifdef HAVE_cc0
466 register rtx prev;
467#endif
230d793d
RS
468 register int i;
469 register rtx links, nextlinks;
470
471 combine_attempts = 0;
472 combine_merges = 0;
473 combine_extras = 0;
474 combine_successes = 0;
241cea85 475 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
476
477 combine_max_regno = nregs;
478
ef026f91
RS
479 reg_nonzero_bits
480 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
481 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
482
4c9a05bc 483 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
484 bzero (reg_sign_bit_copies, nregs * sizeof (char));
485
230d793d
RS
486 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
487 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
488 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
489 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
490 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 491 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
492 reg_last_set_mode
493 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
494 reg_last_set_nonzero_bits
495 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
496 reg_last_set_sign_bit_copies
497 = (char *) alloca (nregs * sizeof (char));
498
ef026f91 499 init_reg_last_arrays ();
230d793d
RS
500
501 init_recog_no_volatile ();
502
503 /* Compute maximum uid value so uid_cuid can be allocated. */
504
505 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
506 if (INSN_UID (insn) > i)
507 i = INSN_UID (insn);
508
509 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
4255220d 510 max_uid_cuid = i;
230d793d 511
951553af 512 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 513
951553af 514 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
515 when, for example, we have j <<= 1 in a loop. */
516
951553af 517 nonzero_sign_valid = 0;
230d793d
RS
518
519 /* Compute the mapping from uids to cuids.
520 Cuids are numbers assigned to insns, like uids,
521 except that cuids increase monotonically through the code.
522
523 Scan all SETs and see if we can deduce anything about what
951553af 524 bits are known to be zero for some registers and how many copies
d79f08e0
RK
525 of the sign bit are known to exist for those registers.
526
527 Also set any known values so that we can use it while searching
528 for what bits are known to be set. */
529
530 label_tick = 1;
230d793d 531
bcd49eb7
JW
532 /* We need to initialize it here, because record_dead_and_set_regs may call
533 get_last_value. */
534 subst_prev_insn = NULL_RTX;
535
7988fd36
RK
536 setup_incoming_promotions ();
537
230d793d
RS
538 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
539 {
4255220d 540 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
541 subst_low_cuid = i;
542 subst_insn = insn;
543
230d793d 544 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
545 {
546 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
547 record_dead_and_set_regs (insn);
2dab894a
RK
548
549#ifdef AUTO_INC_DEC
550 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
551 if (REG_NOTE_KIND (links) == REG_INC)
552 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
553#endif
d79f08e0
RK
554 }
555
556 if (GET_CODE (insn) == CODE_LABEL)
557 label_tick++;
230d793d
RS
558 }
559
951553af 560 nonzero_sign_valid = 1;
230d793d
RS
561
562 /* Now scan all the insns in forward order. */
563
0d4d42c3 564 this_basic_block = -1;
230d793d
RS
565 label_tick = 1;
566 last_call_cuid = 0;
567 mem_last_set = 0;
ef026f91 568 init_reg_last_arrays ();
7988fd36
RK
569 setup_incoming_promotions ();
570
230d793d
RS
571 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
572 {
573 next = 0;
574
0d4d42c3 575 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 576 if (this_basic_block + 1 < n_basic_blocks
3b413743 577 && BLOCK_HEAD (this_basic_block + 1) == insn)
0d4d42c3
RK
578 this_basic_block++;
579
230d793d
RS
580 if (GET_CODE (insn) == CODE_LABEL)
581 label_tick++;
582
0d4d42c3 583 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
584 {
585 /* Try this insn with each insn it links back to. */
586
587 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 588 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
589 goto retry;
590
591 /* Try each sequence of three linked insns ending with this one. */
592
593 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
594 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
595 nextlinks = XEXP (nextlinks, 1))
596 if ((next = try_combine (insn, XEXP (links, 0),
597 XEXP (nextlinks, 0))) != 0)
598 goto retry;
599
600#ifdef HAVE_cc0
601 /* Try to combine a jump insn that uses CC0
602 with a preceding insn that sets CC0, and maybe with its
603 logical predecessor as well.
604 This is how we make decrement-and-branch insns.
605 We need this special code because data flow connections
606 via CC0 do not get entered in LOG_LINKS. */
607
608 if (GET_CODE (insn) == JUMP_INSN
609 && (prev = prev_nonnote_insn (insn)) != 0
610 && GET_CODE (prev) == INSN
611 && sets_cc0_p (PATTERN (prev)))
612 {
5f4f0e22 613 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
614 goto retry;
615
616 for (nextlinks = LOG_LINKS (prev); nextlinks;
617 nextlinks = XEXP (nextlinks, 1))
618 if ((next = try_combine (insn, prev,
619 XEXP (nextlinks, 0))) != 0)
620 goto retry;
621 }
622
623 /* Do the same for an insn that explicitly references CC0. */
624 if (GET_CODE (insn) == INSN
625 && (prev = prev_nonnote_insn (insn)) != 0
626 && GET_CODE (prev) == INSN
627 && sets_cc0_p (PATTERN (prev))
628 && GET_CODE (PATTERN (insn)) == SET
629 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
630 {
5f4f0e22 631 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
632 goto retry;
633
634 for (nextlinks = LOG_LINKS (prev); nextlinks;
635 nextlinks = XEXP (nextlinks, 1))
636 if ((next = try_combine (insn, prev,
637 XEXP (nextlinks, 0))) != 0)
638 goto retry;
639 }
640
641 /* Finally, see if any of the insns that this insn links to
642 explicitly references CC0. If so, try this insn, that insn,
5089e22e 643 and its predecessor if it sets CC0. */
230d793d
RS
644 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
645 if (GET_CODE (XEXP (links, 0)) == INSN
646 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
647 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
648 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
649 && GET_CODE (prev) == INSN
650 && sets_cc0_p (PATTERN (prev))
651 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
652 goto retry;
653#endif
654
655 /* Try combining an insn with two different insns whose results it
656 uses. */
657 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
658 for (nextlinks = XEXP (links, 1); nextlinks;
659 nextlinks = XEXP (nextlinks, 1))
660 if ((next = try_combine (insn, XEXP (links, 0),
661 XEXP (nextlinks, 0))) != 0)
662 goto retry;
663
664 if (GET_CODE (insn) != NOTE)
665 record_dead_and_set_regs (insn);
666
667 retry:
668 ;
669 }
670 }
671
672 total_attempts += combine_attempts;
673 total_merges += combine_merges;
674 total_extras += combine_extras;
675 total_successes += combine_successes;
1a26b032 676
951553af 677 nonzero_sign_valid = 0;
972b320c
R
678
679 /* Make recognizer allow volatile MEMs again. */
680 init_recog ();
230d793d 681}
ef026f91
RS
682
683/* Wipe the reg_last_xxx arrays in preparation for another pass. */
684
685static void
686init_reg_last_arrays ()
687{
688 int nregs = combine_max_regno;
689
4c9a05bc
RK
690 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
691 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
692 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
693 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
694 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 695 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
696 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
697 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
698 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
699}
230d793d 700\f
7988fd36
RK
701/* Set up any promoted values for incoming argument registers. */
702
ee791cc3 703static void
7988fd36
RK
704setup_incoming_promotions ()
705{
706#ifdef PROMOTE_FUNCTION_ARGS
707 int regno;
708 rtx reg;
709 enum machine_mode mode;
710 int unsignedp;
711 rtx first = get_insns ();
712
713 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
714 if (FUNCTION_ARG_REGNO_P (regno)
715 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
38a448ca
RH
716 {
717 record_value_for_reg
718 (reg, first, gen_rtx_fmt_e ((unsignedp ? ZERO_EXTEND
719 : SIGN_EXTEND),
720 GET_MODE (reg),
721 gen_rtx_CLOBBER (mode, const0_rtx)));
722 }
7988fd36
RK
723#endif
724}
725\f
91102d5a
RK
726/* Called via note_stores. If X is a pseudo that is narrower than
727 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
728
729 If we are setting only a portion of X and we can't figure out what
730 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
731 be happening.
732
733 Similarly, set how many bits of X are known to be copies of the sign bit
734 at all locations in the function. This is the smallest number implied
735 by any set of X. */
230d793d
RS
736
737static void
951553af 738set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
739 rtx x;
740 rtx set;
741{
d0ab8cd3
RK
742 int num;
743
230d793d
RS
744 if (GET_CODE (x) == REG
745 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
746 /* If this register is undefined at the start of the file, we can't
747 say what its contents were. */
e881bb1b 748 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start, REGNO (x))
5f4f0e22 749 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 750 {
2dab894a 751 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
752 {
753 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 754 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
755 return;
756 }
230d793d
RS
757
758 /* If this is a complex assignment, see if we can convert it into a
5089e22e 759 simple assignment. */
230d793d 760 set = expand_field_assignment (set);
d79f08e0
RK
761
762 /* If this is a simple assignment, or we have a paradoxical SUBREG,
763 set what we know about X. */
764
765 if (SET_DEST (set) == x
766 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
767 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
768 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 769 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 770 {
9afa3d54
RK
771 rtx src = SET_SRC (set);
772
773#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
774 /* If X is narrower than a word and SRC is a non-negative
775 constant that would appear negative in the mode of X,
776 sign-extend it for use in reg_nonzero_bits because some
777 machines (maybe most) will actually do the sign-extension
778 and this is the conservative approach.
779
780 ??? For 2.5, try to tighten up the MD files in this regard
781 instead of this kludge. */
782
783 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
784 && GET_CODE (src) == CONST_INT
785 && INTVAL (src) > 0
786 && 0 != (INTVAL (src)
787 & ((HOST_WIDE_INT) 1
9e69be8c 788 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
789 src = GEN_INT (INTVAL (src)
790 | ((HOST_WIDE_INT) (-1)
791 << GET_MODE_BITSIZE (GET_MODE (x))));
792#endif
793
951553af 794 reg_nonzero_bits[REGNO (x)]
9afa3d54 795 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
796 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
797 if (reg_sign_bit_copies[REGNO (x)] == 0
798 || reg_sign_bit_copies[REGNO (x)] > num)
799 reg_sign_bit_copies[REGNO (x)] = num;
800 }
230d793d 801 else
d0ab8cd3 802 {
951553af 803 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 804 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 805 }
230d793d
RS
806 }
807}
808\f
809/* See if INSN can be combined into I3. PRED and SUCC are optionally
810 insns that were previously combined into I3 or that will be combined
811 into the merger of INSN and I3.
812
813 Return 0 if the combination is not allowed for any reason.
814
815 If the combination is allowed, *PDEST will be set to the single
816 destination of INSN and *PSRC to the single source, and this function
817 will return 1. */
818
819static int
820can_combine_p (insn, i3, pred, succ, pdest, psrc)
821 rtx insn;
822 rtx i3;
e51712db
KG
823 rtx pred ATTRIBUTE_UNUSED;
824 rtx succ;
230d793d
RS
825 rtx *pdest, *psrc;
826{
827 int i;
828 rtx set = 0, src, dest;
b729186a
JL
829 rtx p;
830#ifdef AUTO_INC_DEC
76d31c63 831 rtx link;
b729186a 832#endif
230d793d
RS
833 int all_adjacent = (succ ? (next_active_insn (insn) == succ
834 && next_active_insn (succ) == i3)
835 : next_active_insn (insn) == i3);
836
837 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
838 or a PARALLEL consisting of such a SET and CLOBBERs.
839
840 If INSN has CLOBBER parallel parts, ignore them for our processing.
841 By definition, these happen during the execution of the insn. When it
842 is merged with another insn, all bets are off. If they are, in fact,
843 needed and aren't also supplied in I3, they may be added by
844 recog_for_combine. Otherwise, it won't match.
845
846 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
847 note.
848
849 Get the source and destination of INSN. If more than one, can't
850 combine. */
851
852 if (GET_CODE (PATTERN (insn)) == SET)
853 set = PATTERN (insn);
854 else if (GET_CODE (PATTERN (insn)) == PARALLEL
855 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
856 {
857 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
858 {
859 rtx elt = XVECEXP (PATTERN (insn), 0, i);
860
861 switch (GET_CODE (elt))
862 {
e3258cef
R
863 /* This is important to combine floating point insns
864 for the SH4 port. */
865 case USE:
866 /* Combining an isolated USE doesn't make sense.
867 We depend here on combinable_i3_pat to reject them. */
868 /* The code below this loop only verifies that the inputs of
869 the SET in INSN do not change. We call reg_set_between_p
870 to verify that the REG in the USE does not change betweeen
871 I3 and INSN.
872 If the USE in INSN was for a pseudo register, the matching
873 insn pattern will likely match any register; combining this
874 with any other USE would only be safe if we knew that the
875 used registers have identical values, or if there was
876 something to tell them apart, e.g. different modes. For
877 now, we forgo such compilcated tests and simply disallow
878 combining of USES of pseudo registers with any other USE. */
879 if (GET_CODE (XEXP (elt, 0)) == REG
880 && GET_CODE (PATTERN (i3)) == PARALLEL)
881 {
882 rtx i3pat = PATTERN (i3);
883 int i = XVECLEN (i3pat, 0) - 1;
884 int regno = REGNO (XEXP (elt, 0));
885 do
886 {
887 rtx i3elt = XVECEXP (i3pat, 0, i);
888 if (GET_CODE (i3elt) == USE
889 && GET_CODE (XEXP (i3elt, 0)) == REG
890 && (REGNO (XEXP (i3elt, 0)) == regno
891 ? reg_set_between_p (XEXP (elt, 0),
892 PREV_INSN (insn), i3)
893 : regno >= FIRST_PSEUDO_REGISTER))
894 return 0;
895 }
896 while (--i >= 0);
897 }
898 break;
899
230d793d
RS
900 /* We can ignore CLOBBERs. */
901 case CLOBBER:
902 break;
903
904 case SET:
905 /* Ignore SETs whose result isn't used but not those that
906 have side-effects. */
907 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
908 && ! side_effects_p (elt))
909 break;
910
911 /* If we have already found a SET, this is a second one and
912 so we cannot combine with this insn. */
913 if (set)
914 return 0;
915
916 set = elt;
917 break;
918
919 default:
920 /* Anything else means we can't combine. */
921 return 0;
922 }
923 }
924
925 if (set == 0
926 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
927 so don't do anything with it. */
928 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
929 return 0;
930 }
931 else
932 return 0;
933
934 if (set == 0)
935 return 0;
936
937 set = expand_field_assignment (set);
938 src = SET_SRC (set), dest = SET_DEST (set);
939
940 /* Don't eliminate a store in the stack pointer. */
941 if (dest == stack_pointer_rtx
230d793d
RS
942 /* If we couldn't eliminate a field assignment, we can't combine. */
943 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
944 /* Don't combine with an insn that sets a register to itself if it has
945 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 946 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
947 /* Can't merge a function call. */
948 || GET_CODE (src) == CALL
cd5e8f1f 949 /* Don't eliminate a function call argument. */
4dca5ec5
RK
950 || (GET_CODE (i3) == CALL_INSN
951 && (find_reg_fusage (i3, USE, dest)
952 || (GET_CODE (dest) == REG
953 && REGNO (dest) < FIRST_PSEUDO_REGISTER
954 && global_regs[REGNO (dest)])))
230d793d
RS
955 /* Don't substitute into an incremented register. */
956 || FIND_REG_INC_NOTE (i3, dest)
957 || (succ && FIND_REG_INC_NOTE (succ, dest))
ec35104c 958#if 0
230d793d 959 /* Don't combine the end of a libcall into anything. */
ec35104c
JL
960 /* ??? This gives worse code, and appears to be unnecessary, since no
961 pass after flow uses REG_LIBCALL/REG_RETVAL notes. Local-alloc does
962 use REG_RETVAL notes for noconflict blocks, but other code here
963 makes sure that those insns don't disappear. */
5f4f0e22 964 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
ec35104c 965#endif
230d793d
RS
966 /* Make sure that DEST is not used after SUCC but before I3. */
967 || (succ && ! all_adjacent
968 && reg_used_between_p (dest, succ, i3))
969 /* Make sure that the value that is to be substituted for the register
970 does not use any registers whose values alter in between. However,
971 If the insns are adjacent, a use can't cross a set even though we
972 think it might (this can happen for a sequence of insns each setting
973 the same destination; reg_last_set of that register might point to
d81481d3
RK
974 a NOTE). If INSN has a REG_EQUIV note, the register is always
975 equivalent to the memory so the substitution is valid even if there
976 are intervening stores. Also, don't move a volatile asm or
977 UNSPEC_VOLATILE across any other insns. */
230d793d 978 || (! all_adjacent
d81481d3
RK
979 && (((GET_CODE (src) != MEM
980 || ! find_reg_note (insn, REG_EQUIV, src))
981 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
982 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
983 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
984 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
985 better register allocation by not doing the combine. */
986 || find_reg_note (i3, REG_NO_CONFLICT, dest)
987 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
988 /* Don't combine across a CALL_INSN, because that would possibly
989 change whether the life span of some REGs crosses calls or not,
990 and it is a pain to update that information.
991 Exception: if source is a constant, moving it later can't hurt.
992 Accept that special case, because it helps -fforce-addr a lot. */
993 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
994 return 0;
995
996 /* DEST must either be a REG or CC0. */
997 if (GET_CODE (dest) == REG)
998 {
999 /* If register alignment is being enforced for multi-word items in all
1000 cases except for parameters, it is possible to have a register copy
1001 insn referencing a hard register that is not allowed to contain the
1002 mode being copied and which would not be valid as an operand of most
1003 insns. Eliminate this problem by not combining with such an insn.
1004
1005 Also, on some machines we don't want to extend the life of a hard
4d2c432d
RK
1006 register.
1007
1008 This is the same test done in can_combine except that we don't test
1009 if SRC is a CALL operation to permit a hard register with
1010 SMALL_REGISTER_CLASSES, and that we have to take all_adjacent
1011 into account. */
230d793d
RS
1012
1013 if (GET_CODE (src) == REG
1014 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
1015 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
1016 /* Don't extend the life of a hard register unless it is
1017 user variable (if we have few registers) or it can't
1018 fit into the desired register (meaning something special
ecd40809
RK
1019 is going on).
1020 Also avoid substituting a return register into I3, because
1021 reload can't handle a conflict with constraints of other
1022 inputs. */
230d793d 1023 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e 1024 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
f95182a4
ILT
1025 || (SMALL_REGISTER_CLASSES
1026 && ((! all_adjacent && ! REG_USERVAR_P (src))
1027 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
e9a25f70 1028 && ! REG_USERVAR_P (src))))))))
230d793d
RS
1029 return 0;
1030 }
1031 else if (GET_CODE (dest) != CC0)
1032 return 0;
1033
5f96750d
RS
1034 /* Don't substitute for a register intended as a clobberable operand.
1035 Similarly, don't substitute an expression containing a register that
1036 will be clobbered in I3. */
230d793d
RS
1037 if (GET_CODE (PATTERN (i3)) == PARALLEL)
1038 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
1039 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
1040 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
1041 src)
1042 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
1043 return 0;
1044
1045 /* If INSN contains anything volatile, or is an `asm' (whether volatile
d276f2bb 1046 or not), reject, unless nothing volatile comes between it and I3 */
230d793d
RS
1047
1048 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
d276f2bb
CM
1049 {
1050 /* Make sure succ doesn't contain a volatile reference. */
1051 if (succ != 0 && volatile_refs_p (PATTERN (succ)))
1052 return 0;
1053
1054 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1055 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1056 && p != succ && volatile_refs_p (PATTERN (p)))
1057 return 0;
1058 }
230d793d 1059
b79ee7eb
RH
1060 /* If INSN is an asm, and DEST is a hard register, reject, since it has
1061 to be an explicit register variable, and was chosen for a reason. */
1062
1063 if (GET_CODE (src) == ASM_OPERANDS
1064 && GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER)
1065 return 0;
1066
4b2cb4a2
RS
1067 /* If there are any volatile insns between INSN and I3, reject, because
1068 they might affect machine state. */
1069
1070 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1071 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1072 && p != succ && volatile_insn_p (PATTERN (p)))
1073 return 0;
1074
230d793d
RS
1075 /* If INSN or I2 contains an autoincrement or autodecrement,
1076 make sure that register is not used between there and I3,
1077 and not already used in I3 either.
1078 Also insist that I3 not be a jump; if it were one
1079 and the incremented register were spilled, we would lose. */
1080
1081#ifdef AUTO_INC_DEC
1082 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1083 if (REG_NOTE_KIND (link) == REG_INC
1084 && (GET_CODE (i3) == JUMP_INSN
1085 || reg_used_between_p (XEXP (link, 0), insn, i3)
1086 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1087 return 0;
1088#endif
1089
1090#ifdef HAVE_cc0
1091 /* Don't combine an insn that follows a CC0-setting insn.
1092 An insn that uses CC0 must not be separated from the one that sets it.
1093 We do, however, allow I2 to follow a CC0-setting insn if that insn
1094 is passed as I1; in that case it will be deleted also.
1095 We also allow combining in this case if all the insns are adjacent
1096 because that would leave the two CC0 insns adjacent as well.
1097 It would be more logical to test whether CC0 occurs inside I1 or I2,
1098 but that would be much slower, and this ought to be equivalent. */
1099
1100 p = prev_nonnote_insn (insn);
1101 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1102 && ! all_adjacent)
1103 return 0;
1104#endif
1105
1106 /* If we get here, we have passed all the tests and the combination is
1107 to be allowed. */
1108
1109 *pdest = dest;
1110 *psrc = src;
1111
1112 return 1;
1113}
1114\f
956d6950
JL
1115/* Check if PAT is an insn - or a part of it - used to set up an
1116 argument for a function in a hard register. */
1117
1118static int
1119sets_function_arg_p (pat)
1120 rtx pat;
1121{
1122 int i;
1123 rtx inner_dest;
1124
1125 switch (GET_CODE (pat))
1126 {
1127 case INSN:
1128 return sets_function_arg_p (PATTERN (pat));
1129
1130 case PARALLEL:
1131 for (i = XVECLEN (pat, 0); --i >= 0;)
1132 if (sets_function_arg_p (XVECEXP (pat, 0, i)))
1133 return 1;
1134
1135 break;
1136
1137 case SET:
1138 inner_dest = SET_DEST (pat);
1139 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1140 || GET_CODE (inner_dest) == SUBREG
1141 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1142 inner_dest = XEXP (inner_dest, 0);
1143
1144 return (GET_CODE (inner_dest) == REG
1145 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
1146 && FUNCTION_ARG_REGNO_P (REGNO (inner_dest)));
1d300e19
KG
1147
1148 default:
1149 break;
956d6950
JL
1150 }
1151
1152 return 0;
1153}
1154
230d793d
RS
1155/* LOC is the location within I3 that contains its pattern or the component
1156 of a PARALLEL of the pattern. We validate that it is valid for combining.
1157
1158 One problem is if I3 modifies its output, as opposed to replacing it
1159 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1160 so would produce an insn that is not equivalent to the original insns.
1161
1162 Consider:
1163
1164 (set (reg:DI 101) (reg:DI 100))
1165 (set (subreg:SI (reg:DI 101) 0) <foo>)
1166
1167 This is NOT equivalent to:
1168
1169 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1170 (set (reg:DI 101) (reg:DI 100))])
1171
1172 Not only does this modify 100 (in which case it might still be valid
1173 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1174
1175 We can also run into a problem if I2 sets a register that I1
1176 uses and I1 gets directly substituted into I3 (not via I2). In that
1177 case, we would be getting the wrong value of I2DEST into I3, so we
1178 must reject the combination. This case occurs when I2 and I1 both
1179 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1180 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1181 of a SET must prevent combination from occurring.
1182
e9a25f70 1183 On machines where SMALL_REGISTER_CLASSES is non-zero, we don't combine
c448a43e
RK
1184 if the destination of a SET is a hard register that isn't a user
1185 variable.
230d793d
RS
1186
1187 Before doing the above check, we first try to expand a field assignment
1188 into a set of logical operations.
1189
1190 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1191 we place a register that is both set and used within I3. If more than one
1192 such register is detected, we fail.
1193
1194 Return 1 if the combination is valid, zero otherwise. */
1195
1196static int
1197combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1198 rtx i3;
1199 rtx *loc;
1200 rtx i2dest;
1201 rtx i1dest;
1202 int i1_not_in_src;
1203 rtx *pi3dest_killed;
1204{
1205 rtx x = *loc;
1206
1207 if (GET_CODE (x) == SET)
1208 {
1209 rtx set = expand_field_assignment (x);
1210 rtx dest = SET_DEST (set);
1211 rtx src = SET_SRC (set);
29a82058
JL
1212 rtx inner_dest = dest;
1213
1214#if 0
1215 rtx inner_src = src;
1216#endif
230d793d
RS
1217
1218 SUBST (*loc, set);
1219
1220 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1221 || GET_CODE (inner_dest) == SUBREG
1222 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1223 inner_dest = XEXP (inner_dest, 0);
1224
1225 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1226 was added. */
1227#if 0
1228 while (GET_CODE (inner_src) == STRICT_LOW_PART
1229 || GET_CODE (inner_src) == SUBREG
1230 || GET_CODE (inner_src) == ZERO_EXTRACT)
1231 inner_src = XEXP (inner_src, 0);
1232
1233 /* If it is better that two different modes keep two different pseudos,
1234 avoid combining them. This avoids producing the following pattern
1235 on a 386:
1236 (set (subreg:SI (reg/v:QI 21) 0)
1237 (lshiftrt:SI (reg/v:SI 20)
1238 (const_int 24)))
1239 If that were made, reload could not handle the pair of
1240 reg 20/21, since it would try to get any GENERAL_REGS
1241 but some of them don't handle QImode. */
1242
1243 if (rtx_equal_p (inner_src, i2dest)
1244 && GET_CODE (inner_dest) == REG
1245 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1246 return 0;
1247#endif
1248
1249 /* Check for the case where I3 modifies its output, as
1250 discussed above. */
1251 if ((inner_dest != dest
1252 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1253 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
956d6950 1254
3f508eca
RK
1255 /* This is the same test done in can_combine_p except that we
1256 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
956d6950
JL
1257 CALL operation. Moreover, we can't test all_adjacent; we don't
1258 have to, since this instruction will stay in place, thus we are
1259 not considering increasing the lifetime of INNER_DEST.
1260
1261 Also, if this insn sets a function argument, combining it with
1262 something that might need a spill could clobber a previous
1263 function argument; the all_adjacent test in can_combine_p also
1264 checks this; here, we do a more specific test for this case. */
1265
230d793d 1266 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1267 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1268 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1269 GET_MODE (inner_dest))
e9a25f70
JL
1270 || (SMALL_REGISTER_CLASSES && GET_CODE (src) != CALL
1271 && ! REG_USERVAR_P (inner_dest)
956d6950
JL
1272 && (FUNCTION_VALUE_REGNO_P (REGNO (inner_dest))
1273 || (FUNCTION_ARG_REGNO_P (REGNO (inner_dest))
1274 && i3 != 0
1275 && sets_function_arg_p (prev_nonnote_insn (i3)))))))
230d793d
RS
1276 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1277 return 0;
1278
1279 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1280 so record that for later.
1281 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1282 STACK_POINTER_REGNUM, since these are always considered to be
1283 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1284 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1285 && reg_referenced_p (dest, PATTERN (i3))
1286 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1287#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1288 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1289#endif
36a9c2e9
JL
1290#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1291 && (REGNO (dest) != ARG_POINTER_REGNUM
1292 || ! fixed_regs [REGNO (dest)])
1293#endif
1294 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1295 {
1296 if (*pi3dest_killed)
1297 return 0;
1298
1299 *pi3dest_killed = dest;
1300 }
1301 }
1302
1303 else if (GET_CODE (x) == PARALLEL)
1304 {
1305 int i;
1306
1307 for (i = 0; i < XVECLEN (x, 0); i++)
1308 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1309 i1_not_in_src, pi3dest_killed))
1310 return 0;
1311 }
1312
1313 return 1;
1314}
1315\f
1316/* Try to combine the insns I1 and I2 into I3.
1317 Here I1 and I2 appear earlier than I3.
1318 I1 can be zero; then we combine just I2 into I3.
1319
1320 It we are combining three insns and the resulting insn is not recognized,
1321 try splitting it into two insns. If that happens, I2 and I3 are retained
1322 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1323 are pseudo-deleted.
1324
abe6e52f
RK
1325 Return 0 if the combination does not work. Then nothing is changed.
1326 If we did the combination, return the insn at which combine should
1327 resume scanning. */
230d793d
RS
1328
1329static rtx
1330try_combine (i3, i2, i1)
1331 register rtx i3, i2, i1;
1332{
1333 /* New patterns for I3 and I3, respectively. */
1334 rtx newpat, newi2pat = 0;
1335 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1336 int added_sets_1, added_sets_2;
1337 /* Total number of SETs to put into I3. */
1338 int total_sets;
1339 /* Nonzero is I2's body now appears in I3. */
1340 int i2_is_used;
1341 /* INSN_CODEs for new I3, new I2, and user of condition code. */
6a651371 1342 int insn_code_number, i2_code_number = 0, other_code_number = 0;
230d793d
RS
1343 /* Contains I3 if the destination of I3 is used in its source, which means
1344 that the old life of I3 is being killed. If that usage is placed into
1345 I2 and not in I3, a REG_DEAD note must be made. */
1346 rtx i3dest_killed = 0;
1347 /* SET_DEST and SET_SRC of I2 and I1. */
1348 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1349 /* PATTERN (I2), or a copy of it in certain cases. */
1350 rtx i2pat;
1351 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1352 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1353 int i1_feeds_i3 = 0;
1354 /* Notes that must be added to REG_NOTES in I3 and I2. */
1355 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1356 /* Notes that we substituted I3 into I2 instead of the normal case. */
1357 int i3_subst_into_i2 = 0;
df7d75de
RK
1358 /* Notes that I1, I2 or I3 is a MULT operation. */
1359 int have_mult = 0;
230d793d
RS
1360
1361 int maxreg;
1362 rtx temp;
1363 register rtx link;
1364 int i;
1365
1366 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1367 This can occur when flow deletes an insn that it has merged into an
1368 auto-increment address. We also can't do anything if I3 has a
1369 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1370 libcall. */
1371
1372 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1373 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1374 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
ec35104c
JL
1375#if 0
1376 /* ??? This gives worse code, and appears to be unnecessary, since no
1377 pass after flow uses REG_LIBCALL/REG_RETVAL notes. */
1378 || find_reg_note (i3, REG_LIBCALL, NULL_RTX)
1379#endif
1380)
230d793d
RS
1381 return 0;
1382
1383 combine_attempts++;
1384
241cea85 1385 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
1386 undobuf.other_insn = 0;
1387
1388 /* Save the current high-water-mark so we can free storage if we didn't
1389 accept this combination. */
1390 undobuf.storage = (char *) oballoc (0);
1391
6e25d159
RK
1392 /* Reset the hard register usage information. */
1393 CLEAR_HARD_REG_SET (newpat_used_regs);
1394
230d793d
RS
1395 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1396 code below, set I1 to be the earlier of the two insns. */
1397 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1398 temp = i1, i1 = i2, i2 = temp;
1399
abe6e52f 1400 added_links_insn = 0;
137e889e 1401
230d793d
RS
1402 /* First check for one important special-case that the code below will
1403 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1404 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1405 we may be able to replace that destination with the destination of I3.
1406 This occurs in the common code where we compute both a quotient and
1407 remainder into a structure, in which case we want to do the computation
1408 directly into the structure to avoid register-register copies.
1409
1410 We make very conservative checks below and only try to handle the
1411 most common cases of this. For example, we only handle the case
1412 where I2 and I3 are adjacent to avoid making difficult register
1413 usage tests. */
1414
1415 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1416 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1417 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
f95182a4 1418 && (! SMALL_REGISTER_CLASSES
e9a25f70
JL
1419 || (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1420 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1421 || REG_USERVAR_P (SET_DEST (PATTERN (i3)))))
230d793d
RS
1422 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1423 && GET_CODE (PATTERN (i2)) == PARALLEL
1424 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1425 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1426 below would need to check what is inside (and reg_overlap_mentioned_p
1427 doesn't support those codes anyway). Don't allow those destinations;
1428 the resulting insn isn't likely to be recognized anyway. */
1429 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1430 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1431 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1432 SET_DEST (PATTERN (i3)))
1433 && next_real_insn (i2) == i3)
5089e22e
RS
1434 {
1435 rtx p2 = PATTERN (i2);
1436
1437 /* Make sure that the destination of I3,
1438 which we are going to substitute into one output of I2,
1439 is not used within another output of I2. We must avoid making this:
1440 (parallel [(set (mem (reg 69)) ...)
1441 (set (reg 69) ...)])
1442 which is not well-defined as to order of actions.
1443 (Besides, reload can't handle output reloads for this.)
1444
1445 The problem can also happen if the dest of I3 is a memory ref,
1446 if another dest in I2 is an indirect memory ref. */
1447 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1448 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1449 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1450 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1451 SET_DEST (XVECEXP (p2, 0, i))))
1452 break;
230d793d 1453
5089e22e
RS
1454 if (i == XVECLEN (p2, 0))
1455 for (i = 0; i < XVECLEN (p2, 0); i++)
1456 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1457 {
1458 combine_merges++;
230d793d 1459
5089e22e
RS
1460 subst_insn = i3;
1461 subst_low_cuid = INSN_CUID (i2);
230d793d 1462
c4e861e8 1463 added_sets_2 = added_sets_1 = 0;
5089e22e 1464 i2dest = SET_SRC (PATTERN (i3));
230d793d 1465
5089e22e
RS
1466 /* Replace the dest in I2 with our dest and make the resulting
1467 insn the new pattern for I3. Then skip to where we
1468 validate the pattern. Everything was set up above. */
1469 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1470 SET_DEST (PATTERN (i3)));
1471
1472 newpat = p2;
176c9e6b 1473 i3_subst_into_i2 = 1;
5089e22e
RS
1474 goto validate_replacement;
1475 }
1476 }
230d793d
RS
1477
1478#ifndef HAVE_cc0
1479 /* If we have no I1 and I2 looks like:
1480 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1481 (set Y OP)])
1482 make up a dummy I1 that is
1483 (set Y OP)
1484 and change I2 to be
1485 (set (reg:CC X) (compare:CC Y (const_int 0)))
1486
1487 (We can ignore any trailing CLOBBERs.)
1488
1489 This undoes a previous combination and allows us to match a branch-and-
1490 decrement insn. */
1491
1492 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1493 && XVECLEN (PATTERN (i2), 0) >= 2
1494 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1495 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1496 == MODE_CC)
1497 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1498 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1499 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1500 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1501 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1502 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1503 {
1504 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1505 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1506 break;
1507
1508 if (i == 1)
1509 {
1510 /* We make I1 with the same INSN_UID as I2. This gives it
1511 the same INSN_CUID for value tracking. Our fake I1 will
1512 never appear in the insn stream so giving it the same INSN_UID
1513 as I2 will not cause a problem. */
1514
0d9641d1 1515 subst_prev_insn = i1
38a448ca
RH
1516 = gen_rtx_INSN (VOIDmode, INSN_UID (i2), NULL_RTX, i2,
1517 XVECEXP (PATTERN (i2), 0, 1), -1, NULL_RTX,
1518 NULL_RTX);
230d793d
RS
1519
1520 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1521 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1522 SET_DEST (PATTERN (i1)));
1523 }
1524 }
1525#endif
1526
1527 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1528 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1529 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1530 {
1531 undo_all ();
1532 return 0;
1533 }
1534
1535 /* Record whether I2DEST is used in I2SRC and similarly for the other
1536 cases. Knowing this will help in register status updating below. */
1537 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1538 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1539 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1540
916f14f1 1541 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1542 in I2SRC. */
1543 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1544
1545 /* Ensure that I3's pattern can be the destination of combines. */
1546 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1547 i1 && i2dest_in_i1src && i1_feeds_i3,
1548 &i3dest_killed))
1549 {
1550 undo_all ();
1551 return 0;
1552 }
1553
df7d75de
RK
1554 /* See if any of the insns is a MULT operation. Unless one is, we will
1555 reject a combination that is, since it must be slower. Be conservative
1556 here. */
1557 if (GET_CODE (i2src) == MULT
1558 || (i1 != 0 && GET_CODE (i1src) == MULT)
1559 || (GET_CODE (PATTERN (i3)) == SET
1560 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1561 have_mult = 1;
1562
230d793d
RS
1563 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1564 We used to do this EXCEPT in one case: I3 has a post-inc in an
1565 output operand. However, that exception can give rise to insns like
1566 mov r3,(r3)+
1567 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1568 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1569
1570#if 0
1571 if (!(GET_CODE (PATTERN (i3)) == SET
1572 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1573 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1574 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1575 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1576 /* It's not the exception. */
1577#endif
1578#ifdef AUTO_INC_DEC
1579 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1580 if (REG_NOTE_KIND (link) == REG_INC
1581 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1582 || (i1 != 0
1583 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1584 {
1585 undo_all ();
1586 return 0;
1587 }
1588#endif
1589
1590 /* See if the SETs in I1 or I2 need to be kept around in the merged
1591 instruction: whenever the value set there is still needed past I3.
1592 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1593
1594 For the SET in I1, we have two cases: If I1 and I2 independently
1595 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1596 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1597 in I1 needs to be kept around unless I1DEST dies or is set in either
1598 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1599 I1DEST. If so, we know I1 feeds into I2. */
1600
1601 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1602
1603 added_sets_1
1604 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1605 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1606
1607 /* If the set in I2 needs to be kept around, we must make a copy of
1608 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1609 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1610 an already-substituted copy. This also prevents making self-referential
1611 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1612 I2DEST. */
1613
1614 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
38a448ca 1615 ? gen_rtx_SET (VOIDmode, i2dest, i2src)
230d793d
RS
1616 : PATTERN (i2));
1617
1618 if (added_sets_2)
1619 i2pat = copy_rtx (i2pat);
1620
1621 combine_merges++;
1622
1623 /* Substitute in the latest insn for the regs set by the earlier ones. */
1624
1625 maxreg = max_reg_num ();
1626
1627 subst_insn = i3;
230d793d
RS
1628
1629 /* It is possible that the source of I2 or I1 may be performing an
1630 unneeded operation, such as a ZERO_EXTEND of something that is known
1631 to have the high part zero. Handle that case by letting subst look at
1632 the innermost one of them.
1633
1634 Another way to do this would be to have a function that tries to
1635 simplify a single insn instead of merging two or more insns. We don't
1636 do this because of the potential of infinite loops and because
1637 of the potential extra memory required. However, doing it the way
1638 we are is a bit of a kludge and doesn't catch all cases.
1639
1640 But only do this if -fexpensive-optimizations since it slows things down
1641 and doesn't usually win. */
1642
1643 if (flag_expensive_optimizations)
1644 {
1645 /* Pass pc_rtx so no substitutions are done, just simplifications.
1646 The cases that we are interested in here do not involve the few
1647 cases were is_replaced is checked. */
1648 if (i1)
d0ab8cd3
RK
1649 {
1650 subst_low_cuid = INSN_CUID (i1);
1651 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1652 }
230d793d 1653 else
d0ab8cd3
RK
1654 {
1655 subst_low_cuid = INSN_CUID (i2);
1656 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1657 }
230d793d 1658
241cea85 1659 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1660 }
1661
1662#ifndef HAVE_cc0
1663 /* Many machines that don't use CC0 have insns that can both perform an
1664 arithmetic operation and set the condition code. These operations will
1665 be represented as a PARALLEL with the first element of the vector
1666 being a COMPARE of an arithmetic operation with the constant zero.
1667 The second element of the vector will set some pseudo to the result
1668 of the same arithmetic operation. If we simplify the COMPARE, we won't
1669 match such a pattern and so will generate an extra insn. Here we test
1670 for this case, where both the comparison and the operation result are
1671 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1672 I2SRC. Later we will make the PARALLEL that contains I2. */
1673
1674 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1675 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1676 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1677 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1678 {
081f5e7e 1679#ifdef EXTRA_CC_MODES
230d793d
RS
1680 rtx *cc_use;
1681 enum machine_mode compare_mode;
081f5e7e 1682#endif
230d793d
RS
1683
1684 newpat = PATTERN (i3);
1685 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1686
1687 i2_is_used = 1;
1688
1689#ifdef EXTRA_CC_MODES
1690 /* See if a COMPARE with the operand we substituted in should be done
1691 with the mode that is currently being used. If not, do the same
1692 processing we do in `subst' for a SET; namely, if the destination
1693 is used only once, try to replace it with a register of the proper
1694 mode and also replace the COMPARE. */
1695 if (undobuf.other_insn == 0
1696 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1697 &undobuf.other_insn))
77fa0940
RK
1698 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1699 i2src, const0_rtx))
230d793d
RS
1700 != GET_MODE (SET_DEST (newpat))))
1701 {
1702 int regno = REGNO (SET_DEST (newpat));
38a448ca 1703 rtx new_dest = gen_rtx_REG (compare_mode, regno);
230d793d
RS
1704
1705 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 1706 || (REG_N_SETS (regno) == 1 && ! added_sets_2
230d793d
RS
1707 && ! REG_USERVAR_P (SET_DEST (newpat))))
1708 {
1709 if (regno >= FIRST_PSEUDO_REGISTER)
1710 SUBST (regno_reg_rtx[regno], new_dest);
1711
1712 SUBST (SET_DEST (newpat), new_dest);
1713 SUBST (XEXP (*cc_use, 0), new_dest);
1714 SUBST (SET_SRC (newpat),
1715 gen_rtx_combine (COMPARE, compare_mode,
1716 i2src, const0_rtx));
1717 }
1718 else
1719 undobuf.other_insn = 0;
1720 }
1721#endif
1722 }
1723 else
1724#endif
1725 {
1726 n_occurrences = 0; /* `subst' counts here */
1727
1728 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1729 need to make a unique copy of I2SRC each time we substitute it
1730 to avoid self-referential rtl. */
1731
d0ab8cd3 1732 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1733 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1734 ! i1_feeds_i3 && i1dest_in_i1src);
241cea85 1735 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1736
1737 /* Record whether i2's body now appears within i3's body. */
1738 i2_is_used = n_occurrences;
1739 }
1740
1741 /* If we already got a failure, don't try to do more. Otherwise,
1742 try to substitute in I1 if we have it. */
1743
1744 if (i1 && GET_CODE (newpat) != CLOBBER)
1745 {
1746 /* Before we can do this substitution, we must redo the test done
1747 above (see detailed comments there) that ensures that I1DEST
0f41302f 1748 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1749
5f4f0e22
CH
1750 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1751 0, NULL_PTR))
230d793d
RS
1752 {
1753 undo_all ();
1754 return 0;
1755 }
1756
1757 n_occurrences = 0;
d0ab8cd3 1758 subst_low_cuid = INSN_CUID (i1);
230d793d 1759 newpat = subst (newpat, i1dest, i1src, 0, 0);
241cea85 1760 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1761 }
1762
916f14f1
RK
1763 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1764 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1765 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1766 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1767 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1768 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1769 > 1))
230d793d
RS
1770 /* Fail if we tried to make a new register (we used to abort, but there's
1771 really no reason to). */
1772 || max_reg_num () != maxreg
1773 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1774 || GET_CODE (newpat) == CLOBBER
1775 /* Fail if this new pattern is a MULT and we didn't have one before
1776 at the outer level. */
1777 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1778 && ! have_mult))
230d793d
RS
1779 {
1780 undo_all ();
1781 return 0;
1782 }
1783
1784 /* If the actions of the earlier insns must be kept
1785 in addition to substituting them into the latest one,
1786 we must make a new PARALLEL for the latest insn
1787 to hold additional the SETs. */
1788
1789 if (added_sets_1 || added_sets_2)
1790 {
1791 combine_extras++;
1792
1793 if (GET_CODE (newpat) == PARALLEL)
1794 {
1795 rtvec old = XVEC (newpat, 0);
1796 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
38a448ca 1797 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
59888de2 1798 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
230d793d
RS
1799 sizeof (old->elem[0]) * old->num_elem);
1800 }
1801 else
1802 {
1803 rtx old = newpat;
1804 total_sets = 1 + added_sets_1 + added_sets_2;
38a448ca 1805 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (total_sets));
230d793d
RS
1806 XVECEXP (newpat, 0, 0) = old;
1807 }
1808
1809 if (added_sets_1)
1810 XVECEXP (newpat, 0, --total_sets)
1811 = (GET_CODE (PATTERN (i1)) == PARALLEL
38a448ca 1812 ? gen_rtx_SET (VOIDmode, i1dest, i1src) : PATTERN (i1));
230d793d
RS
1813
1814 if (added_sets_2)
1815 {
1816 /* If there is no I1, use I2's body as is. We used to also not do
1817 the subst call below if I2 was substituted into I3,
1818 but that could lose a simplification. */
1819 if (i1 == 0)
1820 XVECEXP (newpat, 0, --total_sets) = i2pat;
1821 else
1822 /* See comment where i2pat is assigned. */
1823 XVECEXP (newpat, 0, --total_sets)
1824 = subst (i2pat, i1dest, i1src, 0, 0);
1825 }
1826 }
1827
1828 /* We come here when we are replacing a destination in I2 with the
1829 destination of I3. */
1830 validate_replacement:
1831
6e25d159
RK
1832 /* Note which hard regs this insn has as inputs. */
1833 mark_used_regs_combine (newpat);
1834
230d793d 1835 /* Is the result of combination a valid instruction? */
8e2f6e35 1836 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
1837
1838 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1839 the second SET's destination is a register that is unused. In that case,
1840 we just need the first SET. This can occur when simplifying a divmod
1841 insn. We *must* test for this case here because the code below that
1842 splits two independent SETs doesn't handle this case correctly when it
1843 updates the register status. Also check the case where the first
1844 SET's destination is unused. That would not cause incorrect code, but
1845 does cause an unneeded insn to remain. */
1846
1847 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1848 && XVECLEN (newpat, 0) == 2
1849 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1850 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1851 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1852 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1853 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1854 && asm_noperands (newpat) < 0)
1855 {
1856 newpat = XVECEXP (newpat, 0, 0);
8e2f6e35 1857 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
1858 }
1859
1860 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1861 && XVECLEN (newpat, 0) == 2
1862 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1863 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1864 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1865 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1866 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1867 && asm_noperands (newpat) < 0)
1868 {
1869 newpat = XVECEXP (newpat, 0, 1);
8e2f6e35 1870 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
1871 }
1872
1873 /* If we were combining three insns and the result is a simple SET
1874 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1875 insns. There are two ways to do this. It can be split using a
1876 machine-specific method (like when you have an addition of a large
1877 constant) or by combine in the function find_split_point. */
1878
230d793d
RS
1879 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1880 && asm_noperands (newpat) < 0)
1881 {
916f14f1 1882 rtx m_split, *split;
42495ca0 1883 rtx ni2dest = i2dest;
916f14f1
RK
1884
1885 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1886 use I2DEST as a scratch register will help. In the latter case,
1887 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1888
1889 m_split = split_insns (newpat, i3);
a70c61d9
JW
1890
1891 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1892 inputs of NEWPAT. */
1893
1894 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1895 possible to try that as a scratch reg. This would require adding
1896 more code to make it work though. */
1897
1898 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1899 {
1900 /* If I2DEST is a hard register or the only use of a pseudo,
1901 we can change its mode. */
1902 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1903 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1904 && GET_CODE (i2dest) == REG
42495ca0 1905 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 1906 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
42495ca0 1907 && ! REG_USERVAR_P (i2dest))))
38a448ca 1908 ni2dest = gen_rtx_REG (GET_MODE (SET_DEST (newpat)),
42495ca0
RK
1909 REGNO (i2dest));
1910
38a448ca
RH
1911 m_split = split_insns
1912 (gen_rtx_PARALLEL (VOIDmode,
1913 gen_rtvec (2, newpat,
1914 gen_rtx_CLOBBER (VOIDmode,
1915 ni2dest))),
1916 i3);
42495ca0 1917 }
916f14f1
RK
1918
1919 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1920 && XVECLEN (m_split, 0) == 2
1921 && (next_real_insn (i2) == i3
1922 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1923 INSN_CUID (i2))))
916f14f1 1924 {
1a26b032 1925 rtx i2set, i3set;
d0ab8cd3 1926 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1927 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1928
e4ba89be
RK
1929 i3set = single_set (XVECEXP (m_split, 0, 1));
1930 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1931
42495ca0
RK
1932 /* In case we changed the mode of I2DEST, replace it in the
1933 pseudo-register table here. We can't do it above in case this
1934 code doesn't get executed and we do a split the other way. */
1935
1936 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1937 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1938
8e2f6e35 1939 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
1940
1941 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
1942 register status, so don't use these insns. If I2's destination
1943 is used between I2 and I3, we also can't use these insns. */
1a26b032 1944
9cc96794
RK
1945 if (i2_code_number >= 0 && i2set && i3set
1946 && (next_real_insn (i2) == i3
1947 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
8e2f6e35
BS
1948 insn_code_number = recog_for_combine (&newi3pat, i3,
1949 &new_i3_notes);
d0ab8cd3
RK
1950 if (insn_code_number >= 0)
1951 newpat = newi3pat;
1952
c767f54b 1953 /* It is possible that both insns now set the destination of I3.
22609cbf 1954 If so, we must show an extra use of it. */
c767f54b 1955
393de53f
RK
1956 if (insn_code_number >= 0)
1957 {
1958 rtx new_i3_dest = SET_DEST (i3set);
1959 rtx new_i2_dest = SET_DEST (i2set);
1960
1961 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1962 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1963 || GET_CODE (new_i3_dest) == SUBREG)
1964 new_i3_dest = XEXP (new_i3_dest, 0);
1965
d4096689
RK
1966 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
1967 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
1968 || GET_CODE (new_i2_dest) == SUBREG)
1969 new_i2_dest = XEXP (new_i2_dest, 0);
1970
393de53f
RK
1971 if (GET_CODE (new_i3_dest) == REG
1972 && GET_CODE (new_i2_dest) == REG
1973 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
b1f21e0a 1974 REG_N_SETS (REGNO (new_i2_dest))++;
393de53f 1975 }
916f14f1 1976 }
230d793d
RS
1977
1978 /* If we can split it and use I2DEST, go ahead and see if that
1979 helps things be recognized. Verify that none of the registers
1980 are set between I2 and I3. */
d0ab8cd3 1981 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1982#ifdef HAVE_cc0
1983 && GET_CODE (i2dest) == REG
1984#endif
1985 /* We need I2DEST in the proper mode. If it is a hard register
1986 or the only use of a pseudo, we can change its mode. */
1987 && (GET_MODE (*split) == GET_MODE (i2dest)
1988 || GET_MODE (*split) == VOIDmode
1989 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
b1f21e0a 1990 || (REG_N_SETS (REGNO (i2dest)) == 1 && ! added_sets_2
230d793d
RS
1991 && ! REG_USERVAR_P (i2dest)))
1992 && (next_real_insn (i2) == i3
1993 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1994 /* We can't overwrite I2DEST if its value is still used by
1995 NEWPAT. */
1996 && ! reg_referenced_p (i2dest, newpat))
1997 {
1998 rtx newdest = i2dest;
df7d75de
RK
1999 enum rtx_code split_code = GET_CODE (*split);
2000 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
2001
2002 /* Get NEWDEST as a register in the proper mode. We have already
2003 validated that we can do this. */
df7d75de 2004 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 2005 {
38a448ca 2006 newdest = gen_rtx_REG (split_mode, REGNO (i2dest));
230d793d
RS
2007
2008 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
2009 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
2010 }
2011
2012 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
2013 an ASHIFT. This can occur if it was inside a PLUS and hence
2014 appeared to be a memory address. This is a kludge. */
df7d75de 2015 if (split_code == MULT
230d793d
RS
2016 && GET_CODE (XEXP (*split, 1)) == CONST_INT
2017 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
2018 {
2019 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
2020 XEXP (*split, 0), GEN_INT (i)));
2021 /* Update split_code because we may not have a multiply
2022 anymore. */
2023 split_code = GET_CODE (*split);
2024 }
230d793d
RS
2025
2026#ifdef INSN_SCHEDULING
2027 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
2028 be written as a ZERO_EXTEND. */
df7d75de
RK
2029 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
2030 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
2031 XEXP (*split, 0)));
2032#endif
2033
2034 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
2035 SUBST (*split, newdest);
8e2f6e35 2036 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
df7d75de
RK
2037
2038 /* If the split point was a MULT and we didn't have one before,
2039 don't use one now. */
2040 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
8e2f6e35 2041 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2042 }
2043 }
2044
2045 /* Check for a case where we loaded from memory in a narrow mode and
2046 then sign extended it, but we need both registers. In that case,
2047 we have a PARALLEL with both loads from the same memory location.
2048 We can split this into a load from memory followed by a register-register
2049 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
2050 eliminate the copy.
2051
2052 We cannot do this if the destination of the second assignment is
2053 a register that we have already assumed is zero-extended. Similarly
2054 for a SUBREG of such a register. */
230d793d
RS
2055
2056 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2057 && GET_CODE (newpat) == PARALLEL
2058 && XVECLEN (newpat, 0) == 2
2059 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2060 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
2061 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2062 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2063 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
2064 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2065 INSN_CUID (i2))
2066 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2067 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
2068 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
2069 (GET_CODE (temp) == REG
2070 && reg_nonzero_bits[REGNO (temp)] != 0
2071 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2072 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2073 && (reg_nonzero_bits[REGNO (temp)]
2074 != GET_MODE_MASK (word_mode))))
2075 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
2076 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
2077 (GET_CODE (temp) == REG
2078 && reg_nonzero_bits[REGNO (temp)] != 0
2079 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
2080 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
2081 && (reg_nonzero_bits[REGNO (temp)]
2082 != GET_MODE_MASK (word_mode)))))
230d793d
RS
2083 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2084 SET_SRC (XVECEXP (newpat, 0, 1)))
2085 && ! find_reg_note (i3, REG_UNUSED,
2086 SET_DEST (XVECEXP (newpat, 0, 0))))
2087 {
472fbdd1
RK
2088 rtx ni2dest;
2089
230d793d 2090 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 2091 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
2092 newpat = XVECEXP (newpat, 0, 1);
2093 SUBST (SET_SRC (newpat),
472fbdd1 2094 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
8e2f6e35 2095 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2096
230d793d 2097 if (i2_code_number >= 0)
8e2f6e35 2098 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
2099
2100 if (insn_code_number >= 0)
2101 {
2102 rtx insn;
2103 rtx link;
2104
2105 /* If we will be able to accept this, we have made a change to the
2106 destination of I3. This can invalidate a LOG_LINKS pointing
2107 to I3. No other part of combine.c makes such a transformation.
2108
2109 The new I3 will have a destination that was previously the
2110 destination of I1 or I2 and which was used in i2 or I3. Call
2111 distribute_links to make a LOG_LINK from the next use of
2112 that destination. */
2113
2114 PATTERN (i3) = newpat;
38a448ca 2115 distribute_links (gen_rtx_INSN_LIST (VOIDmode, i3, NULL_RTX));
5089e22e
RS
2116
2117 /* I3 now uses what used to be its destination and which is
2118 now I2's destination. That means we need a LOG_LINK from
2119 I3 to I2. But we used to have one, so we still will.
2120
2121 However, some later insn might be using I2's dest and have
2122 a LOG_LINK pointing at I3. We must remove this link.
2123 The simplest way to remove the link is to point it at I1,
2124 which we know will be a NOTE. */
2125
2126 for (insn = NEXT_INSN (i3);
0d4d42c3 2127 insn && (this_basic_block == n_basic_blocks - 1
3b413743 2128 || insn != BLOCK_HEAD (this_basic_block + 1));
5089e22e
RS
2129 insn = NEXT_INSN (insn))
2130 {
2131 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 2132 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2133 {
2134 for (link = LOG_LINKS (insn); link;
2135 link = XEXP (link, 1))
2136 if (XEXP (link, 0) == i3)
2137 XEXP (link, 0) = i1;
2138
2139 break;
2140 }
2141 }
2142 }
230d793d
RS
2143 }
2144
2145 /* Similarly, check for a case where we have a PARALLEL of two independent
2146 SETs but we started with three insns. In this case, we can do the sets
2147 as two separate insns. This case occurs when some SET allows two
2148 other insns to combine, but the destination of that SET is still live. */
2149
2150 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2151 && GET_CODE (newpat) == PARALLEL
2152 && XVECLEN (newpat, 0) == 2
2153 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2154 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2155 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2156 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2157 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2158 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2159 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2160 INSN_CUID (i2))
2161 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2162 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2163 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2164 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2165 XVECEXP (newpat, 0, 0))
2166 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2167 XVECEXP (newpat, 0, 1)))
2168 {
e9a25f70
JL
2169 /* Normally, it doesn't matter which of the two is done first,
2170 but it does if one references cc0. In that case, it has to
2171 be first. */
2172#ifdef HAVE_cc0
2173 if (reg_referenced_p (cc0_rtx, XVECEXP (newpat, 0, 0)))
2174 {
2175 newi2pat = XVECEXP (newpat, 0, 0);
2176 newpat = XVECEXP (newpat, 0, 1);
2177 }
2178 else
2179#endif
2180 {
2181 newi2pat = XVECEXP (newpat, 0, 1);
2182 newpat = XVECEXP (newpat, 0, 0);
2183 }
230d793d 2184
8e2f6e35 2185 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
a29ca9db 2186
230d793d 2187 if (i2_code_number >= 0)
8e2f6e35 2188 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
230d793d
RS
2189 }
2190
2191 /* If it still isn't recognized, fail and change things back the way they
2192 were. */
2193 if ((insn_code_number < 0
2194 /* Is the result a reasonable ASM_OPERANDS? */
2195 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2196 {
2197 undo_all ();
2198 return 0;
2199 }
2200
2201 /* If we had to change another insn, make sure it is valid also. */
2202 if (undobuf.other_insn)
2203 {
230d793d
RS
2204 rtx other_pat = PATTERN (undobuf.other_insn);
2205 rtx new_other_notes;
2206 rtx note, next;
2207
6e25d159
RK
2208 CLEAR_HARD_REG_SET (newpat_used_regs);
2209
8e2f6e35
BS
2210 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
2211 &new_other_notes);
230d793d
RS
2212
2213 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2214 {
2215 undo_all ();
2216 return 0;
2217 }
2218
2219 PATTERN (undobuf.other_insn) = other_pat;
2220
2221 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2222 are still valid. Then add any non-duplicate notes added by
2223 recog_for_combine. */
2224 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2225 {
2226 next = XEXP (note, 1);
2227
2228 if (REG_NOTE_KIND (note) == REG_UNUSED
2229 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2230 {
2231 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2232 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
1a26b032
RK
2233
2234 remove_note (undobuf.other_insn, note);
2235 }
230d793d
RS
2236 }
2237
1a26b032
RK
2238 for (note = new_other_notes; note; note = XEXP (note, 1))
2239 if (GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 2240 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 2241
230d793d 2242 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2243 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2244 }
2245
2246 /* We now know that we can do this combination. Merge the insns and
2247 update the status of registers and LOG_LINKS. */
2248
2249 {
2250 rtx i3notes, i2notes, i1notes = 0;
2251 rtx i3links, i2links, i1links = 0;
2252 rtx midnotes = 0;
230d793d 2253 register int regno;
ff3467a9
JW
2254 /* Compute which registers we expect to eliminate. newi2pat may be setting
2255 either i3dest or i2dest, so we must check it. Also, i1dest may be the
2256 same as i3dest, in which case newi2pat may be setting i1dest. */
2257 rtx elim_i2 = ((newi2pat && reg_set_p (i2dest, newi2pat))
2258 || i2dest_in_i2src || i2dest_in_i1src
230d793d 2259 ? 0 : i2dest);
ff3467a9
JW
2260 rtx elim_i1 = (i1 == 0 || i1dest_in_i1src
2261 || (newi2pat && reg_set_p (i1dest, newi2pat))
2262 ? 0 : i1dest);
230d793d
RS
2263
2264 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2265 clear them. */
2266 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2267 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2268 if (i1)
2269 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2270
2271 /* Ensure that we do not have something that should not be shared but
2272 occurs multiple times in the new insns. Check this by first
5089e22e 2273 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2274
2275 reset_used_flags (i3notes);
2276 reset_used_flags (i2notes);
2277 reset_used_flags (i1notes);
2278 reset_used_flags (newpat);
2279 reset_used_flags (newi2pat);
2280 if (undobuf.other_insn)
2281 reset_used_flags (PATTERN (undobuf.other_insn));
2282
2283 i3notes = copy_rtx_if_shared (i3notes);
2284 i2notes = copy_rtx_if_shared (i2notes);
2285 i1notes = copy_rtx_if_shared (i1notes);
2286 newpat = copy_rtx_if_shared (newpat);
2287 newi2pat = copy_rtx_if_shared (newi2pat);
2288 if (undobuf.other_insn)
2289 reset_used_flags (PATTERN (undobuf.other_insn));
2290
2291 INSN_CODE (i3) = insn_code_number;
2292 PATTERN (i3) = newpat;
2293 if (undobuf.other_insn)
2294 INSN_CODE (undobuf.other_insn) = other_code_number;
2295
2296 /* We had one special case above where I2 had more than one set and
2297 we replaced a destination of one of those sets with the destination
2298 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2299 in this basic block. Note that this (expensive) case is rare.
2300
2301 Also, in this case, we must pretend that all REG_NOTEs for I2
2302 actually came from I3, so that REG_UNUSED notes from I2 will be
2303 properly handled. */
2304
2305 if (i3_subst_into_i2)
2306 {
2307 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2308 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2309 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2310 && ! find_reg_note (i2, REG_UNUSED,
2311 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2312 for (temp = NEXT_INSN (i2);
2313 temp && (this_basic_block == n_basic_blocks - 1
3b413743 2314 || BLOCK_HEAD (this_basic_block) != temp);
176c9e6b
JW
2315 temp = NEXT_INSN (temp))
2316 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2317 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2318 if (XEXP (link, 0) == i2)
2319 XEXP (link, 0) = i3;
2320
2321 if (i3notes)
2322 {
2323 rtx link = i3notes;
2324 while (XEXP (link, 1))
2325 link = XEXP (link, 1);
2326 XEXP (link, 1) = i2notes;
2327 }
2328 else
2329 i3notes = i2notes;
2330 i2notes = 0;
2331 }
230d793d
RS
2332
2333 LOG_LINKS (i3) = 0;
2334 REG_NOTES (i3) = 0;
2335 LOG_LINKS (i2) = 0;
2336 REG_NOTES (i2) = 0;
2337
2338 if (newi2pat)
2339 {
2340 INSN_CODE (i2) = i2_code_number;
2341 PATTERN (i2) = newi2pat;
2342 }
2343 else
2344 {
2345 PUT_CODE (i2, NOTE);
2346 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2347 NOTE_SOURCE_FILE (i2) = 0;
2348 }
2349
2350 if (i1)
2351 {
2352 LOG_LINKS (i1) = 0;
2353 REG_NOTES (i1) = 0;
2354 PUT_CODE (i1, NOTE);
2355 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2356 NOTE_SOURCE_FILE (i1) = 0;
2357 }
2358
2359 /* Get death notes for everything that is now used in either I3 or
6eb12cef
RK
2360 I2 and used to die in a previous insn. If we built two new
2361 patterns, move from I1 to I2 then I2 to I3 so that we get the
2362 proper movement on registers that I2 modifies. */
230d793d 2363
230d793d 2364 if (newi2pat)
6eb12cef
RK
2365 {
2366 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2367 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2368 }
2369 else
2370 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2371 i3, &midnotes);
230d793d
RS
2372
2373 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2374 if (i3notes)
5f4f0e22
CH
2375 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2376 elim_i2, elim_i1);
230d793d 2377 if (i2notes)
5f4f0e22
CH
2378 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2379 elim_i2, elim_i1);
230d793d 2380 if (i1notes)
5f4f0e22
CH
2381 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2382 elim_i2, elim_i1);
230d793d 2383 if (midnotes)
5f4f0e22
CH
2384 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2385 elim_i2, elim_i1);
230d793d
RS
2386
2387 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2388 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2389 so we always pass it as i3. We have not counted the notes in
2390 reg_n_deaths yet, so we need to do so now. */
2391
230d793d 2392 if (newi2pat && new_i2_notes)
1a26b032
RK
2393 {
2394 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2395 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2396 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2397
2398 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2399 }
2400
230d793d 2401 if (new_i3_notes)
1a26b032
RK
2402 {
2403 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2404 if (GET_CODE (XEXP (temp, 0)) == REG)
b1f21e0a 2405 REG_N_DEATHS (REGNO (XEXP (temp, 0)))++;
1a26b032
RK
2406
2407 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2408 }
230d793d
RS
2409
2410 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
e9a25f70
JL
2411 put a REG_DEAD note for it somewhere. If NEWI2PAT exists and sets
2412 I3DEST, the death must be somewhere before I2, not I3. If we passed I3
2413 in that case, it might delete I2. Similarly for I2 and I1.
1a26b032
RK
2414 Show an additional death due to the REG_DEAD note we make here. If
2415 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2416
230d793d 2417 if (i3dest_killed)
1a26b032
RK
2418 {
2419 if (GET_CODE (i3dest_killed) == REG)
b1f21e0a 2420 REG_N_DEATHS (REGNO (i3dest_killed))++;
1a26b032 2421
e9a25f70 2422 if (newi2pat && reg_set_p (i3dest_killed, newi2pat))
38a448ca
RH
2423 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2424 NULL_RTX),
ff3467a9 2425 NULL_RTX, i2, NULL_RTX, elim_i2, elim_i1);
e9a25f70 2426 else
38a448ca
RH
2427 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i3dest_killed,
2428 NULL_RTX),
e9a25f70 2429 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
ff3467a9 2430 elim_i2, elim_i1);
1a26b032 2431 }
58c8c593 2432
230d793d 2433 if (i2dest_in_i2src)
58c8c593 2434 {
1a26b032 2435 if (GET_CODE (i2dest) == REG)
b1f21e0a 2436 REG_N_DEATHS (REGNO (i2dest))++;
1a26b032 2437
58c8c593 2438 if (newi2pat && reg_set_p (i2dest, newi2pat))
38a448ca 2439 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2440 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2441 else
38a448ca 2442 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i2dest, NULL_RTX),
58c8c593
RK
2443 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2444 NULL_RTX, NULL_RTX);
2445 }
2446
230d793d 2447 if (i1dest_in_i1src)
58c8c593 2448 {
1a26b032 2449 if (GET_CODE (i1dest) == REG)
b1f21e0a 2450 REG_N_DEATHS (REGNO (i1dest))++;
1a26b032 2451
58c8c593 2452 if (newi2pat && reg_set_p (i1dest, newi2pat))
38a448ca 2453 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2454 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2455 else
38a448ca 2456 distribute_notes (gen_rtx_EXPR_LIST (REG_DEAD, i1dest, NULL_RTX),
58c8c593
RK
2457 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2458 NULL_RTX, NULL_RTX);
2459 }
230d793d
RS
2460
2461 distribute_links (i3links);
2462 distribute_links (i2links);
2463 distribute_links (i1links);
2464
2465 if (GET_CODE (i2dest) == REG)
2466 {
d0ab8cd3
RK
2467 rtx link;
2468 rtx i2_insn = 0, i2_val = 0, set;
2469
2470 /* The insn that used to set this register doesn't exist, and
2471 this life of the register may not exist either. See if one of
2472 I3's links points to an insn that sets I2DEST. If it does,
2473 that is now the last known value for I2DEST. If we don't update
2474 this and I2 set the register to a value that depended on its old
230d793d
RS
2475 contents, we will get confused. If this insn is used, thing
2476 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2477
2478 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2479 if ((set = single_set (XEXP (link, 0))) != 0
2480 && rtx_equal_p (i2dest, SET_DEST (set)))
2481 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2482
2483 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2484
2485 /* If the reg formerly set in I2 died only once and that was in I3,
2486 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2487 if (! added_sets_2
2488 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2489 && ! i2dest_in_i2src)
230d793d
RS
2490 {
2491 regno = REGNO (i2dest);
b1f21e0a
MM
2492 REG_N_SETS (regno)--;
2493 if (REG_N_SETS (regno) == 0
e881bb1b
RH
2494 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
2495 regno))
b1f21e0a 2496 REG_N_REFS (regno) = 0;
230d793d
RS
2497 }
2498 }
2499
2500 if (i1 && GET_CODE (i1dest) == REG)
2501 {
d0ab8cd3
RK
2502 rtx link;
2503 rtx i1_insn = 0, i1_val = 0, set;
2504
2505 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2506 if ((set = single_set (XEXP (link, 0))) != 0
2507 && rtx_equal_p (i1dest, SET_DEST (set)))
2508 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2509
2510 record_value_for_reg (i1dest, i1_insn, i1_val);
2511
230d793d 2512 regno = REGNO (i1dest);
5af91171 2513 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d 2514 {
b1f21e0a
MM
2515 REG_N_SETS (regno)--;
2516 if (REG_N_SETS (regno) == 0
e881bb1b
RH
2517 && ! REGNO_REG_SET_P (BASIC_BLOCK (0)->global_live_at_start,
2518 regno))
b1f21e0a 2519 REG_N_REFS (regno) = 0;
230d793d
RS
2520 }
2521 }
2522
951553af 2523 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2524 to this insn. */
2525
951553af 2526 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2527 if (newi2pat)
951553af 2528 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2529
230d793d
RS
2530 /* If I3 is now an unconditional jump, ensure that it has a
2531 BARRIER following it since it may have initially been a
381ee8af 2532 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2533
2534 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2535 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2536 || GET_CODE (temp) != BARRIER))
230d793d
RS
2537 emit_barrier_after (i3);
2538 }
2539
2540 combine_successes++;
2541
bcd49eb7
JW
2542 /* Clear this here, so that subsequent get_last_value calls are not
2543 affected. */
2544 subst_prev_insn = NULL_RTX;
2545
abe6e52f
RK
2546 if (added_links_insn
2547 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2548 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2549 return added_links_insn;
2550 else
2551 return newi2pat ? i2 : i3;
230d793d
RS
2552}
2553\f
2554/* Undo all the modifications recorded in undobuf. */
2555
2556static void
2557undo_all ()
2558{
241cea85
RK
2559 struct undo *undo, *next;
2560
2561 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2562 {
241cea85
RK
2563 next = undo->next;
2564 if (undo->is_int)
2565 *undo->where.i = undo->old_contents.i;
7c046e4e 2566 else
241cea85
RK
2567 *undo->where.r = undo->old_contents.r;
2568
2569 undo->next = undobuf.frees;
2570 undobuf.frees = undo;
7c046e4e 2571 }
230d793d
RS
2572
2573 obfree (undobuf.storage);
845fc875 2574 undobuf.undos = undobuf.previous_undos = 0;
bcd49eb7
JW
2575
2576 /* Clear this here, so that subsequent get_last_value calls are not
2577 affected. */
2578 subst_prev_insn = NULL_RTX;
230d793d
RS
2579}
2580\f
2581/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2582 where we have an arithmetic expression and return that point. LOC will
2583 be inside INSN.
230d793d
RS
2584
2585 try_combine will call this function to see if an insn can be split into
2586 two insns. */
2587
2588static rtx *
d0ab8cd3 2589find_split_point (loc, insn)
230d793d 2590 rtx *loc;
d0ab8cd3 2591 rtx insn;
230d793d
RS
2592{
2593 rtx x = *loc;
2594 enum rtx_code code = GET_CODE (x);
2595 rtx *split;
6a651371
KG
2596 int len = 0, pos = 0, unsignedp = 0;
2597 rtx inner = NULL_RTX;
230d793d
RS
2598
2599 /* First special-case some codes. */
2600 switch (code)
2601 {
2602 case SUBREG:
2603#ifdef INSN_SCHEDULING
2604 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2605 point. */
2606 if (GET_CODE (SUBREG_REG (x)) == MEM)
2607 return loc;
2608#endif
d0ab8cd3 2609 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2610
230d793d 2611 case MEM:
916f14f1 2612#ifdef HAVE_lo_sum
230d793d
RS
2613 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2614 using LO_SUM and HIGH. */
2615 if (GET_CODE (XEXP (x, 0)) == CONST
2616 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2617 {
2618 SUBST (XEXP (x, 0),
2619 gen_rtx_combine (LO_SUM, Pmode,
2620 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2621 XEXP (x, 0)));
2622 return &XEXP (XEXP (x, 0), 0);
2623 }
230d793d
RS
2624#endif
2625
916f14f1
RK
2626 /* If we have a PLUS whose second operand is a constant and the
2627 address is not valid, perhaps will can split it up using
2628 the machine-specific way to split large constants. We use
ddd5a7c1 2629 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2630 it will not remain in the result. */
2631 if (GET_CODE (XEXP (x, 0)) == PLUS
2632 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2633 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2634 {
2635 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
38a448ca 2636 rtx seq = split_insns (gen_rtx_SET (VOIDmode, reg, XEXP (x, 0)),
916f14f1
RK
2637 subst_insn);
2638
2639 /* This should have produced two insns, each of which sets our
2640 placeholder. If the source of the second is a valid address,
2641 we can make put both sources together and make a split point
2642 in the middle. */
2643
2644 if (seq && XVECLEN (seq, 0) == 2
2645 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2646 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2647 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2648 && ! reg_mentioned_p (reg,
2649 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2650 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2651 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2652 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2653 && memory_address_p (GET_MODE (x),
2654 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2655 {
2656 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2657 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2658
2659 /* Replace the placeholder in SRC2 with SRC1. If we can
2660 find where in SRC2 it was placed, that can become our
2661 split point and we can replace this address with SRC2.
2662 Just try two obvious places. */
2663
2664 src2 = replace_rtx (src2, reg, src1);
2665 split = 0;
2666 if (XEXP (src2, 0) == src1)
2667 split = &XEXP (src2, 0);
2668 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2669 && XEXP (XEXP (src2, 0), 0) == src1)
2670 split = &XEXP (XEXP (src2, 0), 0);
2671
2672 if (split)
2673 {
2674 SUBST (XEXP (x, 0), src2);
2675 return split;
2676 }
2677 }
1a26b032
RK
2678
2679 /* If that didn't work, perhaps the first operand is complex and
2680 needs to be computed separately, so make a split point there.
2681 This will occur on machines that just support REG + CONST
2682 and have a constant moved through some previous computation. */
2683
2684 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2685 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2686 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2687 == 'o')))
2688 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2689 }
2690 break;
2691
230d793d
RS
2692 case SET:
2693#ifdef HAVE_cc0
2694 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2695 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2696 we need to put the operand into a register. So split at that
2697 point. */
2698
2699 if (SET_DEST (x) == cc0_rtx
2700 && GET_CODE (SET_SRC (x)) != COMPARE
2701 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2702 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2703 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2704 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2705 return &SET_SRC (x);
2706#endif
2707
2708 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2709 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2710 if (split && split != &SET_SRC (x))
2711 return split;
2712
041d7180
JL
2713 /* See if we can split SET_DEST as it stands. */
2714 split = find_split_point (&SET_DEST (x), insn);
2715 if (split && split != &SET_DEST (x))
2716 return split;
2717
230d793d
RS
2718 /* See if this is a bitfield assignment with everything constant. If
2719 so, this is an IOR of an AND, so split it into that. */
2720 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2721 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2722 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2723 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2724 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2725 && GET_CODE (SET_SRC (x)) == CONST_INT
2726 && ((INTVAL (XEXP (SET_DEST (x), 1))
2727 + INTVAL (XEXP (SET_DEST (x), 2)))
2728 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2729 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2730 {
2731 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2732 int len = INTVAL (XEXP (SET_DEST (x), 1));
2733 int src = INTVAL (SET_SRC (x));
2734 rtx dest = XEXP (SET_DEST (x), 0);
2735 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2736 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2737
f76b9db2
ILT
2738 if (BITS_BIG_ENDIAN)
2739 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d 2740
e51712db 2741 if ((unsigned HOST_WIDE_INT) src == mask)
230d793d 2742 SUBST (SET_SRC (x),
5f4f0e22 2743 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2744 else
2745 SUBST (SET_SRC (x),
2746 gen_binary (IOR, mode,
2747 gen_binary (AND, mode, dest,
5f4f0e22
CH
2748 GEN_INT (~ (mask << pos)
2749 & GET_MODE_MASK (mode))),
2750 GEN_INT (src << pos)));
230d793d
RS
2751
2752 SUBST (SET_DEST (x), dest);
2753
d0ab8cd3 2754 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2755 if (split && split != &SET_SRC (x))
2756 return split;
2757 }
2758
2759 /* Otherwise, see if this is an operation that we can split into two.
2760 If so, try to split that. */
2761 code = GET_CODE (SET_SRC (x));
2762
2763 switch (code)
2764 {
d0ab8cd3
RK
2765 case AND:
2766 /* If we are AND'ing with a large constant that is only a single
2767 bit and the result is only being used in a context where we
2768 need to know if it is zero or non-zero, replace it with a bit
2769 extraction. This will avoid the large constant, which might
2770 have taken more than one insn to make. If the constant were
2771 not a valid argument to the AND but took only one insn to make,
2772 this is no worse, but if it took more than one insn, it will
2773 be better. */
2774
2775 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2776 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2777 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2778 && GET_CODE (SET_DEST (x)) == REG
2779 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2780 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2781 && XEXP (*split, 0) == SET_DEST (x)
2782 && XEXP (*split, 1) == const0_rtx)
2783 {
76184def
DE
2784 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2785 XEXP (SET_SRC (x), 0),
2786 pos, NULL_RTX, 1, 1, 0, 0);
2787 if (extraction != 0)
2788 {
2789 SUBST (SET_SRC (x), extraction);
2790 return find_split_point (loc, insn);
2791 }
d0ab8cd3
RK
2792 }
2793 break;
2794
1a6ec070
RK
2795 case NE:
2796 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2797 is known to be on, this can be converted into a NEG of a shift. */
2798 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2799 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 2800 && 1 <= (pos = exact_log2
1a6ec070
RK
2801 (nonzero_bits (XEXP (SET_SRC (x), 0),
2802 GET_MODE (XEXP (SET_SRC (x), 0))))))
2803 {
2804 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2805
2806 SUBST (SET_SRC (x),
2807 gen_rtx_combine (NEG, mode,
2808 gen_rtx_combine (LSHIFTRT, mode,
2809 XEXP (SET_SRC (x), 0),
4eb2cb10 2810 GEN_INT (pos))));
1a6ec070
RK
2811
2812 split = find_split_point (&SET_SRC (x), insn);
2813 if (split && split != &SET_SRC (x))
2814 return split;
2815 }
2816 break;
2817
230d793d
RS
2818 case SIGN_EXTEND:
2819 inner = XEXP (SET_SRC (x), 0);
101c1a3d
JL
2820
2821 /* We can't optimize if either mode is a partial integer
2822 mode as we don't know how many bits are significant
2823 in those modes. */
2824 if (GET_MODE_CLASS (GET_MODE (inner)) == MODE_PARTIAL_INT
2825 || GET_MODE_CLASS (GET_MODE (SET_SRC (x))) == MODE_PARTIAL_INT)
2826 break;
2827
230d793d
RS
2828 pos = 0;
2829 len = GET_MODE_BITSIZE (GET_MODE (inner));
2830 unsignedp = 0;
2831 break;
2832
2833 case SIGN_EXTRACT:
2834 case ZERO_EXTRACT:
2835 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2836 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2837 {
2838 inner = XEXP (SET_SRC (x), 0);
2839 len = INTVAL (XEXP (SET_SRC (x), 1));
2840 pos = INTVAL (XEXP (SET_SRC (x), 2));
2841
f76b9db2
ILT
2842 if (BITS_BIG_ENDIAN)
2843 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
2844 unsignedp = (code == ZERO_EXTRACT);
2845 }
2846 break;
e9a25f70
JL
2847
2848 default:
2849 break;
230d793d
RS
2850 }
2851
2852 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2853 {
2854 enum machine_mode mode = GET_MODE (SET_SRC (x));
2855
d0ab8cd3
RK
2856 /* For unsigned, we have a choice of a shift followed by an
2857 AND or two shifts. Use two shifts for field sizes where the
2858 constant might be too large. We assume here that we can
2859 always at least get 8-bit constants in an AND insn, which is
2860 true for every current RISC. */
2861
2862 if (unsignedp && len <= 8)
230d793d
RS
2863 {
2864 SUBST (SET_SRC (x),
2865 gen_rtx_combine
2866 (AND, mode,
2867 gen_rtx_combine (LSHIFTRT, mode,
2868 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2869 GEN_INT (pos)),
2870 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2871
d0ab8cd3 2872 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2873 if (split && split != &SET_SRC (x))
2874 return split;
2875 }
2876 else
2877 {
2878 SUBST (SET_SRC (x),
2879 gen_rtx_combine
d0ab8cd3 2880 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2881 gen_rtx_combine (ASHIFT, mode,
2882 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2883 GEN_INT (GET_MODE_BITSIZE (mode)
2884 - len - pos)),
2885 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2886
d0ab8cd3 2887 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2888 if (split && split != &SET_SRC (x))
2889 return split;
2890 }
2891 }
2892
2893 /* See if this is a simple operation with a constant as the second
2894 operand. It might be that this constant is out of range and hence
2895 could be used as a split point. */
2896 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2897 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2898 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2899 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2900 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2901 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2902 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2903 == 'o'))))
2904 return &XEXP (SET_SRC (x), 1);
2905
2906 /* Finally, see if this is a simple operation with its first operand
2907 not in a register. The operation might require this operand in a
2908 register, so return it as a split point. We can always do this
2909 because if the first operand were another operation, we would have
2910 already found it as a split point. */
2911 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2912 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2913 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2914 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2915 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2916 return &XEXP (SET_SRC (x), 0);
2917
2918 return 0;
2919
2920 case AND:
2921 case IOR:
2922 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2923 it is better to write this as (not (ior A B)) so we can split it.
2924 Similarly for IOR. */
2925 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2926 {
2927 SUBST (*loc,
2928 gen_rtx_combine (NOT, GET_MODE (x),
2929 gen_rtx_combine (code == IOR ? AND : IOR,
2930 GET_MODE (x),
2931 XEXP (XEXP (x, 0), 0),
2932 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2933 return find_split_point (loc, insn);
230d793d
RS
2934 }
2935
2936 /* Many RISC machines have a large set of logical insns. If the
2937 second operand is a NOT, put it first so we will try to split the
2938 other operand first. */
2939 if (GET_CODE (XEXP (x, 1)) == NOT)
2940 {
2941 rtx tem = XEXP (x, 0);
2942 SUBST (XEXP (x, 0), XEXP (x, 1));
2943 SUBST (XEXP (x, 1), tem);
2944 }
2945 break;
e9a25f70
JL
2946
2947 default:
2948 break;
230d793d
RS
2949 }
2950
2951 /* Otherwise, select our actions depending on our rtx class. */
2952 switch (GET_RTX_CLASS (code))
2953 {
2954 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2955 case '3':
d0ab8cd3 2956 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2957 if (split)
2958 return split;
0f41302f 2959 /* ... fall through ... */
230d793d
RS
2960 case '2':
2961 case 'c':
2962 case '<':
d0ab8cd3 2963 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2964 if (split)
2965 return split;
0f41302f 2966 /* ... fall through ... */
230d793d
RS
2967 case '1':
2968 /* Some machines have (and (shift ...) ...) insns. If X is not
2969 an AND, but XEXP (X, 0) is, use it as our split point. */
2970 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2971 return &XEXP (x, 0);
2972
d0ab8cd3 2973 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2974 if (split)
2975 return split;
2976 return loc;
2977 }
2978
2979 /* Otherwise, we don't have a split point. */
2980 return 0;
2981}
2982\f
2983/* Throughout X, replace FROM with TO, and return the result.
2984 The result is TO if X is FROM;
2985 otherwise the result is X, but its contents may have been modified.
2986 If they were modified, a record was made in undobuf so that
2987 undo_all will (among other things) return X to its original state.
2988
2989 If the number of changes necessary is too much to record to undo,
2990 the excess changes are not made, so the result is invalid.
2991 The changes already made can still be undone.
2992 undobuf.num_undo is incremented for such changes, so by testing that
2993 the caller can tell whether the result is valid.
2994
2995 `n_occurrences' is incremented each time FROM is replaced.
2996
2997 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2998
5089e22e 2999 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
3000 by copying if `n_occurrences' is non-zero. */
3001
3002static rtx
3003subst (x, from, to, in_dest, unique_copy)
3004 register rtx x, from, to;
3005 int in_dest;
3006 int unique_copy;
3007{
f24ad0e4 3008 register enum rtx_code code = GET_CODE (x);
230d793d 3009 enum machine_mode op0_mode = VOIDmode;
6f7d635c 3010 register const char *fmt;
8079805d
RK
3011 register int len, i;
3012 rtx new;
230d793d
RS
3013
3014/* Two expressions are equal if they are identical copies of a shared
3015 RTX or if they are both registers with the same register number
3016 and mode. */
3017
3018#define COMBINE_RTX_EQUAL_P(X,Y) \
3019 ((X) == (Y) \
3020 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
3021 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
3022
3023 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
3024 {
3025 n_occurrences++;
3026 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
3027 }
3028
3029 /* If X and FROM are the same register but different modes, they will
3030 not have been seen as equal above. However, flow.c will make a
3031 LOG_LINKS entry for that case. If we do nothing, we will try to
3032 rerecognize our original insn and, when it succeeds, we will
3033 delete the feeding insn, which is incorrect.
3034
3035 So force this insn not to match in this (rare) case. */
3036 if (! in_dest && code == REG && GET_CODE (from) == REG
3037 && REGNO (x) == REGNO (from))
38a448ca 3038 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
3039
3040 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
3041 of which may contain things that can be combined. */
3042 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
3043 return x;
3044
3045 /* It is possible to have a subexpression appear twice in the insn.
3046 Suppose that FROM is a register that appears within TO.
3047 Then, after that subexpression has been scanned once by `subst',
3048 the second time it is scanned, TO may be found. If we were
3049 to scan TO here, we would find FROM within it and create a
3050 self-referent rtl structure which is completely wrong. */
3051 if (COMBINE_RTX_EQUAL_P (x, to))
3052 return to;
3053
4f4b3679
RH
3054 /* Parallel asm_operands need special attention because all of the
3055 inputs are shared across the arms. Furthermore, unsharing the
3056 rtl results in recognition failures. Failure to handle this case
3057 specially can result in circular rtl.
3058
3059 Solve this by doing a normal pass across the first entry of the
3060 parallel, and only processing the SET_DESTs of the subsequent
3061 entries. Ug. */
3062
3063 if (code == PARALLEL
3064 && GET_CODE (XVECEXP (x, 0, 0)) == SET
3065 && GET_CODE (SET_SRC (XVECEXP (x, 0, 0))) == ASM_OPERANDS)
230d793d 3066 {
4f4b3679
RH
3067 new = subst (XVECEXP (x, 0, 0), from, to, 0, unique_copy);
3068
3069 /* If this substitution failed, this whole thing fails. */
3070 if (GET_CODE (new) == CLOBBER
3071 && XEXP (new, 0) == const0_rtx)
3072 return new;
3073
3074 SUBST (XVECEXP (x, 0, 0), new);
3075
3076 for (i = XVECLEN (x, 0) - 1; i >= 1; i--)
230d793d 3077 {
4f4b3679
RH
3078 rtx dest = SET_DEST (XVECEXP (x, 0, i));
3079
3080 if (GET_CODE (dest) != REG
3081 && GET_CODE (dest) != CC0
3082 && GET_CODE (dest) != PC)
230d793d 3083 {
4f4b3679 3084 new = subst (dest, from, to, 0, unique_copy);
230d793d 3085
4f4b3679
RH
3086 /* If this substitution failed, this whole thing fails. */
3087 if (GET_CODE (new) == CLOBBER
3088 && XEXP (new, 0) == const0_rtx)
3089 return new;
230d793d 3090
4f4b3679 3091 SUBST (SET_DEST (XVECEXP (x, 0, i)), new);
230d793d
RS
3092 }
3093 }
4f4b3679
RH
3094 }
3095 else
3096 {
3097 len = GET_RTX_LENGTH (code);
3098 fmt = GET_RTX_FORMAT (code);
3099
3100 /* We don't need to process a SET_DEST that is a register, CC0,
3101 or PC, so set up to skip this common case. All other cases
3102 where we want to suppress replacing something inside a
3103 SET_SRC are handled via the IN_DEST operand. */
3104 if (code == SET
3105 && (GET_CODE (SET_DEST (x)) == REG
3106 || GET_CODE (SET_DEST (x)) == CC0
3107 || GET_CODE (SET_DEST (x)) == PC))
3108 fmt = "ie";
3109
3110 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
3111 constant. */
3112 if (fmt[0] == 'e')
3113 op0_mode = GET_MODE (XEXP (x, 0));
3114
3115 for (i = 0; i < len; i++)
230d793d 3116 {
4f4b3679 3117 if (fmt[i] == 'E')
230d793d 3118 {
4f4b3679
RH
3119 register int j;
3120 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3121 {
3122 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
3123 {
3124 new = (unique_copy && n_occurrences
3125 ? copy_rtx (to) : to);
3126 n_occurrences++;
3127 }
3128 else
3129 {
3130 new = subst (XVECEXP (x, i, j), from, to, 0,
3131 unique_copy);
3132
3133 /* If this substitution failed, this whole thing
3134 fails. */
3135 if (GET_CODE (new) == CLOBBER
3136 && XEXP (new, 0) == const0_rtx)
3137 return new;
3138 }
3139
3140 SUBST (XVECEXP (x, i, j), new);
3141 }
3142 }
3143 else if (fmt[i] == 'e')
3144 {
3145 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
3146 {
3147 /* In general, don't install a subreg involving two
3148 modes not tieable. It can worsen register
3149 allocation, and can even make invalid reload
3150 insns, since the reg inside may need to be copied
3151 from in the outside mode, and that may be invalid
3152 if it is an fp reg copied in integer mode.
3153
3154 We allow two exceptions to this: It is valid if
3155 it is inside another SUBREG and the mode of that
3156 SUBREG and the mode of the inside of TO is
3157 tieable and it is valid if X is a SET that copies
3158 FROM to CC0. */
3159
3160 if (GET_CODE (to) == SUBREG
3161 && ! MODES_TIEABLE_P (GET_MODE (to),
3162 GET_MODE (SUBREG_REG (to)))
3163 && ! (code == SUBREG
3164 && MODES_TIEABLE_P (GET_MODE (x),
3165 GET_MODE (SUBREG_REG (to))))
42301240 3166#ifdef HAVE_cc0
4f4b3679 3167 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
42301240 3168#endif
4f4b3679
RH
3169 )
3170 return gen_rtx_CLOBBER (VOIDmode, const0_rtx);
42301240 3171
4f4b3679
RH
3172 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
3173 n_occurrences++;
3174 }
3175 else
3176 /* If we are in a SET_DEST, suppress most cases unless we
3177 have gone inside a MEM, in which case we want to
3178 simplify the address. We assume here that things that
3179 are actually part of the destination have their inner
3180 parts in the first expression. This is true for SUBREG,
3181 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
3182 things aside from REG and MEM that should appear in a
3183 SET_DEST. */
3184 new = subst (XEXP (x, i), from, to,
3185 (((in_dest
3186 && (code == SUBREG || code == STRICT_LOW_PART
3187 || code == ZERO_EXTRACT))
3188 || code == SET)
3189 && i == 0), unique_copy);
3190
3191 /* If we found that we will have to reject this combination,
3192 indicate that by returning the CLOBBER ourselves, rather than
3193 an expression containing it. This will speed things up as
3194 well as prevent accidents where two CLOBBERs are considered
3195 to be equal, thus producing an incorrect simplification. */
3196
3197 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3198 return new;
3199
3200 SUBST (XEXP (x, i), new);
230d793d 3201 }
230d793d
RS
3202 }
3203 }
3204
8079805d
RK
3205 /* Try to simplify X. If the simplification changed the code, it is likely
3206 that further simplification will help, so loop, but limit the number
3207 of repetitions that will be performed. */
3208
3209 for (i = 0; i < 4; i++)
3210 {
3211 /* If X is sufficiently simple, don't bother trying to do anything
3212 with it. */
3213 if (code != CONST_INT && code != REG && code != CLOBBER)
3214 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3215
8079805d
RK
3216 if (GET_CODE (x) == code)
3217 break;
d0ab8cd3 3218
8079805d 3219 code = GET_CODE (x);
eeb43d32 3220
8079805d
RK
3221 /* We no longer know the original mode of operand 0 since we
3222 have changed the form of X) */
3223 op0_mode = VOIDmode;
3224 }
eeb43d32 3225
8079805d
RK
3226 return x;
3227}
3228\f
3229/* Simplify X, a piece of RTL. We just operate on the expression at the
3230 outer level; call `subst' to simplify recursively. Return the new
3231 expression.
3232
3233 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3234 will be the iteration even if an expression with a code different from
3235 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3236
8079805d
RK
3237static rtx
3238simplify_rtx (x, op0_mode, last, in_dest)
3239 rtx x;
3240 enum machine_mode op0_mode;
3241 int last;
3242 int in_dest;
3243{
3244 enum rtx_code code = GET_CODE (x);
3245 enum machine_mode mode = GET_MODE (x);
3246 rtx temp;
3247 int i;
d0ab8cd3 3248
230d793d
RS
3249 /* If this is a commutative operation, put a constant last and a complex
3250 expression first. We don't need to do this for comparisons here. */
3251 if (GET_RTX_CLASS (code) == 'c'
3252 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3253 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3254 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3255 || (GET_CODE (XEXP (x, 0)) == SUBREG
3256 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3257 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3258 {
3259 temp = XEXP (x, 0);
3260 SUBST (XEXP (x, 0), XEXP (x, 1));
3261 SUBST (XEXP (x, 1), temp);
3262 }
3263
22609cbf
RK
3264 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3265 sign extension of a PLUS with a constant, reverse the order of the sign
3266 extension and the addition. Note that this not the same as the original
3267 code, but overflow is undefined for signed values. Also note that the
3268 PLUS will have been partially moved "inside" the sign-extension, so that
3269 the first operand of X will really look like:
3270 (ashiftrt (plus (ashift A C4) C5) C4).
3271 We convert this to
3272 (plus (ashiftrt (ashift A C4) C2) C4)
3273 and replace the first operand of X with that expression. Later parts
3274 of this function may simplify the expression further.
3275
3276 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3277 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3278 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3279
3280 We do this to simplify address expressions. */
3281
3282 if ((code == PLUS || code == MINUS || code == MULT)
3283 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3284 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3285 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3286 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3287 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3288 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3289 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3290 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3291 XEXP (XEXP (XEXP (x, 0), 0), 1),
3292 XEXP (XEXP (x, 0), 1))) != 0)
3293 {
3294 rtx new
3295 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3296 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3297 INTVAL (XEXP (XEXP (x, 0), 1)));
3298
3299 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3300 INTVAL (XEXP (XEXP (x, 0), 1)));
3301
3302 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3303 }
3304
d0ab8cd3
RK
3305 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3306 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3307 things. Check for cases where both arms are testing the same
3308 condition.
3309
3310 Don't do anything if all operands are very simple. */
3311
3312 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3313 || GET_RTX_CLASS (code) == '<')
3314 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3315 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3316 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3317 == 'o')))
3318 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3319 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3320 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3321 == 'o')))))
3322 || (GET_RTX_CLASS (code) == '1'
3323 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3324 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3325 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3326 == 'o'))))))
d0ab8cd3 3327 {
abe6e52f
RK
3328 rtx cond, true, false;
3329
3330 cond = if_then_else_cond (x, &true, &false);
0802d516
RK
3331 if (cond != 0
3332 /* If everything is a comparison, what we have is highly unlikely
3333 to be simpler, so don't use it. */
3334 && ! (GET_RTX_CLASS (code) == '<'
3335 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3336 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
abe6e52f
RK
3337 {
3338 rtx cop1 = const0_rtx;
3339 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3340
15448afc
RK
3341 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3342 return x;
3343
9210df58
RK
3344 /* Simplify the alternative arms; this may collapse the true and
3345 false arms to store-flag values. */
3346 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3347 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3348
3349 /* Restarting if we generate a store-flag expression will cause
3350 us to loop. Just drop through in this case. */
3351
abe6e52f
RK
3352 /* If the result values are STORE_FLAG_VALUE and zero, we can
3353 just make the comparison operation. */
3354 if (true == const_true_rtx && false == const0_rtx)
3355 x = gen_binary (cond_code, mode, cond, cop1);
3356 else if (true == const0_rtx && false == const_true_rtx)
3357 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3358
3359 /* Likewise, we can make the negate of a comparison operation
3360 if the result values are - STORE_FLAG_VALUE and zero. */
3361 else if (GET_CODE (true) == CONST_INT
3362 && INTVAL (true) == - STORE_FLAG_VALUE
3363 && false == const0_rtx)
0c1c8ea6 3364 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3365 gen_binary (cond_code, mode, cond, cop1));
3366 else if (GET_CODE (false) == CONST_INT
3367 && INTVAL (false) == - STORE_FLAG_VALUE
3368 && true == const0_rtx)
0c1c8ea6 3369 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3370 gen_binary (reverse_condition (cond_code),
3371 mode, cond, cop1));
3372 else
38a448ca
RH
3373 return gen_rtx_IF_THEN_ELSE (mode,
3374 gen_binary (cond_code, VOIDmode,
3375 cond, cop1),
3376 true, false);
5109d49f 3377
9210df58
RK
3378 code = GET_CODE (x);
3379 op0_mode = VOIDmode;
abe6e52f 3380 }
d0ab8cd3
RK
3381 }
3382
230d793d
RS
3383 /* Try to fold this expression in case we have constants that weren't
3384 present before. */
3385 temp = 0;
3386 switch (GET_RTX_CLASS (code))
3387 {
3388 case '1':
3389 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3390 break;
3391 case '<':
3392 temp = simplify_relational_operation (code, op0_mode,
3393 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3394#ifdef FLOAT_STORE_FLAG_VALUE
3395 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3396 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3397 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3398#endif
230d793d
RS
3399 break;
3400 case 'c':
3401 case '2':
3402 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3403 break;
3404 case 'b':
3405 case '3':
3406 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3407 XEXP (x, 1), XEXP (x, 2));
3408 break;
3409 }
3410
3411 if (temp)
d0ab8cd3 3412 x = temp, code = GET_CODE (temp);
230d793d 3413
230d793d 3414 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3415 if (code == PLUS || code == MINUS
3416 || code == AND || code == IOR || code == XOR)
230d793d
RS
3417 {
3418 x = apply_distributive_law (x);
3419 code = GET_CODE (x);
3420 }
3421
3422 /* If CODE is an associative operation not otherwise handled, see if we
3423 can associate some operands. This can win if they are constants or
3424 if they are logically related (i.e. (a & b) & a. */
3425 if ((code == PLUS || code == MINUS
3426 || code == MULT || code == AND || code == IOR || code == XOR
3427 || code == DIV || code == UDIV
3428 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3429 && INTEGRAL_MODE_P (mode))
230d793d
RS
3430 {
3431 if (GET_CODE (XEXP (x, 0)) == code)
3432 {
3433 rtx other = XEXP (XEXP (x, 0), 0);
3434 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3435 rtx inner_op1 = XEXP (x, 1);
3436 rtx inner;
3437
3438 /* Make sure we pass the constant operand if any as the second
3439 one if this is a commutative operation. */
3440 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3441 {
3442 rtx tem = inner_op0;
3443 inner_op0 = inner_op1;
3444 inner_op1 = tem;
3445 }
3446 inner = simplify_binary_operation (code == MINUS ? PLUS
3447 : code == DIV ? MULT
3448 : code == UDIV ? MULT
3449 : code,
3450 mode, inner_op0, inner_op1);
3451
3452 /* For commutative operations, try the other pair if that one
3453 didn't simplify. */
3454 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3455 {
3456 other = XEXP (XEXP (x, 0), 1);
3457 inner = simplify_binary_operation (code, mode,
3458 XEXP (XEXP (x, 0), 0),
3459 XEXP (x, 1));
3460 }
3461
3462 if (inner)
8079805d 3463 return gen_binary (code, mode, other, inner);
230d793d
RS
3464 }
3465 }
3466
3467 /* A little bit of algebraic simplification here. */
3468 switch (code)
3469 {
3470 case MEM:
3471 /* Ensure that our address has any ASHIFTs converted to MULT in case
3472 address-recognizing predicates are called later. */
3473 temp = make_compound_operation (XEXP (x, 0), MEM);
3474 SUBST (XEXP (x, 0), temp);
3475 break;
3476
3477 case SUBREG:
3478 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3479 is paradoxical. If we can't do that safely, then it becomes
3480 something nonsensical so that this combination won't take place. */
3481
3482 if (GET_CODE (SUBREG_REG (x)) == MEM
3483 && (GET_MODE_SIZE (mode)
3484 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3485 {
3486 rtx inner = SUBREG_REG (x);
3487 int endian_offset = 0;
3488 /* Don't change the mode of the MEM
3489 if that would change the meaning of the address. */
3490 if (MEM_VOLATILE_P (SUBREG_REG (x))
3491 || mode_dependent_address_p (XEXP (inner, 0)))
38a448ca 3492 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d 3493
f76b9db2
ILT
3494 if (BYTES_BIG_ENDIAN)
3495 {
3496 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3497 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3498 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3499 endian_offset -= (UNITS_PER_WORD
3500 - GET_MODE_SIZE (GET_MODE (inner)));
3501 }
230d793d
RS
3502 /* Note if the plus_constant doesn't make a valid address
3503 then this combination won't be accepted. */
38a448ca
RH
3504 x = gen_rtx_MEM (mode,
3505 plus_constant (XEXP (inner, 0),
3506 (SUBREG_WORD (x) * UNITS_PER_WORD
3507 + endian_offset)));
230d793d 3508 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
c6df88cb 3509 MEM_COPY_ATTRIBUTES (x, inner);
230d793d
RS
3510 return x;
3511 }
3512
3513 /* If we are in a SET_DEST, these other cases can't apply. */
3514 if (in_dest)
3515 return x;
3516
3517 /* Changing mode twice with SUBREG => just change it once,
3518 or not at all if changing back to starting mode. */
3519 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3520 {
3521 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3522 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3523 return SUBREG_REG (SUBREG_REG (x));
3524
3525 SUBST_INT (SUBREG_WORD (x),
3526 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3527 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3528 }
3529
3530 /* SUBREG of a hard register => just change the register number
3531 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3532 suppress this combination. If the hard register is the stack,
3533 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3534
3535 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3536 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3537 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3538#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3539 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3540#endif
26ecfc76
RK
3541#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3542 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3543#endif
3544 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3545 {
3546 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3547 mode))
38a448ca
RH
3548 return gen_rtx_REG (mode,
3549 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
230d793d 3550 else
38a448ca 3551 return gen_rtx_CLOBBER (mode, const0_rtx);
230d793d
RS
3552 }
3553
3554 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3555 word and low-order part. Only do this if we are narrowing
3556 the constant; if it is being widened, we have no idea what
3557 the extra bits will have been set to. */
230d793d
RS
3558
3559 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3560 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
3c99d5ff 3561 && GET_MODE_SIZE (op0_mode) > UNITS_PER_WORD
230d793d
RS
3562 && GET_MODE_CLASS (mode) == MODE_INT)
3563 {
3564 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3565 0, op0_mode);
230d793d
RS
3566 if (temp)
3567 return temp;
3568 }
3569
19808e22
RS
3570 /* If we want a subreg of a constant, at offset 0,
3571 take the low bits. On a little-endian machine, that's
3572 always valid. On a big-endian machine, it's valid
3c99d5ff 3573 only if the constant's mode fits in one word. Note that we
61b1bece 3574 cannot use subreg_lowpart_p since SUBREG_REG may be VOIDmode. */
3c99d5ff
RK
3575 if (CONSTANT_P (SUBREG_REG (x))
3576 && ((GET_MODE_SIZE (op0_mode) <= UNITS_PER_WORD
3577 || ! WORDS_BIG_ENDIAN)
3578 ? SUBREG_WORD (x) == 0
3579 : (SUBREG_WORD (x)
3580 == ((GET_MODE_SIZE (op0_mode)
3581 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
3582 / UNITS_PER_WORD)))
f82da7d2 3583 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3584 && (! WORDS_BIG_ENDIAN
3585 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3586 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3587
b65c1b5b
RK
3588 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3589 since we are saying that the high bits don't matter. */
3590 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3591 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3592 return SUBREG_REG (x);
3593
87e3e0c1
RK
3594 /* Note that we cannot do any narrowing for non-constants since
3595 we might have been counting on using the fact that some bits were
3596 zero. We now do this in the SET. */
3597
230d793d
RS
3598 break;
3599
3600 case NOT:
3601 /* (not (plus X -1)) can become (neg X). */
3602 if (GET_CODE (XEXP (x, 0)) == PLUS
3603 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3604 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3605
3606 /* Similarly, (not (neg X)) is (plus X -1). */
3607 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3608 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3609 constm1_rtx);
230d793d 3610
d0ab8cd3
RK
3611 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3612 if (GET_CODE (XEXP (x, 0)) == XOR
3613 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3614 && (temp = simplify_unary_operation (NOT, mode,
3615 XEXP (XEXP (x, 0), 1),
3616 mode)) != 0)
787745f5 3617 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3618
230d793d
RS
3619 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3620 other than 1, but that is not valid. We could do a similar
3621 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3622 but this doesn't seem common enough to bother with. */
3623 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3624 && XEXP (XEXP (x, 0), 0) == const1_rtx)
38a448ca
RH
3625 return gen_rtx_ROTATE (mode, gen_unary (NOT, mode, mode, const1_rtx),
3626 XEXP (XEXP (x, 0), 1));
230d793d
RS
3627
3628 if (GET_CODE (XEXP (x, 0)) == SUBREG
3629 && subreg_lowpart_p (XEXP (x, 0))
3630 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3631 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3632 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3633 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3634 {
3635 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3636
38a448ca
RH
3637 x = gen_rtx_ROTATE (inner_mode,
3638 gen_unary (NOT, inner_mode, inner_mode,
3639 const1_rtx),
3640 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3641 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3642 }
3643
0802d516
RK
3644 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3645 reversing the comparison code if valid. */
3646 if (STORE_FLAG_VALUE == -1
3647 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
230d793d
RS
3648 && reversible_comparison_p (XEXP (x, 0)))
3649 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3650 mode, XEXP (XEXP (x, 0), 0),
3651 XEXP (XEXP (x, 0), 1));
500c518b
RK
3652
3653 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3654 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3655 perform the above simplification. */
500c518b 3656
0802d516
RK
3657 if (STORE_FLAG_VALUE == -1
3658 && XEXP (x, 1) == const1_rtx
500c518b
RK
3659 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3660 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3661 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3662 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3663
3664 /* Apply De Morgan's laws to reduce number of patterns for machines
3665 with negating logical insns (and-not, nand, etc.). If result has
3666 only one NOT, put it first, since that is how the patterns are
3667 coded. */
3668
3669 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3670 {
3671 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3672
3673 if (GET_CODE (in1) == NOT)
3674 in1 = XEXP (in1, 0);
3675 else
3676 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3677
3678 if (GET_CODE (in2) == NOT)
3679 in2 = XEXP (in2, 0);
3680 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3681 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3682 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3683 else
3684 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3685
3686 if (GET_CODE (in2) == NOT)
3687 {
3688 rtx tem = in2;
3689 in2 = in1; in1 = tem;
3690 }
3691
8079805d
RK
3692 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3693 mode, in1, in2);
230d793d
RS
3694 }
3695 break;
3696
3697 case NEG:
3698 /* (neg (plus X 1)) can become (not X). */
3699 if (GET_CODE (XEXP (x, 0)) == PLUS
3700 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3701 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3702
3703 /* Similarly, (neg (not X)) is (plus X 1). */
3704 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3705 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3706
230d793d
RS
3707 /* (neg (minus X Y)) can become (minus Y X). */
3708 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3709 && (! FLOAT_MODE_P (mode)
0f41302f 3710 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3711 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3712 || flag_fast_math))
8079805d
RK
3713 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3714 XEXP (XEXP (x, 0), 0));
230d793d 3715
0f41302f 3716 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3717 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3718 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3719 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3720
230d793d
RS
3721 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3722 if we can then eliminate the NEG (e.g.,
3723 if the operand is a constant). */
3724
3725 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3726 {
3727 temp = simplify_unary_operation (NEG, mode,
3728 XEXP (XEXP (x, 0), 0), mode);
3729 if (temp)
3730 {
3731 SUBST (XEXP (XEXP (x, 0), 0), temp);
3732 return XEXP (x, 0);
3733 }
3734 }
3735
3736 temp = expand_compound_operation (XEXP (x, 0));
3737
3738 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3739 replaced by (lshiftrt X C). This will convert
3740 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3741
3742 if (GET_CODE (temp) == ASHIFTRT
3743 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3744 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3745 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3746 INTVAL (XEXP (temp, 1)));
230d793d 3747
951553af 3748 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3749 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3750 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3751 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3752 or a SUBREG of one since we'd be making the expression more
3753 complex if it was just a register. */
3754
3755 if (GET_CODE (temp) != REG
3756 && ! (GET_CODE (temp) == SUBREG
3757 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3758 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3759 {
3760 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3761 (NULL_RTX, ASHIFTRT, mode,
3762 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3763 GET_MODE_BITSIZE (mode) - 1 - i),
3764 GET_MODE_BITSIZE (mode) - 1 - i);
3765
3766 /* If all we did was surround TEMP with the two shifts, we
3767 haven't improved anything, so don't use it. Otherwise,
3768 we are better off with TEMP1. */
3769 if (GET_CODE (temp1) != ASHIFTRT
3770 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3771 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3772 return temp1;
230d793d
RS
3773 }
3774 break;
3775
2ca9ae17 3776 case TRUNCATE:
e30fb98f
JL
3777 /* We can't handle truncation to a partial integer mode here
3778 because we don't know the real bitsize of the partial
3779 integer mode. */
3780 if (GET_MODE_CLASS (mode) == MODE_PARTIAL_INT)
3781 break;
3782
80608e27
JL
3783 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3784 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3785 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
2ca9ae17
JW
3786 SUBST (XEXP (x, 0),
3787 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3788 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
3789
3790 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3791 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3792 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3793 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3794 return XEXP (XEXP (x, 0), 0);
3795
3796 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3797 (OP:SI foo:SI) if OP is NEG or ABS. */
3798 if ((GET_CODE (XEXP (x, 0)) == ABS
3799 || GET_CODE (XEXP (x, 0)) == NEG)
3800 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3801 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3802 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3803 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3804 XEXP (XEXP (XEXP (x, 0), 0), 0));
3805
3806 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3807 (truncate:SI x). */
3808 if (GET_CODE (XEXP (x, 0)) == SUBREG
3809 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3810 && subreg_lowpart_p (XEXP (x, 0)))
3811 return SUBREG_REG (XEXP (x, 0));
3812
3813 /* If we know that the value is already truncated, we can
6a992214
JL
3814 replace the TRUNCATE with a SUBREG if TRULY_NOOP_TRUNCATION is
3815 nonzero for the corresponding modes. */
3816 if (TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
3817 GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
3818 && num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3819 >= GET_MODE_BITSIZE (mode) + 1)
0f13a422
ILT
3820 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3821
3822 /* A truncate of a comparison can be replaced with a subreg if
3823 STORE_FLAG_VALUE permits. This is like the previous test,
3824 but it works even if the comparison is done in a mode larger
3825 than HOST_BITS_PER_WIDE_INT. */
3826 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3827 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3828 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3829 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3830
3831 /* Similarly, a truncate of a register whose value is a
3832 comparison can be replaced with a subreg if STORE_FLAG_VALUE
3833 permits. */
3834 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3835 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3836 && (temp = get_last_value (XEXP (x, 0)))
3837 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3838 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3839
2ca9ae17
JW
3840 break;
3841
230d793d
RS
3842 case FLOAT_TRUNCATE:
3843 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3844 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3845 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3846 return XEXP (XEXP (x, 0), 0);
4635f748
RK
3847
3848 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3849 (OP:SF foo:SF) if OP is NEG or ABS. */
3850 if ((GET_CODE (XEXP (x, 0)) == ABS
3851 || GET_CODE (XEXP (x, 0)) == NEG)
3852 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3853 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
3854 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3855 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
3856
3857 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3858 is (float_truncate:SF x). */
3859 if (GET_CODE (XEXP (x, 0)) == SUBREG
3860 && subreg_lowpart_p (XEXP (x, 0))
3861 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3862 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
3863 break;
3864
3865#ifdef HAVE_cc0
3866 case COMPARE:
3867 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3868 using cc0, in which case we want to leave it as a COMPARE
3869 so we can distinguish it from a register-register-copy. */
3870 if (XEXP (x, 1) == const0_rtx)
3871 return XEXP (x, 0);
3872
3873 /* In IEEE floating point, x-0 is not the same as x. */
3874 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3875 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3876 || flag_fast_math)
230d793d
RS
3877 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3878 return XEXP (x, 0);
3879 break;
3880#endif
3881
3882 case CONST:
3883 /* (const (const X)) can become (const X). Do it this way rather than
3884 returning the inner CONST since CONST can be shared with a
3885 REG_EQUAL note. */
3886 if (GET_CODE (XEXP (x, 0)) == CONST)
3887 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3888 break;
3889
3890#ifdef HAVE_lo_sum
3891 case LO_SUM:
3892 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3893 can add in an offset. find_split_point will split this address up
3894 again if it doesn't match. */
3895 if (GET_CODE (XEXP (x, 0)) == HIGH
3896 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3897 return XEXP (x, 1);
3898 break;
3899#endif
3900
3901 case PLUS:
3902 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3903 outermost. That's because that's the way indexed addresses are
3904 supposed to appear. This code used to check many more cases, but
3905 they are now checked elsewhere. */
3906 if (GET_CODE (XEXP (x, 0)) == PLUS
3907 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3908 return gen_binary (PLUS, mode,
3909 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3910 XEXP (x, 1)),
3911 XEXP (XEXP (x, 0), 1));
3912
3913 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3914 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3915 bit-field and can be replaced by either a sign_extend or a
3916 sign_extract. The `and' may be a zero_extend. */
3917 if (GET_CODE (XEXP (x, 0)) == XOR
3918 && GET_CODE (XEXP (x, 1)) == CONST_INT
3919 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3920 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3921 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3922 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3923 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3924 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3925 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3926 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3927 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3928 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3929 == i + 1))))
8079805d
RK
3930 return simplify_shift_const
3931 (NULL_RTX, ASHIFTRT, mode,
3932 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3933 XEXP (XEXP (XEXP (x, 0), 0), 0),
3934 GET_MODE_BITSIZE (mode) - (i + 1)),
3935 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 3936
bc0776c6
RK
3937 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3938 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3939 is 1. This produces better code than the alternative immediately
3940 below. */
3941 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3942 && reversible_comparison_p (XEXP (x, 0))
3943 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3944 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 3945 return
0c1c8ea6 3946 gen_unary (NEG, mode, mode,
8079805d
RK
3947 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3948 mode, XEXP (XEXP (x, 0), 0),
3949 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
3950
3951 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3952 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3953 the bitsize of the mode - 1. This allows simplification of
3954 "a = (b & 8) == 0;" */
3955 if (XEXP (x, 1) == constm1_rtx
3956 && GET_CODE (XEXP (x, 0)) != REG
3957 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3958 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3959 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
3960 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3961 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3962 gen_rtx_combine (XOR, mode,
3963 XEXP (x, 0), const1_rtx),
3964 GET_MODE_BITSIZE (mode) - 1),
3965 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
3966
3967 /* If we are adding two things that have no bits in common, convert
3968 the addition into an IOR. This will often be further simplified,
3969 for example in cases like ((a & 1) + (a & 2)), which can
3970 become a & 3. */
3971
ac49a949 3972 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3973 && (nonzero_bits (XEXP (x, 0), mode)
3974 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 3975 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
3976 break;
3977
3978 case MINUS:
0802d516
RK
3979 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
3980 by reversing the comparison code if valid. */
3981 if (STORE_FLAG_VALUE == 1
3982 && XEXP (x, 0) == const1_rtx
5109d49f
RK
3983 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3984 && reversible_comparison_p (XEXP (x, 1)))
3985 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3986 mode, XEXP (XEXP (x, 1), 0),
3987 XEXP (XEXP (x, 1), 1));
5109d49f 3988
230d793d
RS
3989 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3990 (and <foo> (const_int pow2-1)) */
3991 if (GET_CODE (XEXP (x, 1)) == AND
3992 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3993 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3994 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
3995 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3996 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
3997
3998 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3999 integers. */
4000 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
4001 return gen_binary (MINUS, mode,
4002 gen_binary (MINUS, mode, XEXP (x, 0),
4003 XEXP (XEXP (x, 1), 0)),
4004 XEXP (XEXP (x, 1), 1));
230d793d
RS
4005 break;
4006
4007 case MULT:
4008 /* If we have (mult (plus A B) C), apply the distributive law and then
4009 the inverse distributive law to see if things simplify. This
4010 occurs mostly in addresses, often when unrolling loops. */
4011
4012 if (GET_CODE (XEXP (x, 0)) == PLUS)
4013 {
4014 x = apply_distributive_law
4015 (gen_binary (PLUS, mode,
4016 gen_binary (MULT, mode,
4017 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4018 gen_binary (MULT, mode,
4019 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4020
4021 if (GET_CODE (x) != MULT)
8079805d 4022 return x;
230d793d 4023 }
230d793d
RS
4024 break;
4025
4026 case UDIV:
4027 /* If this is a divide by a power of two, treat it as a shift if
4028 its first operand is a shift. */
4029 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4030 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
4031 && (GET_CODE (XEXP (x, 0)) == ASHIFT
4032 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
4033 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
4034 || GET_CODE (XEXP (x, 0)) == ROTATE
4035 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 4036 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
4037 break;
4038
4039 case EQ: case NE:
4040 case GT: case GTU: case GE: case GEU:
4041 case LT: case LTU: case LE: case LEU:
4042 /* If the first operand is a condition code, we can't do anything
4043 with it. */
4044 if (GET_CODE (XEXP (x, 0)) == COMPARE
4045 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
4046#ifdef HAVE_cc0
4047 && XEXP (x, 0) != cc0_rtx
4048#endif
4049 ))
4050 {
4051 rtx op0 = XEXP (x, 0);
4052 rtx op1 = XEXP (x, 1);
4053 enum rtx_code new_code;
4054
4055 if (GET_CODE (op0) == COMPARE)
4056 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
4057
4058 /* Simplify our comparison, if possible. */
4059 new_code = simplify_comparison (code, &op0, &op1);
4060
230d793d 4061 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 4062 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
4063 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
4064 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
4065 known to be either 0 or -1, NE becomes a NEG and EQ becomes
4066 (plus X 1).
4067
4068 Remove any ZERO_EXTRACT we made when thinking this was a
4069 comparison. It may now be simpler to use, e.g., an AND. If a
4070 ZERO_EXTRACT is indeed appropriate, it will be placed back by
4071 the call to make_compound_operation in the SET case. */
4072
0802d516
RK
4073 if (STORE_FLAG_VALUE == 1
4074 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
4075 && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4076 return gen_lowpart_for_combine (mode,
4077 expand_compound_operation (op0));
5109d49f 4078
0802d516
RK
4079 else if (STORE_FLAG_VALUE == 1
4080 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4081 && op1 == const0_rtx
4082 && (num_sign_bit_copies (op0, mode)
4083 == GET_MODE_BITSIZE (mode)))
4084 {
4085 op0 = expand_compound_operation (op0);
0c1c8ea6 4086 return gen_unary (NEG, mode, mode,
8079805d 4087 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4088 }
4089
0802d516
RK
4090 else if (STORE_FLAG_VALUE == 1
4091 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4092 && op1 == const0_rtx
5109d49f 4093 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
4094 {
4095 op0 = expand_compound_operation (op0);
8079805d
RK
4096 return gen_binary (XOR, mode,
4097 gen_lowpart_for_combine (mode, op0),
4098 const1_rtx);
5109d49f 4099 }
818b11b9 4100
0802d516
RK
4101 else if (STORE_FLAG_VALUE == 1
4102 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4103 && op1 == const0_rtx
4104 && (num_sign_bit_copies (op0, mode)
4105 == GET_MODE_BITSIZE (mode)))
4106 {
4107 op0 = expand_compound_operation (op0);
8079805d 4108 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 4109 }
230d793d 4110
5109d49f
RK
4111 /* If STORE_FLAG_VALUE is -1, we have cases similar to
4112 those above. */
0802d516
RK
4113 if (STORE_FLAG_VALUE == -1
4114 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 4115 && op1 == const0_rtx
5109d49f
RK
4116 && (num_sign_bit_copies (op0, mode)
4117 == GET_MODE_BITSIZE (mode)))
4118 return gen_lowpart_for_combine (mode,
4119 expand_compound_operation (op0));
4120
0802d516
RK
4121 else if (STORE_FLAG_VALUE == -1
4122 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4123 && op1 == const0_rtx
4124 && nonzero_bits (op0, mode) == 1)
4125 {
4126 op0 = expand_compound_operation (op0);
0c1c8ea6 4127 return gen_unary (NEG, mode, mode,
8079805d 4128 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4129 }
4130
0802d516
RK
4131 else if (STORE_FLAG_VALUE == -1
4132 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4133 && op1 == const0_rtx
4134 && (num_sign_bit_copies (op0, mode)
4135 == GET_MODE_BITSIZE (mode)))
230d793d 4136 {
818b11b9 4137 op0 = expand_compound_operation (op0);
0c1c8ea6 4138 return gen_unary (NOT, mode, mode,
8079805d 4139 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
4140 }
4141
4142 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
4143 else if (STORE_FLAG_VALUE == -1
4144 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
4145 && op1 == const0_rtx
4146 && nonzero_bits (op0, mode) == 1)
4147 {
4148 op0 = expand_compound_operation (op0);
8079805d 4149 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 4150 }
230d793d
RS
4151
4152 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
4153 one bit that might be nonzero, we can convert (ne x 0) to
4154 (ashift x c) where C puts the bit in the sign bit. Remove any
4155 AND with STORE_FLAG_VALUE when we are done, since we are only
4156 going to test the sign bit. */
3f508eca 4157 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 4158 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4159 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 4160 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE(mode)-1))
230d793d
RS
4161 && op1 == const0_rtx
4162 && mode == GET_MODE (op0)
5109d49f 4163 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 4164 {
818b11b9
RK
4165 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
4166 expand_compound_operation (op0),
230d793d
RS
4167 GET_MODE_BITSIZE (mode) - 1 - i);
4168 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
4169 return XEXP (x, 0);
4170 else
4171 return x;
4172 }
4173
4174 /* If the code changed, return a whole new comparison. */
4175 if (new_code != code)
4176 return gen_rtx_combine (new_code, mode, op0, op1);
4177
4178 /* Otherwise, keep this operation, but maybe change its operands.
4179 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
4180 SUBST (XEXP (x, 0), op0);
4181 SUBST (XEXP (x, 1), op1);
4182 }
4183 break;
4184
4185 case IF_THEN_ELSE:
8079805d 4186 return simplify_if_then_else (x);
9210df58 4187
8079805d
RK
4188 case ZERO_EXTRACT:
4189 case SIGN_EXTRACT:
4190 case ZERO_EXTEND:
4191 case SIGN_EXTEND:
0f41302f 4192 /* If we are processing SET_DEST, we are done. */
8079805d
RK
4193 if (in_dest)
4194 return x;
d0ab8cd3 4195
8079805d 4196 return expand_compound_operation (x);
d0ab8cd3 4197
8079805d
RK
4198 case SET:
4199 return simplify_set (x);
1a26b032 4200
8079805d
RK
4201 case AND:
4202 case IOR:
4203 case XOR:
4204 return simplify_logical (x, last);
d0ab8cd3 4205
b472527b 4206 case ABS:
8079805d
RK
4207 /* (abs (neg <foo>)) -> (abs <foo>) */
4208 if (GET_CODE (XEXP (x, 0)) == NEG)
4209 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4210
b472527b
JL
4211 /* If the mode of the operand is VOIDmode (i.e. if it is ASM_OPERANDS),
4212 do nothing. */
4213 if (GET_MODE (XEXP (x, 0)) == VOIDmode)
4214 break;
f40421ce 4215
8079805d
RK
4216 /* If operand is something known to be positive, ignore the ABS. */
4217 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4218 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4219 <= HOST_BITS_PER_WIDE_INT)
4220 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4221 & ((HOST_WIDE_INT) 1
4222 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4223 == 0)))
4224 return XEXP (x, 0);
1a26b032 4225
1a26b032 4226
8079805d
RK
4227 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4228 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4229 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 4230
8079805d 4231 break;
1a26b032 4232
8079805d
RK
4233 case FFS:
4234 /* (ffs (*_extend <X>)) = (ffs <X>) */
4235 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4236 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4237 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4238 break;
1a26b032 4239
8079805d
RK
4240 case FLOAT:
4241 /* (float (sign_extend <X>)) = (float <X>). */
4242 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4243 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4244 break;
1a26b032 4245
8079805d
RK
4246 case ASHIFT:
4247 case LSHIFTRT:
4248 case ASHIFTRT:
4249 case ROTATE:
4250 case ROTATERT:
4251 /* If this is a shift by a constant amount, simplify it. */
4252 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4253 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4254 INTVAL (XEXP (x, 1)));
4255
4256#ifdef SHIFT_COUNT_TRUNCATED
4257 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4258 SUBST (XEXP (x, 1),
4259 force_to_mode (XEXP (x, 1), GET_MODE (x),
4260 ((HOST_WIDE_INT) 1
4261 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4262 - 1,
4263 NULL_RTX, 0));
4264#endif
4265
4266 break;
e9a25f70
JL
4267
4268 default:
4269 break;
8079805d
RK
4270 }
4271
4272 return x;
4273}
4274\f
4275/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4276
8079805d
RK
4277static rtx
4278simplify_if_then_else (x)
4279 rtx x;
4280{
4281 enum machine_mode mode = GET_MODE (x);
4282 rtx cond = XEXP (x, 0);
4283 rtx true = XEXP (x, 1);
4284 rtx false = XEXP (x, 2);
4285 enum rtx_code true_code = GET_CODE (cond);
4286 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4287 rtx temp;
4288 int i;
4289
0f41302f 4290 /* Simplify storing of the truth value. */
8079805d
RK
4291 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4292 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4293
0f41302f 4294 /* Also when the truth value has to be reversed. */
8079805d
RK
4295 if (comparison_p && reversible_comparison_p (cond)
4296 && true == const0_rtx && false == const_true_rtx)
4297 return gen_binary (reverse_condition (true_code),
4298 mode, XEXP (cond, 0), XEXP (cond, 1));
4299
4300 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4301 in it is being compared against certain values. Get the true and false
4302 comparisons and see if that says anything about the value of each arm. */
4303
4304 if (comparison_p && reversible_comparison_p (cond)
4305 && GET_CODE (XEXP (cond, 0)) == REG)
4306 {
4307 HOST_WIDE_INT nzb;
4308 rtx from = XEXP (cond, 0);
4309 enum rtx_code false_code = reverse_condition (true_code);
4310 rtx true_val = XEXP (cond, 1);
4311 rtx false_val = true_val;
4312 int swapped = 0;
9210df58 4313
8079805d 4314 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4315
8079805d 4316 if (false_code == EQ)
1a26b032 4317 {
8079805d
RK
4318 swapped = 1, true_code = EQ, false_code = NE;
4319 temp = true, true = false, false = temp;
4320 }
5109d49f 4321
8079805d
RK
4322 /* If we are comparing against zero and the expression being tested has
4323 only a single bit that might be nonzero, that is its value when it is
4324 not equal to zero. Similarly if it is known to be -1 or 0. */
4325
4326 if (true_code == EQ && true_val == const0_rtx
4327 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4328 false_code = EQ, false_val = GEN_INT (nzb);
4329 else if (true_code == EQ && true_val == const0_rtx
4330 && (num_sign_bit_copies (from, GET_MODE (from))
4331 == GET_MODE_BITSIZE (GET_MODE (from))))
4332 false_code = EQ, false_val = constm1_rtx;
4333
4334 /* Now simplify an arm if we know the value of the register in the
4335 branch and it is used in the arm. Be careful due to the potential
4336 of locally-shared RTL. */
4337
4338 if (reg_mentioned_p (from, true))
4339 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4340 pc_rtx, pc_rtx, 0, 0);
4341 if (reg_mentioned_p (from, false))
4342 false = subst (known_cond (copy_rtx (false), false_code,
4343 from, false_val),
4344 pc_rtx, pc_rtx, 0, 0);
4345
4346 SUBST (XEXP (x, 1), swapped ? false : true);
4347 SUBST (XEXP (x, 2), swapped ? true : false);
4348
4349 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4350 }
5109d49f 4351
8079805d
RK
4352 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4353 reversed, do so to avoid needing two sets of patterns for
4354 subtract-and-branch insns. Similarly if we have a constant in the true
4355 arm, the false arm is the same as the first operand of the comparison, or
4356 the false arm is more complicated than the true arm. */
4357
4358 if (comparison_p && reversible_comparison_p (cond)
4359 && (true == pc_rtx
4360 || (CONSTANT_P (true)
4361 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4362 || true == const0_rtx
4363 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4364 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4365 || (GET_CODE (true) == SUBREG
4366 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4367 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4368 || reg_mentioned_p (true, false)
4369 || rtx_equal_p (false, XEXP (cond, 0))))
4370 {
4371 true_code = reverse_condition (true_code);
4372 SUBST (XEXP (x, 0),
4373 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4374 XEXP (cond, 1)));
5109d49f 4375
8079805d
RK
4376 SUBST (XEXP (x, 1), false);
4377 SUBST (XEXP (x, 2), true);
1a26b032 4378
8079805d 4379 temp = true, true = false, false = temp, cond = XEXP (x, 0);
bb821298 4380
0f41302f 4381 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4382 true_code = GET_CODE (cond);
4383 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4384 }
abe6e52f 4385
8079805d 4386 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4387
8079805d
RK
4388 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4389 return true;
1a26b032 4390
5be669c7
RK
4391 /* Convert a == b ? b : a to "a". */
4392 if (true_code == EQ && ! side_effects_p (cond)
4393 && rtx_equal_p (XEXP (cond, 0), false)
4394 && rtx_equal_p (XEXP (cond, 1), true))
4395 return false;
4396 else if (true_code == NE && ! side_effects_p (cond)
4397 && rtx_equal_p (XEXP (cond, 0), true)
4398 && rtx_equal_p (XEXP (cond, 1), false))
4399 return true;
4400
8079805d
RK
4401 /* Look for cases where we have (abs x) or (neg (abs X)). */
4402
4403 if (GET_MODE_CLASS (mode) == MODE_INT
4404 && GET_CODE (false) == NEG
4405 && rtx_equal_p (true, XEXP (false, 0))
4406 && comparison_p
4407 && rtx_equal_p (true, XEXP (cond, 0))
4408 && ! side_effects_p (true))
4409 switch (true_code)
4410 {
4411 case GT:
4412 case GE:
0c1c8ea6 4413 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4414 case LT:
4415 case LE:
0c1c8ea6 4416 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
e9a25f70
JL
4417 default:
4418 break;
8079805d
RK
4419 }
4420
4421 /* Look for MIN or MAX. */
4422
34c8be72 4423 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4424 && comparison_p
4425 && rtx_equal_p (XEXP (cond, 0), true)
4426 && rtx_equal_p (XEXP (cond, 1), false)
4427 && ! side_effects_p (cond))
4428 switch (true_code)
4429 {
4430 case GE:
4431 case GT:
4432 return gen_binary (SMAX, mode, true, false);
4433 case LE:
4434 case LT:
4435 return gen_binary (SMIN, mode, true, false);
4436 case GEU:
4437 case GTU:
4438 return gen_binary (UMAX, mode, true, false);
4439 case LEU:
4440 case LTU:
4441 return gen_binary (UMIN, mode, true, false);
e9a25f70
JL
4442 default:
4443 break;
8079805d
RK
4444 }
4445
8079805d
RK
4446 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4447 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4448 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4449 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4450 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4451 neither 1 or -1, but it isn't worth checking for. */
8079805d 4452
0802d516
RK
4453 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4454 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d
RK
4455 {
4456 rtx t = make_compound_operation (true, SET);
4457 rtx f = make_compound_operation (false, SET);
4458 rtx cond_op0 = XEXP (cond, 0);
4459 rtx cond_op1 = XEXP (cond, 1);
6a651371 4460 enum rtx_code op = NIL, extend_op = NIL;
8079805d 4461 enum machine_mode m = mode;
6a651371 4462 rtx z = 0, c1 = NULL_RTX;
8079805d 4463
8079805d
RK
4464 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4465 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4466 || GET_CODE (t) == ASHIFT
4467 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4468 && rtx_equal_p (XEXP (t, 0), f))
4469 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4470
4471 /* If an identity-zero op is commutative, check whether there
0f41302f 4472 would be a match if we swapped the operands. */
8079805d
RK
4473 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4474 || GET_CODE (t) == XOR)
4475 && rtx_equal_p (XEXP (t, 1), f))
4476 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4477 else if (GET_CODE (t) == SIGN_EXTEND
4478 && (GET_CODE (XEXP (t, 0)) == PLUS
4479 || GET_CODE (XEXP (t, 0)) == MINUS
4480 || GET_CODE (XEXP (t, 0)) == IOR
4481 || GET_CODE (XEXP (t, 0)) == XOR
4482 || GET_CODE (XEXP (t, 0)) == ASHIFT
4483 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4484 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4485 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4486 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4487 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4488 && (num_sign_bit_copies (f, GET_MODE (f))
4489 > (GET_MODE_BITSIZE (mode)
4490 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4491 {
4492 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4493 extend_op = SIGN_EXTEND;
4494 m = GET_MODE (XEXP (t, 0));
1a26b032 4495 }
8079805d
RK
4496 else if (GET_CODE (t) == SIGN_EXTEND
4497 && (GET_CODE (XEXP (t, 0)) == PLUS
4498 || GET_CODE (XEXP (t, 0)) == IOR
4499 || GET_CODE (XEXP (t, 0)) == XOR)
4500 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4501 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4502 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4503 && (num_sign_bit_copies (f, GET_MODE (f))
4504 > (GET_MODE_BITSIZE (mode)
4505 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4506 {
4507 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4508 extend_op = SIGN_EXTEND;
4509 m = GET_MODE (XEXP (t, 0));
4510 }
4511 else if (GET_CODE (t) == ZERO_EXTEND
4512 && (GET_CODE (XEXP (t, 0)) == PLUS
4513 || GET_CODE (XEXP (t, 0)) == MINUS
4514 || GET_CODE (XEXP (t, 0)) == IOR
4515 || GET_CODE (XEXP (t, 0)) == XOR
4516 || GET_CODE (XEXP (t, 0)) == ASHIFT
4517 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4518 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4519 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4520 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4521 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4522 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4523 && ((nonzero_bits (f, GET_MODE (f))
4524 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4525 == 0))
4526 {
4527 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4528 extend_op = ZERO_EXTEND;
4529 m = GET_MODE (XEXP (t, 0));
4530 }
4531 else if (GET_CODE (t) == ZERO_EXTEND
4532 && (GET_CODE (XEXP (t, 0)) == PLUS
4533 || GET_CODE (XEXP (t, 0)) == IOR
4534 || GET_CODE (XEXP (t, 0)) == XOR)
4535 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4536 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4537 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4538 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4539 && ((nonzero_bits (f, GET_MODE (f))
4540 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4541 == 0))
4542 {
4543 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4544 extend_op = ZERO_EXTEND;
4545 m = GET_MODE (XEXP (t, 0));
4546 }
4547
4548 if (z)
4549 {
4550 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4551 pc_rtx, pc_rtx, 0, 0);
4552 temp = gen_binary (MULT, m, temp,
4553 gen_binary (MULT, m, c1, const_true_rtx));
4554 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4555 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4556
4557 if (extend_op != NIL)
0c1c8ea6 4558 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4559
4560 return temp;
4561 }
4562 }
224eeff2 4563
8079805d
RK
4564 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4565 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4566 negation of a single bit, we can convert this operation to a shift. We
4567 can actually do this more generally, but it doesn't seem worth it. */
4568
4569 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4570 && false == const0_rtx && GET_CODE (true) == CONST_INT
4571 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4572 && (i = exact_log2 (INTVAL (true))) >= 0)
4573 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4574 == GET_MODE_BITSIZE (mode))
4575 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4576 return
4577 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4578 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4579
8079805d
RK
4580 return x;
4581}
4582\f
4583/* Simplify X, a SET expression. Return the new expression. */
230d793d 4584
8079805d
RK
4585static rtx
4586simplify_set (x)
4587 rtx x;
4588{
4589 rtx src = SET_SRC (x);
4590 rtx dest = SET_DEST (x);
4591 enum machine_mode mode
4592 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4593 rtx other_insn;
4594 rtx *cc_use;
4595
4596 /* (set (pc) (return)) gets written as (return). */
4597 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4598 return src;
230d793d 4599
87e3e0c1
RK
4600 /* Now that we know for sure which bits of SRC we are using, see if we can
4601 simplify the expression for the object knowing that we only need the
4602 low-order bits. */
4603
4604 if (GET_MODE_CLASS (mode) == MODE_INT)
4605 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4606
8079805d
RK
4607 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4608 the comparison result and try to simplify it unless we already have used
4609 undobuf.other_insn. */
4610 if ((GET_CODE (src) == COMPARE
230d793d 4611#ifdef HAVE_cc0
8079805d 4612 || dest == cc0_rtx
230d793d 4613#endif
8079805d
RK
4614 )
4615 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4616 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4617 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4618 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4619 {
4620 enum rtx_code old_code = GET_CODE (*cc_use);
4621 enum rtx_code new_code;
4622 rtx op0, op1;
4623 int other_changed = 0;
4624 enum machine_mode compare_mode = GET_MODE (dest);
4625
4626 if (GET_CODE (src) == COMPARE)
4627 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4628 else
4629 op0 = src, op1 = const0_rtx;
230d793d 4630
8079805d
RK
4631 /* Simplify our comparison, if possible. */
4632 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4633
c141a106 4634#ifdef EXTRA_CC_MODES
8079805d
RK
4635 /* If this machine has CC modes other than CCmode, check to see if we
4636 need to use a different CC mode here. */
4637 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4638#endif /* EXTRA_CC_MODES */
230d793d 4639
c141a106 4640#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4641 /* If the mode changed, we have to change SET_DEST, the mode in the
4642 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4643 a hard register, just build new versions with the proper mode. If it
4644 is a pseudo, we lose unless it is only time we set the pseudo, in
4645 which case we can safely change its mode. */
4646 if (compare_mode != GET_MODE (dest))
4647 {
4648 int regno = REGNO (dest);
38a448ca 4649 rtx new_dest = gen_rtx_REG (compare_mode, regno);
8079805d
RK
4650
4651 if (regno < FIRST_PSEUDO_REGISTER
b1f21e0a 4652 || (REG_N_SETS (regno) == 1 && ! REG_USERVAR_P (dest)))
230d793d 4653 {
8079805d
RK
4654 if (regno >= FIRST_PSEUDO_REGISTER)
4655 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4656
8079805d
RK
4657 SUBST (SET_DEST (x), new_dest);
4658 SUBST (XEXP (*cc_use, 0), new_dest);
4659 other_changed = 1;
230d793d 4660
8079805d 4661 dest = new_dest;
230d793d 4662 }
8079805d 4663 }
230d793d
RS
4664#endif
4665
8079805d
RK
4666 /* If the code changed, we have to build a new comparison in
4667 undobuf.other_insn. */
4668 if (new_code != old_code)
4669 {
4670 unsigned HOST_WIDE_INT mask;
4671
4672 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4673 dest, const0_rtx));
4674
4675 /* If the only change we made was to change an EQ into an NE or
4676 vice versa, OP0 has only one bit that might be nonzero, and OP1
4677 is zero, check if changing the user of the condition code will
4678 produce a valid insn. If it won't, we can keep the original code
4679 in that insn by surrounding our operation with an XOR. */
4680
4681 if (((old_code == NE && new_code == EQ)
4682 || (old_code == EQ && new_code == NE))
4683 && ! other_changed && op1 == const0_rtx
4684 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4685 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4686 {
8079805d 4687 rtx pat = PATTERN (other_insn), note = 0;
230d793d 4688
8e2f6e35 4689 if ((recog_for_combine (&pat, other_insn, &note) < 0
8079805d
RK
4690 && ! check_asm_operands (pat)))
4691 {
4692 PUT_CODE (*cc_use, old_code);
4693 other_insn = 0;
230d793d 4694
8079805d 4695 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4696 }
230d793d
RS
4697 }
4698
8079805d
RK
4699 other_changed = 1;
4700 }
4701
4702 if (other_changed)
4703 undobuf.other_insn = other_insn;
230d793d
RS
4704
4705#ifdef HAVE_cc0
8079805d
RK
4706 /* If we are now comparing against zero, change our source if
4707 needed. If we do not use cc0, we always have a COMPARE. */
4708 if (op1 == const0_rtx && dest == cc0_rtx)
4709 {
4710 SUBST (SET_SRC (x), op0);
4711 src = op0;
4712 }
4713 else
230d793d
RS
4714#endif
4715
8079805d
RK
4716 /* Otherwise, if we didn't previously have a COMPARE in the
4717 correct mode, we need one. */
4718 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4719 {
4720 SUBST (SET_SRC (x),
4721 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4722 src = SET_SRC (x);
230d793d
RS
4723 }
4724 else
4725 {
8079805d
RK
4726 /* Otherwise, update the COMPARE if needed. */
4727 SUBST (XEXP (src, 0), op0);
4728 SUBST (XEXP (src, 1), op1);
230d793d 4729 }
8079805d
RK
4730 }
4731 else
4732 {
4733 /* Get SET_SRC in a form where we have placed back any
4734 compound expressions. Then do the checks below. */
4735 src = make_compound_operation (src, SET);
4736 SUBST (SET_SRC (x), src);
4737 }
230d793d 4738
8079805d
RK
4739 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4740 and X being a REG or (subreg (reg)), we may be able to convert this to
4741 (set (subreg:m2 x) (op)).
df62f951 4742
8079805d
RK
4743 We can always do this if M1 is narrower than M2 because that means that
4744 we only care about the low bits of the result.
df62f951 4745
8079805d 4746 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
9ec36da5 4747 perform a narrower operation than requested since the high-order bits will
8079805d
RK
4748 be undefined. On machine where it is defined, this transformation is safe
4749 as long as M1 and M2 have the same number of words. */
df62f951 4750
8079805d
RK
4751 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4752 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4753 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4754 / UNITS_PER_WORD)
4755 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4756 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4757#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4758 && (GET_MODE_SIZE (GET_MODE (src))
4759 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4760#endif
f507a070
RK
4761#ifdef CLASS_CANNOT_CHANGE_SIZE
4762 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4763 && (TEST_HARD_REG_BIT
4764 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4765 REGNO (dest)))
4766 && (GET_MODE_SIZE (GET_MODE (src))
4767 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4768#endif
8079805d
RK
4769 && (GET_CODE (dest) == REG
4770 || (GET_CODE (dest) == SUBREG
4771 && GET_CODE (SUBREG_REG (dest)) == REG)))
4772 {
4773 SUBST (SET_DEST (x),
4774 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4775 dest));
4776 SUBST (SET_SRC (x), SUBREG_REG (src));
4777
4778 src = SET_SRC (x), dest = SET_DEST (x);
4779 }
df62f951 4780
8baf60bb 4781#ifdef LOAD_EXTEND_OP
8079805d
RK
4782 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4783 would require a paradoxical subreg. Replace the subreg with a
0f41302f 4784 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
4785
4786 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4787 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4788 && SUBREG_WORD (src) == 0
4789 && (GET_MODE_SIZE (GET_MODE (src))
4790 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4791 && GET_CODE (SUBREG_REG (src)) == MEM)
4792 {
4793 SUBST (SET_SRC (x),
4794 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4795 GET_MODE (src), XEXP (src, 0)));
4796
4797 src = SET_SRC (x);
4798 }
230d793d
RS
4799#endif
4800
8079805d
RK
4801 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4802 are comparing an item known to be 0 or -1 against 0, use a logical
4803 operation instead. Check for one of the arms being an IOR of the other
4804 arm with some value. We compute three terms to be IOR'ed together. In
4805 practice, at most two will be nonzero. Then we do the IOR's. */
4806
4807 if (GET_CODE (dest) != PC
4808 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 4809 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4810 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4811 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 4812 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
4813#ifdef HAVE_conditional_move
4814 && ! can_conditionally_move_p (GET_MODE (src))
4815#endif
8079805d
RK
4816 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4817 GET_MODE (XEXP (XEXP (src, 0), 0)))
4818 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4819 && ! side_effects_p (src))
4820 {
4821 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4822 ? XEXP (src, 1) : XEXP (src, 2));
4823 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4824 ? XEXP (src, 2) : XEXP (src, 1));
4825 rtx term1 = const0_rtx, term2, term3;
4826
4827 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4828 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4829 else if (GET_CODE (true) == IOR
4830 && rtx_equal_p (XEXP (true, 1), false))
4831 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4832 else if (GET_CODE (false) == IOR
4833 && rtx_equal_p (XEXP (false, 0), true))
4834 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4835 else if (GET_CODE (false) == IOR
4836 && rtx_equal_p (XEXP (false, 1), true))
4837 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4838
4839 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4840 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 4841 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
4842 XEXP (XEXP (src, 0), 0)),
4843 false);
4844
4845 SUBST (SET_SRC (x),
4846 gen_binary (IOR, GET_MODE (src),
4847 gen_binary (IOR, GET_MODE (src), term1, term2),
4848 term3));
4849
4850 src = SET_SRC (x);
4851 }
230d793d 4852
246e00f2
RK
4853 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4854 whole thing fail. */
4855 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4856 return src;
4857 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4858 return dest;
4859 else
4860 /* Convert this into a field assignment operation, if possible. */
4861 return make_field_assignment (x);
8079805d
RK
4862}
4863\f
4864/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4865 result. LAST is nonzero if this is the last retry. */
4866
4867static rtx
4868simplify_logical (x, last)
4869 rtx x;
4870 int last;
4871{
4872 enum machine_mode mode = GET_MODE (x);
4873 rtx op0 = XEXP (x, 0);
4874 rtx op1 = XEXP (x, 1);
4875
4876 switch (GET_CODE (x))
4877 {
230d793d 4878 case AND:
8079805d
RK
4879 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4880 insn (and may simplify more). */
4881 if (GET_CODE (op0) == XOR
4882 && rtx_equal_p (XEXP (op0, 0), op1)
4883 && ! side_effects_p (op1))
0c1c8ea6
RK
4884 x = gen_binary (AND, mode,
4885 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
4886
4887 if (GET_CODE (op0) == XOR
4888 && rtx_equal_p (XEXP (op0, 1), op1)
4889 && ! side_effects_p (op1))
0c1c8ea6
RK
4890 x = gen_binary (AND, mode,
4891 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
4892
4893 /* Similarly for (~ (A ^ B)) & A. */
4894 if (GET_CODE (op0) == NOT
4895 && GET_CODE (XEXP (op0, 0)) == XOR
4896 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4897 && ! side_effects_p (op1))
4898 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4899
4900 if (GET_CODE (op0) == NOT
4901 && GET_CODE (XEXP (op0, 0)) == XOR
4902 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4903 && ! side_effects_p (op1))
4904 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4905
2e8f9abf
DM
4906 /* We can call simplify_and_const_int only if we don't lose
4907 any (sign) bits when converting INTVAL (op1) to
4908 "unsigned HOST_WIDE_INT". */
4909 if (GET_CODE (op1) == CONST_INT
4910 && (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4911 || INTVAL (op1) > 0))
230d793d 4912 {
8079805d 4913 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
4914
4915 /* If we have (ior (and (X C1) C2)) and the next restart would be
4916 the last, simplify this by making C1 as small as possible
0f41302f 4917 and then exit. */
8079805d
RK
4918 if (last
4919 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4920 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4921 && GET_CODE (op1) == CONST_INT)
4922 return gen_binary (IOR, mode,
4923 gen_binary (AND, mode, XEXP (op0, 0),
4924 GEN_INT (INTVAL (XEXP (op0, 1))
4925 & ~ INTVAL (op1))), op1);
230d793d
RS
4926
4927 if (GET_CODE (x) != AND)
8079805d 4928 return x;
0e32506c
RK
4929
4930 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4931 || GET_RTX_CLASS (GET_CODE (x)) == '2')
4932 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
4933 }
4934
4935 /* Convert (A | B) & A to A. */
8079805d
RK
4936 if (GET_CODE (op0) == IOR
4937 && (rtx_equal_p (XEXP (op0, 0), op1)
4938 || rtx_equal_p (XEXP (op0, 1), op1))
4939 && ! side_effects_p (XEXP (op0, 0))
4940 && ! side_effects_p (XEXP (op0, 1)))
4941 return op1;
230d793d 4942
d0ab8cd3 4943 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4944 we start with some combination of logical operations and apply
4945 the distributive law followed by the inverse distributive law.
4946 Most of the time, this results in no change. However, if some of
4947 the operands are the same or inverses of each other, simplifications
4948 will result.
4949
4950 For example, (and (ior A B) (not B)) can occur as the result of
4951 expanding a bit field assignment. When we apply the distributive
4952 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 4953 which then simplifies to (and (A (not B))).
230d793d 4954
8079805d 4955 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
4956 the inverse distributive law to see if things simplify. */
4957
8079805d 4958 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
4959 {
4960 x = apply_distributive_law
8079805d
RK
4961 (gen_binary (GET_CODE (op0), mode,
4962 gen_binary (AND, mode, XEXP (op0, 0), op1),
4963 gen_binary (AND, mode, XEXP (op0, 1), op1)));
230d793d 4964 if (GET_CODE (x) != AND)
8079805d 4965 return x;
230d793d
RS
4966 }
4967
8079805d
RK
4968 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4969 return apply_distributive_law
4970 (gen_binary (GET_CODE (op1), mode,
4971 gen_binary (AND, mode, XEXP (op1, 0), op0),
4972 gen_binary (AND, mode, XEXP (op1, 1), op0)));
230d793d
RS
4973
4974 /* Similarly, taking advantage of the fact that
4975 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4976
8079805d
RK
4977 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4978 return apply_distributive_law
4979 (gen_binary (XOR, mode,
4980 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4981 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
230d793d 4982
8079805d
RK
4983 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4984 return apply_distributive_law
4985 (gen_binary (XOR, mode,
4986 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4987 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
230d793d
RS
4988 break;
4989
4990 case IOR:
951553af 4991 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 4992 if (GET_CODE (op1) == CONST_INT
ac49a949 4993 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
4994 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4995 return op1;
d0ab8cd3 4996
230d793d 4997 /* Convert (A & B) | A to A. */
8079805d
RK
4998 if (GET_CODE (op0) == AND
4999 && (rtx_equal_p (XEXP (op0, 0), op1)
5000 || rtx_equal_p (XEXP (op0, 1), op1))
5001 && ! side_effects_p (XEXP (op0, 0))
5002 && ! side_effects_p (XEXP (op0, 1)))
5003 return op1;
230d793d
RS
5004
5005 /* If we have (ior (and A B) C), apply the distributive law and then
5006 the inverse distributive law to see if things simplify. */
5007
8079805d 5008 if (GET_CODE (op0) == AND)
230d793d
RS
5009 {
5010 x = apply_distributive_law
5011 (gen_binary (AND, mode,
8079805d
RK
5012 gen_binary (IOR, mode, XEXP (op0, 0), op1),
5013 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
230d793d
RS
5014
5015 if (GET_CODE (x) != IOR)
8079805d 5016 return x;
230d793d
RS
5017 }
5018
8079805d 5019 if (GET_CODE (op1) == AND)
230d793d
RS
5020 {
5021 x = apply_distributive_law
5022 (gen_binary (AND, mode,
8079805d
RK
5023 gen_binary (IOR, mode, XEXP (op1, 0), op0),
5024 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
230d793d
RS
5025
5026 if (GET_CODE (x) != IOR)
8079805d 5027 return x;
230d793d
RS
5028 }
5029
5030 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
5031 mode size to (rotate A CX). */
5032
8079805d
RK
5033 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
5034 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
5035 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
5036 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5037 && GET_CODE (XEXP (op1, 1)) == CONST_INT
5038 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 5039 == GET_MODE_BITSIZE (mode)))
38a448ca
RH
5040 return gen_rtx_ROTATE (mode, XEXP (op0, 0),
5041 (GET_CODE (op0) == ASHIFT
5042 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 5043
71923da7
RK
5044 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
5045 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
5046 does not affect any of the bits in OP1, it can really be done
5047 as a PLUS and we can associate. We do this by seeing if OP1
5048 can be safely shifted left C bits. */
5049 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
5050 && GET_CODE (XEXP (op0, 0)) == PLUS
5051 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
5052 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5053 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
5054 {
5055 int count = INTVAL (XEXP (op0, 1));
5056 HOST_WIDE_INT mask = INTVAL (op1) << count;
5057
5058 if (mask >> count == INTVAL (op1)
5059 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
5060 {
5061 SUBST (XEXP (XEXP (op0, 0), 1),
5062 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
5063 return op0;
5064 }
5065 }
230d793d
RS
5066 break;
5067
5068 case XOR:
5069 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
5070 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
5071 (NOT y). */
5072 {
5073 int num_negated = 0;
230d793d 5074
8079805d
RK
5075 if (GET_CODE (op0) == NOT)
5076 num_negated++, op0 = XEXP (op0, 0);
5077 if (GET_CODE (op1) == NOT)
5078 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
5079
5080 if (num_negated == 2)
5081 {
8079805d
RK
5082 SUBST (XEXP (x, 0), op0);
5083 SUBST (XEXP (x, 1), op1);
230d793d
RS
5084 }
5085 else if (num_negated == 1)
0c1c8ea6 5086 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
5087 }
5088
5089 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
5090 correspond to a machine insn or result in further simplifications
5091 if B is a constant. */
5092
8079805d
RK
5093 if (GET_CODE (op0) == AND
5094 && rtx_equal_p (XEXP (op0, 1), op1)
5095 && ! side_effects_p (op1))
0c1c8ea6
RK
5096 return gen_binary (AND, mode,
5097 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 5098 op1);
230d793d 5099
8079805d
RK
5100 else if (GET_CODE (op0) == AND
5101 && rtx_equal_p (XEXP (op0, 0), op1)
5102 && ! side_effects_p (op1))
0c1c8ea6
RK
5103 return gen_binary (AND, mode,
5104 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 5105 op1);
230d793d 5106
230d793d 5107 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
5108 comparison if STORE_FLAG_VALUE is 1. */
5109 if (STORE_FLAG_VALUE == 1
5110 && op1 == const1_rtx
8079805d
RK
5111 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5112 && reversible_comparison_p (op0))
5113 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5114 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
5115
5116 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
5117 is (lt foo (const_int 0)), so we can perform the above
0802d516 5118 simplification if STORE_FLAG_VALUE is 1. */
500c518b 5119
0802d516
RK
5120 if (STORE_FLAG_VALUE == 1
5121 && op1 == const1_rtx
8079805d
RK
5122 && GET_CODE (op0) == LSHIFTRT
5123 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5124 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
5125 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
5126
5127 /* (xor (comparison foo bar) (const_int sign-bit))
5128 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 5129 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 5130 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
e51712db 5131 == (unsigned HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
5132 && op1 == const_true_rtx
5133 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
5134 && reversible_comparison_p (op0))
5135 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
5136 mode, XEXP (op0, 0), XEXP (op0, 1));
230d793d 5137 break;
e9a25f70
JL
5138
5139 default:
5140 abort ();
230d793d
RS
5141 }
5142
5143 return x;
5144}
5145\f
5146/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
5147 operations" because they can be replaced with two more basic operations.
5148 ZERO_EXTEND is also considered "compound" because it can be replaced with
5149 an AND operation, which is simpler, though only one operation.
5150
5151 The function expand_compound_operation is called with an rtx expression
5152 and will convert it to the appropriate shifts and AND operations,
5153 simplifying at each stage.
5154
5155 The function make_compound_operation is called to convert an expression
5156 consisting of shifts and ANDs into the equivalent compound expression.
5157 It is the inverse of this function, loosely speaking. */
5158
5159static rtx
5160expand_compound_operation (x)
5161 rtx x;
5162{
5163 int pos = 0, len;
5164 int unsignedp = 0;
5165 int modewidth;
5166 rtx tem;
5167
5168 switch (GET_CODE (x))
5169 {
5170 case ZERO_EXTEND:
5171 unsignedp = 1;
5172 case SIGN_EXTEND:
75473182
RS
5173 /* We can't necessarily use a const_int for a multiword mode;
5174 it depends on implicitly extending the value.
5175 Since we don't know the right way to extend it,
5176 we can't tell whether the implicit way is right.
5177
5178 Even for a mode that is no wider than a const_int,
5179 we can't win, because we need to sign extend one of its bits through
5180 the rest of it, and we don't know which bit. */
230d793d 5181 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 5182 return x;
230d793d 5183
8079805d
RK
5184 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
5185 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
5186 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
5187 reloaded. If not for that, MEM's would very rarely be safe.
5188
5189 Reject MODEs bigger than a word, because we might not be able
5190 to reference a two-register group starting with an arbitrary register
5191 (and currently gen_lowpart might crash for a SUBREG). */
5192
5193 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
5194 return x;
5195
5196 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
5197 /* If the inner object has VOIDmode (the only way this can happen
5198 is if it is a ASM_OPERANDS), we can't do anything since we don't
5199 know how much masking to do. */
5200 if (len == 0)
5201 return x;
5202
5203 break;
5204
5205 case ZERO_EXTRACT:
5206 unsignedp = 1;
5207 case SIGN_EXTRACT:
5208 /* If the operand is a CLOBBER, just return it. */
5209 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
5210 return XEXP (x, 0);
5211
5212 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5213 || GET_CODE (XEXP (x, 2)) != CONST_INT
5214 || GET_MODE (XEXP (x, 0)) == VOIDmode)
5215 return x;
5216
5217 len = INTVAL (XEXP (x, 1));
5218 pos = INTVAL (XEXP (x, 2));
5219
5220 /* If this goes outside the object being extracted, replace the object
5221 with a (use (mem ...)) construct that only combine understands
5222 and is used only for this purpose. */
5223 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
38a448ca 5224 SUBST (XEXP (x, 0), gen_rtx_USE (GET_MODE (x), XEXP (x, 0)));
230d793d 5225
f76b9db2
ILT
5226 if (BITS_BIG_ENDIAN)
5227 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5228
230d793d
RS
5229 break;
5230
5231 default:
5232 return x;
5233 }
5234
0f13a422
ILT
5235 /* We can optimize some special cases of ZERO_EXTEND. */
5236 if (GET_CODE (x) == ZERO_EXTEND)
5237 {
5238 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5239 know that the last value didn't have any inappropriate bits
5240 set. */
5241 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5242 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5243 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5244 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5245 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5246 return XEXP (XEXP (x, 0), 0);
5247
5248 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5249 if (GET_CODE (XEXP (x, 0)) == SUBREG
5250 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5251 && subreg_lowpart_p (XEXP (x, 0))
5252 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5253 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
fcc60894 5254 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
0f13a422
ILT
5255 return SUBREG_REG (XEXP (x, 0));
5256
5257 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5258 is a comparison and STORE_FLAG_VALUE permits. This is like
5259 the first case, but it works even when GET_MODE (x) is larger
5260 than HOST_WIDE_INT. */
5261 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5262 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5263 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5264 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5265 <= HOST_BITS_PER_WIDE_INT)
5266 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5267 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5268 return XEXP (XEXP (x, 0), 0);
5269
5270 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5271 if (GET_CODE (XEXP (x, 0)) == SUBREG
5272 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5273 && subreg_lowpart_p (XEXP (x, 0))
5274 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5275 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5276 <= HOST_BITS_PER_WIDE_INT)
5277 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5278 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5279 return SUBREG_REG (XEXP (x, 0));
5280
5281 /* If sign extension is cheaper than zero extension, then use it
5282 if we know that no extraneous bits are set, and that the high
5283 bit is not set. */
5284 if (flag_expensive_optimizations
5285 && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5286 && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5287 & ~ (((unsigned HOST_WIDE_INT)
5288 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5289 >> 1))
5290 == 0))
5291 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5292 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5293 <= HOST_BITS_PER_WIDE_INT)
5294 && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5295 & ~ (((unsigned HOST_WIDE_INT)
5296 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5297 >> 1))
5298 == 0))))
5299 {
38a448ca 5300 rtx temp = gen_rtx_SIGN_EXTEND (GET_MODE (x), XEXP (x, 0));
0f13a422
ILT
5301
5302 if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5303 return expand_compound_operation (temp);
5304 }
5305 }
5306
230d793d
RS
5307 /* If we reach here, we want to return a pair of shifts. The inner
5308 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5309 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5310 logical depending on the value of UNSIGNEDP.
5311
5312 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5313 converted into an AND of a shift.
5314
5315 We must check for the case where the left shift would have a negative
5316 count. This can happen in a case like (x >> 31) & 255 on machines
5317 that can't shift by a constant. On those machines, we would first
5318 combine the shift with the AND to produce a variable-position
5319 extraction. Then the constant of 31 would be substituted in to produce
5320 a such a position. */
5321
5322 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5323 if (modewidth >= pos - len)
5f4f0e22 5324 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5325 GET_MODE (x),
5f4f0e22
CH
5326 simplify_shift_const (NULL_RTX, ASHIFT,
5327 GET_MODE (x),
230d793d
RS
5328 XEXP (x, 0),
5329 modewidth - pos - len),
5330 modewidth - len);
5331
5f4f0e22
CH
5332 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5333 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5334 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5335 GET_MODE (x),
5336 XEXP (x, 0), pos),
5f4f0e22 5337 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5338 else
5339 /* Any other cases we can't handle. */
5340 return x;
5341
5342
5343 /* If we couldn't do this for some reason, return the original
5344 expression. */
5345 if (GET_CODE (tem) == CLOBBER)
5346 return x;
5347
5348 return tem;
5349}
5350\f
5351/* X is a SET which contains an assignment of one object into
5352 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5353 or certain SUBREGS). If possible, convert it into a series of
5354 logical operations.
5355
5356 We half-heartedly support variable positions, but do not at all
5357 support variable lengths. */
5358
5359static rtx
5360expand_field_assignment (x)
5361 rtx x;
5362{
5363 rtx inner;
0f41302f 5364 rtx pos; /* Always counts from low bit. */
230d793d
RS
5365 int len;
5366 rtx mask;
5367 enum machine_mode compute_mode;
5368
5369 /* Loop until we find something we can't simplify. */
5370 while (1)
5371 {
5372 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5373 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5374 {
5375 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5376 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4d9cfc7b 5377 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
230d793d
RS
5378 }
5379 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5380 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5381 {
5382 inner = XEXP (SET_DEST (x), 0);
5383 len = INTVAL (XEXP (SET_DEST (x), 1));
5384 pos = XEXP (SET_DEST (x), 2);
5385
5386 /* If the position is constant and spans the width of INNER,
5387 surround INNER with a USE to indicate this. */
5388 if (GET_CODE (pos) == CONST_INT
5389 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
38a448ca 5390 inner = gen_rtx_USE (GET_MODE (SET_DEST (x)), inner);
230d793d 5391
f76b9db2
ILT
5392 if (BITS_BIG_ENDIAN)
5393 {
5394 if (GET_CODE (pos) == CONST_INT)
5395 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5396 - INTVAL (pos));
5397 else if (GET_CODE (pos) == MINUS
5398 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5399 && (INTVAL (XEXP (pos, 1))
5400 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5401 /* If position is ADJUST - X, new position is X. */
5402 pos = XEXP (pos, 0);
5403 else
5404 pos = gen_binary (MINUS, GET_MODE (pos),
5405 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5406 - len),
5407 pos);
5408 }
230d793d
RS
5409 }
5410
5411 /* A SUBREG between two modes that occupy the same numbers of words
5412 can be done by moving the SUBREG to the source. */
5413 else if (GET_CODE (SET_DEST (x)) == SUBREG
5414 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5415 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5416 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5417 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5418 {
38a448ca
RH
5419 x = gen_rtx_SET (VOIDmode, SUBREG_REG (SET_DEST (x)),
5420 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
5421 SET_SRC (x)));
230d793d
RS
5422 continue;
5423 }
5424 else
5425 break;
5426
5427 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5428 inner = SUBREG_REG (inner);
5429
5430 compute_mode = GET_MODE (inner);
5431
861556b4
RH
5432 /* Don't attempt bitwise arithmetic on non-integral modes. */
5433 if (! INTEGRAL_MODE_P (compute_mode))
5434 {
5435 enum machine_mode imode;
5436
5437 /* Something is probably seriously wrong if this matches. */
5438 if (! FLOAT_MODE_P (compute_mode))
5439 break;
5440
5441 /* Try to find an integral mode to pun with. */
5442 imode = mode_for_size (GET_MODE_BITSIZE (compute_mode), MODE_INT, 0);
5443 if (imode == BLKmode)
5444 break;
5445
5446 compute_mode = imode;
5447 inner = gen_lowpart_for_combine (imode, inner);
5448 }
5449
230d793d 5450 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5451 if (len < HOST_BITS_PER_WIDE_INT)
5452 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5453 else
5454 break;
5455
5456 /* Now compute the equivalent expression. Make a copy of INNER
5457 for the SET_DEST in case it is a MEM into which we will substitute;
5458 we don't want shared RTL in that case. */
38a448ca
RH
5459 x = gen_rtx_SET (VOIDmode, copy_rtx (inner),
5460 gen_binary (IOR, compute_mode,
5461 gen_binary (AND, compute_mode,
5462 gen_unary (NOT, compute_mode,
5463 compute_mode,
5464 gen_binary (ASHIFT,
5465 compute_mode,
5466 mask, pos)),
5467 inner),
5468 gen_binary (ASHIFT, compute_mode,
5469 gen_binary (AND, compute_mode,
5470 gen_lowpart_for_combine
5471 (compute_mode,
5472 SET_SRC (x)),
5473 mask),
5474 pos)));
230d793d
RS
5475 }
5476
5477 return x;
5478}
5479\f
8999a12e
RK
5480/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5481 it is an RTX that represents a variable starting position; otherwise,
5482 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5483
5484 INNER may be a USE. This will occur when we started with a bitfield
5485 that went outside the boundary of the object in memory, which is
5486 allowed on most machines. To isolate this case, we produce a USE
5487 whose mode is wide enough and surround the MEM with it. The only
5488 code that understands the USE is this routine. If it is not removed,
5489 it will cause the resulting insn not to match.
5490
5491 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5492 signed reference.
5493
5494 IN_DEST is non-zero if this is a reference in the destination of a
5495 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5496 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5497 be used.
5498
5499 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5500 ZERO_EXTRACT should be built even for bits starting at bit 0.
5501
76184def
DE
5502 MODE is the desired mode of the result (if IN_DEST == 0).
5503
5504 The result is an RTX for the extraction or NULL_RTX if the target
5505 can't handle it. */
230d793d
RS
5506
5507static rtx
5508make_extraction (mode, inner, pos, pos_rtx, len,
5509 unsignedp, in_dest, in_compare)
5510 enum machine_mode mode;
5511 rtx inner;
5512 int pos;
5513 rtx pos_rtx;
5514 int len;
5515 int unsignedp;
5516 int in_dest, in_compare;
5517{
94b4b17a
RS
5518 /* This mode describes the size of the storage area
5519 to fetch the overall value from. Within that, we
5520 ignore the POS lowest bits, etc. */
230d793d
RS
5521 enum machine_mode is_mode = GET_MODE (inner);
5522 enum machine_mode inner_mode;
d7cd794f
RK
5523 enum machine_mode wanted_inner_mode = byte_mode;
5524 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5525 enum machine_mode pos_mode = word_mode;
5526 enum machine_mode extraction_mode = word_mode;
5527 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5528 int spans_byte = 0;
5529 rtx new = 0;
8999a12e 5530 rtx orig_pos_rtx = pos_rtx;
6139ff20 5531 int orig_pos;
230d793d
RS
5532
5533 /* Get some information about INNER and get the innermost object. */
5534 if (GET_CODE (inner) == USE)
94b4b17a 5535 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5536 /* We don't need to adjust the position because we set up the USE
5537 to pretend that it was a full-word object. */
5538 spans_byte = 1, inner = XEXP (inner, 0);
5539 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5540 {
5541 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5542 consider just the QI as the memory to extract from.
5543 The subreg adds or removes high bits; its mode is
5544 irrelevant to the meaning of this extraction,
5545 since POS and LEN count from the lsb. */
5546 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5547 is_mode = GET_MODE (SUBREG_REG (inner));
5548 inner = SUBREG_REG (inner);
5549 }
230d793d
RS
5550
5551 inner_mode = GET_MODE (inner);
5552
5553 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5554 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5555
5556 /* See if this can be done without an extraction. We never can if the
5557 width of the field is not the same as that of some integer mode. For
5558 registers, we can only avoid the extraction if the position is at the
5559 low-order bit and this is either not in the destination or we have the
5560 appropriate STRICT_LOW_PART operation available.
5561
5562 For MEM, we can avoid an extract if the field starts on an appropriate
5563 boundary and we can change the mode of the memory reference. However,
5564 we cannot directly access the MEM if we have a USE and the underlying
5565 MEM is not TMODE. This combination means that MEM was being used in a
5566 context where bits outside its mode were being referenced; that is only
5567 valid in bit-field insns. */
5568
5569 if (tmode != BLKmode
5570 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5571 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5572 && GET_CODE (inner) != MEM
230d793d 5573 && (! in_dest
df62f951
RK
5574 || (GET_CODE (inner) == REG
5575 && (movstrict_optab->handlers[(int) tmode].insn_code
5576 != CODE_FOR_nothing))))
8999a12e 5577 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5578 && (pos
5579 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5580 : BITS_PER_UNIT)) == 0
230d793d
RS
5581 /* We can't do this if we are widening INNER_MODE (it
5582 may not be aligned, for one thing). */
5583 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5584 && (inner_mode == tmode
5585 || (! mode_dependent_address_p (XEXP (inner, 0))
5586 && ! MEM_VOLATILE_P (inner))))))
5587 {
230d793d
RS
5588 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5589 field. If the original and current mode are the same, we need not
5590 adjust the offset. Otherwise, we do if bytes big endian.
5591
4d9cfc7b
RK
5592 If INNER is not a MEM, get a piece consisting of just the field
5593 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5594
5595 if (GET_CODE (inner) == MEM)
5596 {
94b4b17a
RS
5597 int offset;
5598 /* POS counts from lsb, but make OFFSET count in memory order. */
5599 if (BYTES_BIG_ENDIAN)
5600 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5601 else
5602 offset = pos / BITS_PER_UNIT;
230d793d 5603
38a448ca 5604 new = gen_rtx_MEM (tmode, plus_constant (XEXP (inner, 0), offset));
230d793d 5605 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
c6df88cb 5606 MEM_COPY_ATTRIBUTES (new, inner);
230d793d 5607 }
df62f951 5608 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5609 {
5610 /* We can't call gen_lowpart_for_combine here since we always want
5611 a SUBREG and it would sometimes return a new hard register. */
5612 if (tmode != inner_mode)
38a448ca
RH
5613 new = gen_rtx_SUBREG (tmode, inner,
5614 (WORDS_BIG_ENDIAN
5615 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5616 ? (((GET_MODE_SIZE (inner_mode)
5617 - GET_MODE_SIZE (tmode))
5618 / UNITS_PER_WORD)
5619 - pos / BITS_PER_WORD)
5620 : pos / BITS_PER_WORD));
c0d3ac4d
RK
5621 else
5622 new = inner;
5623 }
230d793d 5624 else
6139ff20
RK
5625 new = force_to_mode (inner, tmode,
5626 len >= HOST_BITS_PER_WIDE_INT
5627 ? GET_MODE_MASK (tmode)
5628 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5629 NULL_RTX, 0);
230d793d
RS
5630
5631 /* If this extraction is going into the destination of a SET,
5632 make a STRICT_LOW_PART unless we made a MEM. */
5633
5634 if (in_dest)
5635 return (GET_CODE (new) == MEM ? new
77fa0940 5636 : (GET_CODE (new) != SUBREG
38a448ca 5637 ? gen_rtx_CLOBBER (tmode, const0_rtx)
77fa0940 5638 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5639
5640 /* Otherwise, sign- or zero-extend unless we already are in the
5641 proper mode. */
5642
5643 return (mode == tmode ? new
5644 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5645 mode, new));
5646 }
5647
cc471082
RS
5648 /* Unless this is a COMPARE or we have a funny memory reference,
5649 don't do anything with zero-extending field extracts starting at
5650 the low-order bit since they are simple AND operations. */
8999a12e
RK
5651 if (pos_rtx == 0 && pos == 0 && ! in_dest
5652 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5653 return 0;
5654
e7373556
RK
5655 /* Unless we are allowed to span bytes, reject this if we would be
5656 spanning bytes or if the position is not a constant and the length
5657 is not 1. In all other cases, we would only be going outside
5658 out object in cases when an original shift would have been
5659 undefined. */
5660 if (! spans_byte
5661 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5662 || (pos_rtx != 0 && len != 1)))
5663 return 0;
5664
d7cd794f 5665 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
5666 and the mode for the result. */
5667#ifdef HAVE_insv
5668 if (in_dest)
5669 {
0d8e55d8
JL
5670 wanted_inner_reg_mode
5671 = (insn_operand_mode[(int) CODE_FOR_insv][0] == VOIDmode
5672 ? word_mode
5673 : insn_operand_mode[(int) CODE_FOR_insv][0]);
5674 pos_mode = (insn_operand_mode[(int) CODE_FOR_insv][2] == VOIDmode
5675 ? word_mode : insn_operand_mode[(int) CODE_FOR_insv][2]);
5676 extraction_mode = (insn_operand_mode[(int) CODE_FOR_insv][3] == VOIDmode
5677 ? word_mode
5678 : insn_operand_mode[(int) CODE_FOR_insv][3]);
230d793d
RS
5679 }
5680#endif
5681
5682#ifdef HAVE_extzv
5683 if (! in_dest && unsignedp)
5684 {
0d8e55d8
JL
5685 wanted_inner_reg_mode
5686 = (insn_operand_mode[(int) CODE_FOR_extzv][1] == VOIDmode
5687 ? word_mode
5688 : insn_operand_mode[(int) CODE_FOR_extzv][1]);
5689 pos_mode = (insn_operand_mode[(int) CODE_FOR_extzv][3] == VOIDmode
5690 ? word_mode : insn_operand_mode[(int) CODE_FOR_extzv][3]);
5691 extraction_mode = (insn_operand_mode[(int) CODE_FOR_extzv][0] == VOIDmode
5692 ? word_mode
5693 : insn_operand_mode[(int) CODE_FOR_extzv][0]);
230d793d
RS
5694 }
5695#endif
5696
5697#ifdef HAVE_extv
5698 if (! in_dest && ! unsignedp)
5699 {
0d8e55d8
JL
5700 wanted_inner_reg_mode
5701 = (insn_operand_mode[(int) CODE_FOR_extv][1] == VOIDmode
5702 ? word_mode
5703 : insn_operand_mode[(int) CODE_FOR_extv][1]);
5704 pos_mode = (insn_operand_mode[(int) CODE_FOR_extv][3] == VOIDmode
5705 ? word_mode : insn_operand_mode[(int) CODE_FOR_extv][3]);
5706 extraction_mode = (insn_operand_mode[(int) CODE_FOR_extv][0] == VOIDmode
5707 ? word_mode
5708 : insn_operand_mode[(int) CODE_FOR_extv][0]);
230d793d
RS
5709 }
5710#endif
5711
5712 /* Never narrow an object, since that might not be safe. */
5713
5714 if (mode != VOIDmode
5715 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5716 extraction_mode = mode;
5717
5718 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5719 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5720 pos_mode = GET_MODE (pos_rtx);
5721
d7cd794f
RK
5722 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5723 if we have to change the mode of memory and cannot, the desired mode is
5724 EXTRACTION_MODE. */
5725 if (GET_CODE (inner) != MEM)
5726 wanted_inner_mode = wanted_inner_reg_mode;
5727 else if (inner_mode != wanted_inner_mode
5728 && (mode_dependent_address_p (XEXP (inner, 0))
5729 || MEM_VOLATILE_P (inner)))
5730 wanted_inner_mode = extraction_mode;
230d793d 5731
6139ff20
RK
5732 orig_pos = pos;
5733
f76b9db2
ILT
5734 if (BITS_BIG_ENDIAN)
5735 {
cf54c2cd
DE
5736 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5737 BITS_BIG_ENDIAN style. If position is constant, compute new
5738 position. Otherwise, build subtraction.
5739 Note that POS is relative to the mode of the original argument.
5740 If it's a MEM we need to recompute POS relative to that.
5741 However, if we're extracting from (or inserting into) a register,
5742 we want to recompute POS relative to wanted_inner_mode. */
5743 int width = (GET_CODE (inner) == MEM
5744 ? GET_MODE_BITSIZE (is_mode)
5745 : GET_MODE_BITSIZE (wanted_inner_mode));
5746
f76b9db2 5747 if (pos_rtx == 0)
cf54c2cd 5748 pos = width - len - pos;
f76b9db2
ILT
5749 else
5750 pos_rtx
5751 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
cf54c2cd
DE
5752 GEN_INT (width - len), pos_rtx);
5753 /* POS may be less than 0 now, but we check for that below.
5754 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 5755 }
230d793d
RS
5756
5757 /* If INNER has a wider mode, make it smaller. If this is a constant
5758 extract, try to adjust the byte to point to the byte containing
5759 the value. */
d7cd794f
RK
5760 if (wanted_inner_mode != VOIDmode
5761 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 5762 && ((GET_CODE (inner) == MEM
d7cd794f 5763 && (inner_mode == wanted_inner_mode
230d793d
RS
5764 || (! mode_dependent_address_p (XEXP (inner, 0))
5765 && ! MEM_VOLATILE_P (inner))))))
5766 {
5767 int offset = 0;
5768
5769 /* The computations below will be correct if the machine is big
5770 endian in both bits and bytes or little endian in bits and bytes.
5771 If it is mixed, we must adjust. */
5772
230d793d 5773 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 5774 adjust OFFSET to compensate. */
f76b9db2
ILT
5775 if (BYTES_BIG_ENDIAN
5776 && ! spans_byte
230d793d
RS
5777 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5778 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
5779
5780 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5781 if (pos_rtx == 0)
230d793d
RS
5782 {
5783 offset += pos / BITS_PER_UNIT;
d7cd794f 5784 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
5785 }
5786
f76b9db2
ILT
5787 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5788 && ! spans_byte
d7cd794f 5789 && is_mode != wanted_inner_mode)
c6b3f1f2 5790 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 5791 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 5792
d7cd794f 5793 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 5794 {
38a448ca
RH
5795 rtx newmem = gen_rtx_MEM (wanted_inner_mode,
5796 plus_constant (XEXP (inner, 0), offset));
230d793d 5797 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
c6df88cb 5798 MEM_COPY_ATTRIBUTES (newmem, inner);
230d793d
RS
5799 inner = newmem;
5800 }
5801 }
5802
9e74dc41
RK
5803 /* If INNER is not memory, we can always get it into the proper mode. If we
5804 are changing its mode, POS must be a constant and smaller than the size
5805 of the new mode. */
230d793d 5806 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
5807 {
5808 if (GET_MODE (inner) != wanted_inner_mode
5809 && (pos_rtx != 0
5810 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5811 return 0;
5812
5813 inner = force_to_mode (inner, wanted_inner_mode,
5814 pos_rtx
5815 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5816 ? GET_MODE_MASK (wanted_inner_mode)
5817 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5818 NULL_RTX, 0);
5819 }
230d793d
RS
5820
5821 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5822 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5823 if (pos_rtx != 0
230d793d
RS
5824 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5825 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5826 else if (pos_rtx != 0
230d793d
RS
5827 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5828 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5829
8999a12e
RK
5830 /* Make POS_RTX unless we already have it and it is correct. If we don't
5831 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 5832 be a CONST_INT. */
8999a12e
RK
5833 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5834 pos_rtx = orig_pos_rtx;
5835
5836 else if (pos_rtx == 0)
5f4f0e22 5837 pos_rtx = GEN_INT (pos);
230d793d
RS
5838
5839 /* Make the required operation. See if we can use existing rtx. */
5840 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5841 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5842 if (! in_dest)
5843 new = gen_lowpart_for_combine (mode, new);
5844
5845 return new;
5846}
5847\f
71923da7
RK
5848/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5849 with any other operations in X. Return X without that shift if so. */
5850
5851static rtx
5852extract_left_shift (x, count)
5853 rtx x;
5854 int count;
5855{
5856 enum rtx_code code = GET_CODE (x);
5857 enum machine_mode mode = GET_MODE (x);
5858 rtx tem;
5859
5860 switch (code)
5861 {
5862 case ASHIFT:
5863 /* This is the shift itself. If it is wide enough, we will return
5864 either the value being shifted if the shift count is equal to
5865 COUNT or a shift for the difference. */
5866 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5867 && INTVAL (XEXP (x, 1)) >= count)
5868 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5869 INTVAL (XEXP (x, 1)) - count);
5870 break;
5871
5872 case NEG: case NOT:
5873 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 5874 return gen_unary (code, mode, mode, tem);
71923da7
RK
5875
5876 break;
5877
5878 case PLUS: case IOR: case XOR: case AND:
5879 /* If we can safely shift this constant and we find the inner shift,
5880 make a new operation. */
5881 if (GET_CODE (XEXP (x,1)) == CONST_INT
b729186a 5882 && (INTVAL (XEXP (x, 1)) & ((((HOST_WIDE_INT) 1 << count)) - 1)) == 0
71923da7
RK
5883 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5884 return gen_binary (code, mode, tem,
5885 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5886
5887 break;
e9a25f70
JL
5888
5889 default:
5890 break;
71923da7
RK
5891 }
5892
5893 return 0;
5894}
5895\f
230d793d
RS
5896/* Look at the expression rooted at X. Look for expressions
5897 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5898 Form these expressions.
5899
5900 Return the new rtx, usually just X.
5901
5902 Also, for machines like the Vax that don't have logical shift insns,
5903 try to convert logical to arithmetic shift operations in cases where
5904 they are equivalent. This undoes the canonicalizations to logical
5905 shifts done elsewhere.
5906
5907 We try, as much as possible, to re-use rtl expressions to save memory.
5908
5909 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5910 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5911 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5912 or a COMPARE against zero, it is COMPARE. */
5913
5914static rtx
5915make_compound_operation (x, in_code)
5916 rtx x;
5917 enum rtx_code in_code;
5918{
5919 enum rtx_code code = GET_CODE (x);
5920 enum machine_mode mode = GET_MODE (x);
5921 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 5922 rtx rhs, lhs;
230d793d 5923 enum rtx_code next_code;
f24ad0e4 5924 int i;
230d793d 5925 rtx new = 0;
280f58ba 5926 rtx tem;
6f7d635c 5927 const char *fmt;
230d793d
RS
5928
5929 /* Select the code to be used in recursive calls. Once we are inside an
5930 address, we stay there. If we have a comparison, set to COMPARE,
5931 but once inside, go back to our default of SET. */
5932
42495ca0 5933 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5934 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5935 && XEXP (x, 1) == const0_rtx) ? COMPARE
5936 : in_code == COMPARE ? SET : in_code);
5937
5938 /* Process depending on the code of this operation. If NEW is set
5939 non-zero, it will be returned. */
5940
5941 switch (code)
5942 {
5943 case ASHIFT:
230d793d
RS
5944 /* Convert shifts by constants into multiplications if inside
5945 an address. */
5946 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5947 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5948 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5949 {
5950 new = make_compound_operation (XEXP (x, 0), next_code);
5951 new = gen_rtx_combine (MULT, mode, new,
5952 GEN_INT ((HOST_WIDE_INT) 1
5953 << INTVAL (XEXP (x, 1))));
5954 }
230d793d
RS
5955 break;
5956
5957 case AND:
5958 /* If the second operand is not a constant, we can't do anything
5959 with it. */
5960 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5961 break;
5962
5963 /* If the constant is a power of two minus one and the first operand
5964 is a logical right shift, make an extraction. */
5965 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5966 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5967 {
5968 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5969 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5970 0, in_code == COMPARE);
5971 }
dfbe1b2f 5972
230d793d
RS
5973 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5974 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5975 && subreg_lowpart_p (XEXP (x, 0))
5976 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5977 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5978 {
5979 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5980 next_code);
2f99f437 5981 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
5982 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5983 0, in_code == COMPARE);
5984 }
45620ed4 5985 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
5986 else if ((GET_CODE (XEXP (x, 0)) == XOR
5987 || GET_CODE (XEXP (x, 0)) == IOR)
5988 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5989 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5990 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5991 {
5992 /* Apply the distributive law, and then try to make extractions. */
5993 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
38a448ca
RH
5994 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 0),
5995 XEXP (x, 1)),
5996 gen_rtx_AND (mode, XEXP (XEXP (x, 0), 1),
5997 XEXP (x, 1)));
c2f9f64e
JW
5998 new = make_compound_operation (new, in_code);
5999 }
a7c99304
RK
6000
6001 /* If we are have (and (rotate X C) M) and C is larger than the number
6002 of bits in M, this is an extraction. */
6003
6004 else if (GET_CODE (XEXP (x, 0)) == ROTATE
6005 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6006 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
6007 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
6008 {
6009 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
6010 new = make_extraction (mode, new,
6011 (GET_MODE_BITSIZE (mode)
6012 - INTVAL (XEXP (XEXP (x, 0), 1))),
6013 NULL_RTX, i, 1, 0, in_code == COMPARE);
6014 }
a7c99304
RK
6015
6016 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
6017 a logical shift and our mask turns off all the propagated sign
6018 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
6019 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6020 && (lshr_optab->handlers[(int) mode].insn_code
6021 == CODE_FOR_nothing)
230d793d
RS
6022 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
6023 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6024 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
6025 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6026 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 6027 {
5f4f0e22 6028 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
6029
6030 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
6031 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
6032 SUBST (XEXP (x, 0),
280f58ba
RK
6033 gen_rtx_combine (ASHIFTRT, mode,
6034 make_compound_operation (XEXP (XEXP (x, 0), 0),
6035 next_code),
230d793d
RS
6036 XEXP (XEXP (x, 0), 1)));
6037 }
6038
6039 /* If the constant is one less than a power of two, this might be
6040 representable by an extraction even if no shift is present.
6041 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
6042 we are in a COMPARE. */
6043 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
6044 new = make_extraction (mode,
6045 make_compound_operation (XEXP (x, 0),
6046 next_code),
6047 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
6048
6049 /* If we are in a comparison and this is an AND with a power of two,
6050 convert this into the appropriate bit extract. */
6051 else if (in_code == COMPARE
6052 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
6053 new = make_extraction (mode,
6054 make_compound_operation (XEXP (x, 0),
6055 next_code),
6056 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
6057
6058 break;
6059
6060 case LSHIFTRT:
6061 /* If the sign bit is known to be zero, replace this with an
6062 arithmetic shift. */
d0ab8cd3
RK
6063 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
6064 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 6065 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 6066 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 6067 {
280f58ba
RK
6068 new = gen_rtx_combine (ASHIFTRT, mode,
6069 make_compound_operation (XEXP (x, 0),
6070 next_code),
6071 XEXP (x, 1));
230d793d
RS
6072 break;
6073 }
6074
0f41302f 6075 /* ... fall through ... */
230d793d
RS
6076
6077 case ASHIFTRT:
71923da7
RK
6078 lhs = XEXP (x, 0);
6079 rhs = XEXP (x, 1);
6080
230d793d
RS
6081 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
6082 this is a SIGN_EXTRACT. */
71923da7
RK
6083 if (GET_CODE (rhs) == CONST_INT
6084 && GET_CODE (lhs) == ASHIFT
6085 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6086 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 6087 {
71923da7 6088 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 6089 new = make_extraction (mode, new,
71923da7
RK
6090 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
6091 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
6092 code == LSHIFTRT, 0, in_code == COMPARE);
6093 }
6094
71923da7
RK
6095 /* See if we have operations between an ASHIFTRT and an ASHIFT.
6096 If so, try to merge the shifts into a SIGN_EXTEND. We could
6097 also do this for some cases of SIGN_EXTRACT, but it doesn't
6098 seem worth the effort; the case checked for occurs on Alpha. */
6099
6100 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
6101 && ! (GET_CODE (lhs) == SUBREG
6102 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
6103 && GET_CODE (rhs) == CONST_INT
6104 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
6105 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
6106 new = make_extraction (mode, make_compound_operation (new, next_code),
6107 0, NULL_RTX, mode_width - INTVAL (rhs),
6108 code == LSHIFTRT, 0, in_code == COMPARE);
6109
230d793d 6110 break;
280f58ba
RK
6111
6112 case SUBREG:
6113 /* Call ourselves recursively on the inner expression. If we are
6114 narrowing the object and it has a different RTL code from
6115 what it originally did, do this SUBREG as a force_to_mode. */
6116
0a5cbff6 6117 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
6118 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
6119 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
6120 && subreg_lowpart_p (x))
0a5cbff6
RK
6121 {
6122 rtx newer = force_to_mode (tem, mode,
e3d616e3 6123 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
6124
6125 /* If we have something other than a SUBREG, we might have
6126 done an expansion, so rerun outselves. */
6127 if (GET_CODE (newer) != SUBREG)
6128 newer = make_compound_operation (newer, in_code);
6129
6130 return newer;
6131 }
6f28d3e9
RH
6132
6133 /* If this is a paradoxical subreg, and the new code is a sign or
6134 zero extension, omit the subreg and widen the extension. If it
6135 is a regular subreg, we can still get rid of the subreg by not
6136 widening so much, or in fact removing the extension entirely. */
6137 if ((GET_CODE (tem) == SIGN_EXTEND
6138 || GET_CODE (tem) == ZERO_EXTEND)
6139 && subreg_lowpart_p (x))
6140 {
6141 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (tem))
6142 || (GET_MODE_SIZE (mode) >
6143 GET_MODE_SIZE (GET_MODE (XEXP (tem, 0)))))
6144 tem = gen_rtx_combine (GET_CODE (tem), mode, XEXP (tem, 0));
6145 else
6146 tem = gen_lowpart_for_combine (mode, XEXP (tem, 0));
6147 return tem;
6148 }
e9a25f70
JL
6149 break;
6150
6151 default:
6152 break;
230d793d
RS
6153 }
6154
6155 if (new)
6156 {
df62f951 6157 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
6158 code = GET_CODE (x);
6159 }
6160
6161 /* Now recursively process each operand of this operation. */
6162 fmt = GET_RTX_FORMAT (code);
6163 for (i = 0; i < GET_RTX_LENGTH (code); i++)
6164 if (fmt[i] == 'e')
6165 {
6166 new = make_compound_operation (XEXP (x, i), next_code);
6167 SUBST (XEXP (x, i), new);
6168 }
6169
6170 return x;
6171}
6172\f
6173/* Given M see if it is a value that would select a field of bits
6174 within an item, but not the entire word. Return -1 if not.
6175 Otherwise, return the starting position of the field, where 0 is the
6176 low-order bit.
6177
6178 *PLEN is set to the length of the field. */
6179
6180static int
6181get_pos_from_mask (m, plen)
5f4f0e22 6182 unsigned HOST_WIDE_INT m;
230d793d
RS
6183 int *plen;
6184{
6185 /* Get the bit number of the first 1 bit from the right, -1 if none. */
6186 int pos = exact_log2 (m & - m);
6187
6188 if (pos < 0)
6189 return -1;
6190
6191 /* Now shift off the low-order zero bits and see if we have a power of
6192 two minus 1. */
6193 *plen = exact_log2 ((m >> pos) + 1);
6194
6195 if (*plen <= 0)
6196 return -1;
6197
6198 return pos;
6199}
6200\f
6139ff20
RK
6201/* See if X can be simplified knowing that we will only refer to it in
6202 MODE and will only refer to those bits that are nonzero in MASK.
6203 If other bits are being computed or if masking operations are done
6204 that select a superset of the bits in MASK, they can sometimes be
6205 ignored.
6206
6207 Return a possibly simplified expression, but always convert X to
6208 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
6209
6210 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
6211 replace X with REG.
6212
6213 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
6214 are all off in X. This is used when X will be complemented, by either
180b8e4b 6215 NOT, NEG, or XOR. */
dfbe1b2f
RK
6216
6217static rtx
e3d616e3 6218force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
6219 rtx x;
6220 enum machine_mode mode;
6139ff20 6221 unsigned HOST_WIDE_INT mask;
dfbe1b2f 6222 rtx reg;
e3d616e3 6223 int just_select;
dfbe1b2f
RK
6224{
6225 enum rtx_code code = GET_CODE (x);
180b8e4b 6226 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
6227 enum machine_mode op_mode;
6228 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
6229 rtx op0, op1, temp;
6230
132d2040
RK
6231 /* If this is a CALL or ASM_OPERANDS, don't do anything. Some of the
6232 code below will do the wrong thing since the mode of such an
be3d27d6
CI
6233 expression is VOIDmode.
6234
6235 Also do nothing if X is a CLOBBER; this can happen if X was
6236 the return value from a call to gen_lowpart_for_combine. */
6237 if (code == CALL || code == ASM_OPERANDS || code == CLOBBER)
246e00f2
RK
6238 return x;
6239
6139ff20
RK
6240 /* We want to perform the operation is its present mode unless we know
6241 that the operation is valid in MODE, in which case we do the operation
6242 in MODE. */
1c75dfa4
RK
6243 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
6244 && code_to_optab[(int) code] != 0
ef026f91
RS
6245 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
6246 != CODE_FOR_nothing))
6247 ? mode : GET_MODE (x));
e3d616e3 6248
aa988991
RS
6249 /* It is not valid to do a right-shift in a narrower mode
6250 than the one it came in with. */
6251 if ((code == LSHIFTRT || code == ASHIFTRT)
6252 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
6253 op_mode = GET_MODE (x);
ef026f91
RS
6254
6255 /* Truncate MASK to fit OP_MODE. */
6256 if (op_mode)
6257 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
6258
6259 /* When we have an arithmetic operation, or a shift whose count we
6260 do not know, we need to assume that all bit the up to the highest-order
6261 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
6262 if (op_mode)
6263 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
6264 ? GET_MODE_MASK (op_mode)
6265 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
6266 else
6267 fuller_mask = ~ (HOST_WIDE_INT) 0;
6268
6269 /* Determine what bits of X are guaranteed to be (non)zero. */
6270 nonzero = nonzero_bits (x, mode);
6139ff20
RK
6271
6272 /* If none of the bits in X are needed, return a zero. */
e3d616e3 6273 if (! just_select && (nonzero & mask) == 0)
6139ff20 6274 return const0_rtx;
dfbe1b2f 6275
6139ff20
RK
6276 /* If X is a CONST_INT, return a new one. Do this here since the
6277 test below will fail. */
6278 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
6279 {
6280 HOST_WIDE_INT cval = INTVAL (x) & mask;
6281 int width = GET_MODE_BITSIZE (mode);
6282
6283 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6284 number, sign extend it. */
6285 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6286 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6287 cval |= (HOST_WIDE_INT) -1 << width;
6288
6289 return GEN_INT (cval);
6290 }
dfbe1b2f 6291
180b8e4b
RK
6292 /* If X is narrower than MODE and we want all the bits in X's mode, just
6293 get X in the proper mode. */
6294 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6295 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
6296 return gen_lowpart_for_combine (mode, x);
6297
71923da7
RK
6298 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6299 MASK are already known to be zero in X, we need not do anything. */
6300 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
6301 return x;
6302
dfbe1b2f
RK
6303 switch (code)
6304 {
6139ff20
RK
6305 case CLOBBER:
6306 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6307 generating something that won't match. */
6139ff20
RK
6308 return x;
6309
6139ff20
RK
6310 case USE:
6311 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6312 spanned the boundary of the MEM. If we are now masking so it is
6313 within that boundary, we don't need the USE any more. */
f76b9db2
ILT
6314 if (! BITS_BIG_ENDIAN
6315 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6316 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6317 break;
6139ff20 6318
dfbe1b2f
RK
6319 case SIGN_EXTEND:
6320 case ZERO_EXTEND:
6321 case ZERO_EXTRACT:
6322 case SIGN_EXTRACT:
6323 x = expand_compound_operation (x);
6324 if (GET_CODE (x) != code)
e3d616e3 6325 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6326 break;
6327
6328 case REG:
6329 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6330 || rtx_equal_p (reg, get_last_value (x))))
6331 x = reg;
6332 break;
6333
dfbe1b2f 6334 case SUBREG:
6139ff20 6335 if (subreg_lowpart_p (x)
180b8e4b
RK
6336 /* We can ignore the effect of this SUBREG if it narrows the mode or
6337 if the constant masks to zero all the bits the mode doesn't
6338 have. */
6139ff20
RK
6339 && ((GET_MODE_SIZE (GET_MODE (x))
6340 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6341 || (0 == (mask
6342 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 6343 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6344 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6345 break;
6346
6347 case AND:
6139ff20
RK
6348 /* If this is an AND with a constant, convert it into an AND
6349 whose constant is the AND of that constant with MASK. If it
6350 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6351
2ca9ae17 6352 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6353 {
6139ff20
RK
6354 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6355 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6356
6357 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6358 is just some low-order bits. If so, and it is MASK, we don't
6359 need it. */
dfbe1b2f
RK
6360
6361 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
e51712db 6362 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6363 x = XEXP (x, 0);
d0ab8cd3 6364
71923da7
RK
6365 /* If it remains an AND, try making another AND with the bits
6366 in the mode mask that aren't in MASK turned on. If the
6367 constant in the AND is wide enough, this might make a
6368 cheaper constant. */
6369
6370 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6371 && GET_MODE_MASK (GET_MODE (x)) != mask
6372 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6373 {
6374 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6375 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6376 int width = GET_MODE_BITSIZE (GET_MODE (x));
6377 rtx y;
6378
6379 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6380 number, sign extend it. */
6381 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6382 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6383 cval |= (HOST_WIDE_INT) -1 << width;
6384
6385 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6386 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6387 x = y;
6388 }
6389
d0ab8cd3 6390 break;
dfbe1b2f
RK
6391 }
6392
6139ff20 6393 goto binop;
dfbe1b2f
RK
6394
6395 case PLUS:
6139ff20
RK
6396 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6397 low-order bits (as in an alignment operation) and FOO is already
6398 aligned to that boundary, mask C1 to that boundary as well.
6399 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6400
6401 {
6402 int width = GET_MODE_BITSIZE (mode);
6403 unsigned HOST_WIDE_INT smask = mask;
6404
6405 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6406 number, sign extend it. */
6407
6408 if (width < HOST_BITS_PER_WIDE_INT
6409 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6410 smask |= (HOST_WIDE_INT) -1 << width;
6411
6412 if (GET_CODE (XEXP (x, 1)) == CONST_INT
0e9ff885
DM
6413 && exact_log2 (- smask) >= 0)
6414 {
6415#ifdef STACK_BIAS
6416 if (STACK_BIAS
6417 && (XEXP (x, 0) == stack_pointer_rtx
6418 || XEXP (x, 0) == frame_pointer_rtx))
6419 {
6420 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6421 unsigned HOST_WIDE_INT sp_mask = GET_MODE_MASK (mode);
6422
6423 sp_mask &= ~ (sp_alignment - 1);
835c8e04
DT
6424 if ((sp_mask & ~ smask) == 0
6425 && ((INTVAL (XEXP (x, 1)) - STACK_BIAS) & ~ smask) != 0)
0e9ff885
DM
6426 return force_to_mode (plus_constant (XEXP (x, 0),
6427 ((INTVAL (XEXP (x, 1)) -
835c8e04 6428 STACK_BIAS) & smask)
0e9ff885 6429 + STACK_BIAS),
835c8e04 6430 mode, smask, reg, next_select);
0e9ff885
DM
6431 }
6432#endif
835c8e04
DT
6433 if ((nonzero_bits (XEXP (x, 0), mode) & ~ smask) == 0
6434 && (INTVAL (XEXP (x, 1)) & ~ smask) != 0)
0e9ff885 6435 return force_to_mode (plus_constant (XEXP (x, 0),
835c8e04
DT
6436 (INTVAL (XEXP (x, 1))
6437 & smask)),
6438 mode, smask, reg, next_select);
0e9ff885 6439 }
9fa6d012 6440 }
6139ff20 6441
0f41302f 6442 /* ... fall through ... */
6139ff20 6443
dfbe1b2f
RK
6444 case MINUS:
6445 case MULT:
6139ff20
RK
6446 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6447 most significant bit in MASK since carries from those bits will
6448 affect the bits we are interested in. */
6449 mask = fuller_mask;
6450 goto binop;
6451
dfbe1b2f
RK
6452 case IOR:
6453 case XOR:
6139ff20
RK
6454 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6455 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6456 operation which may be a bitfield extraction. Ensure that the
6457 constant we form is not wider than the mode of X. */
6458
6459 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6460 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6461 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6462 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6463 && GET_CODE (XEXP (x, 1)) == CONST_INT
6464 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6465 + floor_log2 (INTVAL (XEXP (x, 1))))
6466 < GET_MODE_BITSIZE (GET_MODE (x)))
6467 && (INTVAL (XEXP (x, 1))
01c82bbb 6468 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6469 {
6470 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6471 << INTVAL (XEXP (XEXP (x, 0), 1)));
6472 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6473 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6474 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6475 XEXP (XEXP (x, 0), 1));
e3d616e3 6476 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6477 }
6478
6479 binop:
dfbe1b2f 6480 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6481 change the mode if we have an operation of that mode. */
6482
e3d616e3
RK
6483 op0 = gen_lowpart_for_combine (op_mode,
6484 force_to_mode (XEXP (x, 0), mode, mask,
6485 reg, next_select));
6486 op1 = gen_lowpart_for_combine (op_mode,
6487 force_to_mode (XEXP (x, 1), mode, mask,
6488 reg, next_select));
6139ff20 6489
2dd484ed
RK
6490 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6491 MASK since OP1 might have been sign-extended but we never want
6492 to turn on extra bits, since combine might have previously relied
6493 on them being off. */
6494 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6495 && (INTVAL (op1) & mask) != 0)
6496 op1 = GEN_INT (INTVAL (op1) & mask);
6497
6139ff20
RK
6498 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6499 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6500 break;
dfbe1b2f
RK
6501
6502 case ASHIFT:
dfbe1b2f 6503 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6504 However, we cannot do anything with shifts where we cannot
6505 guarantee that the counts are smaller than the size of the mode
6506 because such a count will have a different meaning in a
6139ff20 6507 wider mode. */
f6785026
RK
6508
6509 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6510 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6511 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6512 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6513 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6514 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
6515 break;
6516
6139ff20
RK
6517 /* If the shift count is a constant and we can do arithmetic in
6518 the mode of the shift, refine which bits we need. Otherwise, use the
6519 conservative form of the mask. */
6520 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6521 && INTVAL (XEXP (x, 1)) >= 0
6522 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6523 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6524 mask >>= INTVAL (XEXP (x, 1));
6525 else
6526 mask = fuller_mask;
6527
6528 op0 = gen_lowpart_for_combine (op_mode,
6529 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6530 mask, reg, next_select));
6139ff20
RK
6531
6532 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6533 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6534 break;
dfbe1b2f
RK
6535
6536 case LSHIFTRT:
1347292b
JW
6537 /* Here we can only do something if the shift count is a constant,
6538 this shift constant is valid for the host, and we can do arithmetic
6539 in OP_MODE. */
dfbe1b2f
RK
6540
6541 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6542 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6543 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6544 {
6139ff20
RK
6545 rtx inner = XEXP (x, 0);
6546
6547 /* Select the mask of the bits we need for the shift operand. */
6548 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 6549
6139ff20
RK
6550 /* We can only change the mode of the shift if we can do arithmetic
6551 in the mode of the shift and MASK is no wider than the width of
6552 OP_MODE. */
6553 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6554 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6555 op_mode = GET_MODE (x);
6556
e3d616e3 6557 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
6558
6559 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6560 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6561 }
6139ff20
RK
6562
6563 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6564 shift and AND produces only copies of the sign bit (C2 is one less
6565 than a power of two), we can do this with just a shift. */
6566
6567 if (GET_CODE (x) == LSHIFTRT
6568 && GET_CODE (XEXP (x, 1)) == CONST_INT
6569 && ((INTVAL (XEXP (x, 1))
6570 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6571 >= GET_MODE_BITSIZE (GET_MODE (x)))
6572 && exact_log2 (mask + 1) >= 0
6573 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6574 >= exact_log2 (mask + 1)))
6575 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6576 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6577 - exact_log2 (mask + 1)));
fae2db47
JW
6578
6579 goto shiftrt;
d0ab8cd3
RK
6580
6581 case ASHIFTRT:
6139ff20
RK
6582 /* If we are just looking for the sign bit, we don't need this shift at
6583 all, even if it has a variable count. */
9bf22b75 6584 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
e51712db 6585 && (mask == ((unsigned HOST_WIDE_INT) 1
9bf22b75 6586 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6587 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6588
6589 /* If this is a shift by a constant, get a mask that contains those bits
6590 that are not copies of the sign bit. We then have two cases: If
6591 MASK only includes those bits, this can be a logical shift, which may
6592 allow simplifications. If MASK is a single-bit field not within
6593 those bits, we are requesting a copy of the sign bit and hence can
6594 shift the sign bit to the appropriate location. */
6595
6596 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6597 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6598 {
6599 int i = -1;
6600
b69960ac
RK
6601 /* If the considered data is wider then HOST_WIDE_INT, we can't
6602 represent a mask for all its bits in a single scalar.
6603 But we only care about the lower bits, so calculate these. */
6604
6a11342f 6605 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 6606 {
0f41302f 6607 nonzero = ~ (HOST_WIDE_INT) 0;
b69960ac
RK
6608
6609 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6610 is the number of bits a full-width mask would have set.
6611 We need only shift if these are fewer than nonzero can
6612 hold. If not, we must keep all bits set in nonzero. */
6613
6614 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6615 < HOST_BITS_PER_WIDE_INT)
6616 nonzero >>= INTVAL (XEXP (x, 1))
6617 + HOST_BITS_PER_WIDE_INT
6618 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6619 }
6620 else
6621 {
6622 nonzero = GET_MODE_MASK (GET_MODE (x));
6623 nonzero >>= INTVAL (XEXP (x, 1));
6624 }
6139ff20
RK
6625
6626 if ((mask & ~ nonzero) == 0
6627 || (i = exact_log2 (mask)) >= 0)
6628 {
6629 x = simplify_shift_const
6630 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6631 i < 0 ? INTVAL (XEXP (x, 1))
6632 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6633
6634 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 6635 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6636 }
6637 }
6638
6639 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6640 even if the shift count isn't a constant. */
6641 if (mask == 1)
6642 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6643
fae2db47
JW
6644 shiftrt:
6645
6646 /* If this is a zero- or sign-extension operation that just affects bits
4c002f29
RK
6647 we don't care about, remove it. Be sure the call above returned
6648 something that is still a shift. */
d0ab8cd3 6649
4c002f29
RK
6650 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6651 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 6652 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
6653 && (INTVAL (XEXP (x, 1))
6654 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
6655 && GET_CODE (XEXP (x, 0)) == ASHIFT
6656 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6657 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
6658 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6659 reg, next_select);
6139ff20 6660
dfbe1b2f
RK
6661 break;
6662
6139ff20
RK
6663 case ROTATE:
6664 case ROTATERT:
6665 /* If the shift count is constant and we can do computations
6666 in the mode of X, compute where the bits we care about are.
6667 Otherwise, we can't do anything. Don't change the mode of
6668 the shift or propagate MODE into the shift, though. */
6669 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6670 && INTVAL (XEXP (x, 1)) >= 0)
6671 {
6672 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6673 GET_MODE (x), GEN_INT (mask),
6674 XEXP (x, 1));
7d171a1e 6675 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
6676 SUBST (XEXP (x, 0),
6677 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6678 INTVAL (temp), reg, next_select));
6139ff20
RK
6679 }
6680 break;
6681
dfbe1b2f 6682 case NEG:
180b8e4b
RK
6683 /* If we just want the low-order bit, the NEG isn't needed since it
6684 won't change the low-order bit. */
6685 if (mask == 1)
6686 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6687
6139ff20
RK
6688 /* We need any bits less significant than the most significant bit in
6689 MASK since carries from those bits will affect the bits we are
6690 interested in. */
6691 mask = fuller_mask;
6692 goto unop;
6693
dfbe1b2f 6694 case NOT:
6139ff20
RK
6695 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6696 same as the XOR case above. Ensure that the constant we form is not
6697 wider than the mode of X. */
6698
6699 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6700 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6701 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6702 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6703 < GET_MODE_BITSIZE (GET_MODE (x)))
6704 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6705 {
6706 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6707 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6708 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6709
e3d616e3 6710 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6711 }
6712
f82da7d2
JW
6713 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6714 use the full mask inside the NOT. */
6715 mask = fuller_mask;
6716
6139ff20 6717 unop:
e3d616e3
RK
6718 op0 = gen_lowpart_for_combine (op_mode,
6719 force_to_mode (XEXP (x, 0), mode, mask,
6720 reg, next_select));
6139ff20 6721 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 6722 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
6723 break;
6724
6725 case NE:
6726 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 6727 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 6728 which is equal to STORE_FLAG_VALUE. */
3aceff0d
RK
6729 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6730 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 6731 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 6732 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6733
d0ab8cd3
RK
6734 break;
6735
6736 case IF_THEN_ELSE:
6737 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6738 written in a narrower mode. We play it safe and do not do so. */
6739
6740 SUBST (XEXP (x, 1),
6741 gen_lowpart_for_combine (GET_MODE (x),
6742 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6743 mask, reg, next_select)));
d0ab8cd3
RK
6744 SUBST (XEXP (x, 2),
6745 gen_lowpart_for_combine (GET_MODE (x),
6746 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6747 mask, reg,next_select)));
d0ab8cd3 6748 break;
e9a25f70
JL
6749
6750 default:
6751 break;
dfbe1b2f
RK
6752 }
6753
d0ab8cd3 6754 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6755 return gen_lowpart_for_combine (mode, x);
6756}
6757\f
abe6e52f
RK
6758/* Return nonzero if X is an expression that has one of two values depending on
6759 whether some other value is zero or nonzero. In that case, we return the
6760 value that is being tested, *PTRUE is set to the value if the rtx being
6761 returned has a nonzero value, and *PFALSE is set to the other alternative.
6762
6763 If we return zero, we set *PTRUE and *PFALSE to X. */
6764
6765static rtx
6766if_then_else_cond (x, ptrue, pfalse)
6767 rtx x;
6768 rtx *ptrue, *pfalse;
6769{
6770 enum machine_mode mode = GET_MODE (x);
6771 enum rtx_code code = GET_CODE (x);
6772 int size = GET_MODE_BITSIZE (mode);
6773 rtx cond0, cond1, true0, true1, false0, false1;
6774 unsigned HOST_WIDE_INT nz;
6775
6776 /* If this is a unary operation whose operand has one of two values, apply
6777 our opcode to compute those values. */
6778 if (GET_RTX_CLASS (code) == '1'
6779 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6780 {
0c1c8ea6
RK
6781 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6782 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
6783 return cond0;
6784 }
6785
3a19aabc 6786 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 6787 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
6788 else if (code == COMPARE)
6789 ;
6790
abe6e52f
RK
6791 /* If this is a binary operation, see if either side has only one of two
6792 values. If either one does or if both do and they are conditional on
6793 the same value, compute the new true and false values. */
6794 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6795 || GET_RTX_CLASS (code) == '<')
6796 {
6797 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6798 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6799
6800 if ((cond0 != 0 || cond1 != 0)
6801 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6802 {
987e845a
JW
6803 /* If if_then_else_cond returned zero, then true/false are the
6804 same rtl. We must copy one of them to prevent invalid rtl
6805 sharing. */
6806 if (cond0 == 0)
6807 true0 = copy_rtx (true0);
6808 else if (cond1 == 0)
6809 true1 = copy_rtx (true1);
6810
abe6e52f
RK
6811 *ptrue = gen_binary (code, mode, true0, true1);
6812 *pfalse = gen_binary (code, mode, false0, false1);
6813 return cond0 ? cond0 : cond1;
6814 }
9210df58 6815
9210df58 6816 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
6817 operands is zero when the other is non-zero, and vice-versa,
6818 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 6819
0802d516
RK
6820 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6821 && (code == PLUS || code == IOR || code == XOR || code == MINUS
9210df58
RK
6822 || code == UMAX)
6823 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6824 {
6825 rtx op0 = XEXP (XEXP (x, 0), 1);
6826 rtx op1 = XEXP (XEXP (x, 1), 1);
6827
6828 cond0 = XEXP (XEXP (x, 0), 0);
6829 cond1 = XEXP (XEXP (x, 1), 0);
6830
6831 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6832 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6833 && reversible_comparison_p (cond1)
6834 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6835 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6836 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6837 || ((swap_condition (GET_CODE (cond0))
6838 == reverse_condition (GET_CODE (cond1)))
6839 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6840 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6841 && ! side_effects_p (x))
6842 {
6843 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6844 *pfalse = gen_binary (MULT, mode,
6845 (code == MINUS
0c1c8ea6 6846 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
6847 const_true_rtx);
6848 return cond0;
6849 }
6850 }
6851
6852 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6853 is always zero. */
0802d516
RK
6854 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6855 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
6856 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6857 {
6858 cond0 = XEXP (XEXP (x, 0), 0);
6859 cond1 = XEXP (XEXP (x, 1), 0);
6860
6861 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6862 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6863 && reversible_comparison_p (cond1)
6864 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6865 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6866 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6867 || ((swap_condition (GET_CODE (cond0))
6868 == reverse_condition (GET_CODE (cond1)))
6869 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6870 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6871 && ! side_effects_p (x))
6872 {
6873 *ptrue = *pfalse = const0_rtx;
6874 return cond0;
6875 }
6876 }
abe6e52f
RK
6877 }
6878
6879 else if (code == IF_THEN_ELSE)
6880 {
6881 /* If we have IF_THEN_ELSE already, extract the condition and
6882 canonicalize it if it is NE or EQ. */
6883 cond0 = XEXP (x, 0);
6884 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6885 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6886 return XEXP (cond0, 0);
6887 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6888 {
6889 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6890 return XEXP (cond0, 0);
6891 }
6892 else
6893 return cond0;
6894 }
6895
6896 /* If X is a normal SUBREG with both inner and outer modes integral,
6897 we can narrow both the true and false values of the inner expression,
6898 if there is a condition. */
6899 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6900 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6901 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6902 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6903 &true0, &false0)))
6904 {
00244e6b
RK
6905 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6906 *pfalse
6907 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 6908
abe6e52f
RK
6909 return cond0;
6910 }
6911
6912 /* If X is a constant, this isn't special and will cause confusions
6913 if we treat it as such. Likewise if it is equivalent to a constant. */
6914 else if (CONSTANT_P (x)
6915 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6916 ;
6917
6918 /* If X is known to be either 0 or -1, those are the true and
6919 false values when testing X. */
6920 else if (num_sign_bit_copies (x, mode) == size)
6921 {
6922 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6923 return x;
6924 }
6925
6926 /* Likewise for 0 or a single bit. */
6927 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6928 {
6929 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6930 return x;
6931 }
6932
6933 /* Otherwise fail; show no condition with true and false values the same. */
6934 *ptrue = *pfalse = x;
6935 return 0;
6936}
6937\f
1a26b032
RK
6938/* Return the value of expression X given the fact that condition COND
6939 is known to be true when applied to REG as its first operand and VAL
6940 as its second. X is known to not be shared and so can be modified in
6941 place.
6942
6943 We only handle the simplest cases, and specifically those cases that
6944 arise with IF_THEN_ELSE expressions. */
6945
6946static rtx
6947known_cond (x, cond, reg, val)
6948 rtx x;
6949 enum rtx_code cond;
6950 rtx reg, val;
6951{
6952 enum rtx_code code = GET_CODE (x);
f24ad0e4 6953 rtx temp;
6f7d635c 6954 const char *fmt;
1a26b032
RK
6955 int i, j;
6956
6957 if (side_effects_p (x))
6958 return x;
6959
6960 if (cond == EQ && rtx_equal_p (x, reg))
6961 return val;
6962
6963 /* If X is (abs REG) and we know something about REG's relationship
6964 with zero, we may be able to simplify this. */
6965
6966 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6967 switch (cond)
6968 {
6969 case GE: case GT: case EQ:
6970 return XEXP (x, 0);
6971 case LT: case LE:
0c1c8ea6
RK
6972 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6973 XEXP (x, 0));
e9a25f70
JL
6974 default:
6975 break;
1a26b032
RK
6976 }
6977
6978 /* The only other cases we handle are MIN, MAX, and comparisons if the
6979 operands are the same as REG and VAL. */
6980
6981 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6982 {
6983 if (rtx_equal_p (XEXP (x, 0), val))
6984 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6985
6986 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6987 {
6988 if (GET_RTX_CLASS (code) == '<')
6989 return (comparison_dominates_p (cond, code) ? const_true_rtx
6990 : (comparison_dominates_p (cond,
6991 reverse_condition (code))
6992 ? const0_rtx : x));
6993
6994 else if (code == SMAX || code == SMIN
6995 || code == UMIN || code == UMAX)
6996 {
6997 int unsignedp = (code == UMIN || code == UMAX);
6998
6999 if (code == SMAX || code == UMAX)
7000 cond = reverse_condition (cond);
7001
7002 switch (cond)
7003 {
7004 case GE: case GT:
7005 return unsignedp ? x : XEXP (x, 1);
7006 case LE: case LT:
7007 return unsignedp ? x : XEXP (x, 0);
7008 case GEU: case GTU:
7009 return unsignedp ? XEXP (x, 1) : x;
7010 case LEU: case LTU:
7011 return unsignedp ? XEXP (x, 0) : x;
e9a25f70
JL
7012 default:
7013 break;
1a26b032
RK
7014 }
7015 }
7016 }
7017 }
7018
7019 fmt = GET_RTX_FORMAT (code);
7020 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7021 {
7022 if (fmt[i] == 'e')
7023 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
7024 else if (fmt[i] == 'E')
7025 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7026 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
7027 cond, reg, val));
7028 }
7029
7030 return x;
7031}
7032\f
e11fa86f
RK
7033/* See if X and Y are equal for the purposes of seeing if we can rewrite an
7034 assignment as a field assignment. */
7035
7036static int
7037rtx_equal_for_field_assignment_p (x, y)
7038 rtx x;
7039 rtx y;
7040{
e11fa86f
RK
7041 if (x == y || rtx_equal_p (x, y))
7042 return 1;
7043
7044 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
7045 return 0;
7046
7047 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
7048 Note that all SUBREGs of MEM are paradoxical; otherwise they
7049 would have been rewritten. */
7050 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
7051 && GET_CODE (SUBREG_REG (y)) == MEM
7052 && rtx_equal_p (SUBREG_REG (y),
7053 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
7054 return 1;
7055
7056 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
7057 && GET_CODE (SUBREG_REG (x)) == MEM
7058 && rtx_equal_p (SUBREG_REG (x),
7059 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
7060 return 1;
7061
9ec36da5
JL
7062 /* We used to see if get_last_value of X and Y were the same but that's
7063 not correct. In one direction, we'll cause the assignment to have
7064 the wrong destination and in the case, we'll import a register into this
7065 insn that might have already have been dead. So fail if none of the
7066 above cases are true. */
7067 return 0;
e11fa86f
RK
7068}
7069\f
230d793d
RS
7070/* See if X, a SET operation, can be rewritten as a bit-field assignment.
7071 Return that assignment if so.
7072
7073 We only handle the most common cases. */
7074
7075static rtx
7076make_field_assignment (x)
7077 rtx x;
7078{
7079 rtx dest = SET_DEST (x);
7080 rtx src = SET_SRC (x);
dfbe1b2f 7081 rtx assign;
e11fa86f 7082 rtx rhs, lhs;
5f4f0e22
CH
7083 HOST_WIDE_INT c1;
7084 int pos, len;
dfbe1b2f
RK
7085 rtx other;
7086 enum machine_mode mode;
230d793d
RS
7087
7088 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
7089 a clear of a one-bit field. We will have changed it to
7090 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
7091 for a SUBREG. */
7092
7093 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
7094 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
7095 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 7096 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7097 {
8999a12e 7098 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7099 1, 1, 1, 0);
76184def 7100 if (assign != 0)
38a448ca 7101 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7102 return x;
230d793d
RS
7103 }
7104
7105 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
7106 && subreg_lowpart_p (XEXP (src, 0))
7107 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7108 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
7109 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
7110 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 7111 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7112 {
8999a12e 7113 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
7114 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
7115 1, 1, 1, 0);
76184def 7116 if (assign != 0)
38a448ca 7117 return gen_rtx_SET (VOIDmode, assign, const0_rtx);
76184def 7118 return x;
230d793d
RS
7119 }
7120
9dd11dcb 7121 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
7122 one-bit field. */
7123 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
7124 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 7125 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 7126 {
8999a12e 7127 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 7128 1, 1, 1, 0);
76184def 7129 if (assign != 0)
38a448ca 7130 return gen_rtx_SET (VOIDmode, assign, const1_rtx);
76184def 7131 return x;
230d793d
RS
7132 }
7133
dfbe1b2f 7134 /* The other case we handle is assignments into a constant-position
9dd11dcb 7135 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
7136 a mask that has all one bits except for a group of zero bits and
7137 OTHER is known to have zeros where C1 has ones, this is such an
7138 assignment. Compute the position and length from C1. Shift OTHER
7139 to the appropriate position, force it to the required mode, and
7140 make the extraction. Check for the AND in both operands. */
7141
9dd11dcb 7142 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
7143 return x;
7144
7145 rhs = expand_compound_operation (XEXP (src, 0));
7146 lhs = expand_compound_operation (XEXP (src, 1));
7147
7148 if (GET_CODE (rhs) == AND
7149 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
7150 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
7151 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
7152 else if (GET_CODE (lhs) == AND
7153 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
7154 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
7155 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
7156 else
7157 return x;
230d793d 7158
e11fa86f 7159 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 7160 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
e5e809f4
JL
7161 || GET_MODE_BITSIZE (GET_MODE (dest)) > HOST_BITS_PER_WIDE_INT
7162 || (c1 & nonzero_bits (other, GET_MODE (dest))) != 0)
dfbe1b2f 7163 return x;
230d793d 7164
5f4f0e22 7165 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
7166 if (assign == 0)
7167 return x;
230d793d 7168
dfbe1b2f
RK
7169 /* The mode to use for the source is the mode of the assignment, or of
7170 what is inside a possible STRICT_LOW_PART. */
7171 mode = (GET_CODE (assign) == STRICT_LOW_PART
7172 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 7173
dfbe1b2f
RK
7174 /* Shift OTHER right POS places and make it the source, restricting it
7175 to the proper length and mode. */
230d793d 7176
5f4f0e22
CH
7177 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
7178 GET_MODE (src), other, pos),
6139ff20
RK
7179 mode,
7180 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
7181 ? GET_MODE_MASK (mode)
7182 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 7183 dest, 0);
230d793d 7184
dfbe1b2f 7185 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
7186}
7187\f
7188/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
7189 if so. */
7190
7191static rtx
7192apply_distributive_law (x)
7193 rtx x;
7194{
7195 enum rtx_code code = GET_CODE (x);
7196 rtx lhs, rhs, other;
7197 rtx tem;
7198 enum rtx_code inner_code;
7199
d8a8a4da
RS
7200 /* Distributivity is not true for floating point.
7201 It can change the value. So don't do it.
7202 -- rms and moshier@world.std.com. */
3ad2180a 7203 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
7204 return x;
7205
230d793d
RS
7206 /* The outer operation can only be one of the following: */
7207 if (code != IOR && code != AND && code != XOR
7208 && code != PLUS && code != MINUS)
7209 return x;
7210
7211 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
7212
0f41302f
MS
7213 /* If either operand is a primitive we can't do anything, so get out
7214 fast. */
230d793d 7215 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 7216 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
7217 return x;
7218
7219 lhs = expand_compound_operation (lhs);
7220 rhs = expand_compound_operation (rhs);
7221 inner_code = GET_CODE (lhs);
7222 if (inner_code != GET_CODE (rhs))
7223 return x;
7224
7225 /* See if the inner and outer operations distribute. */
7226 switch (inner_code)
7227 {
7228 case LSHIFTRT:
7229 case ASHIFTRT:
7230 case AND:
7231 case IOR:
7232 /* These all distribute except over PLUS. */
7233 if (code == PLUS || code == MINUS)
7234 return x;
7235 break;
7236
7237 case MULT:
7238 if (code != PLUS && code != MINUS)
7239 return x;
7240 break;
7241
7242 case ASHIFT:
45620ed4 7243 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
7244 break;
7245
7246 case SUBREG:
dfbe1b2f
RK
7247 /* Non-paradoxical SUBREGs distributes over all operations, provided
7248 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
7249 of a low-order part, we don't convert an fp operation to int or
7250 vice versa, and we would not be converting a single-word
dfbe1b2f 7251 operation into a multi-word operation. The latter test is not
2b4bd1bc 7252 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
7253 Some of the previous tests are redundant given the latter test, but
7254 are retained because they are required for correctness.
7255
7256 We produce the result slightly differently in this case. */
7257
7258 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
7259 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
7260 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
7261 || (GET_MODE_CLASS (GET_MODE (lhs))
7262 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7263 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 7264 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 7265 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
7266 return x;
7267
7268 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
7269 SUBREG_REG (lhs), SUBREG_REG (rhs));
7270 return gen_lowpart_for_combine (GET_MODE (x), tem);
7271
7272 default:
7273 return x;
7274 }
7275
7276 /* Set LHS and RHS to the inner operands (A and B in the example
7277 above) and set OTHER to the common operand (C in the example).
7278 These is only one way to do this unless the inner operation is
7279 commutative. */
7280 if (GET_RTX_CLASS (inner_code) == 'c'
7281 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
7282 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
7283 else if (GET_RTX_CLASS (inner_code) == 'c'
7284 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
7285 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
7286 else if (GET_RTX_CLASS (inner_code) == 'c'
7287 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
7288 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
7289 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
7290 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
7291 else
7292 return x;
7293
7294 /* Form the new inner operation, seeing if it simplifies first. */
7295 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
7296
7297 /* There is one exception to the general way of distributing:
7298 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
7299 if (code == XOR && inner_code == IOR)
7300 {
7301 inner_code = AND;
0c1c8ea6 7302 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
7303 }
7304
7305 /* We may be able to continuing distributing the result, so call
7306 ourselves recursively on the inner operation before forming the
7307 outer operation, which we return. */
7308 return gen_binary (inner_code, GET_MODE (x),
7309 apply_distributive_law (tem), other);
7310}
7311\f
7312/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
7313 in MODE.
7314
7315 Return an equivalent form, if different from X. Otherwise, return X. If
7316 X is zero, we are to always construct the equivalent form. */
7317
7318static rtx
7319simplify_and_const_int (x, mode, varop, constop)
7320 rtx x;
7321 enum machine_mode mode;
7322 rtx varop;
5f4f0e22 7323 unsigned HOST_WIDE_INT constop;
230d793d 7324{
951553af 7325 unsigned HOST_WIDE_INT nonzero;
42301240 7326 int i;
230d793d 7327
6139ff20
RK
7328 /* Simplify VAROP knowing that we will be only looking at some of the
7329 bits in it. */
e3d616e3 7330 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7331
6139ff20
RK
7332 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7333 CONST_INT, we are done. */
7334 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7335 return varop;
230d793d 7336
fc06d7aa
RK
7337 /* See what bits may be nonzero in VAROP. Unlike the general case of
7338 a call to nonzero_bits, here we don't care about bits outside
7339 MODE. */
7340
7341 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
7e4ce834 7342 nonzero = trunc_int_for_mode (nonzero, mode);
9fa6d012 7343
230d793d 7344 /* Turn off all bits in the constant that are known to already be zero.
951553af 7345 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7346 which is tested below. */
7347
951553af 7348 constop &= nonzero;
230d793d
RS
7349
7350 /* If we don't have any bits left, return zero. */
7351 if (constop == 0)
7352 return const0_rtx;
7353
42301240
RK
7354 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7355 a power of two, we can replace this with a ASHIFT. */
7356 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7357 && (i = exact_log2 (constop)) >= 0)
7358 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7359
6139ff20
RK
7360 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7361 or XOR, then try to apply the distributive law. This may eliminate
7362 operations if either branch can be simplified because of the AND.
7363 It may also make some cases more complex, but those cases probably
7364 won't match a pattern either with or without this. */
7365
7366 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7367 return
7368 gen_lowpart_for_combine
7369 (mode,
7370 apply_distributive_law
7371 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7372 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7373 XEXP (varop, 0), constop),
7374 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7375 XEXP (varop, 1), constop))));
7376
230d793d
RS
7377 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7378 if we already had one (just check for the simplest cases). */
7379 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7380 && GET_MODE (XEXP (x, 0)) == mode
7381 && SUBREG_REG (XEXP (x, 0)) == varop)
7382 varop = XEXP (x, 0);
7383 else
7384 varop = gen_lowpart_for_combine (mode, varop);
7385
0f41302f 7386 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7387 if (GET_CODE (varop) == CLOBBER)
7388 return x ? x : varop;
7389
7390 /* If we are only masking insignificant bits, return VAROP. */
951553af 7391 if (constop == nonzero)
230d793d
RS
7392 x = varop;
7393
7394 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7395 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7396 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7397
7398 else
7399 {
7400 if (GET_CODE (XEXP (x, 1)) != CONST_INT
e51712db 7401 || (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7402 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7403
7404 SUBST (XEXP (x, 0), varop);
7405 }
7406
7407 return x;
7408}
7409\f
b3728b0e
JW
7410/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7411 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7412 is less useful. We can't allow both, because that results in exponential
956d6950 7413 run time recursion. There is a nullstone testcase that triggered
b3728b0e
JW
7414 this. This macro avoids accidental uses of num_sign_bit_copies. */
7415#define num_sign_bit_copies()
7416
230d793d
RS
7417/* Given an expression, X, compute which bits in X can be non-zero.
7418 We don't care about bits outside of those defined in MODE.
7419
7420 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7421 a shift, AND, or zero_extract, we can do better. */
7422
5f4f0e22 7423static unsigned HOST_WIDE_INT
951553af 7424nonzero_bits (x, mode)
230d793d
RS
7425 rtx x;
7426 enum machine_mode mode;
7427{
951553af
RK
7428 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7429 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
7430 enum rtx_code code;
7431 int mode_width = GET_MODE_BITSIZE (mode);
7432 rtx tem;
7433
1c75dfa4
RK
7434 /* For floating-point values, assume all bits are needed. */
7435 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7436 return nonzero;
7437
230d793d
RS
7438 /* If X is wider than MODE, use its mode instead. */
7439 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7440 {
7441 mode = GET_MODE (x);
951553af 7442 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7443 mode_width = GET_MODE_BITSIZE (mode);
7444 }
7445
5f4f0e22 7446 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7447 /* Our only callers in this case look for single bit values. So
7448 just return the mode mask. Those tests will then be false. */
951553af 7449 return nonzero;
230d793d 7450
8baf60bb 7451#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7452 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
7453 and target machines, we can compute this from which bits of the
7454 object might be nonzero in its own mode, taking into account the fact
7455 that on many CISC machines, accessing an object in a wider mode
7456 causes the high-order bits to become undefined. So they are
7457 not known to be zero. */
7458
7459 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7460 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7461 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7462 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7463 {
7464 nonzero &= nonzero_bits (x, GET_MODE (x));
7465 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7466 return nonzero;
7467 }
7468#endif
7469
230d793d
RS
7470 code = GET_CODE (x);
7471 switch (code)
7472 {
7473 case REG:
320dd7a7
RK
7474#ifdef POINTERS_EXTEND_UNSIGNED
7475 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7476 all the bits above ptr_mode are known to be zero. */
7477 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7478 && REGNO_POINTER_FLAG (REGNO (x)))
7479 nonzero &= GET_MODE_MASK (ptr_mode);
7480#endif
7481
b0d71df9
RK
7482#ifdef STACK_BOUNDARY
7483 /* If this is the stack pointer, we may know something about its
7484 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
7485 stack to be momentarily aligned only to that amount, so we pick
7486 the least alignment. */
7487
ee49a9c7
JW
7488 /* We can't check for arg_pointer_rtx here, because it is not
7489 guaranteed to have as much alignment as the stack pointer.
7490 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7491 alignment but the argument pointer has only 64 bit alignment. */
7492
0e9ff885
DM
7493 if ((x == frame_pointer_rtx
7494 || x == stack_pointer_rtx
7495 || x == hard_frame_pointer_rtx
7496 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7497 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
7498#ifdef STACK_BIAS
7499 && !STACK_BIAS
7500#endif
7501 )
230d793d 7502 {
b0d71df9 7503 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
7504
7505#ifdef PUSH_ROUNDING
91102d5a 7506 if (REGNO (x) == STACK_POINTER_REGNUM)
b0d71df9 7507 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
7508#endif
7509
320dd7a7
RK
7510 /* We must return here, otherwise we may get a worse result from
7511 one of the choices below. There is nothing useful below as
7512 far as the stack pointer is concerned. */
b0d71df9 7513 return nonzero &= ~ (sp_alignment - 1);
230d793d 7514 }
b0d71df9 7515#endif
230d793d 7516
55310dad
RK
7517 /* If X is a register whose nonzero bits value is current, use it.
7518 Otherwise, if X is a register whose value we can find, use that
7519 value. Otherwise, use the previously-computed global nonzero bits
7520 for this register. */
7521
7522 if (reg_last_set_value[REGNO (x)] != 0
7523 && reg_last_set_mode[REGNO (x)] == mode
b1f21e0a 7524 && (REG_N_SETS (REGNO (x)) == 1
55310dad
RK
7525 || reg_last_set_label[REGNO (x)] == label_tick)
7526 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7527 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
7528
7529 tem = get_last_value (x);
9afa3d54 7530
230d793d 7531 if (tem)
9afa3d54
RK
7532 {
7533#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7534 /* If X is narrower than MODE and TEM is a non-negative
7535 constant that would appear negative in the mode of X,
7536 sign-extend it for use in reg_nonzero_bits because some
7537 machines (maybe most) will actually do the sign-extension
7538 and this is the conservative approach.
7539
7540 ??? For 2.5, try to tighten up the MD files in this regard
7541 instead of this kludge. */
7542
7543 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7544 && GET_CODE (tem) == CONST_INT
7545 && INTVAL (tem) > 0
7546 && 0 != (INTVAL (tem)
7547 & ((HOST_WIDE_INT) 1
9e69be8c 7548 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7549 tem = GEN_INT (INTVAL (tem)
7550 | ((HOST_WIDE_INT) (-1)
7551 << GET_MODE_BITSIZE (GET_MODE (x))));
7552#endif
7553 return nonzero_bits (tem, mode);
7554 }
951553af
RK
7555 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7556 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7557 else
951553af 7558 return nonzero;
230d793d
RS
7559
7560 case CONST_INT:
9afa3d54
RK
7561#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7562 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
7563 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7564 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7565 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
7566#endif
7567
230d793d
RS
7568 return INTVAL (x);
7569
230d793d 7570 case MEM:
8baf60bb 7571#ifdef LOAD_EXTEND_OP
230d793d
RS
7572 /* In many, if not most, RISC machines, reading a byte from memory
7573 zeros the rest of the register. Noticing that fact saves a lot
7574 of extra zero-extends. */
8baf60bb
RK
7575 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7576 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 7577#endif
8baf60bb 7578 break;
230d793d 7579
230d793d
RS
7580 case EQ: case NE:
7581 case GT: case GTU:
7582 case LT: case LTU:
7583 case GE: case GEU:
7584 case LE: case LEU:
3f508eca 7585
c6965c0f
RK
7586 /* If this produces an integer result, we know which bits are set.
7587 Code here used to clear bits outside the mode of X, but that is
7588 now done above. */
230d793d 7589
c6965c0f
RK
7590 if (GET_MODE_CLASS (mode) == MODE_INT
7591 && mode_width <= HOST_BITS_PER_WIDE_INT)
7592 nonzero = STORE_FLAG_VALUE;
230d793d 7593 break;
230d793d 7594
230d793d 7595 case NEG:
b3728b0e
JW
7596#if 0
7597 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7598 and num_sign_bit_copies. */
d0ab8cd3
RK
7599 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7600 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7601 nonzero = 1;
b3728b0e 7602#endif
230d793d
RS
7603
7604 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 7605 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 7606 break;
d0ab8cd3
RK
7607
7608 case ABS:
b3728b0e
JW
7609#if 0
7610 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7611 and num_sign_bit_copies. */
d0ab8cd3
RK
7612 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7613 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7614 nonzero = 1;
b3728b0e 7615#endif
d0ab8cd3 7616 break;
230d793d
RS
7617
7618 case TRUNCATE:
951553af 7619 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
7620 break;
7621
7622 case ZERO_EXTEND:
951553af 7623 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 7624 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 7625 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
7626 break;
7627
7628 case SIGN_EXTEND:
7629 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7630 Otherwise, show all the bits in the outer mode but not the inner
7631 may be non-zero. */
951553af 7632 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
7633 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7634 {
951553af 7635 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
e3da301d
MS
7636 if (inner_nz
7637 & (((HOST_WIDE_INT) 1
7638 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 7639 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
7640 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7641 }
7642
951553af 7643 nonzero &= inner_nz;
230d793d
RS
7644 break;
7645
7646 case AND:
951553af
RK
7647 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7648 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7649 break;
7650
d0ab8cd3
RK
7651 case XOR: case IOR:
7652 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
7653 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7654 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7655 break;
7656
7657 case PLUS: case MINUS:
7658 case MULT:
7659 case DIV: case UDIV:
7660 case MOD: case UMOD:
7661 /* We can apply the rules of arithmetic to compute the number of
7662 high- and low-order zero bits of these operations. We start by
7663 computing the width (position of the highest-order non-zero bit)
7664 and the number of low-order zero bits for each value. */
7665 {
951553af
RK
7666 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7667 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7668 int width0 = floor_log2 (nz0) + 1;
7669 int width1 = floor_log2 (nz1) + 1;
7670 int low0 = floor_log2 (nz0 & -nz0);
7671 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
7672 HOST_WIDE_INT op0_maybe_minusp
7673 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7674 HOST_WIDE_INT op1_maybe_minusp
7675 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
7676 int result_width = mode_width;
7677 int result_low = 0;
7678
7679 switch (code)
7680 {
7681 case PLUS:
0e9ff885
DM
7682#ifdef STACK_BIAS
7683 if (STACK_BIAS
7684 && (XEXP (x, 0) == stack_pointer_rtx
7685 || XEXP (x, 0) == frame_pointer_rtx)
7686 && GET_CODE (XEXP (x, 1)) == CONST_INT)
7687 {
7688 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
7689
7690 nz0 = (GET_MODE_MASK (mode) & ~ (sp_alignment - 1));
7691 nz1 = INTVAL (XEXP (x, 1)) - STACK_BIAS;
7692 width0 = floor_log2 (nz0) + 1;
7693 width1 = floor_log2 (nz1) + 1;
7694 low0 = floor_log2 (nz0 & -nz0);
7695 low1 = floor_log2 (nz1 & -nz1);
7696 }
7697#endif
230d793d
RS
7698 result_width = MAX (width0, width1) + 1;
7699 result_low = MIN (low0, low1);
7700 break;
7701 case MINUS:
7702 result_low = MIN (low0, low1);
7703 break;
7704 case MULT:
7705 result_width = width0 + width1;
7706 result_low = low0 + low1;
7707 break;
7708 case DIV:
7709 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7710 result_width = width0;
7711 break;
7712 case UDIV:
7713 result_width = width0;
7714 break;
7715 case MOD:
7716 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7717 result_width = MIN (width0, width1);
7718 result_low = MIN (low0, low1);
7719 break;
7720 case UMOD:
7721 result_width = MIN (width0, width1);
7722 result_low = MIN (low0, low1);
7723 break;
e9a25f70
JL
7724 default:
7725 abort ();
230d793d
RS
7726 }
7727
7728 if (result_width < mode_width)
951553af 7729 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
7730
7731 if (result_low > 0)
951553af 7732 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
7733 }
7734 break;
7735
7736 case ZERO_EXTRACT:
7737 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 7738 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 7739 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
7740 break;
7741
7742 case SUBREG:
c3c2cb37
RK
7743 /* If this is a SUBREG formed for a promoted variable that has
7744 been zero-extended, we know that at least the high-order bits
7745 are zero, though others might be too. */
7746
7747 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
7748 nonzero = (GET_MODE_MASK (GET_MODE (x))
7749 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 7750
230d793d
RS
7751 /* If the inner mode is a single word for both the host and target
7752 machines, we can compute this from which bits of the inner
951553af 7753 object might be nonzero. */
230d793d 7754 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
7755 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7756 <= HOST_BITS_PER_WIDE_INT))
230d793d 7757 {
951553af 7758 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb 7759
b52ce03d
R
7760#if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
7761 /* If this is a typical RISC machine, we only have to worry
7762 about the way loads are extended. */
7763 if (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
7764 ? (nonzero
7765 & (1L << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1)))
7766 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
230d793d 7767#endif
b52ce03d
R
7768 {
7769 /* On many CISC machines, accessing an object in a wider mode
7770 causes the high-order bits to become undefined. So they are
7771 not known to be zero. */
7772 if (GET_MODE_SIZE (GET_MODE (x))
7773 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7774 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7775 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
7776 }
230d793d
RS
7777 }
7778 break;
7779
7780 case ASHIFTRT:
7781 case LSHIFTRT:
7782 case ASHIFT:
230d793d 7783 case ROTATE:
951553af 7784 /* The nonzero bits are in two classes: any bits within MODE
230d793d 7785 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 7786 nonzero bits are those that are significant in the operand of
230d793d
RS
7787 the shift when shifted the appropriate number of bits. This
7788 shows that high-order bits are cleared by the right shift and
7789 low-order bits by left shifts. */
7790 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7791 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 7792 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7793 {
7794 enum machine_mode inner_mode = GET_MODE (x);
7795 int width = GET_MODE_BITSIZE (inner_mode);
7796 int count = INTVAL (XEXP (x, 1));
5f4f0e22 7797 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
7798 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7799 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 7800 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
7801
7802 if (mode_width > width)
951553af 7803 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
7804
7805 if (code == LSHIFTRT)
7806 inner >>= count;
7807 else if (code == ASHIFTRT)
7808 {
7809 inner >>= count;
7810
951553af 7811 /* If the sign bit may have been nonzero before the shift, we
230d793d 7812 need to mark all the places it could have been copied to
951553af 7813 by the shift as possibly nonzero. */
5f4f0e22
CH
7814 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7815 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 7816 }
45620ed4 7817 else if (code == ASHIFT)
230d793d
RS
7818 inner <<= count;
7819 else
7820 inner = ((inner << (count % width)
7821 | (inner >> (width - (count % width)))) & mode_mask);
7822
951553af 7823 nonzero &= (outer | inner);
230d793d
RS
7824 }
7825 break;
7826
7827 case FFS:
7828 /* This is at most the number of bits in the mode. */
951553af 7829 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 7830 break;
d0ab8cd3
RK
7831
7832 case IF_THEN_ELSE:
951553af
RK
7833 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7834 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 7835 break;
e9a25f70
JL
7836
7837 default:
7838 break;
230d793d
RS
7839 }
7840
951553af 7841 return nonzero;
230d793d 7842}
b3728b0e
JW
7843
7844/* See the macro definition above. */
7845#undef num_sign_bit_copies
230d793d 7846\f
d0ab8cd3 7847/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
7848 be equal to the sign bit. X will be used in mode MODE; if MODE is
7849 VOIDmode, X will be used in its own mode. The returned value will always
7850 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
7851
7852static int
7853num_sign_bit_copies (x, mode)
7854 rtx x;
7855 enum machine_mode mode;
7856{
7857 enum rtx_code code = GET_CODE (x);
7858 int bitwidth;
7859 int num0, num1, result;
951553af 7860 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
7861 rtx tem;
7862
7863 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
7864 VOIDmode, we don't know anything. Likewise if one of the modes is
7865 floating-point. */
d0ab8cd3
RK
7866
7867 if (mode == VOIDmode)
7868 mode = GET_MODE (x);
7869
1c75dfa4 7870 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 7871 return 1;
d0ab8cd3
RK
7872
7873 bitwidth = GET_MODE_BITSIZE (mode);
7874
0f41302f 7875 /* For a smaller object, just ignore the high bits. */
312def2e
RK
7876 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7877 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7878 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7879
e9a25f70
JL
7880 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7881 {
0c314d1a
RK
7882#ifndef WORD_REGISTER_OPERATIONS
7883 /* If this machine does not do all register operations on the entire
7884 register and MODE is wider than the mode of X, we can say nothing
7885 at all about the high-order bits. */
e9a25f70
JL
7886 return 1;
7887#else
7888 /* Likewise on machines that do, if the mode of the object is smaller
7889 than a word and loads of that size don't sign extend, we can say
7890 nothing about the high order bits. */
7891 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
7892#ifdef LOAD_EXTEND_OP
7893 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
7894#endif
7895 )
7896 return 1;
0c314d1a 7897#endif
e9a25f70 7898 }
0c314d1a 7899
d0ab8cd3
RK
7900 switch (code)
7901 {
7902 case REG:
55310dad 7903
ff0dbdd1
RK
7904#ifdef POINTERS_EXTEND_UNSIGNED
7905 /* If pointers extend signed and this is a pointer in Pmode, say that
7906 all the bits above ptr_mode are known to be sign bit copies. */
7907 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7908 && REGNO_POINTER_FLAG (REGNO (x)))
7909 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7910#endif
7911
55310dad
RK
7912 if (reg_last_set_value[REGNO (x)] != 0
7913 && reg_last_set_mode[REGNO (x)] == mode
b1f21e0a 7914 && (REG_N_SETS (REGNO (x)) == 1
55310dad
RK
7915 || reg_last_set_label[REGNO (x)] == label_tick)
7916 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7917 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7918
7919 tem = get_last_value (x);
7920 if (tem != 0)
7921 return num_sign_bit_copies (tem, mode);
55310dad
RK
7922
7923 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7924 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7925 break;
7926
457816e2 7927 case MEM:
8baf60bb 7928#ifdef LOAD_EXTEND_OP
457816e2 7929 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
7930 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7931 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 7932#endif
8baf60bb 7933 break;
457816e2 7934
d0ab8cd3
RK
7935 case CONST_INT:
7936 /* If the constant is negative, take its 1's complement and remask.
7937 Then see how many zero bits we have. */
951553af 7938 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 7939 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
7940 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7941 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 7942
951553af 7943 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7944
7945 case SUBREG:
c3c2cb37
RK
7946 /* If this is a SUBREG for a promoted object that is sign-extended
7947 and we are looking at it in a wider mode, we know that at least the
7948 high-order bits are known to be sign bit copies. */
7949
7950 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
7951 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7952 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 7953
0f41302f 7954 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7955 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7956 {
7957 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7958 return MAX (1, (num0
7959 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7960 - bitwidth)));
7961 }
457816e2 7962
8baf60bb 7963#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 7964#ifdef LOAD_EXTEND_OP
8baf60bb
RK
7965 /* For paradoxical SUBREGs on machines where all register operations
7966 affect the entire register, just look inside. Note that we are
7967 passing MODE to the recursive call, so the number of sign bit copies
7968 will remain relative to that mode, not the inner mode. */
457816e2 7969
2aec5b7a
JW
7970 /* This works only if loads sign extend. Otherwise, if we get a
7971 reload for the inner part, it may be loaded from the stack, and
7972 then we lose all sign bit copies that existed before the store
7973 to the stack. */
7974
7975 if ((GET_MODE_SIZE (GET_MODE (x))
7976 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7977 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 7978 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 7979#endif
457816e2 7980#endif
d0ab8cd3
RK
7981 break;
7982
7983 case SIGN_EXTRACT:
7984 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7985 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7986 break;
7987
7988 case SIGN_EXTEND:
7989 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7990 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7991
7992 case TRUNCATE:
0f41302f 7993 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7994 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7995 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7996 - bitwidth)));
7997
7998 case NOT:
7999 return num_sign_bit_copies (XEXP (x, 0), mode);
8000
8001 case ROTATE: case ROTATERT:
8002 /* If we are rotating left by a number of bits less than the number
8003 of sign bit copies, we can just subtract that amount from the
8004 number. */
8005 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8006 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
8007 {
8008 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8009 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
8010 : bitwidth - INTVAL (XEXP (x, 1))));
8011 }
8012 break;
8013
8014 case NEG:
8015 /* In general, this subtracts one sign bit copy. But if the value
8016 is known to be positive, the number of sign bit copies is the
951553af
RK
8017 same as that of the input. Finally, if the input has just one bit
8018 that might be nonzero, all the bits are copies of the sign bit. */
70186b34
BS
8019 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8020 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8021 return num0 > 1 ? num0 - 1 : 1;
8022
951553af
RK
8023 nonzero = nonzero_bits (XEXP (x, 0), mode);
8024 if (nonzero == 1)
d0ab8cd3
RK
8025 return bitwidth;
8026
d0ab8cd3 8027 if (num0 > 1
951553af 8028 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
8029 num0--;
8030
8031 return num0;
8032
8033 case IOR: case AND: case XOR:
8034 case SMIN: case SMAX: case UMIN: case UMAX:
8035 /* Logical operations will preserve the number of sign-bit copies.
8036 MIN and MAX operations always return one of the operands. */
8037 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8038 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8039 return MIN (num0, num1);
8040
8041 case PLUS: case MINUS:
8042 /* For addition and subtraction, we can have a 1-bit carry. However,
8043 if we are subtracting 1 from a positive number, there will not
8044 be such a carry. Furthermore, if the positive number is known to
8045 be 0 or 1, we know the result is either -1 or 0. */
8046
3e3ea975 8047 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 8048 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 8049 {
951553af
RK
8050 nonzero = nonzero_bits (XEXP (x, 0), mode);
8051 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
8052 return (nonzero == 1 || nonzero == 0 ? bitwidth
8053 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8054 }
8055
8056 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8057 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8058 return MAX (1, MIN (num0, num1) - 1);
8059
8060 case MULT:
8061 /* The number of bits of the product is the sum of the number of
8062 bits of both terms. However, unless one of the terms if known
8063 to be positive, we must allow for an additional bit since negating
8064 a negative number can remove one sign bit copy. */
8065
8066 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8067 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
8068
8069 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
8070 if (result > 0
70186b34
BS
8071 && (bitwidth > HOST_BITS_PER_WIDE_INT
8072 || (((nonzero_bits (XEXP (x, 0), mode)
8073 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8074 && ((nonzero_bits (XEXP (x, 1), mode)
8075 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
d0ab8cd3
RK
8076 result--;
8077
8078 return MAX (1, result);
8079
8080 case UDIV:
70186b34
BS
8081 /* The result must be <= the first operand. If the first operand
8082 has the high bit set, we know nothing about the number of sign
8083 bit copies. */
8084 if (bitwidth > HOST_BITS_PER_WIDE_INT)
8085 return 1;
8086 else if ((nonzero_bits (XEXP (x, 0), mode)
8087 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
8088 return 1;
8089 else
8090 return num_sign_bit_copies (XEXP (x, 0), mode);
8091
d0ab8cd3
RK
8092 case UMOD:
8093 /* The result must be <= the scond operand. */
8094 return num_sign_bit_copies (XEXP (x, 1), mode);
8095
8096 case DIV:
8097 /* Similar to unsigned division, except that we have to worry about
8098 the case where the divisor is negative, in which case we have
8099 to add 1. */
8100 result = num_sign_bit_copies (XEXP (x, 0), mode);
8101 if (result > 1
70186b34
BS
8102 && (bitwidth > HOST_BITS_PER_WIDE_INT
8103 || (nonzero_bits (XEXP (x, 1), mode)
8104 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8105 result--;
d0ab8cd3
RK
8106
8107 return result;
8108
8109 case MOD:
8110 result = num_sign_bit_copies (XEXP (x, 1), mode);
8111 if (result > 1
70186b34
BS
8112 && (bitwidth > HOST_BITS_PER_WIDE_INT
8113 || (nonzero_bits (XEXP (x, 1), mode)
8114 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
8115 result--;
d0ab8cd3
RK
8116
8117 return result;
8118
8119 case ASHIFTRT:
8120 /* Shifts by a constant add to the number of bits equal to the
8121 sign bit. */
8122 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8123 if (GET_CODE (XEXP (x, 1)) == CONST_INT
8124 && INTVAL (XEXP (x, 1)) > 0)
8125 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
8126
8127 return num0;
8128
8129 case ASHIFT:
d0ab8cd3
RK
8130 /* Left shifts destroy copies. */
8131 if (GET_CODE (XEXP (x, 1)) != CONST_INT
8132 || INTVAL (XEXP (x, 1)) < 0
8133 || INTVAL (XEXP (x, 1)) >= bitwidth)
8134 return 1;
8135
8136 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
8137 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
8138
8139 case IF_THEN_ELSE:
8140 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
8141 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
8142 return MIN (num0, num1);
8143
d0ab8cd3
RK
8144 case EQ: case NE: case GE: case GT: case LE: case LT:
8145 case GEU: case GTU: case LEU: case LTU:
0802d516
RK
8146 if (STORE_FLAG_VALUE == -1)
8147 return bitwidth;
e9a25f70
JL
8148 break;
8149
8150 default:
8151 break;
d0ab8cd3
RK
8152 }
8153
8154 /* If we haven't been able to figure it out by one of the above rules,
8155 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
8156 count those bits and return one less than that amount. If we can't
8157 safely compute the mask for this mode, always return BITWIDTH. */
8158
8159 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 8160 return 1;
d0ab8cd3 8161
951553af 8162 nonzero = nonzero_bits (x, mode);
df6f4086 8163 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 8164 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
8165}
8166\f
1a26b032
RK
8167/* Return the number of "extended" bits there are in X, when interpreted
8168 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
8169 unsigned quantities, this is the number of high-order zero bits.
8170 For signed quantities, this is the number of copies of the sign bit
8171 minus 1. In both case, this function returns the number of "spare"
8172 bits. For example, if two quantities for which this function returns
8173 at least 1 are added, the addition is known not to overflow.
8174
8175 This function will always return 0 unless called during combine, which
8176 implies that it must be called from a define_split. */
8177
8178int
8179extended_count (x, mode, unsignedp)
8180 rtx x;
8181 enum machine_mode mode;
8182 int unsignedp;
8183{
951553af 8184 if (nonzero_sign_valid == 0)
1a26b032
RK
8185 return 0;
8186
8187 return (unsignedp
ac49a949
RS
8188 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8189 && (GET_MODE_BITSIZE (mode) - 1
951553af 8190 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
8191 : num_sign_bit_copies (x, mode) - 1);
8192}
8193\f
230d793d
RS
8194/* This function is called from `simplify_shift_const' to merge two
8195 outer operations. Specifically, we have already found that we need
8196 to perform operation *POP0 with constant *PCONST0 at the outermost
8197 position. We would now like to also perform OP1 with constant CONST1
8198 (with *POP0 being done last).
8199
8200 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
8201 the resulting operation. *PCOMP_P is set to 1 if we would need to
8202 complement the innermost operand, otherwise it is unchanged.
8203
8204 MODE is the mode in which the operation will be done. No bits outside
8205 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 8206 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
8207
8208 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
8209 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
8210 result is simply *PCONST0.
8211
8212 If the resulting operation cannot be expressed as one operation, we
8213 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
8214
8215static int
8216merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
8217 enum rtx_code *pop0;
5f4f0e22 8218 HOST_WIDE_INT *pconst0;
230d793d 8219 enum rtx_code op1;
5f4f0e22 8220 HOST_WIDE_INT const1;
230d793d
RS
8221 enum machine_mode mode;
8222 int *pcomp_p;
8223{
8224 enum rtx_code op0 = *pop0;
5f4f0e22 8225 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
8226
8227 const0 &= GET_MODE_MASK (mode);
8228 const1 &= GET_MODE_MASK (mode);
8229
8230 /* If OP0 is an AND, clear unimportant bits in CONST1. */
8231 if (op0 == AND)
8232 const1 &= const0;
8233
8234 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
8235 if OP0 is SET. */
8236
8237 if (op1 == NIL || op0 == SET)
8238 return 1;
8239
8240 else if (op0 == NIL)
8241 op0 = op1, const0 = const1;
8242
8243 else if (op0 == op1)
8244 {
8245 switch (op0)
8246 {
8247 case AND:
8248 const0 &= const1;
8249 break;
8250 case IOR:
8251 const0 |= const1;
8252 break;
8253 case XOR:
8254 const0 ^= const1;
8255 break;
8256 case PLUS:
8257 const0 += const1;
8258 break;
8259 case NEG:
8260 op0 = NIL;
8261 break;
e9a25f70
JL
8262 default:
8263 break;
230d793d
RS
8264 }
8265 }
8266
8267 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
8268 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
8269 return 0;
8270
8271 /* If the two constants aren't the same, we can't do anything. The
8272 remaining six cases can all be done. */
8273 else if (const0 != const1)
8274 return 0;
8275
8276 else
8277 switch (op0)
8278 {
8279 case IOR:
8280 if (op1 == AND)
8281 /* (a & b) | b == b */
8282 op0 = SET;
8283 else /* op1 == XOR */
8284 /* (a ^ b) | b == a | b */
b729186a 8285 {;}
230d793d
RS
8286 break;
8287
8288 case XOR:
8289 if (op1 == AND)
8290 /* (a & b) ^ b == (~a) & b */
8291 op0 = AND, *pcomp_p = 1;
8292 else /* op1 == IOR */
8293 /* (a | b) ^ b == a & ~b */
8294 op0 = AND, *pconst0 = ~ const0;
8295 break;
8296
8297 case AND:
8298 if (op1 == IOR)
8299 /* (a | b) & b == b */
8300 op0 = SET;
8301 else /* op1 == XOR */
8302 /* (a ^ b) & b) == (~a) & b */
8303 *pcomp_p = 1;
8304 break;
e9a25f70
JL
8305 default:
8306 break;
230d793d
RS
8307 }
8308
8309 /* Check for NO-OP cases. */
8310 const0 &= GET_MODE_MASK (mode);
8311 if (const0 == 0
8312 && (op0 == IOR || op0 == XOR || op0 == PLUS))
8313 op0 = NIL;
8314 else if (const0 == 0 && op0 == AND)
8315 op0 = SET;
e51712db
KG
8316 else if ((unsigned HOST_WIDE_INT) const0 == GET_MODE_MASK (mode)
8317 && op0 == AND)
230d793d
RS
8318 op0 = NIL;
8319
7e4ce834
RH
8320 /* ??? Slightly redundant with the above mask, but not entirely.
8321 Moving this above means we'd have to sign-extend the mode mask
8322 for the final test. */
8323 const0 = trunc_int_for_mode (const0, mode);
9fa6d012 8324
230d793d
RS
8325 *pop0 = op0;
8326 *pconst0 = const0;
8327
8328 return 1;
8329}
8330\f
8331/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
8332 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
8333 that we started with.
8334
8335 The shift is normally computed in the widest mode we find in VAROP, as
8336 long as it isn't a different number of words than RESULT_MODE. Exceptions
8337 are ASHIFTRT and ROTATE, which are always done in their original mode, */
8338
8339static rtx
8340simplify_shift_const (x, code, result_mode, varop, count)
8341 rtx x;
8342 enum rtx_code code;
8343 enum machine_mode result_mode;
8344 rtx varop;
8345 int count;
8346{
8347 enum rtx_code orig_code = code;
8348 int orig_count = count;
8349 enum machine_mode mode = result_mode;
8350 enum machine_mode shift_mode, tmode;
8351 int mode_words
8352 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
8353 /* We form (outer_op (code varop count) (outer_const)). */
8354 enum rtx_code outer_op = NIL;
c4e861e8 8355 HOST_WIDE_INT outer_const = 0;
230d793d
RS
8356 rtx const_rtx;
8357 int complement_p = 0;
8358 rtx new;
8359
8360 /* If we were given an invalid count, don't do anything except exactly
8361 what was requested. */
8362
8363 if (count < 0 || count > GET_MODE_BITSIZE (mode))
8364 {
8365 if (x)
8366 return x;
8367
38a448ca 8368 return gen_rtx_fmt_ee (code, mode, varop, GEN_INT (count));
230d793d
RS
8369 }
8370
8371 /* Unless one of the branches of the `if' in this loop does a `continue',
8372 we will `break' the loop after the `if'. */
8373
8374 while (count != 0)
8375 {
8376 /* If we have an operand of (clobber (const_int 0)), just return that
8377 value. */
8378 if (GET_CODE (varop) == CLOBBER)
8379 return varop;
8380
8381 /* If we discovered we had to complement VAROP, leave. Making a NOT
8382 here would cause an infinite loop. */
8383 if (complement_p)
8384 break;
8385
abc95ed3 8386 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8387 if (code == ROTATERT)
8388 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8389
230d793d 8390 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8391 shift is a right shift or a ROTATE, we must always do it in the mode
8392 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8393 widest mode encountered. */
f6789c77
RK
8394 shift_mode
8395 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8396 ? result_mode : mode);
230d793d
RS
8397
8398 /* Handle cases where the count is greater than the size of the mode
8399 minus 1. For ASHIFT, use the size minus one as the count (this can
8400 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8401 take the count modulo the size. For other shifts, the result is
8402 zero.
8403
8404 Since these shifts are being produced by the compiler by combining
8405 multiple operations, each of which are defined, we know what the
8406 result is supposed to be. */
8407
8408 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8409 {
8410 if (code == ASHIFTRT)
8411 count = GET_MODE_BITSIZE (shift_mode) - 1;
8412 else if (code == ROTATE || code == ROTATERT)
8413 count %= GET_MODE_BITSIZE (shift_mode);
8414 else
8415 {
8416 /* We can't simply return zero because there may be an
8417 outer op. */
8418 varop = const0_rtx;
8419 count = 0;
8420 break;
8421 }
8422 }
8423
8424 /* Negative counts are invalid and should not have been made (a
8425 programmer-specified negative count should have been handled
0f41302f 8426 above). */
230d793d
RS
8427 else if (count < 0)
8428 abort ();
8429
312def2e
RK
8430 /* An arithmetic right shift of a quantity known to be -1 or 0
8431 is a no-op. */
8432 if (code == ASHIFTRT
8433 && (num_sign_bit_copies (varop, shift_mode)
8434 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8435 {
312def2e
RK
8436 count = 0;
8437 break;
8438 }
d0ab8cd3 8439
312def2e
RK
8440 /* If we are doing an arithmetic right shift and discarding all but
8441 the sign bit copies, this is equivalent to doing a shift by the
8442 bitsize minus one. Convert it into that shift because it will often
8443 allow other simplifications. */
500c518b 8444
312def2e
RK
8445 if (code == ASHIFTRT
8446 && (count + num_sign_bit_copies (varop, shift_mode)
8447 >= GET_MODE_BITSIZE (shift_mode)))
8448 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8449
230d793d
RS
8450 /* We simplify the tests below and elsewhere by converting
8451 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8452 `make_compound_operation' will convert it to a ASHIFTRT for
8453 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8454 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8455 && code == ASHIFTRT
951553af 8456 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8457 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8458 == 0))
230d793d
RS
8459 code = LSHIFTRT;
8460
8461 switch (GET_CODE (varop))
8462 {
8463 case SIGN_EXTEND:
8464 case ZERO_EXTEND:
8465 case SIGN_EXTRACT:
8466 case ZERO_EXTRACT:
8467 new = expand_compound_operation (varop);
8468 if (new != varop)
8469 {
8470 varop = new;
8471 continue;
8472 }
8473 break;
8474
8475 case MEM:
8476 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8477 minus the width of a smaller mode, we can do this with a
8478 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8479 if ((code == ASHIFTRT || code == LSHIFTRT)
8480 && ! mode_dependent_address_p (XEXP (varop, 0))
8481 && ! MEM_VOLATILE_P (varop)
8482 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8483 MODE_INT, 1)) != BLKmode)
8484 {
f76b9db2 8485 if (BYTES_BIG_ENDIAN)
38a448ca 8486 new = gen_rtx_MEM (tmode, XEXP (varop, 0));
f76b9db2 8487 else
38a448ca
RH
8488 new = gen_rtx_MEM (tmode,
8489 plus_constant (XEXP (varop, 0),
8490 count / BITS_PER_UNIT));
e24b00c8 8491 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
c6df88cb 8492 MEM_COPY_ATTRIBUTES (new, varop);
230d793d
RS
8493 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8494 : ZERO_EXTEND, mode, new);
8495 count = 0;
8496 continue;
8497 }
8498 break;
8499
8500 case USE:
8501 /* Similar to the case above, except that we can only do this if
8502 the resulting mode is the same as that of the underlying
8503 MEM and adjust the address depending on the *bits* endianness
8504 because of the way that bit-field extract insns are defined. */
8505 if ((code == ASHIFTRT || code == LSHIFTRT)
8506 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8507 MODE_INT, 1)) != BLKmode
8508 && tmode == GET_MODE (XEXP (varop, 0)))
8509 {
f76b9db2
ILT
8510 if (BITS_BIG_ENDIAN)
8511 new = XEXP (varop, 0);
8512 else
8513 {
8514 new = copy_rtx (XEXP (varop, 0));
8515 SUBST (XEXP (new, 0),
8516 plus_constant (XEXP (new, 0),
8517 count / BITS_PER_UNIT));
8518 }
230d793d
RS
8519
8520 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8521 : ZERO_EXTEND, mode, new);
8522 count = 0;
8523 continue;
8524 }
8525 break;
8526
8527 case SUBREG:
8528 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8529 the same number of words as what we've seen so far. Then store
8530 the widest mode in MODE. */
f9e67232
RS
8531 if (subreg_lowpart_p (varop)
8532 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8533 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
8534 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8535 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8536 == mode_words))
8537 {
8538 varop = SUBREG_REG (varop);
8539 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8540 mode = GET_MODE (varop);
8541 continue;
8542 }
8543 break;
8544
8545 case MULT:
8546 /* Some machines use MULT instead of ASHIFT because MULT
8547 is cheaper. But it is still better on those machines to
8548 merge two shifts into one. */
8549 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8550 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8551 {
8552 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8553 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
8554 continue;
8555 }
8556 break;
8557
8558 case UDIV:
8559 /* Similar, for when divides are cheaper. */
8560 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8561 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8562 {
8563 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8564 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
8565 continue;
8566 }
8567 break;
8568
8569 case ASHIFTRT:
8570 /* If we are extracting just the sign bit of an arithmetic right
8571 shift, that shift is not needed. */
8572 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8573 {
8574 varop = XEXP (varop, 0);
8575 continue;
8576 }
8577
0f41302f 8578 /* ... fall through ... */
230d793d
RS
8579
8580 case LSHIFTRT:
8581 case ASHIFT:
230d793d
RS
8582 case ROTATE:
8583 /* Here we have two nested shifts. The result is usually the
8584 AND of a new shift with a mask. We compute the result below. */
8585 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8586 && INTVAL (XEXP (varop, 1)) >= 0
8587 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
8588 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8589 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
8590 {
8591 enum rtx_code first_code = GET_CODE (varop);
8592 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 8593 unsigned HOST_WIDE_INT mask;
230d793d 8594 rtx mask_rtx;
230d793d 8595
230d793d
RS
8596 /* We have one common special case. We can't do any merging if
8597 the inner code is an ASHIFTRT of a smaller mode. However, if
8598 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8599 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8600 we can convert it to
8601 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8602 This simplifies certain SIGN_EXTEND operations. */
8603 if (code == ASHIFT && first_code == ASHIFTRT
8604 && (GET_MODE_BITSIZE (result_mode)
8605 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8606 {
8607 /* C3 has the low-order C1 bits zero. */
8608
5f4f0e22
CH
8609 mask = (GET_MODE_MASK (mode)
8610 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 8611
5f4f0e22 8612 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 8613 XEXP (varop, 0), mask);
5f4f0e22 8614 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
8615 varop, count);
8616 count = first_count;
8617 code = ASHIFTRT;
8618 continue;
8619 }
8620
d0ab8cd3
RK
8621 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8622 than C1 high-order bits equal to the sign bit, we can convert
8623 this to either an ASHIFT or a ASHIFTRT depending on the
8624 two counts.
230d793d
RS
8625
8626 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8627
8628 if (code == ASHIFTRT && first_code == ASHIFT
8629 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
8630 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8631 > first_count))
230d793d 8632 {
d0ab8cd3
RK
8633 count -= first_count;
8634 if (count < 0)
8635 count = - count, code = ASHIFT;
8636 varop = XEXP (varop, 0);
8637 continue;
230d793d
RS
8638 }
8639
8640 /* There are some cases we can't do. If CODE is ASHIFTRT,
8641 we can only do this if FIRST_CODE is also ASHIFTRT.
8642
8643 We can't do the case when CODE is ROTATE and FIRST_CODE is
8644 ASHIFTRT.
8645
8646 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 8647 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
8648
8649 Finally, we can't do any of these if the mode is too wide
8650 unless the codes are the same.
8651
8652 Handle the case where the shift codes are the same
8653 first. */
8654
8655 if (code == first_code)
8656 {
8657 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
8658 && (code == ASHIFTRT || code == LSHIFTRT
8659 || code == ROTATE))
230d793d
RS
8660 break;
8661
8662 count += first_count;
8663 varop = XEXP (varop, 0);
8664 continue;
8665 }
8666
8667 if (code == ASHIFTRT
8668 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 8669 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 8670 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
8671 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8672 || first_code == ROTATE
230d793d
RS
8673 || code == ROTATE)))
8674 break;
8675
8676 /* To compute the mask to apply after the shift, shift the
951553af 8677 nonzero bits of the inner shift the same way the
230d793d
RS
8678 outer shift will. */
8679
951553af 8680 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
8681
8682 mask_rtx
8683 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 8684 GEN_INT (count));
230d793d
RS
8685
8686 /* Give up if we can't compute an outer operation to use. */
8687 if (mask_rtx == 0
8688 || GET_CODE (mask_rtx) != CONST_INT
8689 || ! merge_outer_ops (&outer_op, &outer_const, AND,
8690 INTVAL (mask_rtx),
8691 result_mode, &complement_p))
8692 break;
8693
8694 /* If the shifts are in the same direction, we add the
8695 counts. Otherwise, we subtract them. */
8696 if ((code == ASHIFTRT || code == LSHIFTRT)
8697 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8698 count += first_count;
8699 else
8700 count -= first_count;
8701
8702 /* If COUNT is positive, the new shift is usually CODE,
8703 except for the two exceptions below, in which case it is
8704 FIRST_CODE. If the count is negative, FIRST_CODE should
8705 always be used */
8706 if (count > 0
8707 && ((first_code == ROTATE && code == ASHIFT)
8708 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8709 code = first_code;
8710 else if (count < 0)
8711 code = first_code, count = - count;
8712
8713 varop = XEXP (varop, 0);
8714 continue;
8715 }
8716
8717 /* If we have (A << B << C) for any shift, we can convert this to
8718 (A << C << B). This wins if A is a constant. Only try this if
8719 B is not a constant. */
8720
8721 else if (GET_CODE (varop) == code
8722 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8723 && 0 != (new
8724 = simplify_binary_operation (code, mode,
8725 XEXP (varop, 0),
5f4f0e22 8726 GEN_INT (count))))
230d793d
RS
8727 {
8728 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8729 count = 0;
8730 continue;
8731 }
8732 break;
8733
8734 case NOT:
8735 /* Make this fit the case below. */
8736 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 8737 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
8738 continue;
8739
8740 case IOR:
8741 case AND:
8742 case XOR:
8743 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8744 with C the size of VAROP - 1 and the shift is logical if
8745 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8746 we have an (le X 0) operation. If we have an arithmetic shift
8747 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8748 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8749
8750 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8751 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8752 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8753 && (code == LSHIFTRT || code == ASHIFTRT)
8754 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8755 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8756 {
8757 count = 0;
8758 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8759 const0_rtx);
8760
8761 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8762 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8763
8764 continue;
8765 }
8766
8767 /* If we have (shift (logical)), move the logical to the outside
8768 to allow it to possibly combine with another logical and the
8769 shift to combine with another shift. This also canonicalizes to
8770 what a ZERO_EXTRACT looks like. Also, some machines have
8771 (and (shift)) insns. */
8772
8773 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8774 && (new = simplify_binary_operation (code, result_mode,
8775 XEXP (varop, 1),
5f4f0e22 8776 GEN_INT (count))) != 0
7d171a1e 8777 && GET_CODE(new) == CONST_INT
230d793d
RS
8778 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8779 INTVAL (new), result_mode, &complement_p))
8780 {
8781 varop = XEXP (varop, 0);
8782 continue;
8783 }
8784
8785 /* If we can't do that, try to simplify the shift in each arm of the
8786 logical expression, make a new logical expression, and apply
8787 the inverse distributive law. */
8788 {
00d4ca1c 8789 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 8790 XEXP (varop, 0), count);
00d4ca1c 8791 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
8792 XEXP (varop, 1), count);
8793
21a64bf1 8794 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
8795 varop = apply_distributive_law (varop);
8796
8797 count = 0;
8798 }
8799 break;
8800
8801 case EQ:
45620ed4 8802 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 8803 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
8804 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8805 that may be nonzero. */
8806 if (code == LSHIFTRT
230d793d
RS
8807 && XEXP (varop, 1) == const0_rtx
8808 && GET_MODE (XEXP (varop, 0)) == result_mode
8809 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 8810 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8811 && ((STORE_FLAG_VALUE
5f4f0e22 8812 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 8813 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8814 && merge_outer_ops (&outer_op, &outer_const, XOR,
8815 (HOST_WIDE_INT) 1, result_mode,
8816 &complement_p))
230d793d
RS
8817 {
8818 varop = XEXP (varop, 0);
8819 count = 0;
8820 continue;
8821 }
8822 break;
8823
8824 case NEG:
d0ab8cd3
RK
8825 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8826 than the number of bits in the mode is equivalent to A. */
8827 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 8828 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 8829 {
d0ab8cd3 8830 varop = XEXP (varop, 0);
230d793d
RS
8831 count = 0;
8832 continue;
8833 }
8834
8835 /* NEG commutes with ASHIFT since it is multiplication. Move the
8836 NEG outside to allow shifts to combine. */
8837 if (code == ASHIFT
5f4f0e22
CH
8838 && merge_outer_ops (&outer_op, &outer_const, NEG,
8839 (HOST_WIDE_INT) 0, result_mode,
8840 &complement_p))
230d793d
RS
8841 {
8842 varop = XEXP (varop, 0);
8843 continue;
8844 }
8845 break;
8846
8847 case PLUS:
d0ab8cd3
RK
8848 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8849 is one less than the number of bits in the mode is
8850 equivalent to (xor A 1). */
230d793d
RS
8851 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8852 && XEXP (varop, 1) == constm1_rtx
951553af 8853 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8854 && merge_outer_ops (&outer_op, &outer_const, XOR,
8855 (HOST_WIDE_INT) 1, result_mode,
8856 &complement_p))
230d793d
RS
8857 {
8858 count = 0;
8859 varop = XEXP (varop, 0);
8860 continue;
8861 }
8862
3f508eca 8863 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 8864 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
8865 bits are known zero in FOO, we can replace the PLUS with FOO.
8866 Similarly in the other operand order. This code occurs when
8867 we are computing the size of a variable-size array. */
8868
8869 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8870 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
8871 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8872 && (nonzero_bits (XEXP (varop, 1), result_mode)
8873 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
8874 {
8875 varop = XEXP (varop, 0);
8876 continue;
8877 }
8878 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8879 && count < HOST_BITS_PER_WIDE_INT
ac49a949 8880 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 8881 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 8882 >> count)
951553af
RK
8883 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8884 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
8885 result_mode)))
8886 {
8887 varop = XEXP (varop, 1);
8888 continue;
8889 }
8890
230d793d
RS
8891 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8892 if (code == ASHIFT
8893 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8894 && (new = simplify_binary_operation (ASHIFT, result_mode,
8895 XEXP (varop, 1),
5f4f0e22 8896 GEN_INT (count))) != 0
7d171a1e 8897 && GET_CODE(new) == CONST_INT
230d793d
RS
8898 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8899 INTVAL (new), result_mode, &complement_p))
8900 {
8901 varop = XEXP (varop, 0);
8902 continue;
8903 }
8904 break;
8905
8906 case MINUS:
8907 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8908 with C the size of VAROP - 1 and the shift is logical if
8909 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8910 we have a (gt X 0) operation. If the shift is arithmetic with
8911 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8912 we have a (neg (gt X 0)) operation. */
8913
0802d516
RK
8914 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8915 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 8916 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
8917 && (code == LSHIFTRT || code == ASHIFTRT)
8918 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8919 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8920 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8921 {
8922 count = 0;
8923 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8924 const0_rtx);
8925
8926 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8927 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8928
8929 continue;
8930 }
8931 break;
6e0ef100
JC
8932
8933 case TRUNCATE:
8934 /* Change (lshiftrt (truncate (lshiftrt))) to (truncate (lshiftrt))
8935 if the truncate does not affect the value. */
8936 if (code == LSHIFTRT
8937 && GET_CODE (XEXP (varop, 0)) == LSHIFTRT
8938 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8939 && (INTVAL (XEXP (XEXP (varop, 0), 1))
b577a8ff
JL
8940 >= (GET_MODE_BITSIZE (GET_MODE (XEXP (varop, 0)))
8941 - GET_MODE_BITSIZE (GET_MODE (varop)))))
6e0ef100
JC
8942 {
8943 rtx varop_inner = XEXP (varop, 0);
8944
8945 varop_inner = gen_rtx_combine (LSHIFTRT,
8946 GET_MODE (varop_inner),
8947 XEXP (varop_inner, 0),
8948 GEN_INT (count + INTVAL (XEXP (varop_inner, 1))));
8949 varop = gen_rtx_combine (TRUNCATE, GET_MODE (varop),
8950 varop_inner);
8951 count = 0;
8952 continue;
8953 }
8954 break;
e9a25f70
JL
8955
8956 default:
8957 break;
230d793d
RS
8958 }
8959
8960 break;
8961 }
8962
8963 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
8964 a right shift or ROTATE, we must always do it in the mode it was
8965 originally done in. Otherwise, we can do it in MODE, the widest mode
8966 encountered. The code we care about is that of the shift that will
8967 actually be done, not the shift that was originally requested. */
8968 shift_mode
8969 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8970 ? result_mode : mode);
230d793d
RS
8971
8972 /* We have now finished analyzing the shift. The result should be
8973 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8974 OUTER_OP is non-NIL, it is an operation that needs to be applied
8975 to the result of the shift. OUTER_CONST is the relevant constant,
8976 but we must turn off all bits turned off in the shift.
8977
8978 If we were passed a value for X, see if we can use any pieces of
8979 it. If not, make new rtx. */
8980
8981 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8982 && GET_CODE (XEXP (x, 1)) == CONST_INT
8983 && INTVAL (XEXP (x, 1)) == count)
8984 const_rtx = XEXP (x, 1);
8985 else
5f4f0e22 8986 const_rtx = GEN_INT (count);
230d793d
RS
8987
8988 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8989 && GET_MODE (XEXP (x, 0)) == shift_mode
8990 && SUBREG_REG (XEXP (x, 0)) == varop)
8991 varop = XEXP (x, 0);
8992 else if (GET_MODE (varop) != shift_mode)
8993 varop = gen_lowpart_for_combine (shift_mode, varop);
8994
0f41302f 8995 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
8996 if (GET_CODE (varop) == CLOBBER)
8997 return x ? x : varop;
8998
8999 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
9000 if (new != 0)
9001 x = new;
9002 else
9003 {
9004 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
9005 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
9006
9007 SUBST (XEXP (x, 0), varop);
9008 SUBST (XEXP (x, 1), const_rtx);
9009 }
9010
224eeff2
RK
9011 /* If we have an outer operation and we just made a shift, it is
9012 possible that we could have simplified the shift were it not
9013 for the outer operation. So try to do the simplification
9014 recursively. */
9015
9016 if (outer_op != NIL && GET_CODE (x) == code
9017 && GET_CODE (XEXP (x, 1)) == CONST_INT)
9018 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
9019 INTVAL (XEXP (x, 1)));
9020
230d793d
RS
9021 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
9022 turn off all the bits that the shift would have turned off. */
9023 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 9024 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
9025 GET_MODE_MASK (result_mode) >> orig_count);
9026
9027 /* Do the remainder of the processing in RESULT_MODE. */
9028 x = gen_lowpart_for_combine (result_mode, x);
9029
9030 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
9031 operation. */
9032 if (complement_p)
0c1c8ea6 9033 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
9034
9035 if (outer_op != NIL)
9036 {
5f4f0e22 9037 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7e4ce834 9038 outer_const = trunc_int_for_mode (outer_const, result_mode);
230d793d
RS
9039
9040 if (outer_op == AND)
5f4f0e22 9041 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
9042 else if (outer_op == SET)
9043 /* This means that we have determined that the result is
9044 equivalent to a constant. This should be rare. */
5f4f0e22 9045 x = GEN_INT (outer_const);
230d793d 9046 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 9047 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 9048 else
5f4f0e22 9049 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
9050 }
9051
9052 return x;
9053}
9054\f
9055/* Like recog, but we receive the address of a pointer to a new pattern.
9056 We try to match the rtx that the pointer points to.
9057 If that fails, we may try to modify or replace the pattern,
9058 storing the replacement into the same pointer object.
9059
9060 Modifications include deletion or addition of CLOBBERs.
9061
9062 PNOTES is a pointer to a location where any REG_UNUSED notes added for
9063 the CLOBBERs are placed.
9064
9065 The value is the final insn code from the pattern ultimately matched,
9066 or -1. */
9067
9068static int
8e2f6e35 9069recog_for_combine (pnewpat, insn, pnotes)
230d793d
RS
9070 rtx *pnewpat;
9071 rtx insn;
9072 rtx *pnotes;
9073{
9074 register rtx pat = *pnewpat;
9075 int insn_code_number;
9076 int num_clobbers_to_add = 0;
9077 int i;
9078 rtx notes = 0;
9079
974f4146
RK
9080 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
9081 we use to indicate that something didn't match. If we find such a
9082 thing, force rejection. */
d96023cf 9083 if (GET_CODE (pat) == PARALLEL)
974f4146 9084 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
9085 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
9086 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
9087 return -1;
9088
230d793d
RS
9089 /* Is the result of combination a valid instruction? */
9090 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9091
9092 /* If it isn't, there is the possibility that we previously had an insn
9093 that clobbered some register as a side effect, but the combined
9094 insn doesn't need to do that. So try once more without the clobbers
9095 unless this represents an ASM insn. */
9096
9097 if (insn_code_number < 0 && ! check_asm_operands (pat)
9098 && GET_CODE (pat) == PARALLEL)
9099 {
9100 int pos;
9101
9102 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
9103 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
9104 {
9105 if (i != pos)
9106 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
9107 pos++;
9108 }
9109
9110 SUBST_INT (XVECLEN (pat, 0), pos);
9111
9112 if (pos == 1)
9113 pat = XVECEXP (pat, 0, 0);
9114
9115 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
9116 }
9117
9118 /* If we had any clobbers to add, make a new pattern than contains
9119 them. Then check to make sure that all of them are dead. */
9120 if (num_clobbers_to_add)
9121 {
38a448ca
RH
9122 rtx newpat = gen_rtx_PARALLEL (VOIDmode,
9123 gen_rtvec (GET_CODE (pat) == PARALLEL
9124 ? XVECLEN (pat, 0) + num_clobbers_to_add
9125 : num_clobbers_to_add + 1));
230d793d
RS
9126
9127 if (GET_CODE (pat) == PARALLEL)
9128 for (i = 0; i < XVECLEN (pat, 0); i++)
9129 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
9130 else
9131 XVECEXP (newpat, 0, 0) = pat;
9132
9133 add_clobbers (newpat, insn_code_number);
9134
9135 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
9136 i < XVECLEN (newpat, 0); i++)
9137 {
9138 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
9139 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
9140 return -1;
38a448ca
RH
9141 notes = gen_rtx_EXPR_LIST (REG_UNUSED,
9142 XEXP (XVECEXP (newpat, 0, i), 0), notes);
230d793d
RS
9143 }
9144 pat = newpat;
9145 }
9146
9147 *pnewpat = pat;
9148 *pnotes = notes;
9149
9150 return insn_code_number;
9151}
9152\f
9153/* Like gen_lowpart but for use by combine. In combine it is not possible
9154 to create any new pseudoregs. However, it is safe to create
9155 invalid memory addresses, because combine will try to recognize
9156 them and all they will do is make the combine attempt fail.
9157
9158 If for some reason this cannot do its job, an rtx
9159 (clobber (const_int 0)) is returned.
9160 An insn containing that will not be recognized. */
9161
9162#undef gen_lowpart
9163
9164static rtx
9165gen_lowpart_for_combine (mode, x)
9166 enum machine_mode mode;
9167 register rtx x;
9168{
9169 rtx result;
9170
9171 if (GET_MODE (x) == mode)
9172 return x;
9173
eae957a8
RK
9174 /* We can only support MODE being wider than a word if X is a
9175 constant integer or has a mode the same size. */
9176
9177 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
9178 && ! ((GET_MODE (x) == VOIDmode
9179 && (GET_CODE (x) == CONST_INT
9180 || GET_CODE (x) == CONST_DOUBLE))
9181 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
38a448ca 9182 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9183
9184 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
9185 won't know what to do. So we will strip off the SUBREG here and
9186 process normally. */
9187 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
9188 {
9189 x = SUBREG_REG (x);
9190 if (GET_MODE (x) == mode)
9191 return x;
9192 }
9193
9194 result = gen_lowpart_common (mode, x);
64bf47a2
RK
9195 if (result != 0
9196 && GET_CODE (result) == SUBREG
9197 && GET_CODE (SUBREG_REG (result)) == REG
9198 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
9199 && (GET_MODE_SIZE (GET_MODE (result))
9200 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
b1f21e0a 9201 REG_CHANGES_SIZE (REGNO (SUBREG_REG (result))) = 1;
64bf47a2 9202
230d793d
RS
9203 if (result)
9204 return result;
9205
9206 if (GET_CODE (x) == MEM)
9207 {
9208 register int offset = 0;
9209 rtx new;
9210
9211 /* Refuse to work on a volatile memory ref or one with a mode-dependent
9212 address. */
9213 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
38a448ca 9214 return gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
9215
9216 /* If we want to refer to something bigger than the original memref,
9217 generate a perverse subreg instead. That will force a reload
9218 of the original memref X. */
9219 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
38a448ca 9220 return gen_rtx_SUBREG (mode, x, 0);
230d793d 9221
f76b9db2
ILT
9222 if (WORDS_BIG_ENDIAN)
9223 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
9224 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
9225 if (BYTES_BIG_ENDIAN)
9226 {
9227 /* Adjust the address so that the address-after-the-data is
9228 unchanged. */
9229 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
9230 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
9231 }
38a448ca 9232 new = gen_rtx_MEM (mode, plus_constant (XEXP (x, 0), offset));
230d793d 9233 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 9234 MEM_COPY_ATTRIBUTES (new, x);
230d793d
RS
9235 return new;
9236 }
9237
9238 /* If X is a comparison operator, rewrite it in a new mode. This
9239 probably won't match, but may allow further simplifications. */
9240 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
9241 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
9242
9243 /* If we couldn't simplify X any other way, just enclose it in a
9244 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 9245 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 9246 else
dfbe1b2f
RK
9247 {
9248 int word = 0;
9249
9250 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
9251 word = ((GET_MODE_SIZE (GET_MODE (x))
9252 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
9253 / UNITS_PER_WORD);
38a448ca 9254 return gen_rtx_SUBREG (mode, x, word);
dfbe1b2f 9255 }
230d793d
RS
9256}
9257\f
9258/* Make an rtx expression. This is a subset of gen_rtx and only supports
9259 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
9260
9261 If the identical expression was previously in the insn (in the undobuf),
9262 it will be returned. Only if it is not found will a new expression
9263 be made. */
9264
9265/*VARARGS2*/
9266static rtx
4f90e4a0 9267gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 9268{
5148a72b 9269#ifndef ANSI_PROTOTYPES
230d793d
RS
9270 enum rtx_code code;
9271 enum machine_mode mode;
4f90e4a0
RK
9272#endif
9273 va_list p;
230d793d
RS
9274 int n_args;
9275 rtx args[3];
b729186a 9276 int j;
6f7d635c 9277 const char *fmt;
230d793d 9278 rtx rt;
241cea85 9279 struct undo *undo;
230d793d 9280
4f90e4a0
RK
9281 VA_START (p, mode);
9282
5148a72b 9283#ifndef ANSI_PROTOTYPES
230d793d
RS
9284 code = va_arg (p, enum rtx_code);
9285 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
9286#endif
9287
230d793d
RS
9288 n_args = GET_RTX_LENGTH (code);
9289 fmt = GET_RTX_FORMAT (code);
9290
9291 if (n_args == 0 || n_args > 3)
9292 abort ();
9293
9294 /* Get each arg and verify that it is supposed to be an expression. */
9295 for (j = 0; j < n_args; j++)
9296 {
9297 if (*fmt++ != 'e')
9298 abort ();
9299
9300 args[j] = va_arg (p, rtx);
9301 }
9302
9303 /* See if this is in undobuf. Be sure we don't use objects that came
9304 from another insn; this could produce circular rtl structures. */
9305
241cea85
RK
9306 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
9307 if (!undo->is_int
9308 && GET_CODE (undo->old_contents.r) == code
9309 && GET_MODE (undo->old_contents.r) == mode)
230d793d
RS
9310 {
9311 for (j = 0; j < n_args; j++)
241cea85 9312 if (XEXP (undo->old_contents.r, j) != args[j])
230d793d
RS
9313 break;
9314
9315 if (j == n_args)
241cea85 9316 return undo->old_contents.r;
230d793d
RS
9317 }
9318
9319 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
9320 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
9321 rt = rtx_alloc (code);
9322 PUT_MODE (rt, mode);
9323 XEXP (rt, 0) = args[0];
9324 if (n_args > 1)
9325 {
9326 XEXP (rt, 1) = args[1];
9327 if (n_args > 2)
9328 XEXP (rt, 2) = args[2];
9329 }
9330 return rt;
9331}
9332
9333/* These routines make binary and unary operations by first seeing if they
9334 fold; if not, a new expression is allocated. */
9335
9336static rtx
9337gen_binary (code, mode, op0, op1)
9338 enum rtx_code code;
9339 enum machine_mode mode;
9340 rtx op0, op1;
9341{
9342 rtx result;
1a26b032
RK
9343 rtx tem;
9344
9345 if (GET_RTX_CLASS (code) == 'c'
9346 && (GET_CODE (op0) == CONST_INT
9347 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
9348 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
9349
9350 if (GET_RTX_CLASS (code) == '<')
9351 {
9352 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
9353
9354 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 9355 just (REL_OP X Y). */
9210df58
RK
9356 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
9357 {
9358 op1 = XEXP (op0, 1);
9359 op0 = XEXP (op0, 0);
9360 op_mode = GET_MODE (op0);
9361 }
9362
230d793d
RS
9363 if (op_mode == VOIDmode)
9364 op_mode = GET_MODE (op1);
9365 result = simplify_relational_operation (code, op_mode, op0, op1);
9366 }
9367 else
9368 result = simplify_binary_operation (code, mode, op0, op1);
9369
9370 if (result)
9371 return result;
9372
9373 /* Put complex operands first and constants second. */
9374 if (GET_RTX_CLASS (code) == 'c'
9375 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9376 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9377 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9378 || (GET_CODE (op0) == SUBREG
9379 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9380 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9381 return gen_rtx_combine (code, mode, op1, op0);
9382
e5e809f4
JL
9383 /* If we are turning off bits already known off in OP0, we need not do
9384 an AND. */
9385 else if (code == AND && GET_CODE (op1) == CONST_INT
9386 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
9387 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
9388 return op0;
9389
230d793d
RS
9390 return gen_rtx_combine (code, mode, op0, op1);
9391}
9392
9393static rtx
0c1c8ea6 9394gen_unary (code, mode, op0_mode, op0)
230d793d 9395 enum rtx_code code;
0c1c8ea6 9396 enum machine_mode mode, op0_mode;
230d793d
RS
9397 rtx op0;
9398{
0c1c8ea6 9399 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
9400
9401 if (result)
9402 return result;
9403
9404 return gen_rtx_combine (code, mode, op0);
9405}
9406\f
9407/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9408 comparison code that will be tested.
9409
9410 The result is a possibly different comparison code to use. *POP0 and
9411 *POP1 may be updated.
9412
9413 It is possible that we might detect that a comparison is either always
9414 true or always false. However, we do not perform general constant
5089e22e 9415 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9416 should have been detected earlier. Hence we ignore all such cases. */
9417
9418static enum rtx_code
9419simplify_comparison (code, pop0, pop1)
9420 enum rtx_code code;
9421 rtx *pop0;
9422 rtx *pop1;
9423{
9424 rtx op0 = *pop0;
9425 rtx op1 = *pop1;
9426 rtx tem, tem1;
9427 int i;
9428 enum machine_mode mode, tmode;
9429
9430 /* Try a few ways of applying the same transformation to both operands. */
9431 while (1)
9432 {
3a19aabc
RK
9433#ifndef WORD_REGISTER_OPERATIONS
9434 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9435 so check specially. */
9436 if (code != GTU && code != GEU && code != LTU && code != LEU
9437 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9438 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9439 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9440 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9441 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9442 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 9443 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
9444 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9445 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9446 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9447 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9448 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9449 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9450 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9451 && (INTVAL (XEXP (op0, 1))
9452 == (GET_MODE_BITSIZE (GET_MODE (op0))
9453 - (GET_MODE_BITSIZE
9454 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9455 {
9456 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9457 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9458 }
9459#endif
9460
230d793d
RS
9461 /* If both operands are the same constant shift, see if we can ignore the
9462 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 9463 this shift are known to be zero for both inputs and if the type of
230d793d 9464 comparison is compatible with the shift. */
67232b23
RK
9465 if (GET_CODE (op0) == GET_CODE (op1)
9466 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9467 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 9468 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
9469 && (code != GT && code != LT && code != GE && code != LE))
9470 || (GET_CODE (op0) == ASHIFTRT
9471 && (code != GTU && code != LTU
9472 && code != GEU && code != GEU)))
9473 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9474 && INTVAL (XEXP (op0, 1)) >= 0
9475 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9476 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
9477 {
9478 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 9479 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9480 int shift_count = INTVAL (XEXP (op0, 1));
9481
9482 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9483 mask &= (mask >> shift_count) << shift_count;
45620ed4 9484 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
9485 mask = (mask & (mask << shift_count)) >> shift_count;
9486
951553af
RK
9487 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9488 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
9489 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9490 else
9491 break;
9492 }
9493
9494 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9495 SUBREGs are of the same mode, and, in both cases, the AND would
9496 be redundant if the comparison was done in the narrower mode,
9497 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
9498 and the operand's possibly nonzero bits are 0xffffff01; in that case
9499 if we only care about QImode, we don't need the AND). This case
9500 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
9501 STORE_FLAG_VALUE == 1 (e.g., the 386).
9502
9503 Similarly, check for a case where the AND's are ZERO_EXTEND
9504 operations from some narrower mode even though a SUBREG is not
9505 present. */
230d793d
RS
9506
9507 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9508 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 9509 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 9510 {
7e4dc511
RK
9511 rtx inner_op0 = XEXP (op0, 0);
9512 rtx inner_op1 = XEXP (op1, 0);
9513 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9514 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9515 int changed = 0;
9516
9517 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9518 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9519 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9520 && (GET_MODE (SUBREG_REG (inner_op0))
9521 == GET_MODE (SUBREG_REG (inner_op1)))
729a2bc6 9522 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner_op0)))
7e4dc511 9523 <= HOST_BITS_PER_WIDE_INT)
01c82bbb 9524 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
729a2bc6 9525 GET_MODE (SUBREG_REG (inner_op0)))))
01c82bbb
RK
9526 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9527 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
9528 {
9529 op0 = SUBREG_REG (inner_op0);
9530 op1 = SUBREG_REG (inner_op1);
9531
9532 /* The resulting comparison is always unsigned since we masked
0f41302f 9533 off the original sign bit. */
7e4dc511
RK
9534 code = unsigned_condition (code);
9535
9536 changed = 1;
9537 }
230d793d 9538
7e4dc511
RK
9539 else if (c0 == c1)
9540 for (tmode = GET_CLASS_NARROWEST_MODE
9541 (GET_MODE_CLASS (GET_MODE (op0)));
9542 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
e51712db 9543 if ((unsigned HOST_WIDE_INT) c0 == GET_MODE_MASK (tmode))
7e4dc511
RK
9544 {
9545 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9546 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 9547 code = unsigned_condition (code);
7e4dc511
RK
9548 changed = 1;
9549 break;
9550 }
9551
9552 if (! changed)
9553 break;
230d793d 9554 }
3a19aabc 9555
ad25ba17
RK
9556 /* If both operands are NOT, we can strip off the outer operation
9557 and adjust the comparison code for swapped operands; similarly for
9558 NEG, except that this must be an equality comparison. */
9559 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9560 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9561 && (code == EQ || code == NE)))
9562 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 9563
230d793d
RS
9564 else
9565 break;
9566 }
9567
9568 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
9569 comparison code appropriately, but don't do this if the second operand
9570 is already a constant integer. */
9571 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
230d793d
RS
9572 {
9573 tem = op0, op0 = op1, op1 = tem;
9574 code = swap_condition (code);
9575 }
9576
9577 /* We now enter a loop during which we will try to simplify the comparison.
9578 For the most part, we only are concerned with comparisons with zero,
9579 but some things may really be comparisons with zero but not start
9580 out looking that way. */
9581
9582 while (GET_CODE (op1) == CONST_INT)
9583 {
9584 enum machine_mode mode = GET_MODE (op0);
9585 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 9586 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9587 int equality_comparison_p;
9588 int sign_bit_comparison_p;
9589 int unsigned_comparison_p;
5f4f0e22 9590 HOST_WIDE_INT const_op;
230d793d
RS
9591
9592 /* We only want to handle integral modes. This catches VOIDmode,
9593 CCmode, and the floating-point modes. An exception is that we
9594 can handle VOIDmode if OP0 is a COMPARE or a comparison
9595 operation. */
9596
9597 if (GET_MODE_CLASS (mode) != MODE_INT
9598 && ! (mode == VOIDmode
9599 && (GET_CODE (op0) == COMPARE
9600 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9601 break;
9602
9603 /* Get the constant we are comparing against and turn off all bits
9604 not on in our mode. */
9605 const_op = INTVAL (op1);
5f4f0e22 9606 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 9607 const_op &= mask;
230d793d
RS
9608
9609 /* If we are comparing against a constant power of two and the value
951553af 9610 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
9611 `and'ed with that bit), we can replace this with a comparison
9612 with zero. */
9613 if (const_op
9614 && (code == EQ || code == NE || code == GE || code == GEU
9615 || code == LT || code == LTU)
5f4f0e22 9616 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9617 && exact_log2 (const_op) >= 0
e51712db 9618 && nonzero_bits (op0, mode) == (unsigned HOST_WIDE_INT) const_op)
230d793d
RS
9619 {
9620 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9621 op1 = const0_rtx, const_op = 0;
9622 }
9623
d0ab8cd3
RK
9624 /* Similarly, if we are comparing a value known to be either -1 or
9625 0 with -1, change it to the opposite comparison against zero. */
9626
9627 if (const_op == -1
9628 && (code == EQ || code == NE || code == GT || code == LE
9629 || code == GEU || code == LTU)
9630 && num_sign_bit_copies (op0, mode) == mode_width)
9631 {
9632 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9633 op1 = const0_rtx, const_op = 0;
9634 }
9635
230d793d 9636 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
9637 comparisons against zero and then prefer equality comparisons.
9638 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
9639
9640 switch (code)
9641 {
9642 case LT:
4803a34a
RK
9643 /* < C is equivalent to <= (C - 1) */
9644 if (const_op > 0)
230d793d 9645 {
4803a34a 9646 const_op -= 1;
5f4f0e22 9647 op1 = GEN_INT (const_op);
230d793d
RS
9648 code = LE;
9649 /* ... fall through to LE case below. */
9650 }
9651 else
9652 break;
9653
9654 case LE:
4803a34a
RK
9655 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9656 if (const_op < 0)
9657 {
9658 const_op += 1;
5f4f0e22 9659 op1 = GEN_INT (const_op);
4803a34a
RK
9660 code = LT;
9661 }
230d793d
RS
9662
9663 /* If we are doing a <= 0 comparison on a value known to have
9664 a zero sign bit, we can replace this with == 0. */
9665 else if (const_op == 0
5f4f0e22 9666 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9667 && (nonzero_bits (op0, mode)
5f4f0e22 9668 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9669 code = EQ;
9670 break;
9671
9672 case GE:
0f41302f 9673 /* >= C is equivalent to > (C - 1). */
4803a34a 9674 if (const_op > 0)
230d793d 9675 {
4803a34a 9676 const_op -= 1;
5f4f0e22 9677 op1 = GEN_INT (const_op);
230d793d
RS
9678 code = GT;
9679 /* ... fall through to GT below. */
9680 }
9681 else
9682 break;
9683
9684 case GT:
4803a34a
RK
9685 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9686 if (const_op < 0)
9687 {
9688 const_op += 1;
5f4f0e22 9689 op1 = GEN_INT (const_op);
4803a34a
RK
9690 code = GE;
9691 }
230d793d
RS
9692
9693 /* If we are doing a > 0 comparison on a value known to have
9694 a zero sign bit, we can replace this with != 0. */
9695 else if (const_op == 0
5f4f0e22 9696 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9697 && (nonzero_bits (op0, mode)
5f4f0e22 9698 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9699 code = NE;
9700 break;
9701
230d793d 9702 case LTU:
4803a34a
RK
9703 /* < C is equivalent to <= (C - 1). */
9704 if (const_op > 0)
9705 {
9706 const_op -= 1;
5f4f0e22 9707 op1 = GEN_INT (const_op);
4803a34a 9708 code = LEU;
0f41302f 9709 /* ... fall through ... */
4803a34a 9710 }
d0ab8cd3
RK
9711
9712 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
9713 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9714 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9715 {
9716 const_op = 0, op1 = const0_rtx;
9717 code = GE;
9718 break;
9719 }
4803a34a
RK
9720 else
9721 break;
230d793d
RS
9722
9723 case LEU:
9724 /* unsigned <= 0 is equivalent to == 0 */
9725 if (const_op == 0)
9726 code = EQ;
d0ab8cd3 9727
0f41302f 9728 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
9729 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9730 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9731 {
9732 const_op = 0, op1 = const0_rtx;
9733 code = GE;
9734 }
230d793d
RS
9735 break;
9736
4803a34a
RK
9737 case GEU:
9738 /* >= C is equivalent to < (C - 1). */
9739 if (const_op > 1)
9740 {
9741 const_op -= 1;
5f4f0e22 9742 op1 = GEN_INT (const_op);
4803a34a 9743 code = GTU;
0f41302f 9744 /* ... fall through ... */
4803a34a 9745 }
d0ab8cd3
RK
9746
9747 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
9748 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9749 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9750 {
9751 const_op = 0, op1 = const0_rtx;
9752 code = LT;
8b2e69e1 9753 break;
d0ab8cd3 9754 }
4803a34a
RK
9755 else
9756 break;
9757
230d793d
RS
9758 case GTU:
9759 /* unsigned > 0 is equivalent to != 0 */
9760 if (const_op == 0)
9761 code = NE;
d0ab8cd3
RK
9762
9763 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
9764 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9765 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9766 {
9767 const_op = 0, op1 = const0_rtx;
9768 code = LT;
9769 }
230d793d 9770 break;
e9a25f70
JL
9771
9772 default:
9773 break;
230d793d
RS
9774 }
9775
9776 /* Compute some predicates to simplify code below. */
9777
9778 equality_comparison_p = (code == EQ || code == NE);
9779 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9780 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9781 || code == LEU);
9782
6139ff20
RK
9783 /* If this is a sign bit comparison and we can do arithmetic in
9784 MODE, say that we will only be needing the sign bit of OP0. */
9785 if (sign_bit_comparison_p
9786 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9787 op0 = force_to_mode (op0, mode,
9788 ((HOST_WIDE_INT) 1
9789 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 9790 NULL_RTX, 0);
6139ff20 9791
230d793d
RS
9792 /* Now try cases based on the opcode of OP0. If none of the cases
9793 does a "continue", we exit this loop immediately after the
9794 switch. */
9795
9796 switch (GET_CODE (op0))
9797 {
9798 case ZERO_EXTRACT:
9799 /* If we are extracting a single bit from a variable position in
9800 a constant that has only a single bit set and are comparing it
9801 with zero, we can convert this into an equality comparison
d7cd794f 9802 between the position and the location of the single bit. */
230d793d 9803
230d793d
RS
9804 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9805 && XEXP (op0, 1) == const1_rtx
9806 && equality_comparison_p && const_op == 0
d7cd794f 9807 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 9808 {
f76b9db2 9809 if (BITS_BIG_ENDIAN)
0d8e55d8 9810 {
d7cd794f 9811#ifdef HAVE_extzv
0d8e55d8
JL
9812 mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
9813 if (mode == VOIDmode)
9814 mode = word_mode;
9815 i = (GET_MODE_BITSIZE (mode) - 1 - i);
d7cd794f 9816#else
0d8e55d8 9817 i = BITS_PER_WORD - 1 - i;
230d793d 9818#endif
0d8e55d8 9819 }
230d793d
RS
9820
9821 op0 = XEXP (op0, 2);
5f4f0e22 9822 op1 = GEN_INT (i);
230d793d
RS
9823 const_op = i;
9824
9825 /* Result is nonzero iff shift count is equal to I. */
9826 code = reverse_condition (code);
9827 continue;
9828 }
230d793d 9829
0f41302f 9830 /* ... fall through ... */
230d793d
RS
9831
9832 case SIGN_EXTRACT:
9833 tem = expand_compound_operation (op0);
9834 if (tem != op0)
9835 {
9836 op0 = tem;
9837 continue;
9838 }
9839 break;
9840
9841 case NOT:
9842 /* If testing for equality, we can take the NOT of the constant. */
9843 if (equality_comparison_p
9844 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9845 {
9846 op0 = XEXP (op0, 0);
9847 op1 = tem;
9848 continue;
9849 }
9850
9851 /* If just looking at the sign bit, reverse the sense of the
9852 comparison. */
9853 if (sign_bit_comparison_p)
9854 {
9855 op0 = XEXP (op0, 0);
9856 code = (code == GE ? LT : GE);
9857 continue;
9858 }
9859 break;
9860
9861 case NEG:
9862 /* If testing for equality, we can take the NEG of the constant. */
9863 if (equality_comparison_p
9864 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9865 {
9866 op0 = XEXP (op0, 0);
9867 op1 = tem;
9868 continue;
9869 }
9870
9871 /* The remaining cases only apply to comparisons with zero. */
9872 if (const_op != 0)
9873 break;
9874
9875 /* When X is ABS or is known positive,
9876 (neg X) is < 0 if and only if X != 0. */
9877
9878 if (sign_bit_comparison_p
9879 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 9880 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9881 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9882 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
9883 {
9884 op0 = XEXP (op0, 0);
9885 code = (code == LT ? NE : EQ);
9886 continue;
9887 }
9888
3bed8141 9889 /* If we have NEG of something whose two high-order bits are the
0f41302f 9890 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 9891 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
9892 {
9893 op0 = XEXP (op0, 0);
9894 code = swap_condition (code);
9895 continue;
9896 }
9897 break;
9898
9899 case ROTATE:
9900 /* If we are testing equality and our count is a constant, we
9901 can perform the inverse operation on our RHS. */
9902 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9903 && (tem = simplify_binary_operation (ROTATERT, mode,
9904 op1, XEXP (op0, 1))) != 0)
9905 {
9906 op0 = XEXP (op0, 0);
9907 op1 = tem;
9908 continue;
9909 }
9910
9911 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9912 a particular bit. Convert it to an AND of a constant of that
9913 bit. This will be converted into a ZERO_EXTRACT. */
9914 if (const_op == 0 && sign_bit_comparison_p
9915 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9916 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9917 {
5f4f0e22
CH
9918 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9919 ((HOST_WIDE_INT) 1
9920 << (mode_width - 1
9921 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9922 code = (code == LT ? NE : EQ);
9923 continue;
9924 }
9925
0f41302f 9926 /* ... fall through ... */
230d793d
RS
9927
9928 case ABS:
9929 /* ABS is ignorable inside an equality comparison with zero. */
9930 if (const_op == 0 && equality_comparison_p)
9931 {
9932 op0 = XEXP (op0, 0);
9933 continue;
9934 }
9935 break;
9936
9937
9938 case SIGN_EXTEND:
9939 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9940 to (compare FOO CONST) if CONST fits in FOO's mode and we
9941 are either testing inequality or have an unsigned comparison
9942 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9943 if (! unsigned_comparison_p
9944 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9945 <= HOST_BITS_PER_WIDE_INT)
9946 && ((unsigned HOST_WIDE_INT) const_op
e51712db 9947 < (((unsigned HOST_WIDE_INT) 1
5f4f0e22 9948 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
9949 {
9950 op0 = XEXP (op0, 0);
9951 continue;
9952 }
9953 break;
9954
9955 case SUBREG:
a687e897 9956 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 9957 both constants are smaller than 1/2 the maximum positive
a687e897
RK
9958 value in MODE, and the comparison is equality or unsigned.
9959 In that case, if A is either zero-extended to MODE or has
9960 sufficient sign bits so that the high-order bit in MODE
9961 is a copy of the sign in the inner mode, we can prove that it is
9962 safe to do the operation in the wider mode. This simplifies
9963 many range checks. */
9964
9965 if (mode_width <= HOST_BITS_PER_WIDE_INT
9966 && subreg_lowpart_p (op0)
9967 && GET_CODE (SUBREG_REG (op0)) == PLUS
9968 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9969 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9970 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
e51712db 9971 < (HOST_WIDE_INT)(GET_MODE_MASK (mode) / 2))
adb7a1cb 9972 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
9973 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9974 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
9975 & ~ GET_MODE_MASK (mode))
9976 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9977 GET_MODE (SUBREG_REG (op0)))
9978 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9979 - GET_MODE_BITSIZE (mode)))))
9980 {
9981 op0 = SUBREG_REG (op0);
9982 continue;
9983 }
9984
fe0cf571
RK
9985 /* If the inner mode is narrower and we are extracting the low part,
9986 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9987 if (subreg_lowpart_p (op0)
89f1c7f2
RS
9988 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9989 /* Fall through */ ;
9990 else
230d793d
RS
9991 break;
9992
0f41302f 9993 /* ... fall through ... */
230d793d
RS
9994
9995 case ZERO_EXTEND:
9996 if ((unsigned_comparison_p || equality_comparison_p)
9997 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9998 <= HOST_BITS_PER_WIDE_INT)
9999 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
10000 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
10001 {
10002 op0 = XEXP (op0, 0);
10003 continue;
10004 }
10005 break;
10006
10007 case PLUS:
20fdd649 10008 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 10009 this for equality comparisons due to pathological cases involving
230d793d 10010 overflows. */
20fdd649
RK
10011 if (equality_comparison_p
10012 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10013 op1, XEXP (op0, 1))))
230d793d
RS
10014 {
10015 op0 = XEXP (op0, 0);
10016 op1 = tem;
10017 continue;
10018 }
10019
10020 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
10021 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
10022 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
10023 {
10024 op0 = XEXP (XEXP (op0, 0), 0);
10025 code = (code == LT ? EQ : NE);
10026 continue;
10027 }
10028 break;
10029
10030 case MINUS:
20fdd649
RK
10031 /* (eq (minus A B) C) -> (eq A (plus B C)) or
10032 (eq B (minus A C)), whichever simplifies. We can only do
10033 this for equality comparisons due to pathological cases involving
10034 overflows. */
10035 if (equality_comparison_p
10036 && 0 != (tem = simplify_binary_operation (PLUS, mode,
10037 XEXP (op0, 1), op1)))
10038 {
10039 op0 = XEXP (op0, 0);
10040 op1 = tem;
10041 continue;
10042 }
10043
10044 if (equality_comparison_p
10045 && 0 != (tem = simplify_binary_operation (MINUS, mode,
10046 XEXP (op0, 0), op1)))
10047 {
10048 op0 = XEXP (op0, 1);
10049 op1 = tem;
10050 continue;
10051 }
10052
230d793d
RS
10053 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
10054 of bits in X minus 1, is one iff X > 0. */
10055 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
10056 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10057 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
10058 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10059 {
10060 op0 = XEXP (op0, 1);
10061 code = (code == GE ? LE : GT);
10062 continue;
10063 }
10064 break;
10065
10066 case XOR:
10067 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
10068 if C is zero or B is a constant. */
10069 if (equality_comparison_p
10070 && 0 != (tem = simplify_binary_operation (XOR, mode,
10071 XEXP (op0, 1), op1)))
10072 {
10073 op0 = XEXP (op0, 0);
10074 op1 = tem;
10075 continue;
10076 }
10077 break;
10078
10079 case EQ: case NE:
10080 case LT: case LTU: case LE: case LEU:
10081 case GT: case GTU: case GE: case GEU:
10082 /* We can't do anything if OP0 is a condition code value, rather
10083 than an actual data value. */
10084 if (const_op != 0
10085#ifdef HAVE_cc0
10086 || XEXP (op0, 0) == cc0_rtx
10087#endif
10088 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
10089 break;
10090
10091 /* Get the two operands being compared. */
10092 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
10093 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
10094 else
10095 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
10096
10097 /* Check for the cases where we simply want the result of the
10098 earlier test or the opposite of that result. */
10099 if (code == NE
10100 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 10101 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 10102 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 10103 && (STORE_FLAG_VALUE
5f4f0e22
CH
10104 & (((HOST_WIDE_INT) 1
10105 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
10106 && (code == LT
10107 || (code == GE && reversible_comparison_p (op0)))))
10108 {
10109 code = (code == LT || code == NE
10110 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
10111 op0 = tem, op1 = tem1;
10112 continue;
10113 }
10114 break;
10115
10116 case IOR:
10117 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
10118 iff X <= 0. */
10119 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
10120 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
10121 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
10122 {
10123 op0 = XEXP (op0, 1);
10124 code = (code == GE ? GT : LE);
10125 continue;
10126 }
10127 break;
10128
10129 case AND:
10130 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
10131 will be converted to a ZERO_EXTRACT later. */
10132 if (const_op == 0 && equality_comparison_p
45620ed4 10133 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
10134 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
10135 {
10136 op0 = simplify_and_const_int
10137 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
10138 XEXP (op0, 1),
10139 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 10140 (HOST_WIDE_INT) 1);
230d793d
RS
10141 continue;
10142 }
10143
10144 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
10145 zero and X is a comparison and C1 and C2 describe only bits set
10146 in STORE_FLAG_VALUE, we can compare with X. */
10147 if (const_op == 0 && equality_comparison_p
5f4f0e22 10148 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
10149 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10150 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
10151 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
10152 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 10153 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
10154 {
10155 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
10156 << INTVAL (XEXP (XEXP (op0, 0), 1)));
10157 if ((~ STORE_FLAG_VALUE & mask) == 0
10158 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
10159 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
10160 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
10161 {
10162 op0 = XEXP (XEXP (op0, 0), 0);
10163 continue;
10164 }
10165 }
10166
10167 /* If we are doing an equality comparison of an AND of a bit equal
10168 to the sign bit, replace this with a LT or GE comparison of
10169 the underlying value. */
10170 if (equality_comparison_p
10171 && const_op == 0
10172 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10173 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 10174 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
e51712db 10175 == (unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
10176 {
10177 op0 = XEXP (op0, 0);
10178 code = (code == EQ ? GE : LT);
10179 continue;
10180 }
10181
10182 /* If this AND operation is really a ZERO_EXTEND from a narrower
10183 mode, the constant fits within that mode, and this is either an
10184 equality or unsigned comparison, try to do this comparison in
10185 the narrower mode. */
10186 if ((equality_comparison_p || unsigned_comparison_p)
10187 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10188 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
10189 & GET_MODE_MASK (mode))
10190 + 1)) >= 0
10191 && const_op >> i == 0
10192 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
10193 {
10194 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
10195 continue;
10196 }
e5e809f4
JL
10197
10198 /* If this is (and:M1 (subreg:M2 X 0) (const_int C1)) where C1 fits
10199 in both M1 and M2 and the SUBREG is either paradoxical or
10200 represents the low part, permute the SUBREG and the AND and
10201 try again. */
10202 if (GET_CODE (XEXP (op0, 0)) == SUBREG
10203 && ((mode_width
10204 >= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
9ec36da5
JL
10205#ifdef WORD_REGISTER_OPERATIONS
10206 || subreg_lowpart_p (XEXP (op0, 0))
10207#endif
10208 )
adc05e6c
JL
10209#ifndef WORD_REGISTER_OPERATIONS
10210 /* It is unsafe to commute the AND into the SUBREG if the SUBREG
10211 is paradoxical and WORD_REGISTER_OPERATIONS is not defined.
10212 As originally written the upper bits have a defined value
10213 due to the AND operation. However, if we commute the AND
10214 inside the SUBREG then they no longer have defined values
10215 and the meaning of the code has been changed. */
10216 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
10217 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
10218#endif
e5e809f4
JL
10219 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10220 && mode_width <= HOST_BITS_PER_WIDE_INT
10221 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
10222 <= HOST_BITS_PER_WIDE_INT)
10223 && (INTVAL (XEXP (op0, 1)) & ~ mask) == 0
10224 && 0 == (~ GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
9ec36da5 10225 & INTVAL (XEXP (op0, 1)))
e51712db
KG
10226 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1)) != mask
10227 && ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op0, 1))
9ec36da5 10228 != GET_MODE_MASK (GET_MODE (SUBREG_REG (XEXP (op0, 0))))))
e5e809f4
JL
10229
10230 {
10231 op0
10232 = gen_lowpart_for_combine
10233 (mode,
10234 gen_binary (AND, GET_MODE (SUBREG_REG (XEXP (op0, 0))),
10235 SUBREG_REG (XEXP (op0, 0)), XEXP (op0, 1)));
10236 continue;
10237 }
10238
230d793d
RS
10239 break;
10240
10241 case ASHIFT:
45620ed4 10242 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 10243 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 10244 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
10245 shifted right N bits so long as the low-order N bits of C are
10246 zero. */
10247 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10248 && INTVAL (XEXP (op0, 1)) >= 0
10249 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
10250 < HOST_BITS_PER_WIDE_INT)
10251 && ((const_op
34785d05 10252 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 10253 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10254 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
10255 & ~ (mask >> (INTVAL (XEXP (op0, 1))
10256 + ! equality_comparison_p))) == 0)
10257 {
10258 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 10259 op1 = GEN_INT (const_op);
230d793d
RS
10260 op0 = XEXP (op0, 0);
10261 continue;
10262 }
10263
dfbe1b2f 10264 /* If we are doing a sign bit comparison, it means we are testing
230d793d 10265 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 10266 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 10267 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 10268 {
5f4f0e22
CH
10269 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10270 ((HOST_WIDE_INT) 1
10271 << (mode_width - 1
10272 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
10273 code = (code == LT ? NE : EQ);
10274 continue;
10275 }
dfbe1b2f
RK
10276
10277 /* If this an equality comparison with zero and we are shifting
10278 the low bit to the sign bit, we can convert this to an AND of the
10279 low-order bit. */
10280 if (const_op == 0 && equality_comparison_p
10281 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10282 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10283 {
5f4f0e22
CH
10284 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
10285 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
10286 continue;
10287 }
230d793d
RS
10288 break;
10289
10290 case ASHIFTRT:
d0ab8cd3
RK
10291 /* If this is an equality comparison with zero, we can do this
10292 as a logical shift, which might be much simpler. */
10293 if (equality_comparison_p && const_op == 0
10294 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
10295 {
10296 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
10297 XEXP (op0, 0),
10298 INTVAL (XEXP (op0, 1)));
10299 continue;
10300 }
10301
230d793d
RS
10302 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
10303 do the comparison in a narrower mode. */
10304 if (! unsigned_comparison_p
10305 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10306 && GET_CODE (XEXP (op0, 0)) == ASHIFT
10307 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
10308 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 10309 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
10310 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
10311 || ((unsigned HOST_WIDE_INT) - const_op
10312 <= GET_MODE_MASK (tmode))))
230d793d
RS
10313 {
10314 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
10315 continue;
10316 }
10317
0f41302f 10318 /* ... fall through ... */
230d793d
RS
10319 case LSHIFTRT:
10320 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 10321 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
10322 by comparing FOO with C shifted left N bits so long as no
10323 overflow occurs. */
10324 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
10325 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
10326 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
10327 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 10328 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 10329 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
10330 && (const_op == 0
10331 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
10332 < mode_width)))
10333 {
10334 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 10335 op1 = GEN_INT (const_op);
230d793d
RS
10336 op0 = XEXP (op0, 0);
10337 continue;
10338 }
10339
10340 /* If we are using this shift to extract just the sign bit, we
10341 can replace this with an LT or GE comparison. */
10342 if (const_op == 0
10343 && (equality_comparison_p || sign_bit_comparison_p)
10344 && GET_CODE (XEXP (op0, 1)) == CONST_INT
10345 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
10346 {
10347 op0 = XEXP (op0, 0);
10348 code = (code == NE || code == GT ? LT : GE);
10349 continue;
10350 }
10351 break;
e9a25f70
JL
10352
10353 default:
10354 break;
230d793d
RS
10355 }
10356
10357 break;
10358 }
10359
10360 /* Now make any compound operations involved in this comparison. Then,
76d31c63 10361 check for an outmost SUBREG on OP0 that is not doing anything or is
230d793d
RS
10362 paradoxical. The latter case can only occur when it is known that the
10363 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
10364 We can never remove a SUBREG for a non-equality comparison because the
10365 sign bit is in a different place in the underlying object. */
10366
10367 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
10368 op1 = make_compound_operation (op1, SET);
10369
10370 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10371 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10372 && (code == NE || code == EQ)
10373 && ((GET_MODE_SIZE (GET_MODE (op0))
10374 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
10375 {
10376 op0 = SUBREG_REG (op0);
10377 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
10378 }
10379
10380 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
10381 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10382 && (code == NE || code == EQ)
ac49a949
RS
10383 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
10384 <= HOST_BITS_PER_WIDE_INT)
951553af 10385 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10386 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
10387 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
10388 op1),
951553af 10389 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
10390 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
10391 op0 = SUBREG_REG (op0), op1 = tem;
10392
10393 /* We now do the opposite procedure: Some machines don't have compare
10394 insns in all modes. If OP0's mode is an integer mode smaller than a
10395 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
10396 mode for which we can do the compare. There are a number of cases in
10397 which we can use the wider mode. */
230d793d
RS
10398
10399 mode = GET_MODE (op0);
10400 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
10401 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
10402 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
10403 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
10404 (tmode != VOIDmode
10405 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 10406 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 10407 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 10408 {
951553af 10409 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
10410 narrower mode and this is an equality or unsigned comparison,
10411 we can use the wider mode. Similarly for sign-extended
7e4dc511 10412 values, in which case it is true for all comparisons. */
a687e897
RK
10413 if (((code == EQ || code == NE
10414 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
10415 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
10416 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
10417 || ((num_sign_bit_copies (op0, tmode)
10418 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 10419 && (num_sign_bit_copies (op1, tmode)
58744483 10420 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
10421 {
10422 op0 = gen_lowpart_for_combine (tmode, op0);
10423 op1 = gen_lowpart_for_combine (tmode, op1);
10424 break;
10425 }
230d793d 10426
a687e897
RK
10427 /* If this is a test for negative, we can make an explicit
10428 test of the sign bit. */
10429
10430 if (op1 == const0_rtx && (code == LT || code == GE)
10431 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 10432 {
a687e897
RK
10433 op0 = gen_binary (AND, tmode,
10434 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
10435 GEN_INT ((HOST_WIDE_INT) 1
10436 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 10437 code = (code == LT) ? NE : EQ;
a687e897 10438 break;
230d793d 10439 }
230d793d
RS
10440 }
10441
b7a775b2
RK
10442#ifdef CANONICALIZE_COMPARISON
10443 /* If this machine only supports a subset of valid comparisons, see if we
10444 can convert an unsupported one into a supported one. */
10445 CANONICALIZE_COMPARISON (code, op0, op1);
10446#endif
10447
230d793d
RS
10448 *pop0 = op0;
10449 *pop1 = op1;
10450
10451 return code;
10452}
10453\f
10454/* Return 1 if we know that X, a comparison operation, is not operating
10455 on a floating-point value or is EQ or NE, meaning that we can safely
10456 reverse it. */
10457
10458static int
10459reversible_comparison_p (x)
10460 rtx x;
10461{
10462 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 10463 || flag_fast_math
230d793d
RS
10464 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10465 return 1;
10466
10467 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10468 {
10469 case MODE_INT:
3ad2180a
RK
10470 case MODE_PARTIAL_INT:
10471 case MODE_COMPLEX_INT:
230d793d
RS
10472 return 1;
10473
10474 case MODE_CC:
9210df58
RK
10475 /* If the mode of the condition codes tells us that this is safe,
10476 we need look no further. */
10477 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10478 return 1;
10479
10480 /* Otherwise try and find where the condition codes were last set and
10481 use that. */
230d793d
RS
10482 x = get_last_value (XEXP (x, 0));
10483 return (x && GET_CODE (x) == COMPARE
3ad2180a 10484 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
e9a25f70
JL
10485
10486 default:
10487 return 0;
230d793d 10488 }
230d793d
RS
10489}
10490\f
10491/* Utility function for following routine. Called when X is part of a value
10492 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10493 for each register mentioned. Similar to mention_regs in cse.c */
10494
10495static void
10496update_table_tick (x)
10497 rtx x;
10498{
10499 register enum rtx_code code = GET_CODE (x);
6f7d635c 10500 register const char *fmt = GET_RTX_FORMAT (code);
230d793d
RS
10501 register int i;
10502
10503 if (code == REG)
10504 {
10505 int regno = REGNO (x);
10506 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10507 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10508
10509 for (i = regno; i < endregno; i++)
10510 reg_last_set_table_tick[i] = label_tick;
10511
10512 return;
10513 }
10514
10515 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10516 /* Note that we can't have an "E" in values stored; see
10517 get_last_value_validate. */
10518 if (fmt[i] == 'e')
10519 update_table_tick (XEXP (x, i));
10520}
10521
10522/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10523 are saying that the register is clobbered and we no longer know its
7988fd36
RK
10524 value. If INSN is zero, don't update reg_last_set; this is only permitted
10525 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
10526
10527static void
10528record_value_for_reg (reg, insn, value)
10529 rtx reg;
10530 rtx insn;
10531 rtx value;
10532{
10533 int regno = REGNO (reg);
10534 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10535 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10536 int i;
10537
10538 /* If VALUE contains REG and we have a previous value for REG, substitute
10539 the previous value. */
10540 if (value && insn && reg_overlap_mentioned_p (reg, value))
10541 {
10542 rtx tem;
10543
10544 /* Set things up so get_last_value is allowed to see anything set up to
10545 our insn. */
10546 subst_low_cuid = INSN_CUID (insn);
10547 tem = get_last_value (reg);
10548
10549 if (tem)
10550 value = replace_rtx (copy_rtx (value), reg, tem);
10551 }
10552
10553 /* For each register modified, show we don't know its value, that
ef026f91
RS
10554 we don't know about its bitwise content, that its value has been
10555 updated, and that we don't know the location of the death of the
10556 register. */
230d793d
RS
10557 for (i = regno; i < endregno; i ++)
10558 {
10559 if (insn)
10560 reg_last_set[i] = insn;
10561 reg_last_set_value[i] = 0;
ef026f91
RS
10562 reg_last_set_mode[i] = 0;
10563 reg_last_set_nonzero_bits[i] = 0;
10564 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
10565 reg_last_death[i] = 0;
10566 }
10567
10568 /* Mark registers that are being referenced in this value. */
10569 if (value)
10570 update_table_tick (value);
10571
10572 /* Now update the status of each register being set.
10573 If someone is using this register in this block, set this register
10574 to invalid since we will get confused between the two lives in this
10575 basic block. This makes using this register always invalid. In cse, we
10576 scan the table to invalidate all entries using this register, but this
10577 is too much work for us. */
10578
10579 for (i = regno; i < endregno; i++)
10580 {
10581 reg_last_set_label[i] = label_tick;
10582 if (value && reg_last_set_table_tick[i] == label_tick)
10583 reg_last_set_invalid[i] = 1;
10584 else
10585 reg_last_set_invalid[i] = 0;
10586 }
10587
10588 /* The value being assigned might refer to X (like in "x++;"). In that
10589 case, we must replace it with (clobber (const_int 0)) to prevent
10590 infinite loops. */
9a893315 10591 if (value && ! get_last_value_validate (&value, insn,
230d793d
RS
10592 reg_last_set_label[regno], 0))
10593 {
10594 value = copy_rtx (value);
9a893315
JW
10595 if (! get_last_value_validate (&value, insn,
10596 reg_last_set_label[regno], 1))
230d793d
RS
10597 value = 0;
10598 }
10599
55310dad
RK
10600 /* For the main register being modified, update the value, the mode, the
10601 nonzero bits, and the number of sign bit copies. */
10602
230d793d
RS
10603 reg_last_set_value[regno] = value;
10604
55310dad
RK
10605 if (value)
10606 {
2afabb48 10607 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
10608 reg_last_set_mode[regno] = GET_MODE (reg);
10609 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10610 reg_last_set_sign_bit_copies[regno]
10611 = num_sign_bit_copies (value, GET_MODE (reg));
10612 }
230d793d
RS
10613}
10614
10615/* Used for communication between the following two routines. */
10616static rtx record_dead_insn;
10617
10618/* Called via note_stores from record_dead_and_set_regs to handle one
10619 SET or CLOBBER in an insn. */
10620
10621static void
10622record_dead_and_set_regs_1 (dest, setter)
10623 rtx dest, setter;
10624{
ca89d290
RK
10625 if (GET_CODE (dest) == SUBREG)
10626 dest = SUBREG_REG (dest);
10627
230d793d
RS
10628 if (GET_CODE (dest) == REG)
10629 {
10630 /* If we are setting the whole register, we know its value. Otherwise
10631 show that we don't know the value. We can handle SUBREG in
10632 some cases. */
10633 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10634 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10635 else if (GET_CODE (setter) == SET
10636 && GET_CODE (SET_DEST (setter)) == SUBREG
10637 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 10638 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 10639 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
10640 record_value_for_reg (dest, record_dead_insn,
10641 gen_lowpart_for_combine (GET_MODE (dest),
10642 SET_SRC (setter)));
230d793d 10643 else
5f4f0e22 10644 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
10645 }
10646 else if (GET_CODE (dest) == MEM
10647 /* Ignore pushes, they clobber nothing. */
10648 && ! push_operand (dest, GET_MODE (dest)))
10649 mem_last_set = INSN_CUID (record_dead_insn);
10650}
10651
10652/* Update the records of when each REG was most recently set or killed
10653 for the things done by INSN. This is the last thing done in processing
10654 INSN in the combiner loop.
10655
ef026f91
RS
10656 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10657 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10658 and also the similar information mem_last_set (which insn most recently
10659 modified memory) and last_call_cuid (which insn was the most recent
10660 subroutine call). */
230d793d
RS
10661
10662static void
10663record_dead_and_set_regs (insn)
10664 rtx insn;
10665{
10666 register rtx link;
55310dad
RK
10667 int i;
10668
230d793d
RS
10669 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10670 {
dbc131f3
RK
10671 if (REG_NOTE_KIND (link) == REG_DEAD
10672 && GET_CODE (XEXP (link, 0)) == REG)
10673 {
10674 int regno = REGNO (XEXP (link, 0));
10675 int endregno
10676 = regno + (regno < FIRST_PSEUDO_REGISTER
10677 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10678 : 1);
dbc131f3
RK
10679
10680 for (i = regno; i < endregno; i++)
10681 reg_last_death[i] = insn;
10682 }
230d793d 10683 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 10684 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
10685 }
10686
10687 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
10688 {
10689 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10690 if (call_used_regs[i])
10691 {
10692 reg_last_set_value[i] = 0;
ef026f91
RS
10693 reg_last_set_mode[i] = 0;
10694 reg_last_set_nonzero_bits[i] = 0;
10695 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
10696 reg_last_death[i] = 0;
10697 }
10698
10699 last_call_cuid = mem_last_set = INSN_CUID (insn);
10700 }
230d793d
RS
10701
10702 record_dead_insn = insn;
10703 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10704}
10705\f
10706/* Utility routine for the following function. Verify that all the registers
10707 mentioned in *LOC are valid when *LOC was part of a value set when
10708 label_tick == TICK. Return 0 if some are not.
10709
10710 If REPLACE is non-zero, replace the invalid reference with
10711 (clobber (const_int 0)) and return 1. This replacement is useful because
10712 we often can get useful information about the form of a value (e.g., if
10713 it was produced by a shift that always produces -1 or 0) even though
10714 we don't know exactly what registers it was produced from. */
10715
10716static int
9a893315 10717get_last_value_validate (loc, insn, tick, replace)
230d793d 10718 rtx *loc;
9a893315 10719 rtx insn;
230d793d
RS
10720 int tick;
10721 int replace;
10722{
10723 rtx x = *loc;
6f7d635c 10724 const char *fmt = GET_RTX_FORMAT (GET_CODE (x));
230d793d
RS
10725 int len = GET_RTX_LENGTH (GET_CODE (x));
10726 int i;
10727
10728 if (GET_CODE (x) == REG)
10729 {
10730 int regno = REGNO (x);
10731 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10732 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10733 int j;
10734
10735 for (j = regno; j < endregno; j++)
10736 if (reg_last_set_invalid[j]
10737 /* If this is a pseudo-register that was only set once, it is
10738 always valid. */
b1f21e0a 10739 || (! (regno >= FIRST_PSEUDO_REGISTER && REG_N_SETS (regno) == 1)
230d793d
RS
10740 && reg_last_set_label[j] > tick))
10741 {
10742 if (replace)
38a448ca 10743 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
230d793d
RS
10744 return replace;
10745 }
10746
10747 return 1;
10748 }
9a893315
JW
10749 /* If this is a memory reference, make sure that there were
10750 no stores after it that might have clobbered the value. We don't
10751 have alias info, so we assume any store invalidates it. */
10752 else if (GET_CODE (x) == MEM && ! RTX_UNCHANGING_P (x)
10753 && INSN_CUID (insn) <= mem_last_set)
10754 {
10755 if (replace)
38a448ca 10756 *loc = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
9a893315
JW
10757 return replace;
10758 }
230d793d
RS
10759
10760 for (i = 0; i < len; i++)
10761 if ((fmt[i] == 'e'
9a893315 10762 && get_last_value_validate (&XEXP (x, i), insn, tick, replace) == 0)
230d793d
RS
10763 /* Don't bother with these. They shouldn't occur anyway. */
10764 || fmt[i] == 'E')
10765 return 0;
10766
10767 /* If we haven't found a reason for it to be invalid, it is valid. */
10768 return 1;
10769}
10770
10771/* Get the last value assigned to X, if known. Some registers
10772 in the value may be replaced with (clobber (const_int 0)) if their value
10773 is known longer known reliably. */
10774
10775static rtx
10776get_last_value (x)
10777 rtx x;
10778{
10779 int regno;
10780 rtx value;
10781
10782 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10783 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 10784 we cannot predict what values the "extra" bits might have. */
230d793d
RS
10785 if (GET_CODE (x) == SUBREG
10786 && subreg_lowpart_p (x)
10787 && (GET_MODE_SIZE (GET_MODE (x))
10788 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10789 && (value = get_last_value (SUBREG_REG (x))) != 0)
10790 return gen_lowpart_for_combine (GET_MODE (x), value);
10791
10792 if (GET_CODE (x) != REG)
10793 return 0;
10794
10795 regno = REGNO (x);
10796 value = reg_last_set_value[regno];
10797
0f41302f
MS
10798 /* If we don't have a value or if it isn't for this basic block,
10799 return 0. */
230d793d
RS
10800
10801 if (value == 0
b1f21e0a 10802 || (REG_N_SETS (regno) != 1
55310dad 10803 && reg_last_set_label[regno] != label_tick))
230d793d
RS
10804 return 0;
10805
4255220d 10806 /* If the value was set in a later insn than the ones we are processing,
4090a6b3
RK
10807 we can't use it even if the register was only set once, but make a quick
10808 check to see if the previous insn set it to something. This is commonly
0d9641d1
JW
10809 the case when the same pseudo is used by repeated insns.
10810
10811 This does not work if there exists an instruction which is temporarily
10812 not on the insn chain. */
d0ab8cd3 10813
bcd49eb7 10814 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
10815 {
10816 rtx insn, set;
10817
bcd49eb7
JW
10818 /* We can not do anything useful in this case, because there is
10819 an instruction which is not on the insn chain. */
10820 if (subst_prev_insn)
10821 return 0;
10822
4255220d
JW
10823 /* Skip over USE insns. They are not useful here, and they may have
10824 been made by combine, in which case they do not have a INSN_CUID
d6c80562 10825 value. We can't use prev_real_insn, because that would incorrectly
e340018d
JW
10826 take us backwards across labels. Skip over BARRIERs also, since
10827 they could have been made by combine. If we see one, we must be
10828 optimizing dead code, so it doesn't matter what we do. */
d6c80562
JW
10829 for (insn = prev_nonnote_insn (subst_insn);
10830 insn && ((GET_CODE (insn) == INSN
10831 && GET_CODE (PATTERN (insn)) == USE)
e340018d 10832 || GET_CODE (insn) == BARRIER
4255220d 10833 || INSN_CUID (insn) >= subst_low_cuid);
d6c80562 10834 insn = prev_nonnote_insn (insn))
3adde2a5 10835 ;
d0ab8cd3
RK
10836
10837 if (insn
10838 && (set = single_set (insn)) != 0
10839 && rtx_equal_p (SET_DEST (set), x))
10840 {
10841 value = SET_SRC (set);
10842
10843 /* Make sure that VALUE doesn't reference X. Replace any
ddd5a7c1 10844 explicit references with a CLOBBER. If there are any remaining
d0ab8cd3
RK
10845 references (rare), don't use the value. */
10846
10847 if (reg_mentioned_p (x, value))
10848 value = replace_rtx (copy_rtx (value), x,
38a448ca 10849 gen_rtx_CLOBBER (GET_MODE (x), const0_rtx));
d0ab8cd3
RK
10850
10851 if (reg_overlap_mentioned_p (x, value))
10852 return 0;
10853 }
10854 else
10855 return 0;
10856 }
10857
10858 /* If the value has all its registers valid, return it. */
9a893315
JW
10859 if (get_last_value_validate (&value, reg_last_set[regno],
10860 reg_last_set_label[regno], 0))
230d793d
RS
10861 return value;
10862
10863 /* Otherwise, make a copy and replace any invalid register with
10864 (clobber (const_int 0)). If that fails for some reason, return 0. */
10865
10866 value = copy_rtx (value);
9a893315
JW
10867 if (get_last_value_validate (&value, reg_last_set[regno],
10868 reg_last_set_label[regno], 1))
230d793d
RS
10869 return value;
10870
10871 return 0;
10872}
10873\f
10874/* Return nonzero if expression X refers to a REG or to memory
10875 that is set in an instruction more recent than FROM_CUID. */
10876
10877static int
10878use_crosses_set_p (x, from_cuid)
10879 register rtx x;
10880 int from_cuid;
10881{
6f7d635c 10882 register const char *fmt;
230d793d
RS
10883 register int i;
10884 register enum rtx_code code = GET_CODE (x);
10885
10886 if (code == REG)
10887 {
10888 register int regno = REGNO (x);
e28f5732
RK
10889 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10890 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10891
230d793d
RS
10892#ifdef PUSH_ROUNDING
10893 /* Don't allow uses of the stack pointer to be moved,
10894 because we don't know whether the move crosses a push insn. */
10895 if (regno == STACK_POINTER_REGNUM)
10896 return 1;
10897#endif
e28f5732
RK
10898 for (;regno < endreg; regno++)
10899 if (reg_last_set[regno]
10900 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10901 return 1;
10902 return 0;
230d793d
RS
10903 }
10904
10905 if (code == MEM && mem_last_set > from_cuid)
10906 return 1;
10907
10908 fmt = GET_RTX_FORMAT (code);
10909
10910 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10911 {
10912 if (fmt[i] == 'E')
10913 {
10914 register int j;
10915 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10916 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10917 return 1;
10918 }
10919 else if (fmt[i] == 'e'
10920 && use_crosses_set_p (XEXP (x, i), from_cuid))
10921 return 1;
10922 }
10923 return 0;
10924}
10925\f
10926/* Define three variables used for communication between the following
10927 routines. */
10928
10929static int reg_dead_regno, reg_dead_endregno;
10930static int reg_dead_flag;
10931
10932/* Function called via note_stores from reg_dead_at_p.
10933
ddd5a7c1 10934 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
10935 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10936
10937static void
10938reg_dead_at_p_1 (dest, x)
10939 rtx dest;
10940 rtx x;
10941{
10942 int regno, endregno;
10943
10944 if (GET_CODE (dest) != REG)
10945 return;
10946
10947 regno = REGNO (dest);
10948 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10949 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10950
10951 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10952 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10953}
10954
10955/* Return non-zero if REG is known to be dead at INSN.
10956
10957 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10958 referencing REG, it is dead. If we hit a SET referencing REG, it is
10959 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
10960 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10961 must be assumed to be always live. */
230d793d
RS
10962
10963static int
10964reg_dead_at_p (reg, insn)
10965 rtx reg;
10966 rtx insn;
10967{
10968 int block, i;
10969
10970 /* Set variables for reg_dead_at_p_1. */
10971 reg_dead_regno = REGNO (reg);
10972 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10973 ? HARD_REGNO_NREGS (reg_dead_regno,
10974 GET_MODE (reg))
10975 : 1);
10976
10977 reg_dead_flag = 0;
10978
6e25d159
RK
10979 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10980 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10981 {
10982 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10983 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10984 return 0;
10985 }
10986
230d793d
RS
10987 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10988 beginning of function. */
60715d0b 10989 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
10990 insn = prev_nonnote_insn (insn))
10991 {
10992 note_stores (PATTERN (insn), reg_dead_at_p_1);
10993 if (reg_dead_flag)
10994 return reg_dead_flag == 1 ? 1 : 0;
10995
10996 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10997 return 1;
10998 }
10999
11000 /* Get the basic block number that we were in. */
11001 if (insn == 0)
11002 block = 0;
11003 else
11004 {
11005 for (block = 0; block < n_basic_blocks; block++)
3b413743 11006 if (insn == BLOCK_HEAD (block))
230d793d
RS
11007 break;
11008
11009 if (block == n_basic_blocks)
11010 return 0;
11011 }
11012
11013 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
e881bb1b 11014 if (REGNO_REG_SET_P (BASIC_BLOCK (block)->global_live_at_start, i))
230d793d
RS
11015 return 0;
11016
11017 return 1;
11018}
6e25d159
RK
11019\f
11020/* Note hard registers in X that are used. This code is similar to
11021 that in flow.c, but much simpler since we don't care about pseudos. */
11022
11023static void
11024mark_used_regs_combine (x)
11025 rtx x;
11026{
11027 register RTX_CODE code = GET_CODE (x);
11028 register int regno;
11029 int i;
11030
11031 switch (code)
11032 {
11033 case LABEL_REF:
11034 case SYMBOL_REF:
11035 case CONST_INT:
11036 case CONST:
11037 case CONST_DOUBLE:
11038 case PC:
11039 case ADDR_VEC:
11040 case ADDR_DIFF_VEC:
11041 case ASM_INPUT:
11042#ifdef HAVE_cc0
11043 /* CC0 must die in the insn after it is set, so we don't need to take
11044 special note of it here. */
11045 case CC0:
11046#endif
11047 return;
11048
11049 case CLOBBER:
11050 /* If we are clobbering a MEM, mark any hard registers inside the
11051 address as used. */
11052 if (GET_CODE (XEXP (x, 0)) == MEM)
11053 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
11054 return;
11055
11056 case REG:
11057 regno = REGNO (x);
11058 /* A hard reg in a wide mode may really be multiple registers.
11059 If so, mark all of them just like the first. */
11060 if (regno < FIRST_PSEUDO_REGISTER)
11061 {
11062 /* None of this applies to the stack, frame or arg pointers */
11063 if (regno == STACK_POINTER_REGNUM
11064#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
11065 || regno == HARD_FRAME_POINTER_REGNUM
11066#endif
11067#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
11068 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
11069#endif
11070 || regno == FRAME_POINTER_REGNUM)
11071 return;
11072
11073 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
11074 while (i-- > 0)
11075 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
11076 }
11077 return;
11078
11079 case SET:
11080 {
11081 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
11082 the address. */
11083 register rtx testreg = SET_DEST (x);
11084
e048778f
RK
11085 while (GET_CODE (testreg) == SUBREG
11086 || GET_CODE (testreg) == ZERO_EXTRACT
11087 || GET_CODE (testreg) == SIGN_EXTRACT
11088 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
11089 testreg = XEXP (testreg, 0);
11090
11091 if (GET_CODE (testreg) == MEM)
11092 mark_used_regs_combine (XEXP (testreg, 0));
11093
11094 mark_used_regs_combine (SET_SRC (x));
6e25d159 11095 }
e9a25f70
JL
11096 return;
11097
11098 default:
11099 break;
6e25d159
RK
11100 }
11101
11102 /* Recursively scan the operands of this expression. */
11103
11104 {
6f7d635c 11105 register const char *fmt = GET_RTX_FORMAT (code);
6e25d159
RK
11106
11107 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
11108 {
11109 if (fmt[i] == 'e')
11110 mark_used_regs_combine (XEXP (x, i));
11111 else if (fmt[i] == 'E')
11112 {
11113 register int j;
11114
11115 for (j = 0; j < XVECLEN (x, i); j++)
11116 mark_used_regs_combine (XVECEXP (x, i, j));
11117 }
11118 }
11119 }
11120}
11121
230d793d
RS
11122\f
11123/* Remove register number REGNO from the dead registers list of INSN.
11124
11125 Return the note used to record the death, if there was one. */
11126
11127rtx
11128remove_death (regno, insn)
11129 int regno;
11130 rtx insn;
11131{
11132 register rtx note = find_regno_note (insn, REG_DEAD, regno);
11133
11134 if (note)
1a26b032 11135 {
b1f21e0a 11136 REG_N_DEATHS (regno)--;
1a26b032
RK
11137 remove_note (insn, note);
11138 }
230d793d
RS
11139
11140 return note;
11141}
11142
11143/* For each register (hardware or pseudo) used within expression X, if its
11144 death is in an instruction with cuid between FROM_CUID (inclusive) and
11145 TO_INSN (exclusive), put a REG_DEAD note for that register in the
11146 list headed by PNOTES.
11147
6eb12cef
RK
11148 That said, don't move registers killed by maybe_kill_insn.
11149
230d793d
RS
11150 This is done when X is being merged by combination into TO_INSN. These
11151 notes will then be distributed as needed. */
11152
11153static void
6eb12cef 11154move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 11155 rtx x;
6eb12cef 11156 rtx maybe_kill_insn;
230d793d
RS
11157 int from_cuid;
11158 rtx to_insn;
11159 rtx *pnotes;
11160{
6f7d635c 11161 register const char *fmt;
230d793d
RS
11162 register int len, i;
11163 register enum rtx_code code = GET_CODE (x);
11164
11165 if (code == REG)
11166 {
11167 register int regno = REGNO (x);
11168 register rtx where_dead = reg_last_death[regno];
e340018d
JW
11169 register rtx before_dead, after_dead;
11170
6eb12cef
RK
11171 /* Don't move the register if it gets killed in between from and to */
11172 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
11173 && !reg_referenced_p (x, maybe_kill_insn))
11174 return;
11175
e340018d
JW
11176 /* WHERE_DEAD could be a USE insn made by combine, so first we
11177 make sure that we have insns with valid INSN_CUID values. */
11178 before_dead = where_dead;
11179 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
11180 before_dead = PREV_INSN (before_dead);
11181 after_dead = where_dead;
11182 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
11183 after_dead = NEXT_INSN (after_dead);
11184
11185 if (before_dead && after_dead
11186 && INSN_CUID (before_dead) >= from_cuid
11187 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
11188 || (where_dead != after_dead
11189 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 11190 {
dbc131f3 11191 rtx note = remove_death (regno, where_dead);
230d793d
RS
11192
11193 /* It is possible for the call above to return 0. This can occur
11194 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
11195 In that case make a new note.
11196
11197 We must also check for the case where X is a hard register
11198 and NOTE is a death note for a range of hard registers
11199 including X. In that case, we must put REG_DEAD notes for
11200 the remaining registers in place of NOTE. */
11201
11202 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
11203 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
24e46fc4 11204 > GET_MODE_SIZE (GET_MODE (x))))
dbc131f3
RK
11205 {
11206 int deadregno = REGNO (XEXP (note, 0));
11207 int deadend
11208 = (deadregno + HARD_REGNO_NREGS (deadregno,
11209 GET_MODE (XEXP (note, 0))));
11210 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11211 int i;
11212
11213 for (i = deadregno; i < deadend; i++)
11214 if (i < regno || i >= ourend)
11215 REG_NOTES (where_dead)
38a448ca
RH
11216 = gen_rtx_EXPR_LIST (REG_DEAD,
11217 gen_rtx_REG (reg_raw_mode[i], i),
11218 REG_NOTES (where_dead));
dbc131f3 11219 }
24e46fc4
JW
11220 /* If we didn't find any note, or if we found a REG_DEAD note that
11221 covers only part of the given reg, and we have a multi-reg hard
fabd69e8
RK
11222 register, then to be safe we must check for REG_DEAD notes
11223 for each register other than the first. They could have
11224 their own REG_DEAD notes lying around. */
24e46fc4
JW
11225 else if ((note == 0
11226 || (note != 0
11227 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
11228 < GET_MODE_SIZE (GET_MODE (x)))))
11229 && regno < FIRST_PSEUDO_REGISTER
fabd69e8
RK
11230 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
11231 {
11232 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
24e46fc4 11233 int i, offset;
fabd69e8
RK
11234 rtx oldnotes = 0;
11235
24e46fc4
JW
11236 if (note)
11237 offset = HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0)));
11238 else
11239 offset = 1;
11240
11241 for (i = regno + offset; i < ourend; i++)
38a448ca 11242 move_deaths (gen_rtx_REG (reg_raw_mode[i], i),
6eb12cef 11243 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 11244 }
230d793d 11245
dbc131f3 11246 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
11247 {
11248 XEXP (note, 1) = *pnotes;
11249 *pnotes = note;
11250 }
11251 else
38a448ca 11252 *pnotes = gen_rtx_EXPR_LIST (REG_DEAD, x, *pnotes);
1a26b032 11253
b1f21e0a 11254 REG_N_DEATHS (regno)++;
230d793d
RS
11255 }
11256
11257 return;
11258 }
11259
11260 else if (GET_CODE (x) == SET)
11261 {
11262 rtx dest = SET_DEST (x);
11263
6eb12cef 11264 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 11265
a7c99304
RK
11266 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
11267 that accesses one word of a multi-word item, some
11268 piece of everything register in the expression is used by
11269 this insn, so remove any old death. */
11270
11271 if (GET_CODE (dest) == ZERO_EXTRACT
11272 || GET_CODE (dest) == STRICT_LOW_PART
11273 || (GET_CODE (dest) == SUBREG
11274 && (((GET_MODE_SIZE (GET_MODE (dest))
11275 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
11276 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
11277 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 11278 {
6eb12cef 11279 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 11280 return;
230d793d
RS
11281 }
11282
a7c99304
RK
11283 /* If this is some other SUBREG, we know it replaces the entire
11284 value, so use that as the destination. */
11285 if (GET_CODE (dest) == SUBREG)
11286 dest = SUBREG_REG (dest);
11287
11288 /* If this is a MEM, adjust deaths of anything used in the address.
11289 For a REG (the only other possibility), the entire value is
11290 being replaced so the old value is not used in this insn. */
230d793d
RS
11291
11292 if (GET_CODE (dest) == MEM)
6eb12cef
RK
11293 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
11294 to_insn, pnotes);
230d793d
RS
11295 return;
11296 }
11297
11298 else if (GET_CODE (x) == CLOBBER)
11299 return;
11300
11301 len = GET_RTX_LENGTH (code);
11302 fmt = GET_RTX_FORMAT (code);
11303
11304 for (i = 0; i < len; i++)
11305 {
11306 if (fmt[i] == 'E')
11307 {
11308 register int j;
11309 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
11310 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
11311 to_insn, pnotes);
230d793d
RS
11312 }
11313 else if (fmt[i] == 'e')
6eb12cef 11314 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
11315 }
11316}
11317\f
a7c99304
RK
11318/* Return 1 if X is the target of a bit-field assignment in BODY, the
11319 pattern of an insn. X must be a REG. */
230d793d
RS
11320
11321static int
a7c99304
RK
11322reg_bitfield_target_p (x, body)
11323 rtx x;
230d793d
RS
11324 rtx body;
11325{
11326 int i;
11327
11328 if (GET_CODE (body) == SET)
a7c99304
RK
11329 {
11330 rtx dest = SET_DEST (body);
11331 rtx target;
11332 int regno, tregno, endregno, endtregno;
11333
11334 if (GET_CODE (dest) == ZERO_EXTRACT)
11335 target = XEXP (dest, 0);
11336 else if (GET_CODE (dest) == STRICT_LOW_PART)
11337 target = SUBREG_REG (XEXP (dest, 0));
11338 else
11339 return 0;
11340
11341 if (GET_CODE (target) == SUBREG)
11342 target = SUBREG_REG (target);
11343
11344 if (GET_CODE (target) != REG)
11345 return 0;
11346
11347 tregno = REGNO (target), regno = REGNO (x);
11348 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
11349 return target == x;
11350
11351 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
11352 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
11353
11354 return endregno > tregno && regno < endtregno;
11355 }
230d793d
RS
11356
11357 else if (GET_CODE (body) == PARALLEL)
11358 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 11359 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
11360 return 1;
11361
11362 return 0;
11363}
11364\f
11365/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
11366 as appropriate. I3 and I2 are the insns resulting from the combination
11367 insns including FROM (I2 may be zero).
11368
11369 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
11370 not need REG_DEAD notes because they are being substituted for. This
11371 saves searching in the most common cases.
11372
11373 Each note in the list is either ignored or placed on some insns, depending
11374 on the type of note. */
11375
11376static void
11377distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
11378 rtx notes;
11379 rtx from_insn;
11380 rtx i3, i2;
11381 rtx elim_i2, elim_i1;
11382{
11383 rtx note, next_note;
11384 rtx tem;
11385
11386 for (note = notes; note; note = next_note)
11387 {
11388 rtx place = 0, place2 = 0;
11389
11390 /* If this NOTE references a pseudo register, ensure it references
11391 the latest copy of that register. */
11392 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
11393 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
11394 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
11395
11396 next_note = XEXP (note, 1);
11397 switch (REG_NOTE_KIND (note))
11398 {
c9903b44
DE
11399 case REG_BR_PROB:
11400 case REG_EXEC_COUNT:
11401 /* Doesn't matter much where we put this, as long as it's somewhere.
11402 It is preferable to keep these notes on branches, which is most
11403 likely to be i3. */
11404 place = i3;
11405 break;
11406
4b7c585f 11407 case REG_EH_REGION:
0e403ec3
AS
11408 case REG_EH_RETHROW:
11409 /* These notes must remain with the call. It should not be
11410 possible for both I2 and I3 to be a call. */
4b7c585f
JL
11411 if (GET_CODE (i3) == CALL_INSN)
11412 place = i3;
11413 else if (i2 && GET_CODE (i2) == CALL_INSN)
11414 place = i2;
11415 else
11416 abort ();
11417 break;
11418
230d793d 11419 case REG_UNUSED:
07d0cbdd 11420 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
11421 REG_UNUSED notes from that insn.
11422
11423 Any clobbers from i2 or i1 can only exist if they were added by
11424 recog_for_combine. In that case, recog_for_combine created the
11425 necessary REG_UNUSED notes. Trying to keep any original
11426 REG_UNUSED notes from these insns can cause incorrect output
11427 if it is for the same register as the original i3 dest.
11428 In that case, we will notice that the register is set in i3,
11429 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
11430 is wrong. However, it is possible to have REG_UNUSED notes from
11431 i2 or i1 for register which were both used and clobbered, so
11432 we keep notes from i2 or i1 if they will turn into REG_DEAD
11433 notes. */
176c9e6b 11434
230d793d
RS
11435 /* If this register is set or clobbered in I3, put the note there
11436 unless there is one already. */
07d0cbdd 11437 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 11438 {
07d0cbdd
JW
11439 if (from_insn != i3)
11440 break;
11441
230d793d
RS
11442 if (! (GET_CODE (XEXP (note, 0)) == REG
11443 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
11444 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
11445 place = i3;
11446 }
11447 /* Otherwise, if this register is used by I3, then this register
11448 now dies here, so we must put a REG_DEAD note here unless there
11449 is one already. */
11450 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
11451 && ! (GET_CODE (XEXP (note, 0)) == REG
11452 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
11453 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
11454 {
11455 PUT_REG_NOTE_KIND (note, REG_DEAD);
11456 place = i3;
11457 }
11458 break;
11459
11460 case REG_EQUAL:
11461 case REG_EQUIV:
11462 case REG_NONNEG:
9ae8ffe7 11463 case REG_NOALIAS:
230d793d
RS
11464 /* These notes say something about results of an insn. We can
11465 only support them if they used to be on I3 in which case they
a687e897
RK
11466 remain on I3. Otherwise they are ignored.
11467
11468 If the note refers to an expression that is not a constant, we
11469 must also ignore the note since we cannot tell whether the
11470 equivalence is still true. It might be possible to do
11471 slightly better than this (we only have a problem if I2DEST
11472 or I1DEST is present in the expression), but it doesn't
11473 seem worth the trouble. */
11474
11475 if (from_insn == i3
11476 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
11477 place = i3;
11478 break;
11479
11480 case REG_INC:
11481 case REG_NO_CONFLICT:
230d793d
RS
11482 /* These notes say something about how a register is used. They must
11483 be present on any use of the register in I2 or I3. */
11484 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11485 place = i3;
11486
11487 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11488 {
11489 if (place)
11490 place2 = i2;
11491 else
11492 place = i2;
11493 }
11494 break;
11495
e55b4486
RH
11496 case REG_LABEL:
11497 /* This can show up in several ways -- either directly in the
11498 pattern, or hidden off in the constant pool with (or without?)
11499 a REG_EQUAL note. */
11500 /* ??? Ignore the without-reg_equal-note problem for now. */
11501 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3))
11502 || ((tem = find_reg_note (i3, REG_EQUAL, NULL_RTX))
11503 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
11504 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0)))
11505 place = i3;
11506
11507 if (i2
11508 && (reg_mentioned_p (XEXP (note, 0), PATTERN (i2))
11509 || ((tem = find_reg_note (i2, REG_EQUAL, NULL_RTX))
11510 && GET_CODE (XEXP (tem, 0)) == LABEL_REF
11511 && XEXP (XEXP (tem, 0), 0) == XEXP (note, 0))))
11512 {
11513 if (place)
11514 place2 = i2;
11515 else
11516 place = i2;
11517 }
11518 break;
11519
230d793d
RS
11520 case REG_WAS_0:
11521 /* It is too much trouble to try to see if this note is still
11522 correct in all situations. It is better to simply delete it. */
11523 break;
11524
11525 case REG_RETVAL:
11526 /* If the insn previously containing this note still exists,
11527 put it back where it was. Otherwise move it to the previous
11528 insn. Adjust the corresponding REG_LIBCALL note. */
11529 if (GET_CODE (from_insn) != NOTE)
11530 place = from_insn;
11531 else
11532 {
5f4f0e22 11533 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
11534 place = prev_real_insn (from_insn);
11535 if (tem && place)
11536 XEXP (tem, 0) = place;
11537 }
11538 break;
11539
11540 case REG_LIBCALL:
11541 /* This is handled similarly to REG_RETVAL. */
11542 if (GET_CODE (from_insn) != NOTE)
11543 place = from_insn;
11544 else
11545 {
5f4f0e22 11546 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
11547 place = next_real_insn (from_insn);
11548 if (tem && place)
11549 XEXP (tem, 0) = place;
11550 }
11551 break;
11552
11553 case REG_DEAD:
11554 /* If the register is used as an input in I3, it dies there.
11555 Similarly for I2, if it is non-zero and adjacent to I3.
11556
11557 If the register is not used as an input in either I3 or I2
11558 and it is not one of the registers we were supposed to eliminate,
11559 there are two possibilities. We might have a non-adjacent I2
11560 or we might have somehow eliminated an additional register
11561 from a computation. For example, we might have had A & B where
11562 we discover that B will always be zero. In this case we will
11563 eliminate the reference to A.
11564
11565 In both cases, we must search to see if we can find a previous
11566 use of A and put the death note there. */
11567
6e2d1486
RK
11568 if (from_insn
11569 && GET_CODE (from_insn) == CALL_INSN
11570 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11571 place = from_insn;
11572 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
11573 place = i3;
11574 else if (i2 != 0 && next_nonnote_insn (i2) == i3
11575 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11576 place = i2;
11577
11578 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11579 break;
11580
510dd77e
RK
11581 /* If the register is used in both I2 and I3 and it dies in I3,
11582 we might have added another reference to it. If reg_n_refs
11583 was 2, bump it to 3. This has to be correct since the
11584 register must have been set somewhere. The reason this is
11585 done is because local-alloc.c treats 2 references as a
11586 special case. */
11587
11588 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
b1f21e0a 11589 && REG_N_REFS (REGNO (XEXP (note, 0)))== 2
510dd77e 11590 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
b1f21e0a 11591 REG_N_REFS (REGNO (XEXP (note, 0))) = 3;
510dd77e 11592
230d793d 11593 if (place == 0)
38d8473f
RK
11594 {
11595 for (tem = prev_nonnote_insn (i3);
11596 place == 0 && tem
11597 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
11598 tem = prev_nonnote_insn (tem))
11599 {
11600 /* If the register is being set at TEM, see if that is all
11601 TEM is doing. If so, delete TEM. Otherwise, make this
11602 into a REG_UNUSED note instead. */
11603 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11604 {
11605 rtx set = single_set (tem);
e5e809f4 11606 rtx inner_dest = 0;
e51712db 11607#ifdef HAVE_cc0
f5c97640 11608 rtx cc0_setter = NULL_RTX;
e51712db 11609#endif
e5e809f4
JL
11610
11611 if (set != 0)
11612 for (inner_dest = SET_DEST (set);
11613 GET_CODE (inner_dest) == STRICT_LOW_PART
11614 || GET_CODE (inner_dest) == SUBREG
11615 || GET_CODE (inner_dest) == ZERO_EXTRACT;
11616 inner_dest = XEXP (inner_dest, 0))
11617 ;
38d8473f
RK
11618
11619 /* Verify that it was the set, and not a clobber that
f5c97640
RH
11620 modified the register.
11621
11622 CC0 targets must be careful to maintain setter/user
11623 pairs. If we cannot delete the setter due to side
11624 effects, mark the user with an UNUSED note instead
11625 of deleting it. */
38d8473f
RK
11626
11627 if (set != 0 && ! side_effects_p (SET_SRC (set))
f5c97640
RH
11628 && rtx_equal_p (XEXP (note, 0), inner_dest)
11629#ifdef HAVE_cc0
11630 && (! reg_mentioned_p (cc0_rtx, SET_SRC (set))
11631 || ((cc0_setter = prev_cc0_setter (tem)) != NULL
11632 && sets_cc0_p (PATTERN (cc0_setter)) > 0))
11633#endif
11634 )
38d8473f
RK
11635 {
11636 /* Move the notes and links of TEM elsewhere.
11637 This might delete other dead insns recursively.
11638 First set the pattern to something that won't use
11639 any register. */
11640
11641 PATTERN (tem) = pc_rtx;
11642
11643 distribute_notes (REG_NOTES (tem), tem, tem,
11644 NULL_RTX, NULL_RTX, NULL_RTX);
11645 distribute_links (LOG_LINKS (tem));
11646
11647 PUT_CODE (tem, NOTE);
11648 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11649 NOTE_SOURCE_FILE (tem) = 0;
f5c97640
RH
11650
11651#ifdef HAVE_cc0
11652 /* Delete the setter too. */
11653 if (cc0_setter)
11654 {
11655 PATTERN (cc0_setter) = pc_rtx;
11656
11657 distribute_notes (REG_NOTES (cc0_setter),
11658 cc0_setter, cc0_setter,
11659 NULL_RTX, NULL_RTX, NULL_RTX);
11660 distribute_links (LOG_LINKS (cc0_setter));
11661
11662 PUT_CODE (cc0_setter, NOTE);
11663 NOTE_LINE_NUMBER (cc0_setter) = NOTE_INSN_DELETED;
11664 NOTE_SOURCE_FILE (cc0_setter) = 0;
11665 }
11666#endif
38d8473f 11667 }
e5e809f4
JL
11668 /* If the register is both set and used here, put the
11669 REG_DEAD note here, but place a REG_UNUSED note
11670 here too unless there already is one. */
11671 else if (reg_referenced_p (XEXP (note, 0),
11672 PATTERN (tem)))
11673 {
11674 place = tem;
11675
11676 if (! find_regno_note (tem, REG_UNUSED,
11677 REGNO (XEXP (note, 0))))
11678 REG_NOTES (tem)
9e6a5703
JC
11679 = gen_rtx_EXPR_LIST (REG_UNUSED,
11680 XEXP (note, 0),
11681 REG_NOTES (tem));
e5e809f4 11682 }
38d8473f
RK
11683 else
11684 {
11685 PUT_REG_NOTE_KIND (note, REG_UNUSED);
11686
11687 /* If there isn't already a REG_UNUSED note, put one
11688 here. */
11689 if (! find_regno_note (tem, REG_UNUSED,
11690 REGNO (XEXP (note, 0))))
11691 place = tem;
11692 break;
230d793d
RS
11693 }
11694 }
13018fad
RE
11695 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11696 || (GET_CODE (tem) == CALL_INSN
11697 && find_reg_fusage (tem, USE, XEXP (note, 0))))
230d793d
RS
11698 {
11699 place = tem;
932d1119
RK
11700
11701 /* If we are doing a 3->2 combination, and we have a
11702 register which formerly died in i3 and was not used
11703 by i2, which now no longer dies in i3 and is used in
11704 i2 but does not die in i2, and place is between i2
11705 and i3, then we may need to move a link from place to
11706 i2. */
a8908849
RK
11707 if (i2 && INSN_UID (place) <= max_uid_cuid
11708 && INSN_CUID (place) > INSN_CUID (i2)
932d1119
RK
11709 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11710 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11711 {
11712 rtx links = LOG_LINKS (place);
11713 LOG_LINKS (place) = 0;
11714 distribute_links (links);
11715 }
230d793d
RS
11716 break;
11717 }
38d8473f
RK
11718 }
11719
11720 /* If we haven't found an insn for the death note and it
11721 is still a REG_DEAD note, but we have hit a CODE_LABEL,
11722 insert a USE insn for the register at that label and
11723 put the death node there. This prevents problems with
11724 call-state tracking in caller-save.c. */
11725 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
e2cce0cf
RK
11726 {
11727 place
38a448ca 11728 = emit_insn_after (gen_rtx_USE (VOIDmode, XEXP (note, 0)),
e2cce0cf
RK
11729 tem);
11730
11731 /* If this insn was emitted between blocks, then update
3b413743
RH
11732 BLOCK_HEAD of the current block to include it. */
11733 if (BLOCK_END (this_basic_block - 1) == tem)
11734 BLOCK_HEAD (this_basic_block) = place;
e2cce0cf 11735 }
38d8473f 11736 }
230d793d
RS
11737
11738 /* If the register is set or already dead at PLACE, we needn't do
e5e809f4
JL
11739 anything with this note if it is still a REG_DEAD note.
11740 We can here if it is set at all, not if is it totally replace,
11741 which is what `dead_or_set_p' checks, so also check for it being
11742 set partially. */
11743
230d793d 11744
230d793d
RS
11745 if (place && REG_NOTE_KIND (note) == REG_DEAD)
11746 {
11747 int regno = REGNO (XEXP (note, 0));
11748
11749 if (dead_or_set_p (place, XEXP (note, 0))
11750 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11751 {
11752 /* Unless the register previously died in PLACE, clear
11753 reg_last_death. [I no longer understand why this is
11754 being done.] */
11755 if (reg_last_death[regno] != place)
11756 reg_last_death[regno] = 0;
11757 place = 0;
11758 }
11759 else
11760 reg_last_death[regno] = place;
11761
11762 /* If this is a death note for a hard reg that is occupying
11763 multiple registers, ensure that we are still using all
11764 parts of the object. If we find a piece of the object
11765 that is unused, we must add a USE for that piece before
11766 PLACE and put the appropriate REG_DEAD note on it.
11767
11768 An alternative would be to put a REG_UNUSED for the pieces
11769 on the insn that set the register, but that can't be done if
11770 it is not in the same block. It is simpler, though less
11771 efficient, to add the USE insns. */
11772
11773 if (place && regno < FIRST_PSEUDO_REGISTER
11774 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11775 {
11776 int endregno
11777 = regno + HARD_REGNO_NREGS (regno,
11778 GET_MODE (XEXP (note, 0)));
11779 int all_used = 1;
11780 int i;
11781
11782 for (i = regno; i < endregno; i++)
9fd5bb62
JW
11783 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11784 && ! find_regno_fusage (place, USE, i))
230d793d 11785 {
38a448ca 11786 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
28f6d3af
RK
11787 rtx p;
11788
11789 /* See if we already placed a USE note for this
11790 register in front of PLACE. */
11791 for (p = place;
11792 GET_CODE (PREV_INSN (p)) == INSN
11793 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11794 p = PREV_INSN (p))
11795 if (rtx_equal_p (piece,
11796 XEXP (PATTERN (PREV_INSN (p)), 0)))
11797 {
11798 p = 0;
11799 break;
11800 }
11801
11802 if (p)
11803 {
11804 rtx use_insn
38a448ca
RH
11805 = emit_insn_before (gen_rtx_USE (VOIDmode,
11806 piece),
28f6d3af
RK
11807 p);
11808 REG_NOTES (use_insn)
38a448ca
RH
11809 = gen_rtx_EXPR_LIST (REG_DEAD, piece,
11810 REG_NOTES (use_insn));
28f6d3af 11811 }
230d793d 11812
5089e22e 11813 all_used = 0;
230d793d
RS
11814 }
11815
a394b17b
JW
11816 /* Check for the case where the register dying partially
11817 overlaps the register set by this insn. */
11818 if (all_used)
11819 for (i = regno; i < endregno; i++)
11820 if (dead_or_set_regno_p (place, i))
11821 {
11822 all_used = 0;
11823 break;
11824 }
11825
230d793d
RS
11826 if (! all_used)
11827 {
11828 /* Put only REG_DEAD notes for pieces that are
11829 still used and that are not already dead or set. */
11830
11831 for (i = regno; i < endregno; i++)
11832 {
38a448ca 11833 rtx piece = gen_rtx_REG (reg_raw_mode[i], i);
230d793d 11834
17cbf358
JW
11835 if ((reg_referenced_p (piece, PATTERN (place))
11836 || (GET_CODE (place) == CALL_INSN
11837 && find_reg_fusage (place, USE, piece)))
230d793d
RS
11838 && ! dead_or_set_p (place, piece)
11839 && ! reg_bitfield_target_p (piece,
11840 PATTERN (place)))
38a448ca
RH
11841 REG_NOTES (place)
11842 = gen_rtx_EXPR_LIST (REG_DEAD,
11843 piece, REG_NOTES (place));
230d793d
RS
11844 }
11845
11846 place = 0;
11847 }
11848 }
11849 }
11850 break;
11851
11852 default:
11853 /* Any other notes should not be present at this point in the
11854 compilation. */
11855 abort ();
11856 }
11857
11858 if (place)
11859 {
11860 XEXP (note, 1) = REG_NOTES (place);
11861 REG_NOTES (place) = note;
11862 }
1a26b032
RK
11863 else if ((REG_NOTE_KIND (note) == REG_DEAD
11864 || REG_NOTE_KIND (note) == REG_UNUSED)
11865 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 11866 REG_N_DEATHS (REGNO (XEXP (note, 0)))--;
230d793d
RS
11867
11868 if (place2)
1a26b032
RK
11869 {
11870 if ((REG_NOTE_KIND (note) == REG_DEAD
11871 || REG_NOTE_KIND (note) == REG_UNUSED)
11872 && GET_CODE (XEXP (note, 0)) == REG)
b1f21e0a 11873 REG_N_DEATHS (REGNO (XEXP (note, 0)))++;
1a26b032 11874
38a448ca
RH
11875 REG_NOTES (place2) = gen_rtx_fmt_ee (GET_CODE (note),
11876 REG_NOTE_KIND (note),
11877 XEXP (note, 0),
11878 REG_NOTES (place2));
1a26b032 11879 }
230d793d
RS
11880 }
11881}
11882\f
11883/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
11884 I3, I2, and I1 to new locations. This is also called in one case to
11885 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
11886
11887static void
11888distribute_links (links)
11889 rtx links;
11890{
11891 rtx link, next_link;
11892
11893 for (link = links; link; link = next_link)
11894 {
11895 rtx place = 0;
11896 rtx insn;
11897 rtx set, reg;
11898
11899 next_link = XEXP (link, 1);
11900
11901 /* If the insn that this link points to is a NOTE or isn't a single
11902 set, ignore it. In the latter case, it isn't clear what we
11903 can do other than ignore the link, since we can't tell which
11904 register it was for. Such links wouldn't be used by combine
11905 anyway.
11906
11907 It is not possible for the destination of the target of the link to
11908 have been changed by combine. The only potential of this is if we
11909 replace I3, I2, and I1 by I3 and I2. But in that case the
11910 destination of I2 also remains unchanged. */
11911
11912 if (GET_CODE (XEXP (link, 0)) == NOTE
11913 || (set = single_set (XEXP (link, 0))) == 0)
11914 continue;
11915
11916 reg = SET_DEST (set);
11917 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11918 || GET_CODE (reg) == SIGN_EXTRACT
11919 || GET_CODE (reg) == STRICT_LOW_PART)
11920 reg = XEXP (reg, 0);
11921
11922 /* A LOG_LINK is defined as being placed on the first insn that uses
11923 a register and points to the insn that sets the register. Start
11924 searching at the next insn after the target of the link and stop
11925 when we reach a set of the register or the end of the basic block.
11926
11927 Note that this correctly handles the link that used to point from
5089e22e 11928 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
11929 since most links don't point very far away. */
11930
11931 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3 11932 (insn && (this_basic_block == n_basic_blocks - 1
3b413743 11933 || BLOCK_HEAD (this_basic_block + 1) != insn));
230d793d
RS
11934 insn = NEXT_INSN (insn))
11935 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11936 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11937 {
11938 if (reg_referenced_p (reg, PATTERN (insn)))
11939 place = insn;
11940 break;
11941 }
6e2d1486
RK
11942 else if (GET_CODE (insn) == CALL_INSN
11943 && find_reg_fusage (insn, USE, reg))
11944 {
11945 place = insn;
11946 break;
11947 }
230d793d
RS
11948
11949 /* If we found a place to put the link, place it there unless there
11950 is already a link to the same insn as LINK at that point. */
11951
11952 if (place)
11953 {
11954 rtx link2;
11955
11956 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11957 if (XEXP (link2, 0) == XEXP (link, 0))
11958 break;
11959
11960 if (link2 == 0)
11961 {
11962 XEXP (link, 1) = LOG_LINKS (place);
11963 LOG_LINKS (place) = link;
abe6e52f
RK
11964
11965 /* Set added_links_insn to the earliest insn we added a
11966 link to. */
11967 if (added_links_insn == 0
11968 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11969 added_links_insn = place;
230d793d
RS
11970 }
11971 }
11972 }
11973}
11974\f
1427d6d2
RK
11975/* Compute INSN_CUID for INSN, which is an insn made by combine. */
11976
11977static int
11978insn_cuid (insn)
11979 rtx insn;
11980{
11981 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
11982 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
11983 insn = NEXT_INSN (insn);
11984
11985 if (INSN_UID (insn) > max_uid_cuid)
11986 abort ();
11987
11988 return INSN_CUID (insn);
11989}
11990\f
230d793d
RS
11991void
11992dump_combine_stats (file)
11993 FILE *file;
11994{
ab87f8c8 11995 fnotice
230d793d
RS
11996 (file,
11997 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11998 combine_attempts, combine_merges, combine_extras, combine_successes);
11999}
12000
12001void
12002dump_combine_total_stats (file)
12003 FILE *file;
12004{
ab87f8c8 12005 fnotice
230d793d
RS
12006 (file,
12007 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
12008 total_attempts, total_merges, total_extras, total_successes);
12009}
This page took 2.460205 seconds and 5 git commands to generate.