]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
(STORE_FLAG_VALUE): Write so works on both 32 and 64-bit host.
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
1a6ec070 2 Copyright (C) 1987, 88, 92-96, 1997 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
230d793d
RS
20
21
22/* This module is essentially the "combiner" phase of the U. of Arizona
23 Portable Optimizer, but redone to work on our list-structured
24 representation for RTL instead of their string representation.
25
26 The LOG_LINKS of each insn identify the most recent assignment
27 to each REG used in the insn. It is a list of previous insns,
28 each of which contains a SET for a REG that is used in this insn
29 and not used or set in between. LOG_LINKs never cross basic blocks.
30 They were set up by the preceding pass (lifetime analysis).
31
32 We try to combine each pair of insns joined by a logical link.
33 We also try to combine triples of insns A, B and C when
34 C has a link back to B and B has a link back to A.
35
36 LOG_LINKS does not have links for use of the CC0. They don't
37 need to, because the insn that sets the CC0 is always immediately
38 before the insn that tests it. So we always regard a branch
39 insn as having a logical link to the preceding insn. The same is true
40 for an insn explicitly using CC0.
41
42 We check (with use_crosses_set_p) to avoid combining in such a way
43 as to move a computation to a place where its value would be different.
44
45 Combination is done by mathematically substituting the previous
46 insn(s) values for the regs they set into the expressions in
47 the later insns that refer to these regs. If the result is a valid insn
48 for our target machine, according to the machine description,
49 we install it, delete the earlier insns, and update the data flow
50 information (LOG_LINKS and REG_NOTES) for what we did.
51
52 There are a few exceptions where the dataflow information created by
53 flow.c aren't completely updated:
54
55 - reg_live_length is not updated
56 - reg_n_refs is not adjusted in the rare case when a register is
57 no longer required in a computation
58 - there are extremely rare cases (see distribute_regnotes) when a
59 REG_DEAD note is lost
60 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
61 removed because there is no way to know which register it was
62 linking
63
64 To simplify substitution, we combine only when the earlier insn(s)
65 consist of only a single assignment. To simplify updating afterward,
66 we never combine when a subroutine call appears in the middle.
67
68 Since we do not represent assignments to CC0 explicitly except when that
69 is all an insn does, there is no LOG_LINKS entry in an insn that uses
70 the condition code for the insn that set the condition code.
71 Fortunately, these two insns must be consecutive.
72 Therefore, every JUMP_INSN is taken to have an implicit logical link
73 to the preceding insn. This is not quite right, since non-jumps can
74 also use the condition code; but in practice such insns would not
75 combine anyway. */
76
230d793d 77#include "config.h"
4f90e4a0 78#ifdef __STDC__
04fe4385 79#include <stdarg.h>
4f90e4a0 80#else
04fe4385 81#include <varargs.h>
4f90e4a0 82#endif
dfa3449b 83
9c3b4c8b
RS
84/* Must precede rtl.h for FFS. */
85#include <stdio.h>
86
230d793d
RS
87#include "rtl.h"
88#include "flags.h"
89#include "regs.h"
55310dad 90#include "hard-reg-set.h"
230d793d
RS
91#include "expr.h"
92#include "basic-block.h"
93#include "insn-config.h"
94#include "insn-flags.h"
95#include "insn-codes.h"
96#include "insn-attr.h"
97#include "recog.h"
98#include "real.h"
99
100/* It is not safe to use ordinary gen_lowpart in combine.
101 Use gen_lowpart_for_combine instead. See comments there. */
102#define gen_lowpart dont_use_gen_lowpart_you_dummy
103
104/* Number of attempts to combine instructions in this function. */
105
106static int combine_attempts;
107
108/* Number of attempts that got as far as substitution in this function. */
109
110static int combine_merges;
111
112/* Number of instructions combined with added SETs in this function. */
113
114static int combine_extras;
115
116/* Number of instructions combined in this function. */
117
118static int combine_successes;
119
120/* Totals over entire compilation. */
121
122static int total_attempts, total_merges, total_extras, total_successes;
9210df58 123
ddd5a7c1 124/* Define a default value for REVERSIBLE_CC_MODE.
9210df58
RK
125 We can never assume that a condition code mode is safe to reverse unless
126 the md tells us so. */
127#ifndef REVERSIBLE_CC_MODE
128#define REVERSIBLE_CC_MODE(MODE) 0
129#endif
230d793d
RS
130\f
131/* Vector mapping INSN_UIDs to cuids.
5089e22e 132 The cuids are like uids but increase monotonically always.
230d793d
RS
133 Combine always uses cuids so that it can compare them.
134 But actually renumbering the uids, which we used to do,
135 proves to be a bad idea because it makes it hard to compare
136 the dumps produced by earlier passes with those from later passes. */
137
138static int *uid_cuid;
4255220d 139static int max_uid_cuid;
230d793d
RS
140
141/* Get the cuid of an insn. */
142
1427d6d2
RK
143#define INSN_CUID(INSN) \
144(INSN_UID (INSN) > max_uid_cuid ? insn_cuid (INSN) : uid_cuid[INSN_UID (INSN)])
230d793d
RS
145
146/* Maximum register number, which is the size of the tables below. */
147
148static int combine_max_regno;
149
150/* Record last point of death of (hard or pseudo) register n. */
151
152static rtx *reg_last_death;
153
154/* Record last point of modification of (hard or pseudo) register n. */
155
156static rtx *reg_last_set;
157
158/* Record the cuid of the last insn that invalidated memory
159 (anything that writes memory, and subroutine calls, but not pushes). */
160
161static int mem_last_set;
162
163/* Record the cuid of the last CALL_INSN
164 so we can tell whether a potential combination crosses any calls. */
165
166static int last_call_cuid;
167
168/* When `subst' is called, this is the insn that is being modified
169 (by combining in a previous insn). The PATTERN of this insn
170 is still the old pattern partially modified and it should not be
171 looked at, but this may be used to examine the successors of the insn
172 to judge whether a simplification is valid. */
173
174static rtx subst_insn;
175
0d9641d1
JW
176/* This is an insn that belongs before subst_insn, but is not currently
177 on the insn chain. */
178
179static rtx subst_prev_insn;
180
230d793d
RS
181/* This is the lowest CUID that `subst' is currently dealing with.
182 get_last_value will not return a value if the register was set at or
183 after this CUID. If not for this mechanism, we could get confused if
184 I2 or I1 in try_combine were an insn that used the old value of a register
185 to obtain a new value. In that case, we might erroneously get the
186 new value of the register when we wanted the old one. */
187
188static int subst_low_cuid;
189
6e25d159
RK
190/* This contains any hard registers that are used in newpat; reg_dead_at_p
191 must consider all these registers to be always live. */
192
193static HARD_REG_SET newpat_used_regs;
194
abe6e52f
RK
195/* This is an insn to which a LOG_LINKS entry has been added. If this
196 insn is the earlier than I2 or I3, combine should rescan starting at
197 that location. */
198
199static rtx added_links_insn;
200
0d4d42c3
RK
201/* Basic block number of the block in which we are performing combines. */
202static int this_basic_block;
230d793d
RS
203\f
204/* The next group of arrays allows the recording of the last value assigned
205 to (hard or pseudo) register n. We use this information to see if a
5089e22e 206 operation being processed is redundant given a prior operation performed
230d793d
RS
207 on the register. For example, an `and' with a constant is redundant if
208 all the zero bits are already known to be turned off.
209
210 We use an approach similar to that used by cse, but change it in the
211 following ways:
212
213 (1) We do not want to reinitialize at each label.
214 (2) It is useful, but not critical, to know the actual value assigned
215 to a register. Often just its form is helpful.
216
217 Therefore, we maintain the following arrays:
218
219 reg_last_set_value the last value assigned
220 reg_last_set_label records the value of label_tick when the
221 register was assigned
222 reg_last_set_table_tick records the value of label_tick when a
223 value using the register is assigned
224 reg_last_set_invalid set to non-zero when it is not valid
225 to use the value of this register in some
226 register's value
227
228 To understand the usage of these tables, it is important to understand
229 the distinction between the value in reg_last_set_value being valid
230 and the register being validly contained in some other expression in the
231 table.
232
233 Entry I in reg_last_set_value is valid if it is non-zero, and either
234 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
235
236 Register I may validly appear in any expression returned for the value
237 of another register if reg_n_sets[i] is 1. It may also appear in the
238 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
239 reg_last_set_invalid[j] is zero.
240
241 If an expression is found in the table containing a register which may
242 not validly appear in an expression, the register is replaced by
243 something that won't match, (clobber (const_int 0)).
244
245 reg_last_set_invalid[i] is set non-zero when register I is being assigned
246 to and reg_last_set_table_tick[i] == label_tick. */
247
0f41302f 248/* Record last value assigned to (hard or pseudo) register n. */
230d793d
RS
249
250static rtx *reg_last_set_value;
251
252/* Record the value of label_tick when the value for register n is placed in
253 reg_last_set_value[n]. */
254
568356af 255static int *reg_last_set_label;
230d793d
RS
256
257/* Record the value of label_tick when an expression involving register n
0f41302f 258 is placed in reg_last_set_value. */
230d793d 259
568356af 260static int *reg_last_set_table_tick;
230d793d
RS
261
262/* Set non-zero if references to register n in expressions should not be
263 used. */
264
265static char *reg_last_set_invalid;
266
0f41302f 267/* Incremented for each label. */
230d793d 268
568356af 269static int label_tick;
230d793d
RS
270
271/* Some registers that are set more than once and used in more than one
272 basic block are nevertheless always set in similar ways. For example,
273 a QImode register may be loaded from memory in two places on a machine
274 where byte loads zero extend.
275
951553af 276 We record in the following array what we know about the nonzero
230d793d
RS
277 bits of a register, specifically which bits are known to be zero.
278
279 If an entry is zero, it means that we don't know anything special. */
280
55310dad 281static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 282
951553af 283/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 284 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 285
951553af 286static enum machine_mode nonzero_bits_mode;
230d793d 287
d0ab8cd3
RK
288/* Nonzero if we know that a register has some leading bits that are always
289 equal to the sign bit. */
290
291static char *reg_sign_bit_copies;
292
951553af 293/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
294 It is zero while computing them and after combine has completed. This
295 former test prevents propagating values based on previously set values,
296 which can be incorrect if a variable is modified in a loop. */
230d793d 297
951553af 298static int nonzero_sign_valid;
55310dad
RK
299
300/* These arrays are maintained in parallel with reg_last_set_value
301 and are used to store the mode in which the register was last set,
302 the bits that were known to be zero when it was last set, and the
303 number of sign bits copies it was known to have when it was last set. */
304
305static enum machine_mode *reg_last_set_mode;
306static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
307static char *reg_last_set_sign_bit_copies;
230d793d
RS
308\f
309/* Record one modification to rtl structure
310 to be undone by storing old_contents into *where.
311 is_int is 1 if the contents are an int. */
312
313struct undo
314{
241cea85 315 struct undo *next;
230d793d 316 int is_int;
f5393ab9
RS
317 union {rtx r; int i;} old_contents;
318 union {rtx *r; int *i;} where;
230d793d
RS
319};
320
321/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
322 num_undo says how many are currently recorded.
323
324 storage is nonzero if we must undo the allocation of new storage.
325 The value of storage is what to pass to obfree.
326
327 other_insn is nonzero if we have modified some other insn in the process
241cea85 328 of working on subst_insn. It must be verified too.
230d793d 329
241cea85
RK
330 previous_undos is the value of undobuf.undos when we started processing
331 this substitution. This will prevent gen_rtx_combine from re-used a piece
332 from the previous expression. Doing so can produce circular rtl
333 structures. */
230d793d
RS
334
335struct undobuf
336{
230d793d 337 char *storage;
241cea85
RK
338 struct undo *undos;
339 struct undo *frees;
340 struct undo *previous_undos;
230d793d
RS
341 rtx other_insn;
342};
343
344static struct undobuf undobuf;
345
cc876596 346/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 347 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
348 set to NEWVAL, do not record this change. Because computing NEWVAL might
349 also call SUBST, we have to compute it before we put anything into
350 the undo table. */
230d793d
RS
351
352#define SUBST(INTO, NEWVAL) \
241cea85
RK
353 do { rtx _new = (NEWVAL); \
354 struct undo *_buf; \
355 \
356 if (undobuf.frees) \
357 _buf = undobuf.frees, undobuf.frees = _buf->next; \
358 else \
359 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
360 \
361 _buf->is_int = 0; \
362 _buf->where.r = &INTO; \
363 _buf->old_contents.r = INTO; \
364 INTO = _new; \
365 if (_buf->old_contents.r == INTO) \
366 _buf->next = undobuf.frees, undobuf.frees = _buf; \
367 else \
368 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
369 } while (0)
370
241cea85
RK
371/* Similar to SUBST, but NEWVAL is an int expression. Note that substitution
372 for the value of a HOST_WIDE_INT value (including CONST_INT) is
373 not safe. */
230d793d
RS
374
375#define SUBST_INT(INTO, NEWVAL) \
241cea85
RK
376 do { struct undo *_buf; \
377 \
378 if (undobuf.frees) \
379 _buf = undobuf.frees, undobuf.frees = _buf->next; \
380 else \
381 _buf = (struct undo *) xmalloc (sizeof (struct undo)); \
382 \
383 _buf->is_int = 1; \
384 _buf->where.i = (int *) &INTO; \
385 _buf->old_contents.i = INTO; \
386 INTO = NEWVAL; \
387 if (_buf->old_contents.i == INTO) \
388 _buf->next = undobuf.frees, undobuf.frees = _buf; \
389 else \
390 _buf->next = undobuf.undos, undobuf.undos = _buf; \
230d793d
RS
391 } while (0)
392
393/* Number of times the pseudo being substituted for
394 was found and replaced. */
395
396static int n_occurrences;
397
c5ad722c
RK
398static void init_reg_last_arrays PROTO((void));
399static void setup_incoming_promotions PROTO((void));
fe2db4fb
RK
400static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
401static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
402static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
403static rtx try_combine PROTO((rtx, rtx, rtx));
404static void undo_all PROTO((void));
405static rtx *find_split_point PROTO((rtx *, rtx));
406static rtx subst PROTO((rtx, rtx, rtx, int, int));
8079805d
RK
407static rtx simplify_rtx PROTO((rtx, enum machine_mode, int, int));
408static rtx simplify_if_then_else PROTO((rtx));
409static rtx simplify_set PROTO((rtx));
410static rtx simplify_logical PROTO((rtx, int));
fe2db4fb
RK
411static rtx expand_compound_operation PROTO((rtx));
412static rtx expand_field_assignment PROTO((rtx));
413static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
414 int, int, int));
71923da7 415static rtx extract_left_shift PROTO((rtx, int));
fe2db4fb
RK
416static rtx make_compound_operation PROTO((rtx, enum rtx_code));
417static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20 418static rtx force_to_mode PROTO((rtx, enum machine_mode,
e3d616e3 419 unsigned HOST_WIDE_INT, rtx, int));
abe6e52f 420static rtx if_then_else_cond PROTO((rtx, rtx *, rtx *));
fe2db4fb 421static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
e11fa86f 422static int rtx_equal_for_field_assignment_p PROTO((rtx, rtx));
fe2db4fb
RK
423static rtx make_field_assignment PROTO((rtx));
424static rtx apply_distributive_law PROTO((rtx));
425static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
426 unsigned HOST_WIDE_INT));
427static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
428static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
429static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
430 enum rtx_code, HOST_WIDE_INT,
431 enum machine_mode, int *));
432static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
433 rtx, int));
a29ca9db 434static int recog_for_combine PROTO((rtx *, rtx, rtx *, int *));
fe2db4fb 435static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
d18225c4 436static rtx gen_rtx_combine PVPROTO((enum rtx_code code, enum machine_mode mode,
4f90e4a0 437 ...));
fe2db4fb
RK
438static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
439 rtx, rtx));
0c1c8ea6
RK
440static rtx gen_unary PROTO((enum rtx_code, enum machine_mode,
441 enum machine_mode, rtx));
fe2db4fb
RK
442static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
443static int reversible_comparison_p PROTO((rtx));
444static void update_table_tick PROTO((rtx));
445static void record_value_for_reg PROTO((rtx, rtx, rtx));
446static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
447static void record_dead_and_set_regs PROTO((rtx));
448static int get_last_value_validate PROTO((rtx *, int, int));
449static rtx get_last_value PROTO((rtx));
450static int use_crosses_set_p PROTO((rtx, int));
451static void reg_dead_at_p_1 PROTO((rtx, rtx));
452static int reg_dead_at_p PROTO((rtx, rtx));
6eb12cef 453static void move_deaths PROTO((rtx, rtx, int, rtx, rtx *));
fe2db4fb
RK
454static int reg_bitfield_target_p PROTO((rtx, rtx));
455static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
456static void distribute_links PROTO((rtx));
6e25d159 457static void mark_used_regs_combine PROTO((rtx));
1427d6d2 458static int insn_cuid PROTO((rtx));
230d793d
RS
459\f
460/* Main entry point for combiner. F is the first insn of the function.
461 NREGS is the first unused pseudo-reg number. */
462
463void
464combine_instructions (f, nregs)
465 rtx f;
466 int nregs;
467{
468 register rtx insn, next, prev;
469 register int i;
470 register rtx links, nextlinks;
471
472 combine_attempts = 0;
473 combine_merges = 0;
474 combine_extras = 0;
475 combine_successes = 0;
241cea85 476 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
477
478 combine_max_regno = nregs;
479
ef026f91
RS
480 reg_nonzero_bits
481 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
482 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
483
4c9a05bc 484 bzero ((char *) reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
485 bzero (reg_sign_bit_copies, nregs * sizeof (char));
486
230d793d
RS
487 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
488 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
489 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
490 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
491 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 492 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
493 reg_last_set_mode
494 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
495 reg_last_set_nonzero_bits
496 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
497 reg_last_set_sign_bit_copies
498 = (char *) alloca (nregs * sizeof (char));
499
ef026f91 500 init_reg_last_arrays ();
230d793d
RS
501
502 init_recog_no_volatile ();
503
504 /* Compute maximum uid value so uid_cuid can be allocated. */
505
506 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
507 if (INSN_UID (insn) > i)
508 i = INSN_UID (insn);
509
510 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
4255220d 511 max_uid_cuid = i;
230d793d 512
951553af 513 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 514
951553af 515 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
516 when, for example, we have j <<= 1 in a loop. */
517
951553af 518 nonzero_sign_valid = 0;
230d793d
RS
519
520 /* Compute the mapping from uids to cuids.
521 Cuids are numbers assigned to insns, like uids,
522 except that cuids increase monotonically through the code.
523
524 Scan all SETs and see if we can deduce anything about what
951553af 525 bits are known to be zero for some registers and how many copies
d79f08e0
RK
526 of the sign bit are known to exist for those registers.
527
528 Also set any known values so that we can use it while searching
529 for what bits are known to be set. */
530
531 label_tick = 1;
230d793d 532
bcd49eb7
JW
533 /* We need to initialize it here, because record_dead_and_set_regs may call
534 get_last_value. */
535 subst_prev_insn = NULL_RTX;
536
7988fd36
RK
537 setup_incoming_promotions ();
538
230d793d
RS
539 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
540 {
4255220d 541 uid_cuid[INSN_UID (insn)] = ++i;
d79f08e0
RK
542 subst_low_cuid = i;
543 subst_insn = insn;
544
230d793d 545 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
546 {
547 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
548 record_dead_and_set_regs (insn);
2dab894a
RK
549
550#ifdef AUTO_INC_DEC
551 for (links = REG_NOTES (insn); links; links = XEXP (links, 1))
552 if (REG_NOTE_KIND (links) == REG_INC)
553 set_nonzero_bits_and_sign_copies (XEXP (links, 0), NULL_RTX);
554#endif
d79f08e0
RK
555 }
556
557 if (GET_CODE (insn) == CODE_LABEL)
558 label_tick++;
230d793d
RS
559 }
560
951553af 561 nonzero_sign_valid = 1;
230d793d
RS
562
563 /* Now scan all the insns in forward order. */
564
0d4d42c3 565 this_basic_block = -1;
230d793d
RS
566 label_tick = 1;
567 last_call_cuid = 0;
568 mem_last_set = 0;
ef026f91 569 init_reg_last_arrays ();
7988fd36
RK
570 setup_incoming_promotions ();
571
230d793d
RS
572 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
573 {
574 next = 0;
575
0d4d42c3 576 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 577 if (this_basic_block + 1 < n_basic_blocks
0d4d42c3
RK
578 && basic_block_head[this_basic_block + 1] == insn)
579 this_basic_block++;
580
230d793d
RS
581 if (GET_CODE (insn) == CODE_LABEL)
582 label_tick++;
583
0d4d42c3 584 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
585 {
586 /* Try this insn with each insn it links back to. */
587
588 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 589 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
590 goto retry;
591
592 /* Try each sequence of three linked insns ending with this one. */
593
594 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
595 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
596 nextlinks = XEXP (nextlinks, 1))
597 if ((next = try_combine (insn, XEXP (links, 0),
598 XEXP (nextlinks, 0))) != 0)
599 goto retry;
600
601#ifdef HAVE_cc0
602 /* Try to combine a jump insn that uses CC0
603 with a preceding insn that sets CC0, and maybe with its
604 logical predecessor as well.
605 This is how we make decrement-and-branch insns.
606 We need this special code because data flow connections
607 via CC0 do not get entered in LOG_LINKS. */
608
609 if (GET_CODE (insn) == JUMP_INSN
610 && (prev = prev_nonnote_insn (insn)) != 0
611 && GET_CODE (prev) == INSN
612 && sets_cc0_p (PATTERN (prev)))
613 {
5f4f0e22 614 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
615 goto retry;
616
617 for (nextlinks = LOG_LINKS (prev); nextlinks;
618 nextlinks = XEXP (nextlinks, 1))
619 if ((next = try_combine (insn, prev,
620 XEXP (nextlinks, 0))) != 0)
621 goto retry;
622 }
623
624 /* Do the same for an insn that explicitly references CC0. */
625 if (GET_CODE (insn) == INSN
626 && (prev = prev_nonnote_insn (insn)) != 0
627 && GET_CODE (prev) == INSN
628 && sets_cc0_p (PATTERN (prev))
629 && GET_CODE (PATTERN (insn)) == SET
630 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
631 {
5f4f0e22 632 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
633 goto retry;
634
635 for (nextlinks = LOG_LINKS (prev); nextlinks;
636 nextlinks = XEXP (nextlinks, 1))
637 if ((next = try_combine (insn, prev,
638 XEXP (nextlinks, 0))) != 0)
639 goto retry;
640 }
641
642 /* Finally, see if any of the insns that this insn links to
643 explicitly references CC0. If so, try this insn, that insn,
5089e22e 644 and its predecessor if it sets CC0. */
230d793d
RS
645 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
646 if (GET_CODE (XEXP (links, 0)) == INSN
647 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
648 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
649 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
650 && GET_CODE (prev) == INSN
651 && sets_cc0_p (PATTERN (prev))
652 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
653 goto retry;
654#endif
655
656 /* Try combining an insn with two different insns whose results it
657 uses. */
658 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
659 for (nextlinks = XEXP (links, 1); nextlinks;
660 nextlinks = XEXP (nextlinks, 1))
661 if ((next = try_combine (insn, XEXP (links, 0),
662 XEXP (nextlinks, 0))) != 0)
663 goto retry;
664
665 if (GET_CODE (insn) != NOTE)
666 record_dead_and_set_regs (insn);
667
668 retry:
669 ;
670 }
671 }
672
673 total_attempts += combine_attempts;
674 total_merges += combine_merges;
675 total_extras += combine_extras;
676 total_successes += combine_successes;
1a26b032 677
951553af 678 nonzero_sign_valid = 0;
230d793d 679}
ef026f91
RS
680
681/* Wipe the reg_last_xxx arrays in preparation for another pass. */
682
683static void
684init_reg_last_arrays ()
685{
686 int nregs = combine_max_regno;
687
4c9a05bc
RK
688 bzero ((char *) reg_last_death, nregs * sizeof (rtx));
689 bzero ((char *) reg_last_set, nregs * sizeof (rtx));
690 bzero ((char *) reg_last_set_value, nregs * sizeof (rtx));
691 bzero ((char *) reg_last_set_table_tick, nregs * sizeof (int));
692 bzero ((char *) reg_last_set_label, nregs * sizeof (int));
ef026f91 693 bzero (reg_last_set_invalid, nregs * sizeof (char));
4c9a05bc
RK
694 bzero ((char *) reg_last_set_mode, nregs * sizeof (enum machine_mode));
695 bzero ((char *) reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
ef026f91
RS
696 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
697}
230d793d 698\f
7988fd36
RK
699/* Set up any promoted values for incoming argument registers. */
700
ee791cc3 701static void
7988fd36
RK
702setup_incoming_promotions ()
703{
704#ifdef PROMOTE_FUNCTION_ARGS
705 int regno;
706 rtx reg;
707 enum machine_mode mode;
708 int unsignedp;
709 rtx first = get_insns ();
710
711 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
712 if (FUNCTION_ARG_REGNO_P (regno)
713 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
714 record_value_for_reg (reg, first,
715 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
500c518b
RK
716 GET_MODE (reg),
717 gen_rtx (CLOBBER, mode, const0_rtx)));
7988fd36
RK
718#endif
719}
720\f
91102d5a
RK
721/* Called via note_stores. If X is a pseudo that is narrower than
722 HOST_BITS_PER_WIDE_INT and is being set, record what bits are known zero.
230d793d
RS
723
724 If we are setting only a portion of X and we can't figure out what
725 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
726 be happening.
727
728 Similarly, set how many bits of X are known to be copies of the sign bit
729 at all locations in the function. This is the smallest number implied
730 by any set of X. */
230d793d
RS
731
732static void
951553af 733set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
734 rtx x;
735 rtx set;
736{
d0ab8cd3
RK
737 int num;
738
230d793d
RS
739 if (GET_CODE (x) == REG
740 && REGNO (x) >= FIRST_PSEUDO_REGISTER
e8095e80
RK
741 /* If this register is undefined at the start of the file, we can't
742 say what its contents were. */
743 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
744 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
5f4f0e22 745 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d 746 {
2dab894a 747 if (set == 0 || GET_CODE (set) == CLOBBER)
e8095e80
RK
748 {
749 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 750 reg_sign_bit_copies[REGNO (x)] = 1;
e8095e80
RK
751 return;
752 }
230d793d
RS
753
754 /* If this is a complex assignment, see if we can convert it into a
5089e22e 755 simple assignment. */
230d793d 756 set = expand_field_assignment (set);
d79f08e0
RK
757
758 /* If this is a simple assignment, or we have a paradoxical SUBREG,
759 set what we know about X. */
760
761 if (SET_DEST (set) == x
762 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
763 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
764 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 765 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 766 {
9afa3d54
RK
767 rtx src = SET_SRC (set);
768
769#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
770 /* If X is narrower than a word and SRC is a non-negative
771 constant that would appear negative in the mode of X,
772 sign-extend it for use in reg_nonzero_bits because some
773 machines (maybe most) will actually do the sign-extension
774 and this is the conservative approach.
775
776 ??? For 2.5, try to tighten up the MD files in this regard
777 instead of this kludge. */
778
779 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
780 && GET_CODE (src) == CONST_INT
781 && INTVAL (src) > 0
782 && 0 != (INTVAL (src)
783 & ((HOST_WIDE_INT) 1
9e69be8c 784 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
785 src = GEN_INT (INTVAL (src)
786 | ((HOST_WIDE_INT) (-1)
787 << GET_MODE_BITSIZE (GET_MODE (x))));
788#endif
789
951553af 790 reg_nonzero_bits[REGNO (x)]
9afa3d54 791 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
792 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
793 if (reg_sign_bit_copies[REGNO (x)] == 0
794 || reg_sign_bit_copies[REGNO (x)] > num)
795 reg_sign_bit_copies[REGNO (x)] = num;
796 }
230d793d 797 else
d0ab8cd3 798 {
951553af 799 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
88306d12 800 reg_sign_bit_copies[REGNO (x)] = 1;
d0ab8cd3 801 }
230d793d
RS
802 }
803}
804\f
805/* See if INSN can be combined into I3. PRED and SUCC are optionally
806 insns that were previously combined into I3 or that will be combined
807 into the merger of INSN and I3.
808
809 Return 0 if the combination is not allowed for any reason.
810
811 If the combination is allowed, *PDEST will be set to the single
812 destination of INSN and *PSRC to the single source, and this function
813 will return 1. */
814
815static int
816can_combine_p (insn, i3, pred, succ, pdest, psrc)
817 rtx insn;
818 rtx i3;
819 rtx pred, succ;
820 rtx *pdest, *psrc;
821{
822 int i;
823 rtx set = 0, src, dest;
824 rtx p, link;
825 int all_adjacent = (succ ? (next_active_insn (insn) == succ
826 && next_active_insn (succ) == i3)
827 : next_active_insn (insn) == i3);
828
829 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
830 or a PARALLEL consisting of such a SET and CLOBBERs.
831
832 If INSN has CLOBBER parallel parts, ignore them for our processing.
833 By definition, these happen during the execution of the insn. When it
834 is merged with another insn, all bets are off. If they are, in fact,
835 needed and aren't also supplied in I3, they may be added by
836 recog_for_combine. Otherwise, it won't match.
837
838 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
839 note.
840
841 Get the source and destination of INSN. If more than one, can't
842 combine. */
843
844 if (GET_CODE (PATTERN (insn)) == SET)
845 set = PATTERN (insn);
846 else if (GET_CODE (PATTERN (insn)) == PARALLEL
847 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
848 {
849 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
850 {
851 rtx elt = XVECEXP (PATTERN (insn), 0, i);
852
853 switch (GET_CODE (elt))
854 {
855 /* We can ignore CLOBBERs. */
856 case CLOBBER:
857 break;
858
859 case SET:
860 /* Ignore SETs whose result isn't used but not those that
861 have side-effects. */
862 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
863 && ! side_effects_p (elt))
864 break;
865
866 /* If we have already found a SET, this is a second one and
867 so we cannot combine with this insn. */
868 if (set)
869 return 0;
870
871 set = elt;
872 break;
873
874 default:
875 /* Anything else means we can't combine. */
876 return 0;
877 }
878 }
879
880 if (set == 0
881 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
882 so don't do anything with it. */
883 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
884 return 0;
885 }
886 else
887 return 0;
888
889 if (set == 0)
890 return 0;
891
892 set = expand_field_assignment (set);
893 src = SET_SRC (set), dest = SET_DEST (set);
894
895 /* Don't eliminate a store in the stack pointer. */
896 if (dest == stack_pointer_rtx
230d793d
RS
897 /* If we couldn't eliminate a field assignment, we can't combine. */
898 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
899 /* Don't combine with an insn that sets a register to itself if it has
900 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 901 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
902 /* Can't merge a function call. */
903 || GET_CODE (src) == CALL
cd5e8f1f 904 /* Don't eliminate a function call argument. */
4dca5ec5
RK
905 || (GET_CODE (i3) == CALL_INSN
906 && (find_reg_fusage (i3, USE, dest)
907 || (GET_CODE (dest) == REG
908 && REGNO (dest) < FIRST_PSEUDO_REGISTER
909 && global_regs[REGNO (dest)])))
230d793d
RS
910 /* Don't substitute into an incremented register. */
911 || FIND_REG_INC_NOTE (i3, dest)
912 || (succ && FIND_REG_INC_NOTE (succ, dest))
913 /* Don't combine the end of a libcall into anything. */
5f4f0e22 914 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
915 /* Make sure that DEST is not used after SUCC but before I3. */
916 || (succ && ! all_adjacent
917 && reg_used_between_p (dest, succ, i3))
918 /* Make sure that the value that is to be substituted for the register
919 does not use any registers whose values alter in between. However,
920 If the insns are adjacent, a use can't cross a set even though we
921 think it might (this can happen for a sequence of insns each setting
922 the same destination; reg_last_set of that register might point to
d81481d3
RK
923 a NOTE). If INSN has a REG_EQUIV note, the register is always
924 equivalent to the memory so the substitution is valid even if there
925 are intervening stores. Also, don't move a volatile asm or
926 UNSPEC_VOLATILE across any other insns. */
230d793d 927 || (! all_adjacent
d81481d3
RK
928 && (((GET_CODE (src) != MEM
929 || ! find_reg_note (insn, REG_EQUIV, src))
930 && use_crosses_set_p (src, INSN_CUID (insn)))
a66a10c7
RS
931 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
932 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
933 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
934 better register allocation by not doing the combine. */
935 || find_reg_note (i3, REG_NO_CONFLICT, dest)
936 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
937 /* Don't combine across a CALL_INSN, because that would possibly
938 change whether the life span of some REGs crosses calls or not,
939 and it is a pain to update that information.
940 Exception: if source is a constant, moving it later can't hurt.
941 Accept that special case, because it helps -fforce-addr a lot. */
942 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
943 return 0;
944
945 /* DEST must either be a REG or CC0. */
946 if (GET_CODE (dest) == REG)
947 {
948 /* If register alignment is being enforced for multi-word items in all
949 cases except for parameters, it is possible to have a register copy
950 insn referencing a hard register that is not allowed to contain the
951 mode being copied and which would not be valid as an operand of most
952 insns. Eliminate this problem by not combining with such an insn.
953
954 Also, on some machines we don't want to extend the life of a hard
955 register. */
956
957 if (GET_CODE (src) == REG
958 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
959 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
c448a43e
RK
960 /* Don't extend the life of a hard register unless it is
961 user variable (if we have few registers) or it can't
962 fit into the desired register (meaning something special
ecd40809
RK
963 is going on).
964 Also avoid substituting a return register into I3, because
965 reload can't handle a conflict with constraints of other
966 inputs. */
230d793d 967 || (REGNO (src) < FIRST_PSEUDO_REGISTER
c448a43e
RK
968 && (! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src))
969#ifdef SMALL_REGISTER_CLASSES
f95182a4
ILT
970 || (SMALL_REGISTER_CLASSES
971 && ((! all_adjacent && ! REG_USERVAR_P (src))
972 || (FUNCTION_VALUE_REGNO_P (REGNO (src))
973 && ! REG_USERVAR_P (src))))
230d793d 974#endif
c448a43e 975 ))))
230d793d
RS
976 return 0;
977 }
978 else if (GET_CODE (dest) != CC0)
979 return 0;
980
5f96750d
RS
981 /* Don't substitute for a register intended as a clobberable operand.
982 Similarly, don't substitute an expression containing a register that
983 will be clobbered in I3. */
230d793d
RS
984 if (GET_CODE (PATTERN (i3)) == PARALLEL)
985 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
986 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
987 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
988 src)
989 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
990 return 0;
991
992 /* If INSN contains anything volatile, or is an `asm' (whether volatile
993 or not), reject, unless nothing volatile comes between it and I3,
994 with the exception of SUCC. */
995
996 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
997 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
998 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
999 && p != succ && volatile_refs_p (PATTERN (p)))
1000 return 0;
1001
4b2cb4a2
RS
1002 /* If there are any volatile insns between INSN and I3, reject, because
1003 they might affect machine state. */
1004
1005 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
1006 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
1007 && p != succ && volatile_insn_p (PATTERN (p)))
1008 return 0;
1009
230d793d
RS
1010 /* If INSN or I2 contains an autoincrement or autodecrement,
1011 make sure that register is not used between there and I3,
1012 and not already used in I3 either.
1013 Also insist that I3 not be a jump; if it were one
1014 and the incremented register were spilled, we would lose. */
1015
1016#ifdef AUTO_INC_DEC
1017 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1018 if (REG_NOTE_KIND (link) == REG_INC
1019 && (GET_CODE (i3) == JUMP_INSN
1020 || reg_used_between_p (XEXP (link, 0), insn, i3)
1021 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
1022 return 0;
1023#endif
1024
1025#ifdef HAVE_cc0
1026 /* Don't combine an insn that follows a CC0-setting insn.
1027 An insn that uses CC0 must not be separated from the one that sets it.
1028 We do, however, allow I2 to follow a CC0-setting insn if that insn
1029 is passed as I1; in that case it will be deleted also.
1030 We also allow combining in this case if all the insns are adjacent
1031 because that would leave the two CC0 insns adjacent as well.
1032 It would be more logical to test whether CC0 occurs inside I1 or I2,
1033 but that would be much slower, and this ought to be equivalent. */
1034
1035 p = prev_nonnote_insn (insn);
1036 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
1037 && ! all_adjacent)
1038 return 0;
1039#endif
1040
1041 /* If we get here, we have passed all the tests and the combination is
1042 to be allowed. */
1043
1044 *pdest = dest;
1045 *psrc = src;
1046
1047 return 1;
1048}
1049\f
1050/* LOC is the location within I3 that contains its pattern or the component
1051 of a PARALLEL of the pattern. We validate that it is valid for combining.
1052
1053 One problem is if I3 modifies its output, as opposed to replacing it
1054 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
1055 so would produce an insn that is not equivalent to the original insns.
1056
1057 Consider:
1058
1059 (set (reg:DI 101) (reg:DI 100))
1060 (set (subreg:SI (reg:DI 101) 0) <foo>)
1061
1062 This is NOT equivalent to:
1063
1064 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
1065 (set (reg:DI 101) (reg:DI 100))])
1066
1067 Not only does this modify 100 (in which case it might still be valid
1068 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
1069
1070 We can also run into a problem if I2 sets a register that I1
1071 uses and I1 gets directly substituted into I3 (not via I2). In that
1072 case, we would be getting the wrong value of I2DEST into I3, so we
1073 must reject the combination. This case occurs when I2 and I1 both
1074 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
1075 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
1076 of a SET must prevent combination from occurring.
1077
1078 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
c448a43e
RK
1079 if the destination of a SET is a hard register that isn't a user
1080 variable.
230d793d
RS
1081
1082 Before doing the above check, we first try to expand a field assignment
1083 into a set of logical operations.
1084
1085 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1086 we place a register that is both set and used within I3. If more than one
1087 such register is detected, we fail.
1088
1089 Return 1 if the combination is valid, zero otherwise. */
1090
1091static int
1092combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1093 rtx i3;
1094 rtx *loc;
1095 rtx i2dest;
1096 rtx i1dest;
1097 int i1_not_in_src;
1098 rtx *pi3dest_killed;
1099{
1100 rtx x = *loc;
1101
1102 if (GET_CODE (x) == SET)
1103 {
1104 rtx set = expand_field_assignment (x);
1105 rtx dest = SET_DEST (set);
1106 rtx src = SET_SRC (set);
1107 rtx inner_dest = dest, inner_src = src;
1108
1109 SUBST (*loc, set);
1110
1111 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1112 || GET_CODE (inner_dest) == SUBREG
1113 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1114 inner_dest = XEXP (inner_dest, 0);
1115
1116 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1117 was added. */
1118#if 0
1119 while (GET_CODE (inner_src) == STRICT_LOW_PART
1120 || GET_CODE (inner_src) == SUBREG
1121 || GET_CODE (inner_src) == ZERO_EXTRACT)
1122 inner_src = XEXP (inner_src, 0);
1123
1124 /* If it is better that two different modes keep two different pseudos,
1125 avoid combining them. This avoids producing the following pattern
1126 on a 386:
1127 (set (subreg:SI (reg/v:QI 21) 0)
1128 (lshiftrt:SI (reg/v:SI 20)
1129 (const_int 24)))
1130 If that were made, reload could not handle the pair of
1131 reg 20/21, since it would try to get any GENERAL_REGS
1132 but some of them don't handle QImode. */
1133
1134 if (rtx_equal_p (inner_src, i2dest)
1135 && GET_CODE (inner_dest) == REG
1136 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1137 return 0;
1138#endif
1139
1140 /* Check for the case where I3 modifies its output, as
1141 discussed above. */
1142 if ((inner_dest != dest
1143 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1144 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
1145 /* This is the same test done in can_combine_p except that we
1146 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
6d101824
RK
1147 CALL operation.
1148 Moreover, we can't test all_adjacent; we don't have to, since
1149 this instruction will stay in place, thus we are not considering
1150 to increase the lifetime of INNER_DEST. */
230d793d 1151 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1152 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
c448a43e
RK
1153 && (! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1154 GET_MODE (inner_dest))
3f508eca 1155#ifdef SMALL_REGISTER_CLASSES
f95182a4
ILT
1156 || (SMALL_REGISTER_CLASSES
1157 && GET_CODE (src) != CALL && ! REG_USERVAR_P (inner_dest)
6d101824 1158 && FUNCTION_VALUE_REGNO_P (REGNO (inner_dest)))
230d793d 1159#endif
c448a43e 1160 ))
230d793d
RS
1161 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1162 return 0;
1163
1164 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1165 so record that for later.
1166 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1167 STACK_POINTER_REGNUM, since these are always considered to be
1168 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1169 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1170 && reg_referenced_p (dest, PATTERN (i3))
1171 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1172#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1173 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1174#endif
36a9c2e9
JL
1175#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1176 && (REGNO (dest) != ARG_POINTER_REGNUM
1177 || ! fixed_regs [REGNO (dest)])
1178#endif
1179 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1180 {
1181 if (*pi3dest_killed)
1182 return 0;
1183
1184 *pi3dest_killed = dest;
1185 }
1186 }
1187
1188 else if (GET_CODE (x) == PARALLEL)
1189 {
1190 int i;
1191
1192 for (i = 0; i < XVECLEN (x, 0); i++)
1193 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1194 i1_not_in_src, pi3dest_killed))
1195 return 0;
1196 }
1197
1198 return 1;
1199}
1200\f
1201/* Try to combine the insns I1 and I2 into I3.
1202 Here I1 and I2 appear earlier than I3.
1203 I1 can be zero; then we combine just I2 into I3.
1204
1205 It we are combining three insns and the resulting insn is not recognized,
1206 try splitting it into two insns. If that happens, I2 and I3 are retained
1207 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1208 are pseudo-deleted.
1209
abe6e52f
RK
1210 Return 0 if the combination does not work. Then nothing is changed.
1211 If we did the combination, return the insn at which combine should
1212 resume scanning. */
230d793d
RS
1213
1214static rtx
1215try_combine (i3, i2, i1)
1216 register rtx i3, i2, i1;
1217{
1218 /* New patterns for I3 and I3, respectively. */
1219 rtx newpat, newi2pat = 0;
1220 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1221 int added_sets_1, added_sets_2;
1222 /* Total number of SETs to put into I3. */
1223 int total_sets;
1224 /* Nonzero is I2's body now appears in I3. */
1225 int i2_is_used;
1226 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1227 int insn_code_number, i2_code_number, other_code_number;
1228 /* Contains I3 if the destination of I3 is used in its source, which means
1229 that the old life of I3 is being killed. If that usage is placed into
1230 I2 and not in I3, a REG_DEAD note must be made. */
1231 rtx i3dest_killed = 0;
1232 /* SET_DEST and SET_SRC of I2 and I1. */
1233 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1234 /* PATTERN (I2), or a copy of it in certain cases. */
1235 rtx i2pat;
1236 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
c4e861e8 1237 int i2dest_in_i2src = 0, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
230d793d
RS
1238 int i1_feeds_i3 = 0;
1239 /* Notes that must be added to REG_NOTES in I3 and I2. */
1240 rtx new_i3_notes, new_i2_notes;
176c9e6b
JW
1241 /* Notes that we substituted I3 into I2 instead of the normal case. */
1242 int i3_subst_into_i2 = 0;
df7d75de
RK
1243 /* Notes that I1, I2 or I3 is a MULT operation. */
1244 int have_mult = 0;
a29ca9db
RK
1245 /* Number of clobbers of SCRATCH we had to add. */
1246 int i3_scratches = 0, i2_scratches = 0, other_scratches = 0;
230d793d
RS
1247
1248 int maxreg;
1249 rtx temp;
1250 register rtx link;
1251 int i;
1252
1253 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1254 This can occur when flow deletes an insn that it has merged into an
1255 auto-increment address. We also can't do anything if I3 has a
1256 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1257 libcall. */
1258
1259 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1260 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1261 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1262 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1263 return 0;
1264
1265 combine_attempts++;
1266
241cea85 1267 undobuf.undos = undobuf.previous_undos = 0;
230d793d
RS
1268 undobuf.other_insn = 0;
1269
1270 /* Save the current high-water-mark so we can free storage if we didn't
1271 accept this combination. */
1272 undobuf.storage = (char *) oballoc (0);
1273
6e25d159
RK
1274 /* Reset the hard register usage information. */
1275 CLEAR_HARD_REG_SET (newpat_used_regs);
1276
230d793d
RS
1277 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1278 code below, set I1 to be the earlier of the two insns. */
1279 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1280 temp = i1, i1 = i2, i2 = temp;
1281
abe6e52f 1282 added_links_insn = 0;
137e889e 1283
230d793d
RS
1284 /* First check for one important special-case that the code below will
1285 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1286 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1287 we may be able to replace that destination with the destination of I3.
1288 This occurs in the common code where we compute both a quotient and
1289 remainder into a structure, in which case we want to do the computation
1290 directly into the structure to avoid register-register copies.
1291
1292 We make very conservative checks below and only try to handle the
1293 most common cases of this. For example, we only handle the case
1294 where I2 and I3 are adjacent to avoid making difficult register
1295 usage tests. */
1296
1297 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1298 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1299 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1300#ifdef SMALL_REGISTER_CLASSES
f95182a4
ILT
1301 && (! SMALL_REGISTER_CLASSES
1302 || GET_CODE (SET_DEST (PATTERN (i3))) != REG
c448a43e
RK
1303 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1304 || REG_USERVAR_P (SET_DEST (PATTERN (i3))))
230d793d
RS
1305#endif
1306 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1307 && GET_CODE (PATTERN (i2)) == PARALLEL
1308 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1309 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1310 below would need to check what is inside (and reg_overlap_mentioned_p
1311 doesn't support those codes anyway). Don't allow those destinations;
1312 the resulting insn isn't likely to be recognized anyway. */
1313 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1314 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1315 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1316 SET_DEST (PATTERN (i3)))
1317 && next_real_insn (i2) == i3)
5089e22e
RS
1318 {
1319 rtx p2 = PATTERN (i2);
1320
1321 /* Make sure that the destination of I3,
1322 which we are going to substitute into one output of I2,
1323 is not used within another output of I2. We must avoid making this:
1324 (parallel [(set (mem (reg 69)) ...)
1325 (set (reg 69) ...)])
1326 which is not well-defined as to order of actions.
1327 (Besides, reload can't handle output reloads for this.)
1328
1329 The problem can also happen if the dest of I3 is a memory ref,
1330 if another dest in I2 is an indirect memory ref. */
1331 for (i = 0; i < XVECLEN (p2, 0); i++)
7ca919b7
RK
1332 if ((GET_CODE (XVECEXP (p2, 0, i)) == SET
1333 || GET_CODE (XVECEXP (p2, 0, i)) == CLOBBER)
5089e22e
RS
1334 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1335 SET_DEST (XVECEXP (p2, 0, i))))
1336 break;
230d793d 1337
5089e22e
RS
1338 if (i == XVECLEN (p2, 0))
1339 for (i = 0; i < XVECLEN (p2, 0); i++)
1340 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1341 {
1342 combine_merges++;
230d793d 1343
5089e22e
RS
1344 subst_insn = i3;
1345 subst_low_cuid = INSN_CUID (i2);
230d793d 1346
c4e861e8 1347 added_sets_2 = added_sets_1 = 0;
5089e22e 1348 i2dest = SET_SRC (PATTERN (i3));
230d793d 1349
5089e22e
RS
1350 /* Replace the dest in I2 with our dest and make the resulting
1351 insn the new pattern for I3. Then skip to where we
1352 validate the pattern. Everything was set up above. */
1353 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1354 SET_DEST (PATTERN (i3)));
1355
1356 newpat = p2;
176c9e6b 1357 i3_subst_into_i2 = 1;
5089e22e
RS
1358 goto validate_replacement;
1359 }
1360 }
230d793d
RS
1361
1362#ifndef HAVE_cc0
1363 /* If we have no I1 and I2 looks like:
1364 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1365 (set Y OP)])
1366 make up a dummy I1 that is
1367 (set Y OP)
1368 and change I2 to be
1369 (set (reg:CC X) (compare:CC Y (const_int 0)))
1370
1371 (We can ignore any trailing CLOBBERs.)
1372
1373 This undoes a previous combination and allows us to match a branch-and-
1374 decrement insn. */
1375
1376 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1377 && XVECLEN (PATTERN (i2), 0) >= 2
1378 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1379 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1380 == MODE_CC)
1381 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1382 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1383 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1384 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1385 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1386 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1387 {
1388 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1389 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1390 break;
1391
1392 if (i == 1)
1393 {
1394 /* We make I1 with the same INSN_UID as I2. This gives it
1395 the same INSN_CUID for value tracking. Our fake I1 will
1396 never appear in the insn stream so giving it the same INSN_UID
1397 as I2 will not cause a problem. */
1398
0d9641d1
JW
1399 subst_prev_insn = i1
1400 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1401 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
230d793d
RS
1402
1403 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1404 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1405 SET_DEST (PATTERN (i1)));
1406 }
1407 }
1408#endif
1409
1410 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1411 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1412 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1413 {
1414 undo_all ();
1415 return 0;
1416 }
1417
1418 /* Record whether I2DEST is used in I2SRC and similarly for the other
1419 cases. Knowing this will help in register status updating below. */
1420 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1421 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1422 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1423
916f14f1 1424 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1425 in I2SRC. */
1426 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1427
1428 /* Ensure that I3's pattern can be the destination of combines. */
1429 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1430 i1 && i2dest_in_i1src && i1_feeds_i3,
1431 &i3dest_killed))
1432 {
1433 undo_all ();
1434 return 0;
1435 }
1436
df7d75de
RK
1437 /* See if any of the insns is a MULT operation. Unless one is, we will
1438 reject a combination that is, since it must be slower. Be conservative
1439 here. */
1440 if (GET_CODE (i2src) == MULT
1441 || (i1 != 0 && GET_CODE (i1src) == MULT)
1442 || (GET_CODE (PATTERN (i3)) == SET
1443 && GET_CODE (SET_SRC (PATTERN (i3))) == MULT))
1444 have_mult = 1;
1445
230d793d
RS
1446 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1447 We used to do this EXCEPT in one case: I3 has a post-inc in an
1448 output operand. However, that exception can give rise to insns like
1449 mov r3,(r3)+
1450 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1451 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1452
1453#if 0
1454 if (!(GET_CODE (PATTERN (i3)) == SET
1455 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1456 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1457 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1458 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1459 /* It's not the exception. */
1460#endif
1461#ifdef AUTO_INC_DEC
1462 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1463 if (REG_NOTE_KIND (link) == REG_INC
1464 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1465 || (i1 != 0
1466 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1467 {
1468 undo_all ();
1469 return 0;
1470 }
1471#endif
1472
1473 /* See if the SETs in I1 or I2 need to be kept around in the merged
1474 instruction: whenever the value set there is still needed past I3.
1475 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1476
1477 For the SET in I1, we have two cases: If I1 and I2 independently
1478 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1479 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1480 in I1 needs to be kept around unless I1DEST dies or is set in either
1481 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1482 I1DEST. If so, we know I1 feeds into I2. */
1483
1484 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1485
1486 added_sets_1
1487 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1488 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1489
1490 /* If the set in I2 needs to be kept around, we must make a copy of
1491 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1492 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1493 an already-substituted copy. This also prevents making self-referential
1494 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1495 I2DEST. */
1496
1497 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1498 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1499 : PATTERN (i2));
1500
1501 if (added_sets_2)
1502 i2pat = copy_rtx (i2pat);
1503
1504 combine_merges++;
1505
1506 /* Substitute in the latest insn for the regs set by the earlier ones. */
1507
1508 maxreg = max_reg_num ();
1509
1510 subst_insn = i3;
230d793d
RS
1511
1512 /* It is possible that the source of I2 or I1 may be performing an
1513 unneeded operation, such as a ZERO_EXTEND of something that is known
1514 to have the high part zero. Handle that case by letting subst look at
1515 the innermost one of them.
1516
1517 Another way to do this would be to have a function that tries to
1518 simplify a single insn instead of merging two or more insns. We don't
1519 do this because of the potential of infinite loops and because
1520 of the potential extra memory required. However, doing it the way
1521 we are is a bit of a kludge and doesn't catch all cases.
1522
1523 But only do this if -fexpensive-optimizations since it slows things down
1524 and doesn't usually win. */
1525
1526 if (flag_expensive_optimizations)
1527 {
1528 /* Pass pc_rtx so no substitutions are done, just simplifications.
1529 The cases that we are interested in here do not involve the few
1530 cases were is_replaced is checked. */
1531 if (i1)
d0ab8cd3
RK
1532 {
1533 subst_low_cuid = INSN_CUID (i1);
1534 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1535 }
230d793d 1536 else
d0ab8cd3
RK
1537 {
1538 subst_low_cuid = INSN_CUID (i2);
1539 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1540 }
230d793d 1541
241cea85 1542 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1543 }
1544
1545#ifndef HAVE_cc0
1546 /* Many machines that don't use CC0 have insns that can both perform an
1547 arithmetic operation and set the condition code. These operations will
1548 be represented as a PARALLEL with the first element of the vector
1549 being a COMPARE of an arithmetic operation with the constant zero.
1550 The second element of the vector will set some pseudo to the result
1551 of the same arithmetic operation. If we simplify the COMPARE, we won't
1552 match such a pattern and so will generate an extra insn. Here we test
1553 for this case, where both the comparison and the operation result are
1554 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1555 I2SRC. Later we will make the PARALLEL that contains I2. */
1556
1557 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1558 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1559 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1560 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1561 {
1562 rtx *cc_use;
1563 enum machine_mode compare_mode;
1564
1565 newpat = PATTERN (i3);
1566 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1567
1568 i2_is_used = 1;
1569
1570#ifdef EXTRA_CC_MODES
1571 /* See if a COMPARE with the operand we substituted in should be done
1572 with the mode that is currently being used. If not, do the same
1573 processing we do in `subst' for a SET; namely, if the destination
1574 is used only once, try to replace it with a register of the proper
1575 mode and also replace the COMPARE. */
1576 if (undobuf.other_insn == 0
1577 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1578 &undobuf.other_insn))
77fa0940
RK
1579 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1580 i2src, const0_rtx))
230d793d
RS
1581 != GET_MODE (SET_DEST (newpat))))
1582 {
1583 int regno = REGNO (SET_DEST (newpat));
1584 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1585
1586 if (regno < FIRST_PSEUDO_REGISTER
1587 || (reg_n_sets[regno] == 1 && ! added_sets_2
1588 && ! REG_USERVAR_P (SET_DEST (newpat))))
1589 {
1590 if (regno >= FIRST_PSEUDO_REGISTER)
1591 SUBST (regno_reg_rtx[regno], new_dest);
1592
1593 SUBST (SET_DEST (newpat), new_dest);
1594 SUBST (XEXP (*cc_use, 0), new_dest);
1595 SUBST (SET_SRC (newpat),
1596 gen_rtx_combine (COMPARE, compare_mode,
1597 i2src, const0_rtx));
1598 }
1599 else
1600 undobuf.other_insn = 0;
1601 }
1602#endif
1603 }
1604 else
1605#endif
1606 {
1607 n_occurrences = 0; /* `subst' counts here */
1608
1609 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1610 need to make a unique copy of I2SRC each time we substitute it
1611 to avoid self-referential rtl. */
1612
d0ab8cd3 1613 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1614 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1615 ! i1_feeds_i3 && i1dest_in_i1src);
241cea85 1616 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1617
1618 /* Record whether i2's body now appears within i3's body. */
1619 i2_is_used = n_occurrences;
1620 }
1621
1622 /* If we already got a failure, don't try to do more. Otherwise,
1623 try to substitute in I1 if we have it. */
1624
1625 if (i1 && GET_CODE (newpat) != CLOBBER)
1626 {
1627 /* Before we can do this substitution, we must redo the test done
1628 above (see detailed comments there) that ensures that I1DEST
0f41302f 1629 isn't mentioned in any SETs in NEWPAT that are field assignments. */
230d793d 1630
5f4f0e22
CH
1631 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1632 0, NULL_PTR))
230d793d
RS
1633 {
1634 undo_all ();
1635 return 0;
1636 }
1637
1638 n_occurrences = 0;
d0ab8cd3 1639 subst_low_cuid = INSN_CUID (i1);
230d793d 1640 newpat = subst (newpat, i1dest, i1src, 0, 0);
241cea85 1641 undobuf.previous_undos = undobuf.undos;
230d793d
RS
1642 }
1643
916f14f1
RK
1644 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1645 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1646 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1647 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1648 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1649 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1650 > 1))
230d793d
RS
1651 /* Fail if we tried to make a new register (we used to abort, but there's
1652 really no reason to). */
1653 || max_reg_num () != maxreg
1654 /* Fail if we couldn't do something and have a CLOBBER. */
df7d75de
RK
1655 || GET_CODE (newpat) == CLOBBER
1656 /* Fail if this new pattern is a MULT and we didn't have one before
1657 at the outer level. */
1658 || (GET_CODE (newpat) == SET && GET_CODE (SET_SRC (newpat)) == MULT
1659 && ! have_mult))
230d793d
RS
1660 {
1661 undo_all ();
1662 return 0;
1663 }
1664
1665 /* If the actions of the earlier insns must be kept
1666 in addition to substituting them into the latest one,
1667 we must make a new PARALLEL for the latest insn
1668 to hold additional the SETs. */
1669
1670 if (added_sets_1 || added_sets_2)
1671 {
1672 combine_extras++;
1673
1674 if (GET_CODE (newpat) == PARALLEL)
1675 {
1676 rtvec old = XVEC (newpat, 0);
1677 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1678 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
59888de2 1679 bcopy ((char *) &old->elem[0], (char *) XVEC (newpat, 0)->elem,
230d793d
RS
1680 sizeof (old->elem[0]) * old->num_elem);
1681 }
1682 else
1683 {
1684 rtx old = newpat;
1685 total_sets = 1 + added_sets_1 + added_sets_2;
1686 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1687 XVECEXP (newpat, 0, 0) = old;
1688 }
1689
1690 if (added_sets_1)
1691 XVECEXP (newpat, 0, --total_sets)
1692 = (GET_CODE (PATTERN (i1)) == PARALLEL
1693 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1694
1695 if (added_sets_2)
1696 {
1697 /* If there is no I1, use I2's body as is. We used to also not do
1698 the subst call below if I2 was substituted into I3,
1699 but that could lose a simplification. */
1700 if (i1 == 0)
1701 XVECEXP (newpat, 0, --total_sets) = i2pat;
1702 else
1703 /* See comment where i2pat is assigned. */
1704 XVECEXP (newpat, 0, --total_sets)
1705 = subst (i2pat, i1dest, i1src, 0, 0);
1706 }
1707 }
1708
1709 /* We come here when we are replacing a destination in I2 with the
1710 destination of I3. */
1711 validate_replacement:
1712
6e25d159
RK
1713 /* Note which hard regs this insn has as inputs. */
1714 mark_used_regs_combine (newpat);
1715
230d793d 1716 /* Is the result of combination a valid instruction? */
a29ca9db
RK
1717 insn_code_number
1718 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1719
1720 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1721 the second SET's destination is a register that is unused. In that case,
1722 we just need the first SET. This can occur when simplifying a divmod
1723 insn. We *must* test for this case here because the code below that
1724 splits two independent SETs doesn't handle this case correctly when it
1725 updates the register status. Also check the case where the first
1726 SET's destination is unused. That would not cause incorrect code, but
1727 does cause an unneeded insn to remain. */
1728
1729 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1730 && XVECLEN (newpat, 0) == 2
1731 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1732 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1733 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1734 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1735 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1736 && asm_noperands (newpat) < 0)
1737 {
1738 newpat = XVECEXP (newpat, 0, 0);
a29ca9db
RK
1739 insn_code_number
1740 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1741 }
1742
1743 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1744 && XVECLEN (newpat, 0) == 2
1745 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1746 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1747 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1748 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1749 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1750 && asm_noperands (newpat) < 0)
1751 {
1752 newpat = XVECEXP (newpat, 0, 1);
a29ca9db
RK
1753 insn_code_number
1754 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1755 }
1756
1757 /* If we were combining three insns and the result is a simple SET
1758 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1759 insns. There are two ways to do this. It can be split using a
1760 machine-specific method (like when you have an addition of a large
1761 constant) or by combine in the function find_split_point. */
1762
230d793d
RS
1763 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1764 && asm_noperands (newpat) < 0)
1765 {
916f14f1 1766 rtx m_split, *split;
42495ca0 1767 rtx ni2dest = i2dest;
916f14f1
RK
1768
1769 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1770 use I2DEST as a scratch register will help. In the latter case,
1771 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1772
1773 m_split = split_insns (newpat, i3);
a70c61d9
JW
1774
1775 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1776 inputs of NEWPAT. */
1777
1778 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1779 possible to try that as a scratch reg. This would require adding
1780 more code to make it work though. */
1781
1782 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1783 {
1784 /* If I2DEST is a hard register or the only use of a pseudo,
1785 we can change its mode. */
1786 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1787 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1788 && GET_CODE (i2dest) == REG
42495ca0
RK
1789 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1790 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1791 && ! REG_USERVAR_P (i2dest))))
1792 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1793 REGNO (i2dest));
1794
1795 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1796 gen_rtvec (2, newpat,
1797 gen_rtx (CLOBBER,
1798 VOIDmode,
1799 ni2dest))),
1800 i3);
1801 }
916f14f1
RK
1802
1803 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1804 && XVECLEN (m_split, 0) == 2
1805 && (next_real_insn (i2) == i3
1806 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1807 INSN_CUID (i2))))
916f14f1 1808 {
1a26b032 1809 rtx i2set, i3set;
d0ab8cd3 1810 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1811 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1812
e4ba89be
RK
1813 i3set = single_set (XVECEXP (m_split, 0, 1));
1814 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1815
42495ca0
RK
1816 /* In case we changed the mode of I2DEST, replace it in the
1817 pseudo-register table here. We can't do it above in case this
1818 code doesn't get executed and we do a split the other way. */
1819
1820 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1821 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1822
a29ca9db
RK
1823 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes,
1824 &i2_scratches);
1a26b032
RK
1825
1826 /* If I2 or I3 has multiple SETs, we won't know how to track
9cc96794
RK
1827 register status, so don't use these insns. If I2's destination
1828 is used between I2 and I3, we also can't use these insns. */
1a26b032 1829
9cc96794
RK
1830 if (i2_code_number >= 0 && i2set && i3set
1831 && (next_real_insn (i2) == i3
1832 || ! reg_used_between_p (SET_DEST (i2set), i2, i3)))
a29ca9db
RK
1833 insn_code_number = recog_for_combine (&newi3pat, i3, &new_i3_notes,
1834 &i3_scratches);
d0ab8cd3
RK
1835 if (insn_code_number >= 0)
1836 newpat = newi3pat;
1837
c767f54b 1838 /* It is possible that both insns now set the destination of I3.
22609cbf 1839 If so, we must show an extra use of it. */
c767f54b 1840
393de53f
RK
1841 if (insn_code_number >= 0)
1842 {
1843 rtx new_i3_dest = SET_DEST (i3set);
1844 rtx new_i2_dest = SET_DEST (i2set);
1845
1846 while (GET_CODE (new_i3_dest) == ZERO_EXTRACT
1847 || GET_CODE (new_i3_dest) == STRICT_LOW_PART
1848 || GET_CODE (new_i3_dest) == SUBREG)
1849 new_i3_dest = XEXP (new_i3_dest, 0);
1850
d4096689
RK
1851 while (GET_CODE (new_i2_dest) == ZERO_EXTRACT
1852 || GET_CODE (new_i2_dest) == STRICT_LOW_PART
1853 || GET_CODE (new_i2_dest) == SUBREG)
1854 new_i2_dest = XEXP (new_i2_dest, 0);
1855
393de53f
RK
1856 if (GET_CODE (new_i3_dest) == REG
1857 && GET_CODE (new_i2_dest) == REG
1858 && REGNO (new_i3_dest) == REGNO (new_i2_dest))
d4096689 1859 reg_n_sets[REGNO (new_i2_dest)]++;
393de53f 1860 }
916f14f1 1861 }
230d793d
RS
1862
1863 /* If we can split it and use I2DEST, go ahead and see if that
1864 helps things be recognized. Verify that none of the registers
1865 are set between I2 and I3. */
d0ab8cd3 1866 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1867#ifdef HAVE_cc0
1868 && GET_CODE (i2dest) == REG
1869#endif
1870 /* We need I2DEST in the proper mode. If it is a hard register
1871 or the only use of a pseudo, we can change its mode. */
1872 && (GET_MODE (*split) == GET_MODE (i2dest)
1873 || GET_MODE (*split) == VOIDmode
1874 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1875 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1876 && ! REG_USERVAR_P (i2dest)))
1877 && (next_real_insn (i2) == i3
1878 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1879 /* We can't overwrite I2DEST if its value is still used by
1880 NEWPAT. */
1881 && ! reg_referenced_p (i2dest, newpat))
1882 {
1883 rtx newdest = i2dest;
df7d75de
RK
1884 enum rtx_code split_code = GET_CODE (*split);
1885 enum machine_mode split_mode = GET_MODE (*split);
230d793d
RS
1886
1887 /* Get NEWDEST as a register in the proper mode. We have already
1888 validated that we can do this. */
df7d75de 1889 if (GET_MODE (i2dest) != split_mode && split_mode != VOIDmode)
230d793d 1890 {
df7d75de 1891 newdest = gen_rtx (REG, split_mode, REGNO (i2dest));
230d793d
RS
1892
1893 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1894 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1895 }
1896
1897 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1898 an ASHIFT. This can occur if it was inside a PLUS and hence
1899 appeared to be a memory address. This is a kludge. */
df7d75de 1900 if (split_code == MULT
230d793d
RS
1901 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1902 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1dc8a823
JW
1903 {
1904 SUBST (*split, gen_rtx_combine (ASHIFT, split_mode,
1905 XEXP (*split, 0), GEN_INT (i)));
1906 /* Update split_code because we may not have a multiply
1907 anymore. */
1908 split_code = GET_CODE (*split);
1909 }
230d793d
RS
1910
1911#ifdef INSN_SCHEDULING
1912 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1913 be written as a ZERO_EXTEND. */
df7d75de
RK
1914 if (split_code == SUBREG && GET_CODE (SUBREG_REG (*split)) == MEM)
1915 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, split_mode,
230d793d
RS
1916 XEXP (*split, 0)));
1917#endif
1918
1919 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1920 SUBST (*split, newdest);
a29ca9db
RK
1921 i2_code_number
1922 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
df7d75de
RK
1923
1924 /* If the split point was a MULT and we didn't have one before,
1925 don't use one now. */
1926 if (i2_code_number >= 0 && ! (split_code == MULT && ! have_mult))
a29ca9db
RK
1927 insn_code_number
1928 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
1929 }
1930 }
1931
1932 /* Check for a case where we loaded from memory in a narrow mode and
1933 then sign extended it, but we need both registers. In that case,
1934 we have a PARALLEL with both loads from the same memory location.
1935 We can split this into a load from memory followed by a register-register
1936 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
1937 eliminate the copy.
1938
1939 We cannot do this if the destination of the second assignment is
1940 a register that we have already assumed is zero-extended. Similarly
1941 for a SUBREG of such a register. */
230d793d
RS
1942
1943 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1944 && GET_CODE (newpat) == PARALLEL
1945 && XVECLEN (newpat, 0) == 2
1946 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1947 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1948 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1949 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1950 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1951 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1952 INSN_CUID (i2))
1953 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1954 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
1955 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1956 (GET_CODE (temp) == REG
1957 && reg_nonzero_bits[REGNO (temp)] != 0
1958 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1959 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1960 && (reg_nonzero_bits[REGNO (temp)]
1961 != GET_MODE_MASK (word_mode))))
1962 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1963 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1964 (GET_CODE (temp) == REG
1965 && reg_nonzero_bits[REGNO (temp)] != 0
1966 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1967 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1968 && (reg_nonzero_bits[REGNO (temp)]
1969 != GET_MODE_MASK (word_mode)))))
230d793d
RS
1970 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1971 SET_SRC (XVECEXP (newpat, 0, 1)))
1972 && ! find_reg_note (i3, REG_UNUSED,
1973 SET_DEST (XVECEXP (newpat, 0, 0))))
1974 {
472fbdd1
RK
1975 rtx ni2dest;
1976
230d793d 1977 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 1978 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
1979 newpat = XVECEXP (newpat, 0, 1);
1980 SUBST (SET_SRC (newpat),
472fbdd1 1981 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
a29ca9db
RK
1982 i2_code_number
1983 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
1984
230d793d 1985 if (i2_code_number >= 0)
a29ca9db
RK
1986 insn_code_number
1987 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
5089e22e
RS
1988
1989 if (insn_code_number >= 0)
1990 {
1991 rtx insn;
1992 rtx link;
1993
1994 /* If we will be able to accept this, we have made a change to the
1995 destination of I3. This can invalidate a LOG_LINKS pointing
1996 to I3. No other part of combine.c makes such a transformation.
1997
1998 The new I3 will have a destination that was previously the
1999 destination of I1 or I2 and which was used in i2 or I3. Call
2000 distribute_links to make a LOG_LINK from the next use of
2001 that destination. */
2002
2003 PATTERN (i3) = newpat;
5f4f0e22 2004 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
2005
2006 /* I3 now uses what used to be its destination and which is
2007 now I2's destination. That means we need a LOG_LINK from
2008 I3 to I2. But we used to have one, so we still will.
2009
2010 However, some later insn might be using I2's dest and have
2011 a LOG_LINK pointing at I3. We must remove this link.
2012 The simplest way to remove the link is to point it at I1,
2013 which we know will be a NOTE. */
2014
2015 for (insn = NEXT_INSN (i3);
0d4d42c3
RK
2016 insn && (this_basic_block == n_basic_blocks - 1
2017 || insn != basic_block_head[this_basic_block + 1]);
5089e22e
RS
2018 insn = NEXT_INSN (insn))
2019 {
2020 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 2021 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
2022 {
2023 for (link = LOG_LINKS (insn); link;
2024 link = XEXP (link, 1))
2025 if (XEXP (link, 0) == i3)
2026 XEXP (link, 0) = i1;
2027
2028 break;
2029 }
2030 }
2031 }
230d793d
RS
2032 }
2033
2034 /* Similarly, check for a case where we have a PARALLEL of two independent
2035 SETs but we started with three insns. In this case, we can do the sets
2036 as two separate insns. This case occurs when some SET allows two
2037 other insns to combine, but the destination of that SET is still live. */
2038
2039 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
2040 && GET_CODE (newpat) == PARALLEL
2041 && XVECLEN (newpat, 0) == 2
2042 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
2043 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
2044 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
2045 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
2046 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
2047 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
2048 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
2049 INSN_CUID (i2))
2050 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
2051 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
2052 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
2053 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
2054 XVECEXP (newpat, 0, 0))
2055 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
2056 XVECEXP (newpat, 0, 1)))
2057 {
2058 newi2pat = XVECEXP (newpat, 0, 1);
2059 newpat = XVECEXP (newpat, 0, 0);
2060
a29ca9db
RK
2061 i2_code_number
2062 = recog_for_combine (&newi2pat, i2, &new_i2_notes, &i2_scratches);
2063
230d793d 2064 if (i2_code_number >= 0)
a29ca9db
RK
2065 insn_code_number
2066 = recog_for_combine (&newpat, i3, &new_i3_notes, &i3_scratches);
230d793d
RS
2067 }
2068
2069 /* If it still isn't recognized, fail and change things back the way they
2070 were. */
2071 if ((insn_code_number < 0
2072 /* Is the result a reasonable ASM_OPERANDS? */
2073 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
2074 {
2075 undo_all ();
2076 return 0;
2077 }
2078
2079 /* If we had to change another insn, make sure it is valid also. */
2080 if (undobuf.other_insn)
2081 {
230d793d
RS
2082 rtx other_pat = PATTERN (undobuf.other_insn);
2083 rtx new_other_notes;
2084 rtx note, next;
2085
6e25d159
RK
2086 CLEAR_HARD_REG_SET (newpat_used_regs);
2087
a29ca9db
RK
2088 other_code_number
2089 = recog_for_combine (&other_pat, undobuf.other_insn,
2090 &new_other_notes, &other_scratches);
230d793d
RS
2091
2092 if (other_code_number < 0 && ! check_asm_operands (other_pat))
2093 {
2094 undo_all ();
2095 return 0;
2096 }
2097
2098 PATTERN (undobuf.other_insn) = other_pat;
2099
2100 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
2101 are still valid. Then add any non-duplicate notes added by
2102 recog_for_combine. */
2103 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
2104 {
2105 next = XEXP (note, 1);
2106
2107 if (REG_NOTE_KIND (note) == REG_UNUSED
2108 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
2109 {
2110 if (GET_CODE (XEXP (note, 0)) == REG)
2111 reg_n_deaths[REGNO (XEXP (note, 0))]--;
2112
2113 remove_note (undobuf.other_insn, note);
2114 }
230d793d
RS
2115 }
2116
1a26b032
RK
2117 for (note = new_other_notes; note; note = XEXP (note, 1))
2118 if (GET_CODE (XEXP (note, 0)) == REG)
2119 reg_n_deaths[REGNO (XEXP (note, 0))]++;
2120
230d793d 2121 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 2122 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
2123 }
2124
2125 /* We now know that we can do this combination. Merge the insns and
2126 update the status of registers and LOG_LINKS. */
2127
2128 {
2129 rtx i3notes, i2notes, i1notes = 0;
2130 rtx i3links, i2links, i1links = 0;
2131 rtx midnotes = 0;
230d793d
RS
2132 register int regno;
2133 /* Compute which registers we expect to eliminate. */
2134 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2135 ? 0 : i2dest);
2136 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2137
2138 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2139 clear them. */
2140 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2141 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2142 if (i1)
2143 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2144
2145 /* Ensure that we do not have something that should not be shared but
2146 occurs multiple times in the new insns. Check this by first
5089e22e 2147 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2148
2149 reset_used_flags (i3notes);
2150 reset_used_flags (i2notes);
2151 reset_used_flags (i1notes);
2152 reset_used_flags (newpat);
2153 reset_used_flags (newi2pat);
2154 if (undobuf.other_insn)
2155 reset_used_flags (PATTERN (undobuf.other_insn));
2156
2157 i3notes = copy_rtx_if_shared (i3notes);
2158 i2notes = copy_rtx_if_shared (i2notes);
2159 i1notes = copy_rtx_if_shared (i1notes);
2160 newpat = copy_rtx_if_shared (newpat);
2161 newi2pat = copy_rtx_if_shared (newi2pat);
2162 if (undobuf.other_insn)
2163 reset_used_flags (PATTERN (undobuf.other_insn));
2164
2165 INSN_CODE (i3) = insn_code_number;
2166 PATTERN (i3) = newpat;
2167 if (undobuf.other_insn)
2168 INSN_CODE (undobuf.other_insn) = other_code_number;
2169
2170 /* We had one special case above where I2 had more than one set and
2171 we replaced a destination of one of those sets with the destination
2172 of I3. In that case, we have to update LOG_LINKS of insns later
176c9e6b
JW
2173 in this basic block. Note that this (expensive) case is rare.
2174
2175 Also, in this case, we must pretend that all REG_NOTEs for I2
2176 actually came from I3, so that REG_UNUSED notes from I2 will be
2177 properly handled. */
2178
2179 if (i3_subst_into_i2)
2180 {
2181 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2182 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2183 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2184 && ! find_reg_note (i2, REG_UNUSED,
2185 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
2186 for (temp = NEXT_INSN (i2);
2187 temp && (this_basic_block == n_basic_blocks - 1
2188 || basic_block_head[this_basic_block] != temp);
2189 temp = NEXT_INSN (temp))
2190 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2191 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2192 if (XEXP (link, 0) == i2)
2193 XEXP (link, 0) = i3;
2194
2195 if (i3notes)
2196 {
2197 rtx link = i3notes;
2198 while (XEXP (link, 1))
2199 link = XEXP (link, 1);
2200 XEXP (link, 1) = i2notes;
2201 }
2202 else
2203 i3notes = i2notes;
2204 i2notes = 0;
2205 }
230d793d
RS
2206
2207 LOG_LINKS (i3) = 0;
2208 REG_NOTES (i3) = 0;
2209 LOG_LINKS (i2) = 0;
2210 REG_NOTES (i2) = 0;
2211
2212 if (newi2pat)
2213 {
2214 INSN_CODE (i2) = i2_code_number;
2215 PATTERN (i2) = newi2pat;
2216 }
2217 else
2218 {
2219 PUT_CODE (i2, NOTE);
2220 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2221 NOTE_SOURCE_FILE (i2) = 0;
2222 }
2223
2224 if (i1)
2225 {
2226 LOG_LINKS (i1) = 0;
2227 REG_NOTES (i1) = 0;
2228 PUT_CODE (i1, NOTE);
2229 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2230 NOTE_SOURCE_FILE (i1) = 0;
2231 }
2232
2233 /* Get death notes for everything that is now used in either I3 or
6eb12cef
RK
2234 I2 and used to die in a previous insn. If we built two new
2235 patterns, move from I1 to I2 then I2 to I3 so that we get the
2236 proper movement on registers that I2 modifies. */
230d793d 2237
230d793d 2238 if (newi2pat)
6eb12cef
RK
2239 {
2240 move_deaths (newi2pat, NULL_RTX, INSN_CUID (i1), i2, &midnotes);
2241 move_deaths (newpat, newi2pat, INSN_CUID (i1), i3, &midnotes);
2242 }
2243 else
2244 move_deaths (newpat, NULL_RTX, i1 ? INSN_CUID (i1) : INSN_CUID (i2),
2245 i3, &midnotes);
230d793d
RS
2246
2247 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2248 if (i3notes)
5f4f0e22
CH
2249 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2250 elim_i2, elim_i1);
230d793d 2251 if (i2notes)
5f4f0e22
CH
2252 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2253 elim_i2, elim_i1);
230d793d 2254 if (i1notes)
5f4f0e22
CH
2255 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2256 elim_i2, elim_i1);
230d793d 2257 if (midnotes)
5f4f0e22
CH
2258 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2259 elim_i2, elim_i1);
230d793d
RS
2260
2261 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2262 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2263 so we always pass it as i3. We have not counted the notes in
2264 reg_n_deaths yet, so we need to do so now. */
2265
230d793d 2266 if (newi2pat && new_i2_notes)
1a26b032
RK
2267 {
2268 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2269 if (GET_CODE (XEXP (temp, 0)) == REG)
2270 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2271
2272 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2273 }
2274
230d793d 2275 if (new_i3_notes)
1a26b032
RK
2276 {
2277 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2278 if (GET_CODE (XEXP (temp, 0)) == REG)
2279 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2280
2281 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2282 }
230d793d
RS
2283
2284 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1a26b032
RK
2285 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2286 Show an additional death due to the REG_DEAD note we make here. If
2287 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2288
230d793d 2289 if (i3dest_killed)
1a26b032
RK
2290 {
2291 if (GET_CODE (i3dest_killed) == REG)
2292 reg_n_deaths[REGNO (i3dest_killed)]++;
2293
2294 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2295 NULL_RTX),
2296 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2297 NULL_RTX, NULL_RTX);
2298 }
58c8c593
RK
2299
2300 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2301 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2302 we passed I3 in that case, it might delete I2. */
2303
230d793d 2304 if (i2dest_in_i2src)
58c8c593 2305 {
1a26b032
RK
2306 if (GET_CODE (i2dest) == REG)
2307 reg_n_deaths[REGNO (i2dest)]++;
2308
58c8c593
RK
2309 if (newi2pat && reg_set_p (i2dest, newi2pat))
2310 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2311 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2312 else
2313 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2314 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2315 NULL_RTX, NULL_RTX);
2316 }
2317
230d793d 2318 if (i1dest_in_i1src)
58c8c593 2319 {
1a26b032
RK
2320 if (GET_CODE (i1dest) == REG)
2321 reg_n_deaths[REGNO (i1dest)]++;
2322
58c8c593
RK
2323 if (newi2pat && reg_set_p (i1dest, newi2pat))
2324 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2325 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2326 else
2327 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2328 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2329 NULL_RTX, NULL_RTX);
2330 }
230d793d
RS
2331
2332 distribute_links (i3links);
2333 distribute_links (i2links);
2334 distribute_links (i1links);
2335
2336 if (GET_CODE (i2dest) == REG)
2337 {
d0ab8cd3
RK
2338 rtx link;
2339 rtx i2_insn = 0, i2_val = 0, set;
2340
2341 /* The insn that used to set this register doesn't exist, and
2342 this life of the register may not exist either. See if one of
2343 I3's links points to an insn that sets I2DEST. If it does,
2344 that is now the last known value for I2DEST. If we don't update
2345 this and I2 set the register to a value that depended on its old
230d793d
RS
2346 contents, we will get confused. If this insn is used, thing
2347 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2348
2349 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2350 if ((set = single_set (XEXP (link, 0))) != 0
2351 && rtx_equal_p (i2dest, SET_DEST (set)))
2352 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2353
2354 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2355
2356 /* If the reg formerly set in I2 died only once and that was in I3,
2357 zero its use count so it won't make `reload' do any work. */
538fe8cd
ILT
2358 if (! added_sets_2
2359 && (newi2pat == 0 || ! reg_mentioned_p (i2dest, newi2pat))
2360 && ! i2dest_in_i2src)
230d793d
RS
2361 {
2362 regno = REGNO (i2dest);
2363 reg_n_sets[regno]--;
2364 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2365 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2366 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2367 reg_n_refs[regno] = 0;
2368 }
2369 }
2370
2371 if (i1 && GET_CODE (i1dest) == REG)
2372 {
d0ab8cd3
RK
2373 rtx link;
2374 rtx i1_insn = 0, i1_val = 0, set;
2375
2376 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2377 if ((set = single_set (XEXP (link, 0))) != 0
2378 && rtx_equal_p (i1dest, SET_DEST (set)))
2379 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2380
2381 record_value_for_reg (i1dest, i1_insn, i1_val);
2382
230d793d 2383 regno = REGNO (i1dest);
5af91171 2384 if (! added_sets_1 && ! i1dest_in_i1src)
230d793d
RS
2385 {
2386 reg_n_sets[regno]--;
2387 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2388 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2389 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2390 reg_n_refs[regno] = 0;
2391 }
2392 }
2393
951553af 2394 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2395 to this insn. */
2396
951553af 2397 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2398 if (newi2pat)
951553af 2399 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2400
a29ca9db
RK
2401 /* If we added any (clobber (scratch)), add them to the max for a
2402 block. This is a very pessimistic calculation, since we might
2403 have had them already and this might not be the worst block, but
2404 it's not worth doing any better. */
2405 max_scratch += i3_scratches + i2_scratches + other_scratches;
2406
230d793d
RS
2407 /* If I3 is now an unconditional jump, ensure that it has a
2408 BARRIER following it since it may have initially been a
381ee8af 2409 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2410
2411 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2412 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2413 || GET_CODE (temp) != BARRIER))
230d793d
RS
2414 emit_barrier_after (i3);
2415 }
2416
2417 combine_successes++;
2418
bcd49eb7
JW
2419 /* Clear this here, so that subsequent get_last_value calls are not
2420 affected. */
2421 subst_prev_insn = NULL_RTX;
2422
abe6e52f
RK
2423 if (added_links_insn
2424 && (newi2pat == 0 || INSN_CUID (added_links_insn) < INSN_CUID (i2))
2425 && INSN_CUID (added_links_insn) < INSN_CUID (i3))
2426 return added_links_insn;
2427 else
2428 return newi2pat ? i2 : i3;
230d793d
RS
2429}
2430\f
2431/* Undo all the modifications recorded in undobuf. */
2432
2433static void
2434undo_all ()
2435{
241cea85
RK
2436 struct undo *undo, *next;
2437
2438 for (undo = undobuf.undos; undo; undo = next)
7c046e4e 2439 {
241cea85
RK
2440 next = undo->next;
2441 if (undo->is_int)
2442 *undo->where.i = undo->old_contents.i;
7c046e4e 2443 else
241cea85
RK
2444 *undo->where.r = undo->old_contents.r;
2445
2446 undo->next = undobuf.frees;
2447 undobuf.frees = undo;
7c046e4e 2448 }
230d793d
RS
2449
2450 obfree (undobuf.storage);
845fc875 2451 undobuf.undos = undobuf.previous_undos = 0;
bcd49eb7
JW
2452
2453 /* Clear this here, so that subsequent get_last_value calls are not
2454 affected. */
2455 subst_prev_insn = NULL_RTX;
230d793d
RS
2456}
2457\f
2458/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2459 where we have an arithmetic expression and return that point. LOC will
2460 be inside INSN.
230d793d
RS
2461
2462 try_combine will call this function to see if an insn can be split into
2463 two insns. */
2464
2465static rtx *
d0ab8cd3 2466find_split_point (loc, insn)
230d793d 2467 rtx *loc;
d0ab8cd3 2468 rtx insn;
230d793d
RS
2469{
2470 rtx x = *loc;
2471 enum rtx_code code = GET_CODE (x);
2472 rtx *split;
2473 int len = 0, pos, unsignedp;
2474 rtx inner;
2475
2476 /* First special-case some codes. */
2477 switch (code)
2478 {
2479 case SUBREG:
2480#ifdef INSN_SCHEDULING
2481 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2482 point. */
2483 if (GET_CODE (SUBREG_REG (x)) == MEM)
2484 return loc;
2485#endif
d0ab8cd3 2486 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2487
230d793d 2488 case MEM:
916f14f1 2489#ifdef HAVE_lo_sum
230d793d
RS
2490 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2491 using LO_SUM and HIGH. */
2492 if (GET_CODE (XEXP (x, 0)) == CONST
2493 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2494 {
2495 SUBST (XEXP (x, 0),
2496 gen_rtx_combine (LO_SUM, Pmode,
2497 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2498 XEXP (x, 0)));
2499 return &XEXP (XEXP (x, 0), 0);
2500 }
230d793d
RS
2501#endif
2502
916f14f1
RK
2503 /* If we have a PLUS whose second operand is a constant and the
2504 address is not valid, perhaps will can split it up using
2505 the machine-specific way to split large constants. We use
ddd5a7c1 2506 the first pseudo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2507 it will not remain in the result. */
2508 if (GET_CODE (XEXP (x, 0)) == PLUS
2509 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2510 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2511 {
2512 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2513 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2514 subst_insn);
2515
2516 /* This should have produced two insns, each of which sets our
2517 placeholder. If the source of the second is a valid address,
2518 we can make put both sources together and make a split point
2519 in the middle. */
2520
2521 if (seq && XVECLEN (seq, 0) == 2
2522 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2523 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2524 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2525 && ! reg_mentioned_p (reg,
2526 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2527 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2528 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2529 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2530 && memory_address_p (GET_MODE (x),
2531 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2532 {
2533 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2534 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2535
2536 /* Replace the placeholder in SRC2 with SRC1. If we can
2537 find where in SRC2 it was placed, that can become our
2538 split point and we can replace this address with SRC2.
2539 Just try two obvious places. */
2540
2541 src2 = replace_rtx (src2, reg, src1);
2542 split = 0;
2543 if (XEXP (src2, 0) == src1)
2544 split = &XEXP (src2, 0);
2545 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2546 && XEXP (XEXP (src2, 0), 0) == src1)
2547 split = &XEXP (XEXP (src2, 0), 0);
2548
2549 if (split)
2550 {
2551 SUBST (XEXP (x, 0), src2);
2552 return split;
2553 }
2554 }
1a26b032
RK
2555
2556 /* If that didn't work, perhaps the first operand is complex and
2557 needs to be computed separately, so make a split point there.
2558 This will occur on machines that just support REG + CONST
2559 and have a constant moved through some previous computation. */
2560
2561 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2562 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2563 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2564 == 'o')))
2565 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2566 }
2567 break;
2568
230d793d
RS
2569 case SET:
2570#ifdef HAVE_cc0
2571 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2572 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2573 we need to put the operand into a register. So split at that
2574 point. */
2575
2576 if (SET_DEST (x) == cc0_rtx
2577 && GET_CODE (SET_SRC (x)) != COMPARE
2578 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2579 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2580 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2581 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2582 return &SET_SRC (x);
2583#endif
2584
2585 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2586 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2587 if (split && split != &SET_SRC (x))
2588 return split;
2589
041d7180
JL
2590 /* See if we can split SET_DEST as it stands. */
2591 split = find_split_point (&SET_DEST (x), insn);
2592 if (split && split != &SET_DEST (x))
2593 return split;
2594
230d793d
RS
2595 /* See if this is a bitfield assignment with everything constant. If
2596 so, this is an IOR of an AND, so split it into that. */
2597 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2598 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2599 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2600 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2601 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2602 && GET_CODE (SET_SRC (x)) == CONST_INT
2603 && ((INTVAL (XEXP (SET_DEST (x), 1))
2604 + INTVAL (XEXP (SET_DEST (x), 2)))
2605 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2606 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2607 {
2608 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2609 int len = INTVAL (XEXP (SET_DEST (x), 1));
2610 int src = INTVAL (SET_SRC (x));
2611 rtx dest = XEXP (SET_DEST (x), 0);
2612 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2613 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d 2614
f76b9db2
ILT
2615 if (BITS_BIG_ENDIAN)
2616 pos = GET_MODE_BITSIZE (mode) - len - pos;
230d793d
RS
2617
2618 if (src == mask)
2619 SUBST (SET_SRC (x),
5f4f0e22 2620 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2621 else
2622 SUBST (SET_SRC (x),
2623 gen_binary (IOR, mode,
2624 gen_binary (AND, mode, dest,
5f4f0e22
CH
2625 GEN_INT (~ (mask << pos)
2626 & GET_MODE_MASK (mode))),
2627 GEN_INT (src << pos)));
230d793d
RS
2628
2629 SUBST (SET_DEST (x), dest);
2630
d0ab8cd3 2631 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2632 if (split && split != &SET_SRC (x))
2633 return split;
2634 }
2635
2636 /* Otherwise, see if this is an operation that we can split into two.
2637 If so, try to split that. */
2638 code = GET_CODE (SET_SRC (x));
2639
2640 switch (code)
2641 {
d0ab8cd3
RK
2642 case AND:
2643 /* If we are AND'ing with a large constant that is only a single
2644 bit and the result is only being used in a context where we
2645 need to know if it is zero or non-zero, replace it with a bit
2646 extraction. This will avoid the large constant, which might
2647 have taken more than one insn to make. If the constant were
2648 not a valid argument to the AND but took only one insn to make,
2649 this is no worse, but if it took more than one insn, it will
2650 be better. */
2651
2652 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2653 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2654 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2655 && GET_CODE (SET_DEST (x)) == REG
2656 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2657 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2658 && XEXP (*split, 0) == SET_DEST (x)
2659 && XEXP (*split, 1) == const0_rtx)
2660 {
76184def
DE
2661 rtx extraction = make_extraction (GET_MODE (SET_DEST (x)),
2662 XEXP (SET_SRC (x), 0),
2663 pos, NULL_RTX, 1, 1, 0, 0);
2664 if (extraction != 0)
2665 {
2666 SUBST (SET_SRC (x), extraction);
2667 return find_split_point (loc, insn);
2668 }
d0ab8cd3
RK
2669 }
2670 break;
2671
1a6ec070
RK
2672 case NE:
2673 /* if STORE_FLAG_VALUE is -1, this is (NE X 0) and only one bit of X
2674 is known to be on, this can be converted into a NEG of a shift. */
2675 if (STORE_FLAG_VALUE == -1 && XEXP (SET_SRC (x), 1) == const0_rtx
2676 && GET_MODE (SET_SRC (x)) == GET_MODE (XEXP (SET_SRC (x), 0))
4eb2cb10 2677 && 1 <= (pos = exact_log2
1a6ec070
RK
2678 (nonzero_bits (XEXP (SET_SRC (x), 0),
2679 GET_MODE (XEXP (SET_SRC (x), 0))))))
2680 {
2681 enum machine_mode mode = GET_MODE (XEXP (SET_SRC (x), 0));
2682
2683 SUBST (SET_SRC (x),
2684 gen_rtx_combine (NEG, mode,
2685 gen_rtx_combine (LSHIFTRT, mode,
2686 XEXP (SET_SRC (x), 0),
4eb2cb10 2687 GEN_INT (pos))));
1a6ec070
RK
2688
2689 split = find_split_point (&SET_SRC (x), insn);
2690 if (split && split != &SET_SRC (x))
2691 return split;
2692 }
2693 break;
2694
230d793d
RS
2695 case SIGN_EXTEND:
2696 inner = XEXP (SET_SRC (x), 0);
2697 pos = 0;
2698 len = GET_MODE_BITSIZE (GET_MODE (inner));
2699 unsignedp = 0;
2700 break;
2701
2702 case SIGN_EXTRACT:
2703 case ZERO_EXTRACT:
2704 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2705 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2706 {
2707 inner = XEXP (SET_SRC (x), 0);
2708 len = INTVAL (XEXP (SET_SRC (x), 1));
2709 pos = INTVAL (XEXP (SET_SRC (x), 2));
2710
f76b9db2
ILT
2711 if (BITS_BIG_ENDIAN)
2712 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
230d793d
RS
2713 unsignedp = (code == ZERO_EXTRACT);
2714 }
2715 break;
2716 }
2717
2718 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2719 {
2720 enum machine_mode mode = GET_MODE (SET_SRC (x));
2721
d0ab8cd3
RK
2722 /* For unsigned, we have a choice of a shift followed by an
2723 AND or two shifts. Use two shifts for field sizes where the
2724 constant might be too large. We assume here that we can
2725 always at least get 8-bit constants in an AND insn, which is
2726 true for every current RISC. */
2727
2728 if (unsignedp && len <= 8)
230d793d
RS
2729 {
2730 SUBST (SET_SRC (x),
2731 gen_rtx_combine
2732 (AND, mode,
2733 gen_rtx_combine (LSHIFTRT, mode,
2734 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2735 GEN_INT (pos)),
2736 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2737
d0ab8cd3 2738 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2739 if (split && split != &SET_SRC (x))
2740 return split;
2741 }
2742 else
2743 {
2744 SUBST (SET_SRC (x),
2745 gen_rtx_combine
d0ab8cd3 2746 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2747 gen_rtx_combine (ASHIFT, mode,
2748 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2749 GEN_INT (GET_MODE_BITSIZE (mode)
2750 - len - pos)),
2751 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2752
d0ab8cd3 2753 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2754 if (split && split != &SET_SRC (x))
2755 return split;
2756 }
2757 }
2758
2759 /* See if this is a simple operation with a constant as the second
2760 operand. It might be that this constant is out of range and hence
2761 could be used as a split point. */
2762 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2763 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2764 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2765 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2766 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2767 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2768 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2769 == 'o'))))
2770 return &XEXP (SET_SRC (x), 1);
2771
2772 /* Finally, see if this is a simple operation with its first operand
2773 not in a register. The operation might require this operand in a
2774 register, so return it as a split point. We can always do this
2775 because if the first operand were another operation, we would have
2776 already found it as a split point. */
2777 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2778 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2779 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2780 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2781 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2782 return &XEXP (SET_SRC (x), 0);
2783
2784 return 0;
2785
2786 case AND:
2787 case IOR:
2788 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2789 it is better to write this as (not (ior A B)) so we can split it.
2790 Similarly for IOR. */
2791 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2792 {
2793 SUBST (*loc,
2794 gen_rtx_combine (NOT, GET_MODE (x),
2795 gen_rtx_combine (code == IOR ? AND : IOR,
2796 GET_MODE (x),
2797 XEXP (XEXP (x, 0), 0),
2798 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2799 return find_split_point (loc, insn);
230d793d
RS
2800 }
2801
2802 /* Many RISC machines have a large set of logical insns. If the
2803 second operand is a NOT, put it first so we will try to split the
2804 other operand first. */
2805 if (GET_CODE (XEXP (x, 1)) == NOT)
2806 {
2807 rtx tem = XEXP (x, 0);
2808 SUBST (XEXP (x, 0), XEXP (x, 1));
2809 SUBST (XEXP (x, 1), tem);
2810 }
2811 break;
2812 }
2813
2814 /* Otherwise, select our actions depending on our rtx class. */
2815 switch (GET_RTX_CLASS (code))
2816 {
2817 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2818 case '3':
d0ab8cd3 2819 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2820 if (split)
2821 return split;
0f41302f 2822 /* ... fall through ... */
230d793d
RS
2823 case '2':
2824 case 'c':
2825 case '<':
d0ab8cd3 2826 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2827 if (split)
2828 return split;
0f41302f 2829 /* ... fall through ... */
230d793d
RS
2830 case '1':
2831 /* Some machines have (and (shift ...) ...) insns. If X is not
2832 an AND, but XEXP (X, 0) is, use it as our split point. */
2833 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2834 return &XEXP (x, 0);
2835
d0ab8cd3 2836 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2837 if (split)
2838 return split;
2839 return loc;
2840 }
2841
2842 /* Otherwise, we don't have a split point. */
2843 return 0;
2844}
2845\f
2846/* Throughout X, replace FROM with TO, and return the result.
2847 The result is TO if X is FROM;
2848 otherwise the result is X, but its contents may have been modified.
2849 If they were modified, a record was made in undobuf so that
2850 undo_all will (among other things) return X to its original state.
2851
2852 If the number of changes necessary is too much to record to undo,
2853 the excess changes are not made, so the result is invalid.
2854 The changes already made can still be undone.
2855 undobuf.num_undo is incremented for such changes, so by testing that
2856 the caller can tell whether the result is valid.
2857
2858 `n_occurrences' is incremented each time FROM is replaced.
2859
2860 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2861
5089e22e 2862 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2863 by copying if `n_occurrences' is non-zero. */
2864
2865static rtx
2866subst (x, from, to, in_dest, unique_copy)
2867 register rtx x, from, to;
2868 int in_dest;
2869 int unique_copy;
2870{
f24ad0e4 2871 register enum rtx_code code = GET_CODE (x);
230d793d 2872 enum machine_mode op0_mode = VOIDmode;
8079805d
RK
2873 register char *fmt;
2874 register int len, i;
2875 rtx new;
230d793d
RS
2876
2877/* Two expressions are equal if they are identical copies of a shared
2878 RTX or if they are both registers with the same register number
2879 and mode. */
2880
2881#define COMBINE_RTX_EQUAL_P(X,Y) \
2882 ((X) == (Y) \
2883 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2884 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2885
2886 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2887 {
2888 n_occurrences++;
2889 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2890 }
2891
2892 /* If X and FROM are the same register but different modes, they will
2893 not have been seen as equal above. However, flow.c will make a
2894 LOG_LINKS entry for that case. If we do nothing, we will try to
2895 rerecognize our original insn and, when it succeeds, we will
2896 delete the feeding insn, which is incorrect.
2897
2898 So force this insn not to match in this (rare) case. */
2899 if (! in_dest && code == REG && GET_CODE (from) == REG
2900 && REGNO (x) == REGNO (from))
2901 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2902
2903 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2904 of which may contain things that can be combined. */
2905 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2906 return x;
2907
2908 /* It is possible to have a subexpression appear twice in the insn.
2909 Suppose that FROM is a register that appears within TO.
2910 Then, after that subexpression has been scanned once by `subst',
2911 the second time it is scanned, TO may be found. If we were
2912 to scan TO here, we would find FROM within it and create a
2913 self-referent rtl structure which is completely wrong. */
2914 if (COMBINE_RTX_EQUAL_P (x, to))
2915 return to;
2916
2917 len = GET_RTX_LENGTH (code);
2918 fmt = GET_RTX_FORMAT (code);
2919
2920 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2921 set up to skip this common case. All other cases where we want to
2922 suppress replacing something inside a SET_SRC are handled via the
2923 IN_DEST operand. */
2924 if (code == SET
2925 && (GET_CODE (SET_DEST (x)) == REG
2926 || GET_CODE (SET_DEST (x)) == CC0
2927 || GET_CODE (SET_DEST (x)) == PC))
2928 fmt = "ie";
2929
0f41302f
MS
2930 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a
2931 constant. */
230d793d
RS
2932 if (fmt[0] == 'e')
2933 op0_mode = GET_MODE (XEXP (x, 0));
2934
2935 for (i = 0; i < len; i++)
2936 {
2937 if (fmt[i] == 'E')
2938 {
2939 register int j;
2940 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2941 {
230d793d
RS
2942 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2943 {
2944 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2945 n_occurrences++;
2946 }
2947 else
2948 {
2949 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2950
2951 /* If this substitution failed, this whole thing fails. */
2952 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2953 return new;
2954 }
2955
2956 SUBST (XVECEXP (x, i, j), new);
2957 }
2958 }
2959 else if (fmt[i] == 'e')
2960 {
230d793d
RS
2961 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2962 {
42301240
RK
2963 /* In general, don't install a subreg involving two modes not
2964 tieable. It can worsen register allocation, and can even
2965 make invalid reload insns, since the reg inside may need to
2966 be copied from in the outside mode, and that may be invalid
2967 if it is an fp reg copied in integer mode.
2968
2969 We allow two exceptions to this: It is valid if it is inside
2970 another SUBREG and the mode of that SUBREG and the mode of
2971 the inside of TO is tieable and it is valid if X is a SET
2972 that copies FROM to CC0. */
2973 if (GET_CODE (to) == SUBREG
2974 && ! MODES_TIEABLE_P (GET_MODE (to),
2975 GET_MODE (SUBREG_REG (to)))
2976 && ! (code == SUBREG
8079805d
RK
2977 && MODES_TIEABLE_P (GET_MODE (x),
2978 GET_MODE (SUBREG_REG (to))))
42301240
RK
2979#ifdef HAVE_cc0
2980 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2981#endif
2982 )
2983 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2984
230d793d
RS
2985 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2986 n_occurrences++;
2987 }
2988 else
2989 /* If we are in a SET_DEST, suppress most cases unless we
2990 have gone inside a MEM, in which case we want to
2991 simplify the address. We assume here that things that
2992 are actually part of the destination have their inner
2993 parts in the first expression. This is true for SUBREG,
2994 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2995 things aside from REG and MEM that should appear in a
2996 SET_DEST. */
2997 new = subst (XEXP (x, i), from, to,
2998 (((in_dest
2999 && (code == SUBREG || code == STRICT_LOW_PART
3000 || code == ZERO_EXTRACT))
3001 || code == SET)
3002 && i == 0), unique_copy);
3003
3004 /* If we found that we will have to reject this combination,
3005 indicate that by returning the CLOBBER ourselves, rather than
3006 an expression containing it. This will speed things up as
3007 well as prevent accidents where two CLOBBERs are considered
3008 to be equal, thus producing an incorrect simplification. */
3009
3010 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
3011 return new;
3012
3013 SUBST (XEXP (x, i), new);
3014 }
3015 }
3016
8079805d
RK
3017 /* Try to simplify X. If the simplification changed the code, it is likely
3018 that further simplification will help, so loop, but limit the number
3019 of repetitions that will be performed. */
3020
3021 for (i = 0; i < 4; i++)
3022 {
3023 /* If X is sufficiently simple, don't bother trying to do anything
3024 with it. */
3025 if (code != CONST_INT && code != REG && code != CLOBBER)
3026 x = simplify_rtx (x, op0_mode, i == 3, in_dest);
d0ab8cd3 3027
8079805d
RK
3028 if (GET_CODE (x) == code)
3029 break;
d0ab8cd3 3030
8079805d 3031 code = GET_CODE (x);
eeb43d32 3032
8079805d
RK
3033 /* We no longer know the original mode of operand 0 since we
3034 have changed the form of X) */
3035 op0_mode = VOIDmode;
3036 }
eeb43d32 3037
8079805d
RK
3038 return x;
3039}
3040\f
3041/* Simplify X, a piece of RTL. We just operate on the expression at the
3042 outer level; call `subst' to simplify recursively. Return the new
3043 expression.
3044
3045 OP0_MODE is the original mode of XEXP (x, 0); LAST is nonzero if this
3046 will be the iteration even if an expression with a code different from
3047 X is returned; IN_DEST is nonzero if we are inside a SET_DEST. */
eeb43d32 3048
8079805d
RK
3049static rtx
3050simplify_rtx (x, op0_mode, last, in_dest)
3051 rtx x;
3052 enum machine_mode op0_mode;
3053 int last;
3054 int in_dest;
3055{
3056 enum rtx_code code = GET_CODE (x);
3057 enum machine_mode mode = GET_MODE (x);
3058 rtx temp;
3059 int i;
d0ab8cd3 3060
230d793d
RS
3061 /* If this is a commutative operation, put a constant last and a complex
3062 expression first. We don't need to do this for comparisons here. */
3063 if (GET_RTX_CLASS (code) == 'c'
3064 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
3065 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
3066 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
3067 || (GET_CODE (XEXP (x, 0)) == SUBREG
3068 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
3069 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
3070 {
3071 temp = XEXP (x, 0);
3072 SUBST (XEXP (x, 0), XEXP (x, 1));
3073 SUBST (XEXP (x, 1), temp);
3074 }
3075
22609cbf
RK
3076 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
3077 sign extension of a PLUS with a constant, reverse the order of the sign
3078 extension and the addition. Note that this not the same as the original
3079 code, but overflow is undefined for signed values. Also note that the
3080 PLUS will have been partially moved "inside" the sign-extension, so that
3081 the first operand of X will really look like:
3082 (ashiftrt (plus (ashift A C4) C5) C4).
3083 We convert this to
3084 (plus (ashiftrt (ashift A C4) C2) C4)
3085 and replace the first operand of X with that expression. Later parts
3086 of this function may simplify the expression further.
3087
3088 For example, if we start with (mult (sign_extend (plus A C1)) C2),
3089 we swap the SIGN_EXTEND and PLUS. Later code will apply the
3090 distributive law to produce (plus (mult (sign_extend X) C1) C3).
3091
3092 We do this to simplify address expressions. */
3093
3094 if ((code == PLUS || code == MINUS || code == MULT)
3095 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3096 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
3097 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
3098 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
3099 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3100 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
3101 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3102 && (temp = simplify_binary_operation (ASHIFTRT, mode,
3103 XEXP (XEXP (XEXP (x, 0), 0), 1),
3104 XEXP (XEXP (x, 0), 1))) != 0)
3105 {
3106 rtx new
3107 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3108 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
3109 INTVAL (XEXP (XEXP (x, 0), 1)));
3110
3111 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
3112 INTVAL (XEXP (XEXP (x, 0), 1)));
3113
3114 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
3115 }
3116
d0ab8cd3
RK
3117 /* If this is a simple operation applied to an IF_THEN_ELSE, try
3118 applying it to the arms of the IF_THEN_ELSE. This often simplifies
abe6e52f
RK
3119 things. Check for cases where both arms are testing the same
3120 condition.
3121
3122 Don't do anything if all operands are very simple. */
3123
3124 if (((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c'
3125 || GET_RTX_CLASS (code) == '<')
3126 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3127 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3128 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3129 == 'o')))
3130 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o'
3131 && ! (GET_CODE (XEXP (x, 1)) == SUBREG
3132 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 1))))
3133 == 'o')))))
3134 || (GET_RTX_CLASS (code) == '1'
3135 && ((GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3136 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3137 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0))))
3138 == 'o'))))))
d0ab8cd3 3139 {
abe6e52f
RK
3140 rtx cond, true, false;
3141
3142 cond = if_then_else_cond (x, &true, &false);
0802d516
RK
3143 if (cond != 0
3144 /* If everything is a comparison, what we have is highly unlikely
3145 to be simpler, so don't use it. */
3146 && ! (GET_RTX_CLASS (code) == '<'
3147 && (GET_RTX_CLASS (GET_CODE (true)) == '<'
3148 || GET_RTX_CLASS (GET_CODE (false)) == '<')))
abe6e52f
RK
3149 {
3150 rtx cop1 = const0_rtx;
3151 enum rtx_code cond_code = simplify_comparison (NE, &cond, &cop1);
3152
15448afc
RK
3153 if (cond_code == NE && GET_RTX_CLASS (GET_CODE (cond)) == '<')
3154 return x;
3155
9210df58
RK
3156 /* Simplify the alternative arms; this may collapse the true and
3157 false arms to store-flag values. */
3158 true = subst (true, pc_rtx, pc_rtx, 0, 0);
3159 false = subst (false, pc_rtx, pc_rtx, 0, 0);
3160
3161 /* Restarting if we generate a store-flag expression will cause
3162 us to loop. Just drop through in this case. */
3163
abe6e52f
RK
3164 /* If the result values are STORE_FLAG_VALUE and zero, we can
3165 just make the comparison operation. */
3166 if (true == const_true_rtx && false == const0_rtx)
3167 x = gen_binary (cond_code, mode, cond, cop1);
3168 else if (true == const0_rtx && false == const_true_rtx)
3169 x = gen_binary (reverse_condition (cond_code), mode, cond, cop1);
3170
3171 /* Likewise, we can make the negate of a comparison operation
3172 if the result values are - STORE_FLAG_VALUE and zero. */
3173 else if (GET_CODE (true) == CONST_INT
3174 && INTVAL (true) == - STORE_FLAG_VALUE
3175 && false == const0_rtx)
0c1c8ea6 3176 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3177 gen_binary (cond_code, mode, cond, cop1));
3178 else if (GET_CODE (false) == CONST_INT
3179 && INTVAL (false) == - STORE_FLAG_VALUE
3180 && true == const0_rtx)
0c1c8ea6 3181 x = gen_unary (NEG, mode, mode,
abe6e52f
RK
3182 gen_binary (reverse_condition (cond_code),
3183 mode, cond, cop1));
3184 else
8079805d
RK
3185 return gen_rtx (IF_THEN_ELSE, mode,
3186 gen_binary (cond_code, VOIDmode, cond, cop1),
3187 true, false);
5109d49f 3188
9210df58
RK
3189 code = GET_CODE (x);
3190 op0_mode = VOIDmode;
abe6e52f 3191 }
d0ab8cd3
RK
3192 }
3193
230d793d
RS
3194 /* Try to fold this expression in case we have constants that weren't
3195 present before. */
3196 temp = 0;
3197 switch (GET_RTX_CLASS (code))
3198 {
3199 case '1':
3200 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
3201 break;
3202 case '<':
3203 temp = simplify_relational_operation (code, op0_mode,
3204 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
3205#ifdef FLOAT_STORE_FLAG_VALUE
3206 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
3207 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
3208 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
3209#endif
230d793d
RS
3210 break;
3211 case 'c':
3212 case '2':
3213 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
3214 break;
3215 case 'b':
3216 case '3':
3217 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
3218 XEXP (x, 1), XEXP (x, 2));
3219 break;
3220 }
3221
3222 if (temp)
d0ab8cd3 3223 x = temp, code = GET_CODE (temp);
230d793d 3224
230d793d 3225 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
3226 if (code == PLUS || code == MINUS
3227 || code == AND || code == IOR || code == XOR)
230d793d
RS
3228 {
3229 x = apply_distributive_law (x);
3230 code = GET_CODE (x);
3231 }
3232
3233 /* If CODE is an associative operation not otherwise handled, see if we
3234 can associate some operands. This can win if they are constants or
3235 if they are logically related (i.e. (a & b) & a. */
3236 if ((code == PLUS || code == MINUS
3237 || code == MULT || code == AND || code == IOR || code == XOR
3238 || code == DIV || code == UDIV
3239 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3240 && INTEGRAL_MODE_P (mode))
230d793d
RS
3241 {
3242 if (GET_CODE (XEXP (x, 0)) == code)
3243 {
3244 rtx other = XEXP (XEXP (x, 0), 0);
3245 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3246 rtx inner_op1 = XEXP (x, 1);
3247 rtx inner;
3248
3249 /* Make sure we pass the constant operand if any as the second
3250 one if this is a commutative operation. */
3251 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3252 {
3253 rtx tem = inner_op0;
3254 inner_op0 = inner_op1;
3255 inner_op1 = tem;
3256 }
3257 inner = simplify_binary_operation (code == MINUS ? PLUS
3258 : code == DIV ? MULT
3259 : code == UDIV ? MULT
3260 : code,
3261 mode, inner_op0, inner_op1);
3262
3263 /* For commutative operations, try the other pair if that one
3264 didn't simplify. */
3265 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3266 {
3267 other = XEXP (XEXP (x, 0), 1);
3268 inner = simplify_binary_operation (code, mode,
3269 XEXP (XEXP (x, 0), 0),
3270 XEXP (x, 1));
3271 }
3272
3273 if (inner)
8079805d 3274 return gen_binary (code, mode, other, inner);
230d793d
RS
3275 }
3276 }
3277
3278 /* A little bit of algebraic simplification here. */
3279 switch (code)
3280 {
3281 case MEM:
3282 /* Ensure that our address has any ASHIFTs converted to MULT in case
3283 address-recognizing predicates are called later. */
3284 temp = make_compound_operation (XEXP (x, 0), MEM);
3285 SUBST (XEXP (x, 0), temp);
3286 break;
3287
3288 case SUBREG:
3289 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3290 is paradoxical. If we can't do that safely, then it becomes
3291 something nonsensical so that this combination won't take place. */
3292
3293 if (GET_CODE (SUBREG_REG (x)) == MEM
3294 && (GET_MODE_SIZE (mode)
3295 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3296 {
3297 rtx inner = SUBREG_REG (x);
3298 int endian_offset = 0;
3299 /* Don't change the mode of the MEM
3300 if that would change the meaning of the address. */
3301 if (MEM_VOLATILE_P (SUBREG_REG (x))
3302 || mode_dependent_address_p (XEXP (inner, 0)))
3303 return gen_rtx (CLOBBER, mode, const0_rtx);
3304
f76b9db2
ILT
3305 if (BYTES_BIG_ENDIAN)
3306 {
3307 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3308 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3309 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3310 endian_offset -= (UNITS_PER_WORD
3311 - GET_MODE_SIZE (GET_MODE (inner)));
3312 }
230d793d
RS
3313 /* Note if the plus_constant doesn't make a valid address
3314 then this combination won't be accepted. */
3315 x = gen_rtx (MEM, mode,
3316 plus_constant (XEXP (inner, 0),
3317 (SUBREG_WORD (x) * UNITS_PER_WORD
3318 + endian_offset)));
3319 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3320 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3321 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3322 return x;
3323 }
3324
3325 /* If we are in a SET_DEST, these other cases can't apply. */
3326 if (in_dest)
3327 return x;
3328
3329 /* Changing mode twice with SUBREG => just change it once,
3330 or not at all if changing back to starting mode. */
3331 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3332 {
3333 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3334 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3335 return SUBREG_REG (SUBREG_REG (x));
3336
3337 SUBST_INT (SUBREG_WORD (x),
3338 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3339 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3340 }
3341
3342 /* SUBREG of a hard register => just change the register number
3343 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3344 suppress this combination. If the hard register is the stack,
3345 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3346
3347 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3348 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3349 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3350#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3351 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3352#endif
26ecfc76
RK
3353#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3354 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3355#endif
3356 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3357 {
3358 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3359 mode))
3360 return gen_rtx (REG, mode,
3361 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3362 else
3363 return gen_rtx (CLOBBER, mode, const0_rtx);
3364 }
3365
3366 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3367 word and low-order part. Only do this if we are narrowing
3368 the constant; if it is being widened, we have no idea what
3369 the extra bits will have been set to. */
230d793d
RS
3370
3371 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3372 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 3373 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
3374 && GET_MODE_CLASS (mode) == MODE_INT)
3375 {
3376 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3377 0, op0_mode);
230d793d
RS
3378 if (temp)
3379 return temp;
3380 }
3381
19808e22
RS
3382 /* If we want a subreg of a constant, at offset 0,
3383 take the low bits. On a little-endian machine, that's
3384 always valid. On a big-endian machine, it's valid
3385 only if the constant's mode fits in one word. */
a4bde0b1 3386 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
f82da7d2 3387 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (op0_mode)
f76b9db2
ILT
3388 && (! WORDS_BIG_ENDIAN
3389 || GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD))
230d793d
RS
3390 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3391
b65c1b5b
RK
3392 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
3393 since we are saying that the high bits don't matter. */
3394 if (CONSTANT_P (SUBREG_REG (x)) && GET_MODE (SUBREG_REG (x)) == VOIDmode
3395 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (op0_mode))
3396 return SUBREG_REG (x);
3397
87e3e0c1
RK
3398 /* Note that we cannot do any narrowing for non-constants since
3399 we might have been counting on using the fact that some bits were
3400 zero. We now do this in the SET. */
3401
230d793d
RS
3402 break;
3403
3404 case NOT:
3405 /* (not (plus X -1)) can become (neg X). */
3406 if (GET_CODE (XEXP (x, 0)) == PLUS
3407 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
8079805d 3408 return gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3409
3410 /* Similarly, (not (neg X)) is (plus X -1). */
3411 if (GET_CODE (XEXP (x, 0)) == NEG)
8079805d
RK
3412 return gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0),
3413 constm1_rtx);
230d793d 3414
d0ab8cd3
RK
3415 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3416 if (GET_CODE (XEXP (x, 0)) == XOR
3417 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3418 && (temp = simplify_unary_operation (NOT, mode,
3419 XEXP (XEXP (x, 0), 1),
3420 mode)) != 0)
787745f5 3421 return gen_binary (XOR, mode, XEXP (XEXP (x, 0), 0), temp);
d0ab8cd3 3422
230d793d
RS
3423 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3424 other than 1, but that is not valid. We could do a similar
3425 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3426 but this doesn't seem common enough to bother with. */
3427 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3428 && XEXP (XEXP (x, 0), 0) == const1_rtx)
0c1c8ea6 3429 return gen_rtx (ROTATE, mode, gen_unary (NOT, mode, mode, const1_rtx),
8079805d 3430 XEXP (XEXP (x, 0), 1));
230d793d
RS
3431
3432 if (GET_CODE (XEXP (x, 0)) == SUBREG
3433 && subreg_lowpart_p (XEXP (x, 0))
3434 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3435 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3436 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3437 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3438 {
3439 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3440
3441 x = gen_rtx (ROTATE, inner_mode,
0c1c8ea6 3442 gen_unary (NOT, inner_mode, inner_mode, const1_rtx),
230d793d 3443 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
8079805d 3444 return gen_lowpart_for_combine (mode, x);
230d793d
RS
3445 }
3446
0802d516
RK
3447 /* If STORE_FLAG_VALUE is -1, (not (comparison foo bar)) can be done by
3448 reversing the comparison code if valid. */
3449 if (STORE_FLAG_VALUE == -1
3450 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
230d793d
RS
3451 && reversible_comparison_p (XEXP (x, 0)))
3452 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3453 mode, XEXP (XEXP (x, 0), 0),
3454 XEXP (XEXP (x, 0), 1));
500c518b
RK
3455
3456 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
0802d516
RK
3457 is (lt foo (const_int 0)) if STORE_FLAG_VALUE is -1, so we can
3458 perform the above simplification. */
500c518b 3459
0802d516
RK
3460 if (STORE_FLAG_VALUE == -1
3461 && XEXP (x, 1) == const1_rtx
500c518b
RK
3462 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3463 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3464 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3465 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3466
3467 /* Apply De Morgan's laws to reduce number of patterns for machines
3468 with negating logical insns (and-not, nand, etc.). If result has
3469 only one NOT, put it first, since that is how the patterns are
3470 coded. */
3471
3472 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3473 {
3474 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3475
3476 if (GET_CODE (in1) == NOT)
3477 in1 = XEXP (in1, 0);
3478 else
3479 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3480
3481 if (GET_CODE (in2) == NOT)
3482 in2 = XEXP (in2, 0);
3483 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3484 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3485 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3486 else
3487 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3488
3489 if (GET_CODE (in2) == NOT)
3490 {
3491 rtx tem = in2;
3492 in2 = in1; in1 = tem;
3493 }
3494
8079805d
RK
3495 return gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3496 mode, in1, in2);
230d793d
RS
3497 }
3498 break;
3499
3500 case NEG:
3501 /* (neg (plus X 1)) can become (not X). */
3502 if (GET_CODE (XEXP (x, 0)) == PLUS
3503 && XEXP (XEXP (x, 0), 1) == const1_rtx)
8079805d 3504 return gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
230d793d
RS
3505
3506 /* Similarly, (neg (not X)) is (plus X 1). */
3507 if (GET_CODE (XEXP (x, 0)) == NOT)
8079805d 3508 return plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d 3509
230d793d
RS
3510 /* (neg (minus X Y)) can become (minus Y X). */
3511 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3512 && (! FLOAT_MODE_P (mode)
0f41302f 3513 /* x-y != -(y-x) with IEEE floating point. */
7e2a0d8e
RK
3514 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3515 || flag_fast_math))
8079805d
RK
3516 return gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3517 XEXP (XEXP (x, 0), 0));
230d793d 3518
0f41302f 3519 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
d0ab8cd3 3520 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3521 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
8079805d 3522 return gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
d0ab8cd3 3523
230d793d
RS
3524 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3525 if we can then eliminate the NEG (e.g.,
3526 if the operand is a constant). */
3527
3528 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3529 {
3530 temp = simplify_unary_operation (NEG, mode,
3531 XEXP (XEXP (x, 0), 0), mode);
3532 if (temp)
3533 {
3534 SUBST (XEXP (XEXP (x, 0), 0), temp);
3535 return XEXP (x, 0);
3536 }
3537 }
3538
3539 temp = expand_compound_operation (XEXP (x, 0));
3540
3541 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3542 replaced by (lshiftrt X C). This will convert
3543 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3544
3545 if (GET_CODE (temp) == ASHIFTRT
3546 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3547 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
8079805d
RK
3548 return simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3549 INTVAL (XEXP (temp, 1)));
230d793d 3550
951553af 3551 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3552 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3553 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3554 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3555 or a SUBREG of one since we'd be making the expression more
3556 complex if it was just a register. */
3557
3558 if (GET_CODE (temp) != REG
3559 && ! (GET_CODE (temp) == SUBREG
3560 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3561 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3562 {
3563 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3564 (NULL_RTX, ASHIFTRT, mode,
3565 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3566 GET_MODE_BITSIZE (mode) - 1 - i),
3567 GET_MODE_BITSIZE (mode) - 1 - i);
3568
3569 /* If all we did was surround TEMP with the two shifts, we
3570 haven't improved anything, so don't use it. Otherwise,
3571 we are better off with TEMP1. */
3572 if (GET_CODE (temp1) != ASHIFTRT
3573 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3574 || XEXP (XEXP (temp1, 0), 0) != temp)
8079805d 3575 return temp1;
230d793d
RS
3576 }
3577 break;
3578
2ca9ae17
JW
3579 case TRUNCATE:
3580 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3581 SUBST (XEXP (x, 0),
3582 force_to_mode (XEXP (x, 0), GET_MODE (XEXP (x, 0)),
3583 GET_MODE_MASK (mode), NULL_RTX, 0));
0f13a422
ILT
3584
3585 /* (truncate:SI ({sign,zero}_extend:DI foo:SI)) == foo:SI. */
3586 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3587 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3588 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3589 return XEXP (XEXP (x, 0), 0);
3590
3591 /* (truncate:SI (OP:DI ({sign,zero}_extend:DI foo:SI))) is
3592 (OP:SI foo:SI) if OP is NEG or ABS. */
3593 if ((GET_CODE (XEXP (x, 0)) == ABS
3594 || GET_CODE (XEXP (x, 0)) == NEG)
3595 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTEND
3596 || GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND)
3597 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
3598 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3599 XEXP (XEXP (XEXP (x, 0), 0), 0));
3600
3601 /* (truncate:SI (subreg:DI (truncate:SI X) 0)) is
3602 (truncate:SI x). */
3603 if (GET_CODE (XEXP (x, 0)) == SUBREG
3604 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == TRUNCATE
3605 && subreg_lowpart_p (XEXP (x, 0)))
3606 return SUBREG_REG (XEXP (x, 0));
3607
3608 /* If we know that the value is already truncated, we can
3609 replace the TRUNCATE with a SUBREG. */
3610 if (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) <= HOST_BITS_PER_WIDE_INT
3611 && (nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3612 &~ GET_MODE_MASK (mode)) == 0)
3613 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3614
3615 /* A truncate of a comparison can be replaced with a subreg if
3616 STORE_FLAG_VALUE permits. This is like the previous test,
3617 but it works even if the comparison is done in a mode larger
3618 than HOST_BITS_PER_WIDE_INT. */
3619 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3620 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3621 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0)
3622 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3623
3624 /* Similarly, a truncate of a register whose value is a
3625 comparison can be replaced with a subreg if STORE_FLAG_VALUE
3626 permits. */
3627 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3628 && ((HOST_WIDE_INT) STORE_FLAG_VALUE &~ GET_MODE_MASK (mode)) == 0
3629 && (temp = get_last_value (XEXP (x, 0)))
3630 && GET_RTX_CLASS (GET_CODE (temp)) == '<')
3631 return gen_lowpart_for_combine (mode, XEXP (x, 0));
3632
2ca9ae17
JW
3633 break;
3634
230d793d
RS
3635 case FLOAT_TRUNCATE:
3636 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3637 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3638 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3639 return XEXP (XEXP (x, 0), 0);
4635f748
RK
3640
3641 /* (float_truncate:SF (OP:DF (float_extend:DF foo:sf))) is
3642 (OP:SF foo:SF) if OP is NEG or ABS. */
3643 if ((GET_CODE (XEXP (x, 0)) == ABS
3644 || GET_CODE (XEXP (x, 0)) == NEG)
3645 && GET_CODE (XEXP (XEXP (x, 0), 0)) == FLOAT_EXTEND
3646 && GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == mode)
0c1c8ea6
RK
3647 return gen_unary (GET_CODE (XEXP (x, 0)), mode, mode,
3648 XEXP (XEXP (XEXP (x, 0), 0), 0));
1d12df72
RK
3649
3650 /* (float_truncate:SF (subreg:DF (float_truncate:SF X) 0))
3651 is (float_truncate:SF x). */
3652 if (GET_CODE (XEXP (x, 0)) == SUBREG
3653 && subreg_lowpart_p (XEXP (x, 0))
3654 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == FLOAT_TRUNCATE)
3655 return SUBREG_REG (XEXP (x, 0));
230d793d
RS
3656 break;
3657
3658#ifdef HAVE_cc0
3659 case COMPARE:
3660 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3661 using cc0, in which case we want to leave it as a COMPARE
3662 so we can distinguish it from a register-register-copy. */
3663 if (XEXP (x, 1) == const0_rtx)
3664 return XEXP (x, 0);
3665
3666 /* In IEEE floating point, x-0 is not the same as x. */
3667 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e
RK
3668 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0)))
3669 || flag_fast_math)
230d793d
RS
3670 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3671 return XEXP (x, 0);
3672 break;
3673#endif
3674
3675 case CONST:
3676 /* (const (const X)) can become (const X). Do it this way rather than
3677 returning the inner CONST since CONST can be shared with a
3678 REG_EQUAL note. */
3679 if (GET_CODE (XEXP (x, 0)) == CONST)
3680 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3681 break;
3682
3683#ifdef HAVE_lo_sum
3684 case LO_SUM:
3685 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3686 can add in an offset. find_split_point will split this address up
3687 again if it doesn't match. */
3688 if (GET_CODE (XEXP (x, 0)) == HIGH
3689 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3690 return XEXP (x, 1);
3691 break;
3692#endif
3693
3694 case PLUS:
3695 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3696 outermost. That's because that's the way indexed addresses are
3697 supposed to appear. This code used to check many more cases, but
3698 they are now checked elsewhere. */
3699 if (GET_CODE (XEXP (x, 0)) == PLUS
3700 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3701 return gen_binary (PLUS, mode,
3702 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3703 XEXP (x, 1)),
3704 XEXP (XEXP (x, 0), 1));
3705
3706 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3707 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3708 bit-field and can be replaced by either a sign_extend or a
3709 sign_extract. The `and' may be a zero_extend. */
3710 if (GET_CODE (XEXP (x, 0)) == XOR
3711 && GET_CODE (XEXP (x, 1)) == CONST_INT
3712 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3713 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3714 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3715 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3716 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3717 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3718 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3719 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3720 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3721 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3722 == i + 1))))
8079805d
RK
3723 return simplify_shift_const
3724 (NULL_RTX, ASHIFTRT, mode,
3725 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3726 XEXP (XEXP (XEXP (x, 0), 0), 0),
3727 GET_MODE_BITSIZE (mode) - (i + 1)),
3728 GET_MODE_BITSIZE (mode) - (i + 1));
230d793d 3729
bc0776c6
RK
3730 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3731 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3732 is 1. This produces better code than the alternative immediately
3733 below. */
3734 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3735 && reversible_comparison_p (XEXP (x, 0))
3736 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3737 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
8079805d 3738 return
0c1c8ea6 3739 gen_unary (NEG, mode, mode,
8079805d
RK
3740 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3741 mode, XEXP (XEXP (x, 0), 0),
3742 XEXP (XEXP (x, 0), 1)));
bc0776c6
RK
3743
3744 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3745 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3746 the bitsize of the mode - 1. This allows simplification of
3747 "a = (b & 8) == 0;" */
3748 if (XEXP (x, 1) == constm1_rtx
3749 && GET_CODE (XEXP (x, 0)) != REG
3750 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3751 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3752 && nonzero_bits (XEXP (x, 0), mode) == 1)
8079805d
RK
3753 return simplify_shift_const (NULL_RTX, ASHIFTRT, mode,
3754 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3755 gen_rtx_combine (XOR, mode,
3756 XEXP (x, 0), const1_rtx),
3757 GET_MODE_BITSIZE (mode) - 1),
3758 GET_MODE_BITSIZE (mode) - 1);
02f4ada4
RK
3759
3760 /* If we are adding two things that have no bits in common, convert
3761 the addition into an IOR. This will often be further simplified,
3762 for example in cases like ((a & 1) + (a & 2)), which can
3763 become a & 3. */
3764
ac49a949 3765 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3766 && (nonzero_bits (XEXP (x, 0), mode)
3767 & nonzero_bits (XEXP (x, 1), mode)) == 0)
8079805d 3768 return gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
230d793d
RS
3769 break;
3770
3771 case MINUS:
0802d516
RK
3772 /* If STORE_FLAG_VALUE is 1, (minus 1 (comparison foo bar)) can be done
3773 by reversing the comparison code if valid. */
3774 if (STORE_FLAG_VALUE == 1
3775 && XEXP (x, 0) == const1_rtx
5109d49f
RK
3776 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3777 && reversible_comparison_p (XEXP (x, 1)))
3778 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3779 mode, XEXP (XEXP (x, 1), 0),
3780 XEXP (XEXP (x, 1), 1));
5109d49f 3781
230d793d
RS
3782 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3783 (and <foo> (const_int pow2-1)) */
3784 if (GET_CODE (XEXP (x, 1)) == AND
3785 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3786 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3787 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
8079805d
RK
3788 return simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3789 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
7bef8680
RK
3790
3791 /* Canonicalize (minus A (plus B C)) to (minus (minus A B) C) for
3792 integers. */
3793 if (GET_CODE (XEXP (x, 1)) == PLUS && INTEGRAL_MODE_P (mode))
8079805d
RK
3794 return gen_binary (MINUS, mode,
3795 gen_binary (MINUS, mode, XEXP (x, 0),
3796 XEXP (XEXP (x, 1), 0)),
3797 XEXP (XEXP (x, 1), 1));
230d793d
RS
3798 break;
3799
3800 case MULT:
3801 /* If we have (mult (plus A B) C), apply the distributive law and then
3802 the inverse distributive law to see if things simplify. This
3803 occurs mostly in addresses, often when unrolling loops. */
3804
3805 if (GET_CODE (XEXP (x, 0)) == PLUS)
3806 {
3807 x = apply_distributive_law
3808 (gen_binary (PLUS, mode,
3809 gen_binary (MULT, mode,
3810 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3811 gen_binary (MULT, mode,
3812 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3813
3814 if (GET_CODE (x) != MULT)
8079805d 3815 return x;
230d793d 3816 }
230d793d
RS
3817 break;
3818
3819 case UDIV:
3820 /* If this is a divide by a power of two, treat it as a shift if
3821 its first operand is a shift. */
3822 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3823 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3824 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3825 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3826 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3827 || GET_CODE (XEXP (x, 0)) == ROTATE
3828 || GET_CODE (XEXP (x, 0)) == ROTATERT))
8079805d 3829 return simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3830 break;
3831
3832 case EQ: case NE:
3833 case GT: case GTU: case GE: case GEU:
3834 case LT: case LTU: case LE: case LEU:
3835 /* If the first operand is a condition code, we can't do anything
3836 with it. */
3837 if (GET_CODE (XEXP (x, 0)) == COMPARE
3838 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3839#ifdef HAVE_cc0
3840 && XEXP (x, 0) != cc0_rtx
3841#endif
3842 ))
3843 {
3844 rtx op0 = XEXP (x, 0);
3845 rtx op1 = XEXP (x, 1);
3846 enum rtx_code new_code;
3847
3848 if (GET_CODE (op0) == COMPARE)
3849 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3850
3851 /* Simplify our comparison, if possible. */
3852 new_code = simplify_comparison (code, &op0, &op1);
3853
230d793d 3854 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 3855 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
3856 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3857 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3858 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3859 (plus X 1).
3860
3861 Remove any ZERO_EXTRACT we made when thinking this was a
3862 comparison. It may now be simpler to use, e.g., an AND. If a
3863 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3864 the call to make_compound_operation in the SET case. */
3865
0802d516
RK
3866 if (STORE_FLAG_VALUE == 1
3867 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3868 && op1 == const0_rtx && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3869 return gen_lowpart_for_combine (mode,
3870 expand_compound_operation (op0));
5109d49f 3871
0802d516
RK
3872 else if (STORE_FLAG_VALUE == 1
3873 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
3874 && op1 == const0_rtx
3875 && (num_sign_bit_copies (op0, mode)
3876 == GET_MODE_BITSIZE (mode)))
3877 {
3878 op0 = expand_compound_operation (op0);
0c1c8ea6 3879 return gen_unary (NEG, mode, mode,
8079805d 3880 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3881 }
3882
0802d516
RK
3883 else if (STORE_FLAG_VALUE == 1
3884 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3885 && op1 == const0_rtx
5109d49f 3886 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3887 {
3888 op0 = expand_compound_operation (op0);
8079805d
RK
3889 return gen_binary (XOR, mode,
3890 gen_lowpart_for_combine (mode, op0),
3891 const1_rtx);
5109d49f 3892 }
818b11b9 3893
0802d516
RK
3894 else if (STORE_FLAG_VALUE == 1
3895 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
3896 && op1 == const0_rtx
3897 && (num_sign_bit_copies (op0, mode)
3898 == GET_MODE_BITSIZE (mode)))
3899 {
3900 op0 = expand_compound_operation (op0);
8079805d 3901 return plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9 3902 }
230d793d 3903
5109d49f
RK
3904 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3905 those above. */
0802d516
RK
3906 if (STORE_FLAG_VALUE == -1
3907 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3908 && op1 == const0_rtx
5109d49f
RK
3909 && (num_sign_bit_copies (op0, mode)
3910 == GET_MODE_BITSIZE (mode)))
3911 return gen_lowpart_for_combine (mode,
3912 expand_compound_operation (op0));
3913
0802d516
RK
3914 else if (STORE_FLAG_VALUE == -1
3915 && new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
3916 && op1 == const0_rtx
3917 && nonzero_bits (op0, mode) == 1)
3918 {
3919 op0 = expand_compound_operation (op0);
0c1c8ea6 3920 return gen_unary (NEG, mode, mode,
8079805d 3921 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3922 }
3923
0802d516
RK
3924 else if (STORE_FLAG_VALUE == -1
3925 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
3926 && op1 == const0_rtx
3927 && (num_sign_bit_copies (op0, mode)
3928 == GET_MODE_BITSIZE (mode)))
230d793d 3929 {
818b11b9 3930 op0 = expand_compound_operation (op0);
0c1c8ea6 3931 return gen_unary (NOT, mode, mode,
8079805d 3932 gen_lowpart_for_combine (mode, op0));
5109d49f
RK
3933 }
3934
3935 /* If X is 0/1, (eq X 0) is X-1. */
0802d516
RK
3936 else if (STORE_FLAG_VALUE == -1
3937 && new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
5109d49f
RK
3938 && op1 == const0_rtx
3939 && nonzero_bits (op0, mode) == 1)
3940 {
3941 op0 = expand_compound_operation (op0);
8079805d 3942 return plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d 3943 }
230d793d
RS
3944
3945 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
3946 one bit that might be nonzero, we can convert (ne x 0) to
3947 (ashift x c) where C puts the bit in the sign bit. Remove any
3948 AND with STORE_FLAG_VALUE when we are done, since we are only
3949 going to test the sign bit. */
3f508eca 3950 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22 3951 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 3952 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5f4f0e22 3953 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3954 && op1 == const0_rtx
3955 && mode == GET_MODE (op0)
5109d49f 3956 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 3957 {
818b11b9
RK
3958 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3959 expand_compound_operation (op0),
230d793d
RS
3960 GET_MODE_BITSIZE (mode) - 1 - i);
3961 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3962 return XEXP (x, 0);
3963 else
3964 return x;
3965 }
3966
3967 /* If the code changed, return a whole new comparison. */
3968 if (new_code != code)
3969 return gen_rtx_combine (new_code, mode, op0, op1);
3970
3971 /* Otherwise, keep this operation, but maybe change its operands.
3972 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3973 SUBST (XEXP (x, 0), op0);
3974 SUBST (XEXP (x, 1), op1);
3975 }
3976 break;
3977
3978 case IF_THEN_ELSE:
8079805d 3979 return simplify_if_then_else (x);
9210df58 3980
8079805d
RK
3981 case ZERO_EXTRACT:
3982 case SIGN_EXTRACT:
3983 case ZERO_EXTEND:
3984 case SIGN_EXTEND:
0f41302f 3985 /* If we are processing SET_DEST, we are done. */
8079805d
RK
3986 if (in_dest)
3987 return x;
d0ab8cd3 3988
8079805d 3989 return expand_compound_operation (x);
d0ab8cd3 3990
8079805d
RK
3991 case SET:
3992 return simplify_set (x);
1a26b032 3993
8079805d
RK
3994 case AND:
3995 case IOR:
3996 case XOR:
3997 return simplify_logical (x, last);
d0ab8cd3 3998
8079805d
RK
3999 case ABS:
4000 /* (abs (neg <foo>)) -> (abs <foo>) */
4001 if (GET_CODE (XEXP (x, 0)) == NEG)
4002 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
1a26b032 4003
8079805d
RK
4004 /* If operand is something known to be positive, ignore the ABS. */
4005 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4006 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4007 <= HOST_BITS_PER_WIDE_INT)
4008 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4009 & ((HOST_WIDE_INT) 1
4010 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4011 == 0)))
4012 return XEXP (x, 0);
1a26b032 4013
1a26b032 4014
8079805d
RK
4015 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4016 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4017 return gen_rtx_combine (NEG, mode, XEXP (x, 0));
1a26b032 4018
8079805d 4019 break;
1a26b032 4020
8079805d
RK
4021 case FFS:
4022 /* (ffs (*_extend <X>)) = (ffs <X>) */
4023 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4024 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4025 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4026 break;
1a26b032 4027
8079805d
RK
4028 case FLOAT:
4029 /* (float (sign_extend <X>)) = (float <X>). */
4030 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4031 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4032 break;
1a26b032 4033
8079805d
RK
4034 case ASHIFT:
4035 case LSHIFTRT:
4036 case ASHIFTRT:
4037 case ROTATE:
4038 case ROTATERT:
4039 /* If this is a shift by a constant amount, simplify it. */
4040 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4041 return simplify_shift_const (x, code, mode, XEXP (x, 0),
4042 INTVAL (XEXP (x, 1)));
4043
4044#ifdef SHIFT_COUNT_TRUNCATED
4045 else if (SHIFT_COUNT_TRUNCATED && GET_CODE (XEXP (x, 1)) != REG)
4046 SUBST (XEXP (x, 1),
4047 force_to_mode (XEXP (x, 1), GET_MODE (x),
4048 ((HOST_WIDE_INT) 1
4049 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4050 - 1,
4051 NULL_RTX, 0));
4052#endif
4053
4054 break;
4055 }
4056
4057 return x;
4058}
4059\f
4060/* Simplify X, an IF_THEN_ELSE expression. Return the new expression. */
5109d49f 4061
8079805d
RK
4062static rtx
4063simplify_if_then_else (x)
4064 rtx x;
4065{
4066 enum machine_mode mode = GET_MODE (x);
4067 rtx cond = XEXP (x, 0);
4068 rtx true = XEXP (x, 1);
4069 rtx false = XEXP (x, 2);
4070 enum rtx_code true_code = GET_CODE (cond);
4071 int comparison_p = GET_RTX_CLASS (true_code) == '<';
4072 rtx temp;
4073 int i;
4074
0f41302f 4075 /* Simplify storing of the truth value. */
8079805d
RK
4076 if (comparison_p && true == const_true_rtx && false == const0_rtx)
4077 return gen_binary (true_code, mode, XEXP (cond, 0), XEXP (cond, 1));
4078
0f41302f 4079 /* Also when the truth value has to be reversed. */
8079805d
RK
4080 if (comparison_p && reversible_comparison_p (cond)
4081 && true == const0_rtx && false == const_true_rtx)
4082 return gen_binary (reverse_condition (true_code),
4083 mode, XEXP (cond, 0), XEXP (cond, 1));
4084
4085 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register used
4086 in it is being compared against certain values. Get the true and false
4087 comparisons and see if that says anything about the value of each arm. */
4088
4089 if (comparison_p && reversible_comparison_p (cond)
4090 && GET_CODE (XEXP (cond, 0)) == REG)
4091 {
4092 HOST_WIDE_INT nzb;
4093 rtx from = XEXP (cond, 0);
4094 enum rtx_code false_code = reverse_condition (true_code);
4095 rtx true_val = XEXP (cond, 1);
4096 rtx false_val = true_val;
4097 int swapped = 0;
9210df58 4098
8079805d 4099 /* If FALSE_CODE is EQ, swap the codes and arms. */
5109d49f 4100
8079805d 4101 if (false_code == EQ)
1a26b032 4102 {
8079805d
RK
4103 swapped = 1, true_code = EQ, false_code = NE;
4104 temp = true, true = false, false = temp;
4105 }
5109d49f 4106
8079805d
RK
4107 /* If we are comparing against zero and the expression being tested has
4108 only a single bit that might be nonzero, that is its value when it is
4109 not equal to zero. Similarly if it is known to be -1 or 0. */
4110
4111 if (true_code == EQ && true_val == const0_rtx
4112 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
4113 false_code = EQ, false_val = GEN_INT (nzb);
4114 else if (true_code == EQ && true_val == const0_rtx
4115 && (num_sign_bit_copies (from, GET_MODE (from))
4116 == GET_MODE_BITSIZE (GET_MODE (from))))
4117 false_code = EQ, false_val = constm1_rtx;
4118
4119 /* Now simplify an arm if we know the value of the register in the
4120 branch and it is used in the arm. Be careful due to the potential
4121 of locally-shared RTL. */
4122
4123 if (reg_mentioned_p (from, true))
4124 true = subst (known_cond (copy_rtx (true), true_code, from, true_val),
4125 pc_rtx, pc_rtx, 0, 0);
4126 if (reg_mentioned_p (from, false))
4127 false = subst (known_cond (copy_rtx (false), false_code,
4128 from, false_val),
4129 pc_rtx, pc_rtx, 0, 0);
4130
4131 SUBST (XEXP (x, 1), swapped ? false : true);
4132 SUBST (XEXP (x, 2), swapped ? true : false);
4133
4134 true = XEXP (x, 1), false = XEXP (x, 2), true_code = GET_CODE (cond);
4135 }
5109d49f 4136
8079805d
RK
4137 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
4138 reversed, do so to avoid needing two sets of patterns for
4139 subtract-and-branch insns. Similarly if we have a constant in the true
4140 arm, the false arm is the same as the first operand of the comparison, or
4141 the false arm is more complicated than the true arm. */
4142
4143 if (comparison_p && reversible_comparison_p (cond)
4144 && (true == pc_rtx
4145 || (CONSTANT_P (true)
4146 && GET_CODE (false) != CONST_INT && false != pc_rtx)
4147 || true == const0_rtx
4148 || (GET_RTX_CLASS (GET_CODE (true)) == 'o'
4149 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4150 || (GET_CODE (true) == SUBREG
4151 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (true))) == 'o'
4152 && GET_RTX_CLASS (GET_CODE (false)) != 'o')
4153 || reg_mentioned_p (true, false)
4154 || rtx_equal_p (false, XEXP (cond, 0))))
4155 {
4156 true_code = reverse_condition (true_code);
4157 SUBST (XEXP (x, 0),
4158 gen_binary (true_code, GET_MODE (cond), XEXP (cond, 0),
4159 XEXP (cond, 1)));
5109d49f 4160
8079805d
RK
4161 SUBST (XEXP (x, 1), false);
4162 SUBST (XEXP (x, 2), true);
1a26b032 4163
8079805d 4164 temp = true, true = false, false = temp, cond = XEXP (x, 0);
bb821298 4165
0f41302f 4166 /* It is possible that the conditional has been simplified out. */
bb821298
RK
4167 true_code = GET_CODE (cond);
4168 comparison_p = GET_RTX_CLASS (true_code) == '<';
8079805d 4169 }
abe6e52f 4170
8079805d 4171 /* If the two arms are identical, we don't need the comparison. */
1a26b032 4172
8079805d
RK
4173 if (rtx_equal_p (true, false) && ! side_effects_p (cond))
4174 return true;
1a26b032 4175
5be669c7
RK
4176 /* Convert a == b ? b : a to "a". */
4177 if (true_code == EQ && ! side_effects_p (cond)
4178 && rtx_equal_p (XEXP (cond, 0), false)
4179 && rtx_equal_p (XEXP (cond, 1), true))
4180 return false;
4181 else if (true_code == NE && ! side_effects_p (cond)
4182 && rtx_equal_p (XEXP (cond, 0), true)
4183 && rtx_equal_p (XEXP (cond, 1), false))
4184 return true;
4185
8079805d
RK
4186 /* Look for cases where we have (abs x) or (neg (abs X)). */
4187
4188 if (GET_MODE_CLASS (mode) == MODE_INT
4189 && GET_CODE (false) == NEG
4190 && rtx_equal_p (true, XEXP (false, 0))
4191 && comparison_p
4192 && rtx_equal_p (true, XEXP (cond, 0))
4193 && ! side_effects_p (true))
4194 switch (true_code)
4195 {
4196 case GT:
4197 case GE:
0c1c8ea6 4198 return gen_unary (ABS, mode, mode, true);
8079805d
RK
4199 case LT:
4200 case LE:
0c1c8ea6 4201 return gen_unary (NEG, mode, mode, gen_unary (ABS, mode, mode, true));
8079805d
RK
4202 }
4203
4204 /* Look for MIN or MAX. */
4205
34c8be72 4206 if ((! FLOAT_MODE_P (mode) || flag_fast_math)
8079805d
RK
4207 && comparison_p
4208 && rtx_equal_p (XEXP (cond, 0), true)
4209 && rtx_equal_p (XEXP (cond, 1), false)
4210 && ! side_effects_p (cond))
4211 switch (true_code)
4212 {
4213 case GE:
4214 case GT:
4215 return gen_binary (SMAX, mode, true, false);
4216 case LE:
4217 case LT:
4218 return gen_binary (SMIN, mode, true, false);
4219 case GEU:
4220 case GTU:
4221 return gen_binary (UMAX, mode, true, false);
4222 case LEU:
4223 case LTU:
4224 return gen_binary (UMIN, mode, true, false);
4225 }
4226
8079805d
RK
4227 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when its
4228 second operand is zero, this can be done as (OP Z (mult COND C2)) where
4229 C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer ZERO_EXTEND or
4230 SIGN_EXTEND as long as Z is already extended (so we don't destroy it).
4231 We can do this kind of thing in some cases when STORE_FLAG_VALUE is
0802d516 4232 neither 1 or -1, but it isn't worth checking for. */
8079805d 4233
0802d516
RK
4234 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
4235 && comparison_p && mode != VOIDmode && ! side_effects_p (x))
8079805d
RK
4236 {
4237 rtx t = make_compound_operation (true, SET);
4238 rtx f = make_compound_operation (false, SET);
4239 rtx cond_op0 = XEXP (cond, 0);
4240 rtx cond_op1 = XEXP (cond, 1);
4241 enum rtx_code op, extend_op = NIL;
4242 enum machine_mode m = mode;
f24ad0e4 4243 rtx z = 0, c1;
8079805d 4244
8079805d
RK
4245 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
4246 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
4247 || GET_CODE (t) == ASHIFT
4248 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
4249 && rtx_equal_p (XEXP (t, 0), f))
4250 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
4251
4252 /* If an identity-zero op is commutative, check whether there
0f41302f 4253 would be a match if we swapped the operands. */
8079805d
RK
4254 else if ((GET_CODE (t) == PLUS || GET_CODE (t) == IOR
4255 || GET_CODE (t) == XOR)
4256 && rtx_equal_p (XEXP (t, 1), f))
4257 c1 = XEXP (t, 0), op = GET_CODE (t), z = f;
4258 else if (GET_CODE (t) == SIGN_EXTEND
4259 && (GET_CODE (XEXP (t, 0)) == PLUS
4260 || GET_CODE (XEXP (t, 0)) == MINUS
4261 || GET_CODE (XEXP (t, 0)) == IOR
4262 || GET_CODE (XEXP (t, 0)) == XOR
4263 || GET_CODE (XEXP (t, 0)) == ASHIFT
4264 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4265 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4266 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4267 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4268 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4269 && (num_sign_bit_copies (f, GET_MODE (f))
4270 > (GET_MODE_BITSIZE (mode)
4271 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
4272 {
4273 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4274 extend_op = SIGN_EXTEND;
4275 m = GET_MODE (XEXP (t, 0));
1a26b032 4276 }
8079805d
RK
4277 else if (GET_CODE (t) == SIGN_EXTEND
4278 && (GET_CODE (XEXP (t, 0)) == PLUS
4279 || GET_CODE (XEXP (t, 0)) == IOR
4280 || GET_CODE (XEXP (t, 0)) == XOR)
4281 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4282 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4283 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4284 && (num_sign_bit_copies (f, GET_MODE (f))
4285 > (GET_MODE_BITSIZE (mode)
4286 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 1))))))
4287 {
4288 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4289 extend_op = SIGN_EXTEND;
4290 m = GET_MODE (XEXP (t, 0));
4291 }
4292 else if (GET_CODE (t) == ZERO_EXTEND
4293 && (GET_CODE (XEXP (t, 0)) == PLUS
4294 || GET_CODE (XEXP (t, 0)) == MINUS
4295 || GET_CODE (XEXP (t, 0)) == IOR
4296 || GET_CODE (XEXP (t, 0)) == XOR
4297 || GET_CODE (XEXP (t, 0)) == ASHIFT
4298 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
4299 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
4300 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
4301 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4302 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
4303 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
4304 && ((nonzero_bits (f, GET_MODE (f))
4305 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
4306 == 0))
4307 {
4308 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
4309 extend_op = ZERO_EXTEND;
4310 m = GET_MODE (XEXP (t, 0));
4311 }
4312 else if (GET_CODE (t) == ZERO_EXTEND
4313 && (GET_CODE (XEXP (t, 0)) == PLUS
4314 || GET_CODE (XEXP (t, 0)) == IOR
4315 || GET_CODE (XEXP (t, 0)) == XOR)
4316 && GET_CODE (XEXP (XEXP (t, 0), 1)) == SUBREG
4317 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4318 && subreg_lowpart_p (XEXP (XEXP (t, 0), 1))
4319 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 1)), f)
4320 && ((nonzero_bits (f, GET_MODE (f))
4321 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 1))))
4322 == 0))
4323 {
4324 c1 = XEXP (XEXP (t, 0), 0); z = f; op = GET_CODE (XEXP (t, 0));
4325 extend_op = ZERO_EXTEND;
4326 m = GET_MODE (XEXP (t, 0));
4327 }
4328
4329 if (z)
4330 {
4331 temp = subst (gen_binary (true_code, m, cond_op0, cond_op1),
4332 pc_rtx, pc_rtx, 0, 0);
4333 temp = gen_binary (MULT, m, temp,
4334 gen_binary (MULT, m, c1, const_true_rtx));
4335 temp = subst (temp, pc_rtx, pc_rtx, 0, 0);
4336 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
4337
4338 if (extend_op != NIL)
0c1c8ea6 4339 temp = gen_unary (extend_op, mode, m, temp);
8079805d
RK
4340
4341 return temp;
4342 }
4343 }
224eeff2 4344
8079805d
RK
4345 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to be 0 or
4346 1 and C1 is a single bit or A is known to be 0 or -1 and C1 is the
4347 negation of a single bit, we can convert this operation to a shift. We
4348 can actually do this more generally, but it doesn't seem worth it. */
4349
4350 if (true_code == NE && XEXP (cond, 1) == const0_rtx
4351 && false == const0_rtx && GET_CODE (true) == CONST_INT
4352 && ((1 == nonzero_bits (XEXP (cond, 0), mode)
4353 && (i = exact_log2 (INTVAL (true))) >= 0)
4354 || ((num_sign_bit_copies (XEXP (cond, 0), mode)
4355 == GET_MODE_BITSIZE (mode))
4356 && (i = exact_log2 (- INTVAL (true))) >= 0)))
4357 return
4358 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4359 gen_lowpart_for_combine (mode, XEXP (cond, 0)), i);
230d793d 4360
8079805d
RK
4361 return x;
4362}
4363\f
4364/* Simplify X, a SET expression. Return the new expression. */
230d793d 4365
8079805d
RK
4366static rtx
4367simplify_set (x)
4368 rtx x;
4369{
4370 rtx src = SET_SRC (x);
4371 rtx dest = SET_DEST (x);
4372 enum machine_mode mode
4373 = GET_MODE (src) != VOIDmode ? GET_MODE (src) : GET_MODE (dest);
4374 rtx other_insn;
4375 rtx *cc_use;
4376
4377 /* (set (pc) (return)) gets written as (return). */
4378 if (GET_CODE (dest) == PC && GET_CODE (src) == RETURN)
4379 return src;
230d793d 4380
87e3e0c1
RK
4381 /* Now that we know for sure which bits of SRC we are using, see if we can
4382 simplify the expression for the object knowing that we only need the
4383 low-order bits. */
4384
4385 if (GET_MODE_CLASS (mode) == MODE_INT)
4386 src = force_to_mode (src, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
4387
8079805d
RK
4388 /* If we are setting CC0 or if the source is a COMPARE, look for the use of
4389 the comparison result and try to simplify it unless we already have used
4390 undobuf.other_insn. */
4391 if ((GET_CODE (src) == COMPARE
230d793d 4392#ifdef HAVE_cc0
8079805d 4393 || dest == cc0_rtx
230d793d 4394#endif
8079805d
RK
4395 )
4396 && (cc_use = find_single_use (dest, subst_insn, &other_insn)) != 0
4397 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4398 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
c0d3ac4d 4399 && rtx_equal_p (XEXP (*cc_use, 0), dest))
8079805d
RK
4400 {
4401 enum rtx_code old_code = GET_CODE (*cc_use);
4402 enum rtx_code new_code;
4403 rtx op0, op1;
4404 int other_changed = 0;
4405 enum machine_mode compare_mode = GET_MODE (dest);
4406
4407 if (GET_CODE (src) == COMPARE)
4408 op0 = XEXP (src, 0), op1 = XEXP (src, 1);
4409 else
4410 op0 = src, op1 = const0_rtx;
230d793d 4411
8079805d
RK
4412 /* Simplify our comparison, if possible. */
4413 new_code = simplify_comparison (old_code, &op0, &op1);
230d793d 4414
c141a106 4415#ifdef EXTRA_CC_MODES
8079805d
RK
4416 /* If this machine has CC modes other than CCmode, check to see if we
4417 need to use a different CC mode here. */
4418 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4419#endif /* EXTRA_CC_MODES */
230d793d 4420
c141a106 4421#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
8079805d
RK
4422 /* If the mode changed, we have to change SET_DEST, the mode in the
4423 compare, and the mode in the place SET_DEST is used. If SET_DEST is
4424 a hard register, just build new versions with the proper mode. If it
4425 is a pseudo, we lose unless it is only time we set the pseudo, in
4426 which case we can safely change its mode. */
4427 if (compare_mode != GET_MODE (dest))
4428 {
4429 int regno = REGNO (dest);
4430 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4431
4432 if (regno < FIRST_PSEUDO_REGISTER
4433 || (reg_n_sets[regno] == 1 && ! REG_USERVAR_P (dest)))
230d793d 4434 {
8079805d
RK
4435 if (regno >= FIRST_PSEUDO_REGISTER)
4436 SUBST (regno_reg_rtx[regno], new_dest);
230d793d 4437
8079805d
RK
4438 SUBST (SET_DEST (x), new_dest);
4439 SUBST (XEXP (*cc_use, 0), new_dest);
4440 other_changed = 1;
230d793d 4441
8079805d 4442 dest = new_dest;
230d793d 4443 }
8079805d 4444 }
230d793d
RS
4445#endif
4446
8079805d
RK
4447 /* If the code changed, we have to build a new comparison in
4448 undobuf.other_insn. */
4449 if (new_code != old_code)
4450 {
4451 unsigned HOST_WIDE_INT mask;
4452
4453 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4454 dest, const0_rtx));
4455
4456 /* If the only change we made was to change an EQ into an NE or
4457 vice versa, OP0 has only one bit that might be nonzero, and OP1
4458 is zero, check if changing the user of the condition code will
4459 produce a valid insn. If it won't, we can keep the original code
4460 in that insn by surrounding our operation with an XOR. */
4461
4462 if (((old_code == NE && new_code == EQ)
4463 || (old_code == EQ && new_code == NE))
4464 && ! other_changed && op1 == const0_rtx
4465 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
4466 && exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0))) >= 0)
230d793d 4467 {
8079805d 4468 rtx pat = PATTERN (other_insn), note = 0;
a29ca9db 4469 int scratches;
230d793d 4470
a29ca9db 4471 if ((recog_for_combine (&pat, other_insn, &note, &scratches) < 0
8079805d
RK
4472 && ! check_asm_operands (pat)))
4473 {
4474 PUT_CODE (*cc_use, old_code);
4475 other_insn = 0;
230d793d 4476
8079805d 4477 op0 = gen_binary (XOR, GET_MODE (op0), op0, GEN_INT (mask));
230d793d 4478 }
230d793d
RS
4479 }
4480
8079805d
RK
4481 other_changed = 1;
4482 }
4483
4484 if (other_changed)
4485 undobuf.other_insn = other_insn;
230d793d
RS
4486
4487#ifdef HAVE_cc0
8079805d
RK
4488 /* If we are now comparing against zero, change our source if
4489 needed. If we do not use cc0, we always have a COMPARE. */
4490 if (op1 == const0_rtx && dest == cc0_rtx)
4491 {
4492 SUBST (SET_SRC (x), op0);
4493 src = op0;
4494 }
4495 else
230d793d
RS
4496#endif
4497
8079805d
RK
4498 /* Otherwise, if we didn't previously have a COMPARE in the
4499 correct mode, we need one. */
4500 if (GET_CODE (src) != COMPARE || GET_MODE (src) != compare_mode)
4501 {
4502 SUBST (SET_SRC (x),
4503 gen_rtx_combine (COMPARE, compare_mode, op0, op1));
4504 src = SET_SRC (x);
230d793d
RS
4505 }
4506 else
4507 {
8079805d
RK
4508 /* Otherwise, update the COMPARE if needed. */
4509 SUBST (XEXP (src, 0), op0);
4510 SUBST (XEXP (src, 1), op1);
230d793d 4511 }
8079805d
RK
4512 }
4513 else
4514 {
4515 /* Get SET_SRC in a form where we have placed back any
4516 compound expressions. Then do the checks below. */
4517 src = make_compound_operation (src, SET);
4518 SUBST (SET_SRC (x), src);
4519 }
230d793d 4520
8079805d
RK
4521 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some operation,
4522 and X being a REG or (subreg (reg)), we may be able to convert this to
4523 (set (subreg:m2 x) (op)).
df62f951 4524
8079805d
RK
4525 We can always do this if M1 is narrower than M2 because that means that
4526 we only care about the low bits of the result.
df62f951 4527
8079805d
RK
4528 However, on machines without WORD_REGISTER_OPERATIONS defined, we cannot
4529 perform a narrower operation that requested since the high-order bits will
4530 be undefined. On machine where it is defined, this transformation is safe
4531 as long as M1 and M2 have the same number of words. */
df62f951 4532
8079805d
RK
4533 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4534 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (src))) != 'o'
4535 && (((GET_MODE_SIZE (GET_MODE (src)) + (UNITS_PER_WORD - 1))
4536 / UNITS_PER_WORD)
4537 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))
4538 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4539#ifndef WORD_REGISTER_OPERATIONS
8079805d
RK
4540 && (GET_MODE_SIZE (GET_MODE (src))
4541 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
df62f951 4542#endif
f507a070
RK
4543#ifdef CLASS_CANNOT_CHANGE_SIZE
4544 && ! (GET_CODE (dest) == REG && REGNO (dest) < FIRST_PSEUDO_REGISTER
4545 && (TEST_HARD_REG_BIT
4546 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_SIZE],
4547 REGNO (dest)))
4548 && (GET_MODE_SIZE (GET_MODE (src))
4549 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (src)))))
4550#endif
8079805d
RK
4551 && (GET_CODE (dest) == REG
4552 || (GET_CODE (dest) == SUBREG
4553 && GET_CODE (SUBREG_REG (dest)) == REG)))
4554 {
4555 SUBST (SET_DEST (x),
4556 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (src)),
4557 dest));
4558 SUBST (SET_SRC (x), SUBREG_REG (src));
4559
4560 src = SET_SRC (x), dest = SET_DEST (x);
4561 }
df62f951 4562
8baf60bb 4563#ifdef LOAD_EXTEND_OP
8079805d
RK
4564 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with M wider than N, this
4565 would require a paradoxical subreg. Replace the subreg with a
0f41302f 4566 zero_extend to avoid the reload that would otherwise be required. */
8079805d
RK
4567
4568 if (GET_CODE (src) == SUBREG && subreg_lowpart_p (src)
4569 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))) != NIL
4570 && SUBREG_WORD (src) == 0
4571 && (GET_MODE_SIZE (GET_MODE (src))
4572 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (src))))
4573 && GET_CODE (SUBREG_REG (src)) == MEM)
4574 {
4575 SUBST (SET_SRC (x),
4576 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (src))),
4577 GET_MODE (src), XEXP (src, 0)));
4578
4579 src = SET_SRC (x);
4580 }
230d793d
RS
4581#endif
4582
8079805d
RK
4583 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE, and we
4584 are comparing an item known to be 0 or -1 against 0, use a logical
4585 operation instead. Check for one of the arms being an IOR of the other
4586 arm with some value. We compute three terms to be IOR'ed together. In
4587 practice, at most two will be nonzero. Then we do the IOR's. */
4588
4589 if (GET_CODE (dest) != PC
4590 && GET_CODE (src) == IF_THEN_ELSE
36b8d792 4591 && GET_MODE_CLASS (GET_MODE (src)) == MODE_INT
8079805d
RK
4592 && (GET_CODE (XEXP (src, 0)) == EQ || GET_CODE (XEXP (src, 0)) == NE)
4593 && XEXP (XEXP (src, 0), 1) == const0_rtx
6dd49058 4594 && GET_MODE (src) == GET_MODE (XEXP (XEXP (src, 0), 0))
ea414472
DE
4595#ifdef HAVE_conditional_move
4596 && ! can_conditionally_move_p (GET_MODE (src))
4597#endif
8079805d
RK
4598 && (num_sign_bit_copies (XEXP (XEXP (src, 0), 0),
4599 GET_MODE (XEXP (XEXP (src, 0), 0)))
4600 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (src, 0), 0))))
4601 && ! side_effects_p (src))
4602 {
4603 rtx true = (GET_CODE (XEXP (src, 0)) == NE
4604 ? XEXP (src, 1) : XEXP (src, 2));
4605 rtx false = (GET_CODE (XEXP (src, 0)) == NE
4606 ? XEXP (src, 2) : XEXP (src, 1));
4607 rtx term1 = const0_rtx, term2, term3;
4608
4609 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4610 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4611 else if (GET_CODE (true) == IOR
4612 && rtx_equal_p (XEXP (true, 1), false))
4613 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4614 else if (GET_CODE (false) == IOR
4615 && rtx_equal_p (XEXP (false, 0), true))
4616 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4617 else if (GET_CODE (false) == IOR
4618 && rtx_equal_p (XEXP (false, 1), true))
4619 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4620
4621 term2 = gen_binary (AND, GET_MODE (src), XEXP (XEXP (src, 0), 0), true);
4622 term3 = gen_binary (AND, GET_MODE (src),
0c1c8ea6 4623 gen_unary (NOT, GET_MODE (src), GET_MODE (src),
8079805d
RK
4624 XEXP (XEXP (src, 0), 0)),
4625 false);
4626
4627 SUBST (SET_SRC (x),
4628 gen_binary (IOR, GET_MODE (src),
4629 gen_binary (IOR, GET_MODE (src), term1, term2),
4630 term3));
4631
4632 src = SET_SRC (x);
4633 }
230d793d 4634
246e00f2
RK
4635 /* If either SRC or DEST is a CLOBBER of (const_int 0), make this
4636 whole thing fail. */
4637 if (GET_CODE (src) == CLOBBER && XEXP (src, 0) == const0_rtx)
4638 return src;
4639 else if (GET_CODE (dest) == CLOBBER && XEXP (dest, 0) == const0_rtx)
4640 return dest;
4641 else
4642 /* Convert this into a field assignment operation, if possible. */
4643 return make_field_assignment (x);
8079805d
RK
4644}
4645\f
4646/* Simplify, X, and AND, IOR, or XOR operation, and return the simplified
4647 result. LAST is nonzero if this is the last retry. */
4648
4649static rtx
4650simplify_logical (x, last)
4651 rtx x;
4652 int last;
4653{
4654 enum machine_mode mode = GET_MODE (x);
4655 rtx op0 = XEXP (x, 0);
4656 rtx op1 = XEXP (x, 1);
4657
4658 switch (GET_CODE (x))
4659 {
230d793d 4660 case AND:
8079805d
RK
4661 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4662 insn (and may simplify more). */
4663 if (GET_CODE (op0) == XOR
4664 && rtx_equal_p (XEXP (op0, 0), op1)
4665 && ! side_effects_p (op1))
0c1c8ea6
RK
4666 x = gen_binary (AND, mode,
4667 gen_unary (NOT, mode, mode, XEXP (op0, 1)), op1);
8079805d
RK
4668
4669 if (GET_CODE (op0) == XOR
4670 && rtx_equal_p (XEXP (op0, 1), op1)
4671 && ! side_effects_p (op1))
0c1c8ea6
RK
4672 x = gen_binary (AND, mode,
4673 gen_unary (NOT, mode, mode, XEXP (op0, 0)), op1);
8079805d
RK
4674
4675 /* Similarly for (~ (A ^ B)) & A. */
4676 if (GET_CODE (op0) == NOT
4677 && GET_CODE (XEXP (op0, 0)) == XOR
4678 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), op1)
4679 && ! side_effects_p (op1))
4680 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 1), op1);
4681
4682 if (GET_CODE (op0) == NOT
4683 && GET_CODE (XEXP (op0, 0)) == XOR
4684 && rtx_equal_p (XEXP (XEXP (op0, 0), 1), op1)
4685 && ! side_effects_p (op1))
4686 x = gen_binary (AND, mode, XEXP (XEXP (op0, 0), 0), op1);
4687
4688 if (GET_CODE (op1) == CONST_INT)
230d793d 4689 {
8079805d 4690 x = simplify_and_const_int (x, mode, op0, INTVAL (op1));
230d793d
RS
4691
4692 /* If we have (ior (and (X C1) C2)) and the next restart would be
4693 the last, simplify this by making C1 as small as possible
0f41302f 4694 and then exit. */
8079805d
RK
4695 if (last
4696 && GET_CODE (x) == IOR && GET_CODE (op0) == AND
4697 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4698 && GET_CODE (op1) == CONST_INT)
4699 return gen_binary (IOR, mode,
4700 gen_binary (AND, mode, XEXP (op0, 0),
4701 GEN_INT (INTVAL (XEXP (op0, 1))
4702 & ~ INTVAL (op1))), op1);
230d793d
RS
4703
4704 if (GET_CODE (x) != AND)
8079805d 4705 return x;
0e32506c
RK
4706
4707 if (GET_RTX_CLASS (GET_CODE (x)) == 'c'
4708 || GET_RTX_CLASS (GET_CODE (x)) == '2')
4709 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
230d793d
RS
4710 }
4711
4712 /* Convert (A | B) & A to A. */
8079805d
RK
4713 if (GET_CODE (op0) == IOR
4714 && (rtx_equal_p (XEXP (op0, 0), op1)
4715 || rtx_equal_p (XEXP (op0, 1), op1))
4716 && ! side_effects_p (XEXP (op0, 0))
4717 && ! side_effects_p (XEXP (op0, 1)))
4718 return op1;
230d793d 4719
d0ab8cd3 4720 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4721 we start with some combination of logical operations and apply
4722 the distributive law followed by the inverse distributive law.
4723 Most of the time, this results in no change. However, if some of
4724 the operands are the same or inverses of each other, simplifications
4725 will result.
4726
4727 For example, (and (ior A B) (not B)) can occur as the result of
4728 expanding a bit field assignment. When we apply the distributive
4729 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
8079805d 4730 which then simplifies to (and (A (not B))).
230d793d 4731
8079805d 4732 If we have (and (ior A B) C), apply the distributive law and then
230d793d
RS
4733 the inverse distributive law to see if things simplify. */
4734
8079805d 4735 if (GET_CODE (op0) == IOR || GET_CODE (op0) == XOR)
230d793d
RS
4736 {
4737 x = apply_distributive_law
8079805d
RK
4738 (gen_binary (GET_CODE (op0), mode,
4739 gen_binary (AND, mode, XEXP (op0, 0), op1),
4740 gen_binary (AND, mode, XEXP (op0, 1), op1)));
230d793d 4741 if (GET_CODE (x) != AND)
8079805d 4742 return x;
230d793d
RS
4743 }
4744
8079805d
RK
4745 if (GET_CODE (op1) == IOR || GET_CODE (op1) == XOR)
4746 return apply_distributive_law
4747 (gen_binary (GET_CODE (op1), mode,
4748 gen_binary (AND, mode, XEXP (op1, 0), op0),
4749 gen_binary (AND, mode, XEXP (op1, 1), op0)));
230d793d
RS
4750
4751 /* Similarly, taking advantage of the fact that
4752 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4753
8079805d
RK
4754 if (GET_CODE (op0) == NOT && GET_CODE (op1) == XOR)
4755 return apply_distributive_law
4756 (gen_binary (XOR, mode,
4757 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 0)),
4758 gen_binary (IOR, mode, XEXP (op0, 0), XEXP (op1, 1))));
230d793d 4759
8079805d
RK
4760 else if (GET_CODE (op1) == NOT && GET_CODE (op0) == XOR)
4761 return apply_distributive_law
4762 (gen_binary (XOR, mode,
4763 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 0)),
4764 gen_binary (IOR, mode, XEXP (op1, 0), XEXP (op0, 1))));
230d793d
RS
4765 break;
4766
4767 case IOR:
951553af 4768 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
8079805d 4769 if (GET_CODE (op1) == CONST_INT
ac49a949 4770 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8079805d
RK
4771 && (nonzero_bits (op0, mode) & ~ INTVAL (op1)) == 0)
4772 return op1;
d0ab8cd3 4773
230d793d 4774 /* Convert (A & B) | A to A. */
8079805d
RK
4775 if (GET_CODE (op0) == AND
4776 && (rtx_equal_p (XEXP (op0, 0), op1)
4777 || rtx_equal_p (XEXP (op0, 1), op1))
4778 && ! side_effects_p (XEXP (op0, 0))
4779 && ! side_effects_p (XEXP (op0, 1)))
4780 return op1;
230d793d
RS
4781
4782 /* If we have (ior (and A B) C), apply the distributive law and then
4783 the inverse distributive law to see if things simplify. */
4784
8079805d 4785 if (GET_CODE (op0) == AND)
230d793d
RS
4786 {
4787 x = apply_distributive_law
4788 (gen_binary (AND, mode,
8079805d
RK
4789 gen_binary (IOR, mode, XEXP (op0, 0), op1),
4790 gen_binary (IOR, mode, XEXP (op0, 1), op1)));
230d793d
RS
4791
4792 if (GET_CODE (x) != IOR)
8079805d 4793 return x;
230d793d
RS
4794 }
4795
8079805d 4796 if (GET_CODE (op1) == AND)
230d793d
RS
4797 {
4798 x = apply_distributive_law
4799 (gen_binary (AND, mode,
8079805d
RK
4800 gen_binary (IOR, mode, XEXP (op1, 0), op0),
4801 gen_binary (IOR, mode, XEXP (op1, 1), op0)));
230d793d
RS
4802
4803 if (GET_CODE (x) != IOR)
8079805d 4804 return x;
230d793d
RS
4805 }
4806
4807 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4808 mode size to (rotate A CX). */
4809
8079805d
RK
4810 if (((GET_CODE (op0) == ASHIFT && GET_CODE (op1) == LSHIFTRT)
4811 || (GET_CODE (op1) == ASHIFT && GET_CODE (op0) == LSHIFTRT))
4812 && rtx_equal_p (XEXP (op0, 0), XEXP (op1, 0))
4813 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4814 && GET_CODE (XEXP (op1, 1)) == CONST_INT
4815 && (INTVAL (XEXP (op0, 1)) + INTVAL (XEXP (op1, 1))
230d793d 4816 == GET_MODE_BITSIZE (mode)))
8079805d
RK
4817 return gen_rtx (ROTATE, mode, XEXP (op0, 0),
4818 (GET_CODE (op0) == ASHIFT
4819 ? XEXP (op0, 1) : XEXP (op1, 1)));
230d793d 4820
71923da7
RK
4821 /* If OP0 is (ashiftrt (plus ...) C), it might actually be
4822 a (sign_extend (plus ...)). If so, OP1 is a CONST_INT, and the PLUS
4823 does not affect any of the bits in OP1, it can really be done
4824 as a PLUS and we can associate. We do this by seeing if OP1
4825 can be safely shifted left C bits. */
4826 if (GET_CODE (op1) == CONST_INT && GET_CODE (op0) == ASHIFTRT
4827 && GET_CODE (XEXP (op0, 0)) == PLUS
4828 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
4829 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4830 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT)
4831 {
4832 int count = INTVAL (XEXP (op0, 1));
4833 HOST_WIDE_INT mask = INTVAL (op1) << count;
4834
4835 if (mask >> count == INTVAL (op1)
4836 && (mask & nonzero_bits (XEXP (op0, 0), mode)) == 0)
4837 {
4838 SUBST (XEXP (XEXP (op0, 0), 1),
4839 GEN_INT (INTVAL (XEXP (XEXP (op0, 0), 1)) | mask));
4840 return op0;
4841 }
4842 }
230d793d
RS
4843 break;
4844
4845 case XOR:
4846 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4847 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4848 (NOT y). */
4849 {
4850 int num_negated = 0;
230d793d 4851
8079805d
RK
4852 if (GET_CODE (op0) == NOT)
4853 num_negated++, op0 = XEXP (op0, 0);
4854 if (GET_CODE (op1) == NOT)
4855 num_negated++, op1 = XEXP (op1, 0);
230d793d
RS
4856
4857 if (num_negated == 2)
4858 {
8079805d
RK
4859 SUBST (XEXP (x, 0), op0);
4860 SUBST (XEXP (x, 1), op1);
230d793d
RS
4861 }
4862 else if (num_negated == 1)
0c1c8ea6 4863 return gen_unary (NOT, mode, mode, gen_binary (XOR, mode, op0, op1));
230d793d
RS
4864 }
4865
4866 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4867 correspond to a machine insn or result in further simplifications
4868 if B is a constant. */
4869
8079805d
RK
4870 if (GET_CODE (op0) == AND
4871 && rtx_equal_p (XEXP (op0, 1), op1)
4872 && ! side_effects_p (op1))
0c1c8ea6
RK
4873 return gen_binary (AND, mode,
4874 gen_unary (NOT, mode, mode, XEXP (op0, 0)),
8079805d 4875 op1);
230d793d 4876
8079805d
RK
4877 else if (GET_CODE (op0) == AND
4878 && rtx_equal_p (XEXP (op0, 0), op1)
4879 && ! side_effects_p (op1))
0c1c8ea6
RK
4880 return gen_binary (AND, mode,
4881 gen_unary (NOT, mode, mode, XEXP (op0, 1)),
8079805d 4882 op1);
230d793d 4883
230d793d 4884 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
0802d516
RK
4885 comparison if STORE_FLAG_VALUE is 1. */
4886 if (STORE_FLAG_VALUE == 1
4887 && op1 == const1_rtx
8079805d
RK
4888 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4889 && reversible_comparison_p (op0))
4890 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4891 mode, XEXP (op0, 0), XEXP (op0, 1));
500c518b
RK
4892
4893 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4894 is (lt foo (const_int 0)), so we can perform the above
0802d516 4895 simplification if STORE_FLAG_VALUE is 1. */
500c518b 4896
0802d516
RK
4897 if (STORE_FLAG_VALUE == 1
4898 && op1 == const1_rtx
8079805d
RK
4899 && GET_CODE (op0) == LSHIFTRT
4900 && GET_CODE (XEXP (op0, 1)) == CONST_INT
4901 && INTVAL (XEXP (op0, 1)) == GET_MODE_BITSIZE (mode) - 1)
4902 return gen_rtx_combine (GE, mode, XEXP (op0, 0), const0_rtx);
230d793d
RS
4903
4904 /* (xor (comparison foo bar) (const_int sign-bit))
4905 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22 4906 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
0802d516 4907 && ((STORE_FLAG_VALUE & GET_MODE_MASK (mode))
5f4f0e22 4908 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
8079805d
RK
4909 && op1 == const_true_rtx
4910 && GET_RTX_CLASS (GET_CODE (op0)) == '<'
4911 && reversible_comparison_p (op0))
4912 return gen_rtx_combine (reverse_condition (GET_CODE (op0)),
4913 mode, XEXP (op0, 0), XEXP (op0, 1));
230d793d
RS
4914 break;
4915 }
4916
4917 return x;
4918}
4919\f
4920/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4921 operations" because they can be replaced with two more basic operations.
4922 ZERO_EXTEND is also considered "compound" because it can be replaced with
4923 an AND operation, which is simpler, though only one operation.
4924
4925 The function expand_compound_operation is called with an rtx expression
4926 and will convert it to the appropriate shifts and AND operations,
4927 simplifying at each stage.
4928
4929 The function make_compound_operation is called to convert an expression
4930 consisting of shifts and ANDs into the equivalent compound expression.
4931 It is the inverse of this function, loosely speaking. */
4932
4933static rtx
4934expand_compound_operation (x)
4935 rtx x;
4936{
4937 int pos = 0, len;
4938 int unsignedp = 0;
4939 int modewidth;
4940 rtx tem;
4941
4942 switch (GET_CODE (x))
4943 {
4944 case ZERO_EXTEND:
4945 unsignedp = 1;
4946 case SIGN_EXTEND:
75473182
RS
4947 /* We can't necessarily use a const_int for a multiword mode;
4948 it depends on implicitly extending the value.
4949 Since we don't know the right way to extend it,
4950 we can't tell whether the implicit way is right.
4951
4952 Even for a mode that is no wider than a const_int,
4953 we can't win, because we need to sign extend one of its bits through
4954 the rest of it, and we don't know which bit. */
230d793d 4955 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 4956 return x;
230d793d 4957
8079805d
RK
4958 /* Return if (subreg:MODE FROM 0) is not a safe replacement for
4959 (zero_extend:MODE FROM) or (sign_extend:MODE FROM). It is for any MEM
4960 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be
4961 reloaded. If not for that, MEM's would very rarely be safe.
4962
4963 Reject MODEs bigger than a word, because we might not be able
4964 to reference a two-register group starting with an arbitrary register
4965 (and currently gen_lowpart might crash for a SUBREG). */
4966
4967 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) > UNITS_PER_WORD)
230d793d
RS
4968 return x;
4969
4970 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4971 /* If the inner object has VOIDmode (the only way this can happen
4972 is if it is a ASM_OPERANDS), we can't do anything since we don't
4973 know how much masking to do. */
4974 if (len == 0)
4975 return x;
4976
4977 break;
4978
4979 case ZERO_EXTRACT:
4980 unsignedp = 1;
4981 case SIGN_EXTRACT:
4982 /* If the operand is a CLOBBER, just return it. */
4983 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4984 return XEXP (x, 0);
4985
4986 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4987 || GET_CODE (XEXP (x, 2)) != CONST_INT
4988 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4989 return x;
4990
4991 len = INTVAL (XEXP (x, 1));
4992 pos = INTVAL (XEXP (x, 2));
4993
4994 /* If this goes outside the object being extracted, replace the object
4995 with a (use (mem ...)) construct that only combine understands
4996 and is used only for this purpose. */
4997 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4998 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4999
f76b9db2
ILT
5000 if (BITS_BIG_ENDIAN)
5001 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
5002
230d793d
RS
5003 break;
5004
5005 default:
5006 return x;
5007 }
5008
0f13a422
ILT
5009 /* We can optimize some special cases of ZERO_EXTEND. */
5010 if (GET_CODE (x) == ZERO_EXTEND)
5011 {
5012 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI if we
5013 know that the last value didn't have any inappropriate bits
5014 set. */
5015 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5016 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5017 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5018 && (nonzero_bits (XEXP (XEXP (x, 0), 0), GET_MODE (x))
5019 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5020 return XEXP (XEXP (x, 0), 0);
5021
5022 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5023 if (GET_CODE (XEXP (x, 0)) == SUBREG
5024 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5025 && subreg_lowpart_p (XEXP (x, 0))
5026 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5027 && (nonzero_bits (SUBREG_REG (XEXP (x, 0)), GET_MODE (x))
5028 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))) == 0)
5029 return SUBREG_REG (XEXP (x, 0));
5030
5031 /* (zero_extend:DI (truncate:SI foo:DI)) is just foo:DI when foo
5032 is a comparison and STORE_FLAG_VALUE permits. This is like
5033 the first case, but it works even when GET_MODE (x) is larger
5034 than HOST_WIDE_INT. */
5035 if (GET_CODE (XEXP (x, 0)) == TRUNCATE
5036 && GET_MODE (XEXP (XEXP (x, 0), 0)) == GET_MODE (x)
5037 && GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) == '<'
5038 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5039 <= HOST_BITS_PER_WIDE_INT)
5040 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5041 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5042 return XEXP (XEXP (x, 0), 0);
5043
5044 /* Likewise for (zero_extend:DI (subreg:SI foo:DI 0)). */
5045 if (GET_CODE (XEXP (x, 0)) == SUBREG
5046 && GET_MODE (SUBREG_REG (XEXP (x, 0))) == GET_MODE (x)
5047 && subreg_lowpart_p (XEXP (x, 0))
5048 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == '<'
5049 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5050 <= HOST_BITS_PER_WIDE_INT)
5051 && ((HOST_WIDE_INT) STORE_FLAG_VALUE
5052 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5053 return SUBREG_REG (XEXP (x, 0));
5054
5055 /* If sign extension is cheaper than zero extension, then use it
5056 if we know that no extraneous bits are set, and that the high
5057 bit is not set. */
5058 if (flag_expensive_optimizations
5059 && ((GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
5060 && ((nonzero_bits (XEXP (x, 0), GET_MODE (x))
5061 & ~ (((unsigned HOST_WIDE_INT)
5062 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5063 >> 1))
5064 == 0))
5065 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5066 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5067 <= HOST_BITS_PER_WIDE_INT)
5068 && (((HOST_WIDE_INT) STORE_FLAG_VALUE
5069 & ~ (((unsigned HOST_WIDE_INT)
5070 GET_MODE_MASK (GET_MODE (XEXP (x, 0))))
5071 >> 1))
5072 == 0))))
5073 {
5074 rtx temp = gen_rtx (SIGN_EXTEND, GET_MODE (x), XEXP (x, 0));
5075
5076 if (rtx_cost (temp, SET) < rtx_cost (x, SET))
5077 return expand_compound_operation (temp);
5078 }
5079 }
5080
230d793d
RS
5081 /* If we reach here, we want to return a pair of shifts. The inner
5082 shift is a left shift of BITSIZE - POS - LEN bits. The outer
5083 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
5084 logical depending on the value of UNSIGNEDP.
5085
5086 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
5087 converted into an AND of a shift.
5088
5089 We must check for the case where the left shift would have a negative
5090 count. This can happen in a case like (x >> 31) & 255 on machines
5091 that can't shift by a constant. On those machines, we would first
5092 combine the shift with the AND to produce a variable-position
5093 extraction. Then the constant of 31 would be substituted in to produce
5094 a such a position. */
5095
5096 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
5097 if (modewidth >= pos - len)
5f4f0e22 5098 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 5099 GET_MODE (x),
5f4f0e22
CH
5100 simplify_shift_const (NULL_RTX, ASHIFT,
5101 GET_MODE (x),
230d793d
RS
5102 XEXP (x, 0),
5103 modewidth - pos - len),
5104 modewidth - len);
5105
5f4f0e22
CH
5106 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
5107 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
5108 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
5109 GET_MODE (x),
5110 XEXP (x, 0), pos),
5f4f0e22 5111 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5112 else
5113 /* Any other cases we can't handle. */
5114 return x;
5115
5116
5117 /* If we couldn't do this for some reason, return the original
5118 expression. */
5119 if (GET_CODE (tem) == CLOBBER)
5120 return x;
5121
5122 return tem;
5123}
5124\f
5125/* X is a SET which contains an assignment of one object into
5126 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
5127 or certain SUBREGS). If possible, convert it into a series of
5128 logical operations.
5129
5130 We half-heartedly support variable positions, but do not at all
5131 support variable lengths. */
5132
5133static rtx
5134expand_field_assignment (x)
5135 rtx x;
5136{
5137 rtx inner;
0f41302f 5138 rtx pos; /* Always counts from low bit. */
230d793d
RS
5139 int len;
5140 rtx mask;
5141 enum machine_mode compute_mode;
5142
5143 /* Loop until we find something we can't simplify. */
5144 while (1)
5145 {
5146 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
5147 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
5148 {
5149 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
5150 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4d9cfc7b 5151 pos = GEN_INT (BITS_PER_WORD * SUBREG_WORD (XEXP (SET_DEST (x), 0)));
230d793d
RS
5152 }
5153 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
5154 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
5155 {
5156 inner = XEXP (SET_DEST (x), 0);
5157 len = INTVAL (XEXP (SET_DEST (x), 1));
5158 pos = XEXP (SET_DEST (x), 2);
5159
5160 /* If the position is constant and spans the width of INNER,
5161 surround INNER with a USE to indicate this. */
5162 if (GET_CODE (pos) == CONST_INT
5163 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
5164 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
5165
f76b9db2
ILT
5166 if (BITS_BIG_ENDIAN)
5167 {
5168 if (GET_CODE (pos) == CONST_INT)
5169 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
5170 - INTVAL (pos));
5171 else if (GET_CODE (pos) == MINUS
5172 && GET_CODE (XEXP (pos, 1)) == CONST_INT
5173 && (INTVAL (XEXP (pos, 1))
5174 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
5175 /* If position is ADJUST - X, new position is X. */
5176 pos = XEXP (pos, 0);
5177 else
5178 pos = gen_binary (MINUS, GET_MODE (pos),
5179 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
5180 - len),
5181 pos);
5182 }
230d793d
RS
5183 }
5184
5185 /* A SUBREG between two modes that occupy the same numbers of words
5186 can be done by moving the SUBREG to the source. */
5187 else if (GET_CODE (SET_DEST (x)) == SUBREG
5188 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
5189 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5190 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
5191 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
5192 {
5193 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
5194 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
5195 SET_SRC (x)));
5196 continue;
5197 }
5198 else
5199 break;
5200
5201 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
5202 inner = SUBREG_REG (inner);
5203
5204 compute_mode = GET_MODE (inner);
5205
5206 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
5207 if (len < HOST_BITS_PER_WIDE_INT)
5208 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
5209 else
5210 break;
5211
5212 /* Now compute the equivalent expression. Make a copy of INNER
5213 for the SET_DEST in case it is a MEM into which we will substitute;
5214 we don't want shared RTL in that case. */
5215 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
5216 gen_binary (IOR, compute_mode,
5217 gen_binary (AND, compute_mode,
5218 gen_unary (NOT, compute_mode,
0c1c8ea6 5219 compute_mode,
230d793d
RS
5220 gen_binary (ASHIFT,
5221 compute_mode,
5222 mask, pos)),
5223 inner),
5224 gen_binary (ASHIFT, compute_mode,
5225 gen_binary (AND, compute_mode,
5226 gen_lowpart_for_combine
5227 (compute_mode,
5228 SET_SRC (x)),
5229 mask),
5230 pos)));
5231 }
5232
5233 return x;
5234}
5235\f
8999a12e
RK
5236/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
5237 it is an RTX that represents a variable starting position; otherwise,
5238 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
5239
5240 INNER may be a USE. This will occur when we started with a bitfield
5241 that went outside the boundary of the object in memory, which is
5242 allowed on most machines. To isolate this case, we produce a USE
5243 whose mode is wide enough and surround the MEM with it. The only
5244 code that understands the USE is this routine. If it is not removed,
5245 it will cause the resulting insn not to match.
5246
5247 UNSIGNEDP is non-zero for an unsigned reference and zero for a
5248 signed reference.
5249
5250 IN_DEST is non-zero if this is a reference in the destination of a
5251 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
5252 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
5253 be used.
5254
5255 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
5256 ZERO_EXTRACT should be built even for bits starting at bit 0.
5257
76184def
DE
5258 MODE is the desired mode of the result (if IN_DEST == 0).
5259
5260 The result is an RTX for the extraction or NULL_RTX if the target
5261 can't handle it. */
230d793d
RS
5262
5263static rtx
5264make_extraction (mode, inner, pos, pos_rtx, len,
5265 unsignedp, in_dest, in_compare)
5266 enum machine_mode mode;
5267 rtx inner;
5268 int pos;
5269 rtx pos_rtx;
5270 int len;
5271 int unsignedp;
5272 int in_dest, in_compare;
5273{
94b4b17a
RS
5274 /* This mode describes the size of the storage area
5275 to fetch the overall value from. Within that, we
5276 ignore the POS lowest bits, etc. */
230d793d
RS
5277 enum machine_mode is_mode = GET_MODE (inner);
5278 enum machine_mode inner_mode;
d7cd794f
RK
5279 enum machine_mode wanted_inner_mode = byte_mode;
5280 enum machine_mode wanted_inner_reg_mode = word_mode;
230d793d
RS
5281 enum machine_mode pos_mode = word_mode;
5282 enum machine_mode extraction_mode = word_mode;
5283 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
5284 int spans_byte = 0;
5285 rtx new = 0;
8999a12e 5286 rtx orig_pos_rtx = pos_rtx;
6139ff20 5287 int orig_pos;
230d793d
RS
5288
5289 /* Get some information about INNER and get the innermost object. */
5290 if (GET_CODE (inner) == USE)
94b4b17a 5291 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
5292 /* We don't need to adjust the position because we set up the USE
5293 to pretend that it was a full-word object. */
5294 spans_byte = 1, inner = XEXP (inner, 0);
5295 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
5296 {
5297 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
5298 consider just the QI as the memory to extract from.
5299 The subreg adds or removes high bits; its mode is
5300 irrelevant to the meaning of this extraction,
5301 since POS and LEN count from the lsb. */
5302 if (GET_CODE (SUBREG_REG (inner)) == MEM)
5303 is_mode = GET_MODE (SUBREG_REG (inner));
5304 inner = SUBREG_REG (inner);
5305 }
230d793d
RS
5306
5307 inner_mode = GET_MODE (inner);
5308
5309 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 5310 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
5311
5312 /* See if this can be done without an extraction. We never can if the
5313 width of the field is not the same as that of some integer mode. For
5314 registers, we can only avoid the extraction if the position is at the
5315 low-order bit and this is either not in the destination or we have the
5316 appropriate STRICT_LOW_PART operation available.
5317
5318 For MEM, we can avoid an extract if the field starts on an appropriate
5319 boundary and we can change the mode of the memory reference. However,
5320 we cannot directly access the MEM if we have a USE and the underlying
5321 MEM is not TMODE. This combination means that MEM was being used in a
5322 context where bits outside its mode were being referenced; that is only
5323 valid in bit-field insns. */
5324
5325 if (tmode != BLKmode
5326 && ! (spans_byte && inner_mode != tmode)
4d9cfc7b
RK
5327 && ((pos_rtx == 0 && (pos % BITS_PER_WORD) == 0
5328 && GET_CODE (inner) != MEM
230d793d 5329 && (! in_dest
df62f951
RK
5330 || (GET_CODE (inner) == REG
5331 && (movstrict_optab->handlers[(int) tmode].insn_code
5332 != CODE_FOR_nothing))))
8999a12e 5333 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
5334 && (pos
5335 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
5336 : BITS_PER_UNIT)) == 0
230d793d
RS
5337 /* We can't do this if we are widening INNER_MODE (it
5338 may not be aligned, for one thing). */
5339 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
5340 && (inner_mode == tmode
5341 || (! mode_dependent_address_p (XEXP (inner, 0))
5342 && ! MEM_VOLATILE_P (inner))))))
5343 {
230d793d
RS
5344 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5345 field. If the original and current mode are the same, we need not
5346 adjust the offset. Otherwise, we do if bytes big endian.
5347
4d9cfc7b
RK
5348 If INNER is not a MEM, get a piece consisting of just the field
5349 of interest (in this case POS % BITS_PER_WORD must be 0). */
230d793d
RS
5350
5351 if (GET_CODE (inner) == MEM)
5352 {
94b4b17a
RS
5353 int offset;
5354 /* POS counts from lsb, but make OFFSET count in memory order. */
5355 if (BYTES_BIG_ENDIAN)
5356 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5357 else
5358 offset = pos / BITS_PER_UNIT;
230d793d
RS
5359
5360 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5361 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5362 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5363 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5364 }
df62f951 5365 else if (GET_CODE (inner) == REG)
c0d3ac4d
RK
5366 {
5367 /* We can't call gen_lowpart_for_combine here since we always want
5368 a SUBREG and it would sometimes return a new hard register. */
5369 if (tmode != inner_mode)
5370 new = gen_rtx (SUBREG, tmode, inner,
5371 (WORDS_BIG_ENDIAN
5372 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
4d9cfc7b
RK
5373 ? (((GET_MODE_SIZE (inner_mode)
5374 - GET_MODE_SIZE (tmode))
5375 / UNITS_PER_WORD)
5376 - pos / BITS_PER_WORD)
5377 : pos / BITS_PER_WORD));
c0d3ac4d
RK
5378 else
5379 new = inner;
5380 }
230d793d 5381 else
6139ff20
RK
5382 new = force_to_mode (inner, tmode,
5383 len >= HOST_BITS_PER_WIDE_INT
5384 ? GET_MODE_MASK (tmode)
5385 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 5386 NULL_RTX, 0);
230d793d
RS
5387
5388 /* If this extraction is going into the destination of a SET,
5389 make a STRICT_LOW_PART unless we made a MEM. */
5390
5391 if (in_dest)
5392 return (GET_CODE (new) == MEM ? new
77fa0940
RK
5393 : (GET_CODE (new) != SUBREG
5394 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5395 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5396
5397 /* Otherwise, sign- or zero-extend unless we already are in the
5398 proper mode. */
5399
5400 return (mode == tmode ? new
5401 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5402 mode, new));
5403 }
5404
cc471082
RS
5405 /* Unless this is a COMPARE or we have a funny memory reference,
5406 don't do anything with zero-extending field extracts starting at
5407 the low-order bit since they are simple AND operations. */
8999a12e
RK
5408 if (pos_rtx == 0 && pos == 0 && ! in_dest
5409 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5410 return 0;
5411
e7373556
RK
5412 /* Unless we are allowed to span bytes, reject this if we would be
5413 spanning bytes or if the position is not a constant and the length
5414 is not 1. In all other cases, we would only be going outside
5415 out object in cases when an original shift would have been
5416 undefined. */
5417 if (! spans_byte
5418 && ((pos_rtx == 0 && pos + len > GET_MODE_BITSIZE (is_mode))
5419 || (pos_rtx != 0 && len != 1)))
5420 return 0;
5421
d7cd794f 5422 /* Get the mode to use should INNER not be a MEM, the mode for the position,
230d793d
RS
5423 and the mode for the result. */
5424#ifdef HAVE_insv
5425 if (in_dest)
5426 {
d7cd794f 5427 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
230d793d
RS
5428 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5429 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5430 }
5431#endif
5432
5433#ifdef HAVE_extzv
5434 if (! in_dest && unsignedp)
5435 {
d7cd794f 5436 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
230d793d
RS
5437 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5438 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5439 }
5440#endif
5441
5442#ifdef HAVE_extv
5443 if (! in_dest && ! unsignedp)
5444 {
d7cd794f 5445 wanted_inner_reg_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
230d793d
RS
5446 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5447 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5448 }
5449#endif
5450
5451 /* Never narrow an object, since that might not be safe. */
5452
5453 if (mode != VOIDmode
5454 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5455 extraction_mode = mode;
5456
5457 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5458 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5459 pos_mode = GET_MODE (pos_rtx);
5460
d7cd794f
RK
5461 /* If this is not from memory, the desired mode is wanted_inner_reg_mode;
5462 if we have to change the mode of memory and cannot, the desired mode is
5463 EXTRACTION_MODE. */
5464 if (GET_CODE (inner) != MEM)
5465 wanted_inner_mode = wanted_inner_reg_mode;
5466 else if (inner_mode != wanted_inner_mode
5467 && (mode_dependent_address_p (XEXP (inner, 0))
5468 || MEM_VOLATILE_P (inner)))
5469 wanted_inner_mode = extraction_mode;
230d793d 5470
6139ff20
RK
5471 orig_pos = pos;
5472
f76b9db2
ILT
5473 if (BITS_BIG_ENDIAN)
5474 {
cf54c2cd
DE
5475 /* POS is passed as if BITS_BIG_ENDIAN == 0, so we need to convert it to
5476 BITS_BIG_ENDIAN style. If position is constant, compute new
5477 position. Otherwise, build subtraction.
5478 Note that POS is relative to the mode of the original argument.
5479 If it's a MEM we need to recompute POS relative to that.
5480 However, if we're extracting from (or inserting into) a register,
5481 we want to recompute POS relative to wanted_inner_mode. */
5482 int width = (GET_CODE (inner) == MEM
5483 ? GET_MODE_BITSIZE (is_mode)
5484 : GET_MODE_BITSIZE (wanted_inner_mode));
5485
f76b9db2 5486 if (pos_rtx == 0)
cf54c2cd 5487 pos = width - len - pos;
f76b9db2
ILT
5488 else
5489 pos_rtx
5490 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
cf54c2cd
DE
5491 GEN_INT (width - len), pos_rtx);
5492 /* POS may be less than 0 now, but we check for that below.
5493 Note that it can only be less than 0 if GET_CODE (inner) != MEM. */
f76b9db2 5494 }
230d793d
RS
5495
5496 /* If INNER has a wider mode, make it smaller. If this is a constant
5497 extract, try to adjust the byte to point to the byte containing
5498 the value. */
d7cd794f
RK
5499 if (wanted_inner_mode != VOIDmode
5500 && GET_MODE_SIZE (wanted_inner_mode) < GET_MODE_SIZE (is_mode)
230d793d 5501 && ((GET_CODE (inner) == MEM
d7cd794f 5502 && (inner_mode == wanted_inner_mode
230d793d
RS
5503 || (! mode_dependent_address_p (XEXP (inner, 0))
5504 && ! MEM_VOLATILE_P (inner))))))
5505 {
5506 int offset = 0;
5507
5508 /* The computations below will be correct if the machine is big
5509 endian in both bits and bytes or little endian in bits and bytes.
5510 If it is mixed, we must adjust. */
5511
230d793d 5512 /* If bytes are big endian and we had a paradoxical SUBREG, we must
0f41302f 5513 adjust OFFSET to compensate. */
f76b9db2
ILT
5514 if (BYTES_BIG_ENDIAN
5515 && ! spans_byte
230d793d
RS
5516 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5517 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
230d793d
RS
5518
5519 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5520 if (pos_rtx == 0)
230d793d
RS
5521 {
5522 offset += pos / BITS_PER_UNIT;
d7cd794f 5523 pos %= GET_MODE_BITSIZE (wanted_inner_mode);
230d793d
RS
5524 }
5525
f76b9db2
ILT
5526 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5527 && ! spans_byte
d7cd794f 5528 && is_mode != wanted_inner_mode)
c6b3f1f2 5529 offset = (GET_MODE_SIZE (is_mode)
d7cd794f 5530 - GET_MODE_SIZE (wanted_inner_mode) - offset);
c6b3f1f2 5531
d7cd794f 5532 if (offset != 0 || inner_mode != wanted_inner_mode)
230d793d 5533 {
d7cd794f 5534 rtx newmem = gen_rtx (MEM, wanted_inner_mode,
230d793d
RS
5535 plus_constant (XEXP (inner, 0), offset));
5536 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5537 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5538 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5539 inner = newmem;
5540 }
5541 }
5542
9e74dc41
RK
5543 /* If INNER is not memory, we can always get it into the proper mode. If we
5544 are changing its mode, POS must be a constant and smaller than the size
5545 of the new mode. */
230d793d 5546 else if (GET_CODE (inner) != MEM)
9e74dc41
RK
5547 {
5548 if (GET_MODE (inner) != wanted_inner_mode
5549 && (pos_rtx != 0
5550 || orig_pos + len > GET_MODE_BITSIZE (wanted_inner_mode)))
5551 return 0;
5552
5553 inner = force_to_mode (inner, wanted_inner_mode,
5554 pos_rtx
5555 || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5556 ? GET_MODE_MASK (wanted_inner_mode)
5557 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
5558 NULL_RTX, 0);
5559 }
230d793d
RS
5560
5561 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5562 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5563 if (pos_rtx != 0
230d793d
RS
5564 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5565 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5566 else if (pos_rtx != 0
230d793d
RS
5567 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5568 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5569
8999a12e
RK
5570 /* Make POS_RTX unless we already have it and it is correct. If we don't
5571 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
0f41302f 5572 be a CONST_INT. */
8999a12e
RK
5573 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5574 pos_rtx = orig_pos_rtx;
5575
5576 else if (pos_rtx == 0)
5f4f0e22 5577 pos_rtx = GEN_INT (pos);
230d793d
RS
5578
5579 /* Make the required operation. See if we can use existing rtx. */
5580 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5581 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5582 if (! in_dest)
5583 new = gen_lowpart_for_combine (mode, new);
5584
5585 return new;
5586}
5587\f
71923da7
RK
5588/* See if X contains an ASHIFT of COUNT or more bits that can be commuted
5589 with any other operations in X. Return X without that shift if so. */
5590
5591static rtx
5592extract_left_shift (x, count)
5593 rtx x;
5594 int count;
5595{
5596 enum rtx_code code = GET_CODE (x);
5597 enum machine_mode mode = GET_MODE (x);
5598 rtx tem;
5599
5600 switch (code)
5601 {
5602 case ASHIFT:
5603 /* This is the shift itself. If it is wide enough, we will return
5604 either the value being shifted if the shift count is equal to
5605 COUNT or a shift for the difference. */
5606 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5607 && INTVAL (XEXP (x, 1)) >= count)
5608 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0),
5609 INTVAL (XEXP (x, 1)) - count);
5610 break;
5611
5612 case NEG: case NOT:
5613 if ((tem = extract_left_shift (XEXP (x, 0), count)) != 0)
0c1c8ea6 5614 return gen_unary (code, mode, mode, tem);
71923da7
RK
5615
5616 break;
5617
5618 case PLUS: case IOR: case XOR: case AND:
5619 /* If we can safely shift this constant and we find the inner shift,
5620 make a new operation. */
5621 if (GET_CODE (XEXP (x,1)) == CONST_INT
5622 && (INTVAL (XEXP (x, 1)) & (((HOST_WIDE_INT) 1 << count)) - 1) == 0
5623 && (tem = extract_left_shift (XEXP (x, 0), count)) != 0)
5624 return gen_binary (code, mode, tem,
5625 GEN_INT (INTVAL (XEXP (x, 1)) >> count));
5626
5627 break;
5628 }
5629
5630 return 0;
5631}
5632\f
230d793d
RS
5633/* Look at the expression rooted at X. Look for expressions
5634 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5635 Form these expressions.
5636
5637 Return the new rtx, usually just X.
5638
5639 Also, for machines like the Vax that don't have logical shift insns,
5640 try to convert logical to arithmetic shift operations in cases where
5641 they are equivalent. This undoes the canonicalizations to logical
5642 shifts done elsewhere.
5643
5644 We try, as much as possible, to re-use rtl expressions to save memory.
5645
5646 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5647 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5648 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5649 or a COMPARE against zero, it is COMPARE. */
5650
5651static rtx
5652make_compound_operation (x, in_code)
5653 rtx x;
5654 enum rtx_code in_code;
5655{
5656 enum rtx_code code = GET_CODE (x);
5657 enum machine_mode mode = GET_MODE (x);
5658 int mode_width = GET_MODE_BITSIZE (mode);
71923da7 5659 rtx rhs, lhs;
230d793d 5660 enum rtx_code next_code;
f24ad0e4 5661 int i;
230d793d 5662 rtx new = 0;
280f58ba 5663 rtx tem;
230d793d
RS
5664 char *fmt;
5665
5666 /* Select the code to be used in recursive calls. Once we are inside an
5667 address, we stay there. If we have a comparison, set to COMPARE,
5668 but once inside, go back to our default of SET. */
5669
42495ca0 5670 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5671 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5672 && XEXP (x, 1) == const0_rtx) ? COMPARE
5673 : in_code == COMPARE ? SET : in_code);
5674
5675 /* Process depending on the code of this operation. If NEW is set
5676 non-zero, it will be returned. */
5677
5678 switch (code)
5679 {
5680 case ASHIFT:
230d793d
RS
5681 /* Convert shifts by constants into multiplications if inside
5682 an address. */
5683 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5684 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5685 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5686 {
5687 new = make_compound_operation (XEXP (x, 0), next_code);
5688 new = gen_rtx_combine (MULT, mode, new,
5689 GEN_INT ((HOST_WIDE_INT) 1
5690 << INTVAL (XEXP (x, 1))));
5691 }
230d793d
RS
5692 break;
5693
5694 case AND:
5695 /* If the second operand is not a constant, we can't do anything
5696 with it. */
5697 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5698 break;
5699
5700 /* If the constant is a power of two minus one and the first operand
5701 is a logical right shift, make an extraction. */
5702 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5703 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5704 {
5705 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5706 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5707 0, in_code == COMPARE);
5708 }
dfbe1b2f 5709
230d793d
RS
5710 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5711 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5712 && subreg_lowpart_p (XEXP (x, 0))
5713 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5714 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5715 {
5716 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5717 next_code);
2f99f437 5718 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
280f58ba
RK
5719 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5720 0, in_code == COMPARE);
5721 }
45620ed4 5722 /* Same as previous, but for (xor/ior (lshiftrt...) (lshiftrt...)). */
c2f9f64e
JW
5723 else if ((GET_CODE (XEXP (x, 0)) == XOR
5724 || GET_CODE (XEXP (x, 0)) == IOR)
5725 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5726 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5727 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5728 {
5729 /* Apply the distributive law, and then try to make extractions. */
5730 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5731 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5732 XEXP (x, 1)),
5733 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5734 XEXP (x, 1)));
5735 new = make_compound_operation (new, in_code);
5736 }
a7c99304
RK
5737
5738 /* If we are have (and (rotate X C) M) and C is larger than the number
5739 of bits in M, this is an extraction. */
5740
5741 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5742 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5743 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5744 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5745 {
5746 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5747 new = make_extraction (mode, new,
5748 (GET_MODE_BITSIZE (mode)
5749 - INTVAL (XEXP (XEXP (x, 0), 1))),
5750 NULL_RTX, i, 1, 0, in_code == COMPARE);
5751 }
a7c99304
RK
5752
5753 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5754 a logical shift and our mask turns off all the propagated sign
5755 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5756 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5757 && (lshr_optab->handlers[(int) mode].insn_code
5758 == CODE_FOR_nothing)
230d793d
RS
5759 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5760 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5761 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5762 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5763 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5764 {
5f4f0e22 5765 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5766
5767 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5768 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5769 SUBST (XEXP (x, 0),
280f58ba
RK
5770 gen_rtx_combine (ASHIFTRT, mode,
5771 make_compound_operation (XEXP (XEXP (x, 0), 0),
5772 next_code),
230d793d
RS
5773 XEXP (XEXP (x, 0), 1)));
5774 }
5775
5776 /* If the constant is one less than a power of two, this might be
5777 representable by an extraction even if no shift is present.
5778 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5779 we are in a COMPARE. */
5780 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5781 new = make_extraction (mode,
5782 make_compound_operation (XEXP (x, 0),
5783 next_code),
5784 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5785
5786 /* If we are in a comparison and this is an AND with a power of two,
5787 convert this into the appropriate bit extract. */
5788 else if (in_code == COMPARE
5789 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5790 new = make_extraction (mode,
5791 make_compound_operation (XEXP (x, 0),
5792 next_code),
5793 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5794
5795 break;
5796
5797 case LSHIFTRT:
5798 /* If the sign bit is known to be zero, replace this with an
5799 arithmetic shift. */
d0ab8cd3
RK
5800 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5801 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5802 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5803 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5804 {
280f58ba
RK
5805 new = gen_rtx_combine (ASHIFTRT, mode,
5806 make_compound_operation (XEXP (x, 0),
5807 next_code),
5808 XEXP (x, 1));
230d793d
RS
5809 break;
5810 }
5811
0f41302f 5812 /* ... fall through ... */
230d793d
RS
5813
5814 case ASHIFTRT:
71923da7
RK
5815 lhs = XEXP (x, 0);
5816 rhs = XEXP (x, 1);
5817
230d793d
RS
5818 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5819 this is a SIGN_EXTRACT. */
71923da7
RK
5820 if (GET_CODE (rhs) == CONST_INT
5821 && GET_CODE (lhs) == ASHIFT
5822 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
5823 && INTVAL (rhs) >= INTVAL (XEXP (lhs, 1)))
280f58ba 5824 {
71923da7 5825 new = make_compound_operation (XEXP (lhs, 0), next_code);
280f58ba 5826 new = make_extraction (mode, new,
71923da7
RK
5827 INTVAL (rhs) - INTVAL (XEXP (lhs, 1)),
5828 NULL_RTX, mode_width - INTVAL (rhs),
d0ab8cd3
RK
5829 code == LSHIFTRT, 0, in_code == COMPARE);
5830 }
5831
71923da7
RK
5832 /* See if we have operations between an ASHIFTRT and an ASHIFT.
5833 If so, try to merge the shifts into a SIGN_EXTEND. We could
5834 also do this for some cases of SIGN_EXTRACT, but it doesn't
5835 seem worth the effort; the case checked for occurs on Alpha. */
5836
5837 if (GET_RTX_CLASS (GET_CODE (lhs)) != 'o'
5838 && ! (GET_CODE (lhs) == SUBREG
5839 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (lhs))) == 'o'))
5840 && GET_CODE (rhs) == CONST_INT
5841 && INTVAL (rhs) < HOST_BITS_PER_WIDE_INT
5842 && (new = extract_left_shift (lhs, INTVAL (rhs))) != 0)
5843 new = make_extraction (mode, make_compound_operation (new, next_code),
5844 0, NULL_RTX, mode_width - INTVAL (rhs),
5845 code == LSHIFTRT, 0, in_code == COMPARE);
5846
230d793d 5847 break;
280f58ba
RK
5848
5849 case SUBREG:
5850 /* Call ourselves recursively on the inner expression. If we are
5851 narrowing the object and it has a different RTL code from
5852 what it originally did, do this SUBREG as a force_to_mode. */
5853
0a5cbff6 5854 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
5855 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5856 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5857 && subreg_lowpart_p (x))
0a5cbff6
RK
5858 {
5859 rtx newer = force_to_mode (tem, mode,
e3d616e3 5860 GET_MODE_MASK (mode), NULL_RTX, 0);
0a5cbff6
RK
5861
5862 /* If we have something other than a SUBREG, we might have
5863 done an expansion, so rerun outselves. */
5864 if (GET_CODE (newer) != SUBREG)
5865 newer = make_compound_operation (newer, in_code);
5866
5867 return newer;
5868 }
230d793d
RS
5869 }
5870
5871 if (new)
5872 {
df62f951 5873 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
5874 code = GET_CODE (x);
5875 }
5876
5877 /* Now recursively process each operand of this operation. */
5878 fmt = GET_RTX_FORMAT (code);
5879 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5880 if (fmt[i] == 'e')
5881 {
5882 new = make_compound_operation (XEXP (x, i), next_code);
5883 SUBST (XEXP (x, i), new);
5884 }
5885
5886 return x;
5887}
5888\f
5889/* Given M see if it is a value that would select a field of bits
5890 within an item, but not the entire word. Return -1 if not.
5891 Otherwise, return the starting position of the field, where 0 is the
5892 low-order bit.
5893
5894 *PLEN is set to the length of the field. */
5895
5896static int
5897get_pos_from_mask (m, plen)
5f4f0e22 5898 unsigned HOST_WIDE_INT m;
230d793d
RS
5899 int *plen;
5900{
5901 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5902 int pos = exact_log2 (m & - m);
5903
5904 if (pos < 0)
5905 return -1;
5906
5907 /* Now shift off the low-order zero bits and see if we have a power of
5908 two minus 1. */
5909 *plen = exact_log2 ((m >> pos) + 1);
5910
5911 if (*plen <= 0)
5912 return -1;
5913
5914 return pos;
5915}
5916\f
6139ff20
RK
5917/* See if X can be simplified knowing that we will only refer to it in
5918 MODE and will only refer to those bits that are nonzero in MASK.
5919 If other bits are being computed or if masking operations are done
5920 that select a superset of the bits in MASK, they can sometimes be
5921 ignored.
5922
5923 Return a possibly simplified expression, but always convert X to
5924 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
5925
5926 Also, if REG is non-zero and X is a register equal in value to REG,
e3d616e3
RK
5927 replace X with REG.
5928
5929 If JUST_SELECT is nonzero, don't optimize by noticing that bits in MASK
5930 are all off in X. This is used when X will be complemented, by either
180b8e4b 5931 NOT, NEG, or XOR. */
dfbe1b2f
RK
5932
5933static rtx
e3d616e3 5934force_to_mode (x, mode, mask, reg, just_select)
dfbe1b2f
RK
5935 rtx x;
5936 enum machine_mode mode;
6139ff20 5937 unsigned HOST_WIDE_INT mask;
dfbe1b2f 5938 rtx reg;
e3d616e3 5939 int just_select;
dfbe1b2f
RK
5940{
5941 enum rtx_code code = GET_CODE (x);
180b8e4b 5942 int next_select = just_select || code == XOR || code == NOT || code == NEG;
ef026f91
RS
5943 enum machine_mode op_mode;
5944 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
5945 rtx op0, op1, temp;
5946
246e00f2
RK
5947 /* If this is a CALL, don't do anything. Some of the code below
5948 will do the wrong thing since the mode of a CALL is VOIDmode. */
5949 if (code == CALL)
5950 return x;
5951
6139ff20
RK
5952 /* We want to perform the operation is its present mode unless we know
5953 that the operation is valid in MODE, in which case we do the operation
5954 in MODE. */
1c75dfa4
RK
5955 op_mode = ((GET_MODE_CLASS (mode) == GET_MODE_CLASS (GET_MODE (x))
5956 && code_to_optab[(int) code] != 0
ef026f91
RS
5957 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5958 != CODE_FOR_nothing))
5959 ? mode : GET_MODE (x));
e3d616e3 5960
aa988991
RS
5961 /* It is not valid to do a right-shift in a narrower mode
5962 than the one it came in with. */
5963 if ((code == LSHIFTRT || code == ASHIFTRT)
5964 && GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (GET_MODE (x)))
5965 op_mode = GET_MODE (x);
ef026f91
RS
5966
5967 /* Truncate MASK to fit OP_MODE. */
5968 if (op_mode)
5969 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
5970
5971 /* When we have an arithmetic operation, or a shift whose count we
5972 do not know, we need to assume that all bit the up to the highest-order
5973 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
5974 if (op_mode)
5975 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5976 ? GET_MODE_MASK (op_mode)
5977 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5978 else
5979 fuller_mask = ~ (HOST_WIDE_INT) 0;
5980
5981 /* Determine what bits of X are guaranteed to be (non)zero. */
5982 nonzero = nonzero_bits (x, mode);
6139ff20
RK
5983
5984 /* If none of the bits in X are needed, return a zero. */
e3d616e3 5985 if (! just_select && (nonzero & mask) == 0)
6139ff20 5986 return const0_rtx;
dfbe1b2f 5987
6139ff20
RK
5988 /* If X is a CONST_INT, return a new one. Do this here since the
5989 test below will fail. */
5990 if (GET_CODE (x) == CONST_INT)
ceb7983c
RK
5991 {
5992 HOST_WIDE_INT cval = INTVAL (x) & mask;
5993 int width = GET_MODE_BITSIZE (mode);
5994
5995 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
5996 number, sign extend it. */
5997 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
5998 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
5999 cval |= (HOST_WIDE_INT) -1 << width;
6000
6001 return GEN_INT (cval);
6002 }
dfbe1b2f 6003
180b8e4b
RK
6004 /* If X is narrower than MODE and we want all the bits in X's mode, just
6005 get X in the proper mode. */
6006 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
6007 && (GET_MODE_MASK (GET_MODE (x)) & ~ mask) == 0)
dfbe1b2f
RK
6008 return gen_lowpart_for_combine (mode, x);
6009
71923da7
RK
6010 /* If we aren't changing the mode, X is not a SUBREG, and all zero bits in
6011 MASK are already known to be zero in X, we need not do anything. */
6012 if (GET_MODE (x) == mode && code != SUBREG && (~ mask & nonzero) == 0)
6139ff20
RK
6013 return x;
6014
dfbe1b2f
RK
6015 switch (code)
6016 {
6139ff20
RK
6017 case CLOBBER:
6018 /* If X is a (clobber (const_int)), return it since we know we are
0f41302f 6019 generating something that won't match. */
6139ff20
RK
6020 return x;
6021
6139ff20
RK
6022 case USE:
6023 /* X is a (use (mem ..)) that was made from a bit-field extraction that
6024 spanned the boundary of the MEM. If we are now masking so it is
6025 within that boundary, we don't need the USE any more. */
f76b9db2
ILT
6026 if (! BITS_BIG_ENDIAN
6027 && (mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
e3d616e3 6028 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
f76b9db2 6029 break;
6139ff20 6030
dfbe1b2f
RK
6031 case SIGN_EXTEND:
6032 case ZERO_EXTEND:
6033 case ZERO_EXTRACT:
6034 case SIGN_EXTRACT:
6035 x = expand_compound_operation (x);
6036 if (GET_CODE (x) != code)
e3d616e3 6037 return force_to_mode (x, mode, mask, reg, next_select);
dfbe1b2f
RK
6038 break;
6039
6040 case REG:
6041 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
6042 || rtx_equal_p (reg, get_last_value (x))))
6043 x = reg;
6044 break;
6045
dfbe1b2f 6046 case SUBREG:
6139ff20 6047 if (subreg_lowpart_p (x)
180b8e4b
RK
6048 /* We can ignore the effect of this SUBREG if it narrows the mode or
6049 if the constant masks to zero all the bits the mode doesn't
6050 have. */
6139ff20
RK
6051 && ((GET_MODE_SIZE (GET_MODE (x))
6052 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6139ff20
RK
6053 || (0 == (mask
6054 & GET_MODE_MASK (GET_MODE (x))
180b8e4b 6055 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))))
e3d616e3 6056 return force_to_mode (SUBREG_REG (x), mode, mask, reg, next_select);
dfbe1b2f
RK
6057 break;
6058
6059 case AND:
6139ff20
RK
6060 /* If this is an AND with a constant, convert it into an AND
6061 whose constant is the AND of that constant with MASK. If it
6062 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 6063
2ca9ae17 6064 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
dfbe1b2f 6065 {
6139ff20
RK
6066 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
6067 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
6068
6069 /* If X is still an AND, see if it is an AND with a mask that
71923da7
RK
6070 is just some low-order bits. If so, and it is MASK, we don't
6071 need it. */
dfbe1b2f
RK
6072
6073 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6074 && INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 6075 x = XEXP (x, 0);
d0ab8cd3 6076
71923da7
RK
6077 /* If it remains an AND, try making another AND with the bits
6078 in the mode mask that aren't in MASK turned on. If the
6079 constant in the AND is wide enough, this might make a
6080 cheaper constant. */
6081
6082 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
2ca9ae17
JW
6083 && GET_MODE_MASK (GET_MODE (x)) != mask
6084 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
71923da7
RK
6085 {
6086 HOST_WIDE_INT cval = (INTVAL (XEXP (x, 1))
6087 | (GET_MODE_MASK (GET_MODE (x)) & ~ mask));
6088 int width = GET_MODE_BITSIZE (GET_MODE (x));
6089 rtx y;
6090
6091 /* If MODE is narrower that HOST_WIDE_INT and CVAL is a negative
6092 number, sign extend it. */
6093 if (width > 0 && width < HOST_BITS_PER_WIDE_INT
6094 && (cval & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6095 cval |= (HOST_WIDE_INT) -1 << width;
6096
6097 y = gen_binary (AND, GET_MODE (x), XEXP (x, 0), GEN_INT (cval));
6098 if (rtx_cost (y, SET) < rtx_cost (x, SET))
6099 x = y;
6100 }
6101
d0ab8cd3 6102 break;
dfbe1b2f
RK
6103 }
6104
6139ff20 6105 goto binop;
dfbe1b2f
RK
6106
6107 case PLUS:
6139ff20
RK
6108 /* In (and (plus FOO C1) M), if M is a mask that just turns off
6109 low-order bits (as in an alignment operation) and FOO is already
6110 aligned to that boundary, mask C1 to that boundary as well.
6111 This may eliminate that PLUS and, later, the AND. */
9fa6d012
TG
6112
6113 {
6114 int width = GET_MODE_BITSIZE (mode);
6115 unsigned HOST_WIDE_INT smask = mask;
6116
6117 /* If MODE is narrower than HOST_WIDE_INT and mask is a negative
6118 number, sign extend it. */
6119
6120 if (width < HOST_BITS_PER_WIDE_INT
6121 && (smask & ((HOST_WIDE_INT) 1 << (width - 1))) != 0)
6122 smask |= (HOST_WIDE_INT) -1 << width;
6123
6124 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6125 && exact_log2 (- smask) >= 0
6126 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
6127 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
6128 return force_to_mode (plus_constant (XEXP (x, 0),
6129 INTVAL (XEXP (x, 1)) & mask),
6130 mode, mask, reg, next_select);
6131 }
6139ff20 6132
0f41302f 6133 /* ... fall through ... */
6139ff20 6134
dfbe1b2f
RK
6135 case MINUS:
6136 case MULT:
6139ff20
RK
6137 /* For PLUS, MINUS and MULT, we need any bits less significant than the
6138 most significant bit in MASK since carries from those bits will
6139 affect the bits we are interested in. */
6140 mask = fuller_mask;
6141 goto binop;
6142
dfbe1b2f
RK
6143 case IOR:
6144 case XOR:
6139ff20
RK
6145 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
6146 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
6147 operation which may be a bitfield extraction. Ensure that the
6148 constant we form is not wider than the mode of X. */
6149
6150 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6151 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6152 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6153 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
6154 && GET_CODE (XEXP (x, 1)) == CONST_INT
6155 && ((INTVAL (XEXP (XEXP (x, 0), 1))
6156 + floor_log2 (INTVAL (XEXP (x, 1))))
6157 < GET_MODE_BITSIZE (GET_MODE (x)))
6158 && (INTVAL (XEXP (x, 1))
01c82bbb 6159 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x))) == 0)
6139ff20
RK
6160 {
6161 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
6162 << INTVAL (XEXP (XEXP (x, 0), 1)));
6163 temp = gen_binary (GET_CODE (x), GET_MODE (x),
6164 XEXP (XEXP (x, 0), 0), temp);
d4d2b13f
RK
6165 x = gen_binary (LSHIFTRT, GET_MODE (x), temp,
6166 XEXP (XEXP (x, 0), 1));
e3d616e3 6167 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6168 }
6169
6170 binop:
dfbe1b2f 6171 /* For most binary operations, just propagate into the operation and
6139ff20
RK
6172 change the mode if we have an operation of that mode. */
6173
e3d616e3
RK
6174 op0 = gen_lowpart_for_combine (op_mode,
6175 force_to_mode (XEXP (x, 0), mode, mask,
6176 reg, next_select));
6177 op1 = gen_lowpart_for_combine (op_mode,
6178 force_to_mode (XEXP (x, 1), mode, mask,
6179 reg, next_select));
6139ff20 6180
2dd484ed
RK
6181 /* If OP1 is a CONST_INT and X is an IOR or XOR, clear bits outside
6182 MASK since OP1 might have been sign-extended but we never want
6183 to turn on extra bits, since combine might have previously relied
6184 on them being off. */
6185 if (GET_CODE (op1) == CONST_INT && (code == IOR || code == XOR)
6186 && (INTVAL (op1) & mask) != 0)
6187 op1 = GEN_INT (INTVAL (op1) & mask);
6188
6139ff20
RK
6189 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
6190 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 6191 break;
dfbe1b2f
RK
6192
6193 case ASHIFT:
dfbe1b2f 6194 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
6195 However, we cannot do anything with shifts where we cannot
6196 guarantee that the counts are smaller than the size of the mode
6197 because such a count will have a different meaning in a
6139ff20 6198 wider mode. */
f6785026
RK
6199
6200 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 6201 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
6202 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
6203 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
6204 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 6205 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
6206 break;
6207
6139ff20
RK
6208 /* If the shift count is a constant and we can do arithmetic in
6209 the mode of the shift, refine which bits we need. Otherwise, use the
6210 conservative form of the mask. */
6211 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6212 && INTVAL (XEXP (x, 1)) >= 0
6213 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
6214 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
6215 mask >>= INTVAL (XEXP (x, 1));
6216 else
6217 mask = fuller_mask;
6218
6219 op0 = gen_lowpart_for_combine (op_mode,
6220 force_to_mode (XEXP (x, 0), op_mode,
e3d616e3 6221 mask, reg, next_select));
6139ff20
RK
6222
6223 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
6224 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 6225 break;
dfbe1b2f
RK
6226
6227 case LSHIFTRT:
1347292b
JW
6228 /* Here we can only do something if the shift count is a constant,
6229 this shift constant is valid for the host, and we can do arithmetic
6230 in OP_MODE. */
dfbe1b2f
RK
6231
6232 if (GET_CODE (XEXP (x, 1)) == CONST_INT
1347292b 6233 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
6139ff20 6234 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6235 {
6139ff20
RK
6236 rtx inner = XEXP (x, 0);
6237
6238 /* Select the mask of the bits we need for the shift operand. */
6239 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 6240
6139ff20
RK
6241 /* We can only change the mode of the shift if we can do arithmetic
6242 in the mode of the shift and MASK is no wider than the width of
6243 OP_MODE. */
6244 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
6245 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
6246 op_mode = GET_MODE (x);
6247
e3d616e3 6248 inner = force_to_mode (inner, op_mode, mask, reg, next_select);
6139ff20
RK
6249
6250 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
6251 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 6252 }
6139ff20
RK
6253
6254 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
6255 shift and AND produces only copies of the sign bit (C2 is one less
6256 than a power of two), we can do this with just a shift. */
6257
6258 if (GET_CODE (x) == LSHIFTRT
6259 && GET_CODE (XEXP (x, 1)) == CONST_INT
6260 && ((INTVAL (XEXP (x, 1))
6261 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
6262 >= GET_MODE_BITSIZE (GET_MODE (x)))
6263 && exact_log2 (mask + 1) >= 0
6264 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
6265 >= exact_log2 (mask + 1)))
6266 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6267 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
6268 - exact_log2 (mask + 1)));
d0ab8cd3
RK
6269 break;
6270
6271 case ASHIFTRT:
6139ff20
RK
6272 /* If we are just looking for the sign bit, we don't need this shift at
6273 all, even if it has a variable count. */
9bf22b75
RK
6274 if (GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
6275 && (mask == ((HOST_WIDE_INT) 1
6276 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
e3d616e3 6277 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20
RK
6278
6279 /* If this is a shift by a constant, get a mask that contains those bits
6280 that are not copies of the sign bit. We then have two cases: If
6281 MASK only includes those bits, this can be a logical shift, which may
6282 allow simplifications. If MASK is a single-bit field not within
6283 those bits, we are requesting a copy of the sign bit and hence can
6284 shift the sign bit to the appropriate location. */
6285
6286 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
6287 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6288 {
6289 int i = -1;
6290
b69960ac
RK
6291 /* If the considered data is wider then HOST_WIDE_INT, we can't
6292 represent a mask for all its bits in a single scalar.
6293 But we only care about the lower bits, so calculate these. */
6294
6a11342f 6295 if (GET_MODE_BITSIZE (GET_MODE (x)) > HOST_BITS_PER_WIDE_INT)
b69960ac 6296 {
0f41302f 6297 nonzero = ~ (HOST_WIDE_INT) 0;
b69960ac
RK
6298
6299 /* GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6300 is the number of bits a full-width mask would have set.
6301 We need only shift if these are fewer than nonzero can
6302 hold. If not, we must keep all bits set in nonzero. */
6303
6304 if (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 1))
6305 < HOST_BITS_PER_WIDE_INT)
6306 nonzero >>= INTVAL (XEXP (x, 1))
6307 + HOST_BITS_PER_WIDE_INT
6308 - GET_MODE_BITSIZE (GET_MODE (x)) ;
6309 }
6310 else
6311 {
6312 nonzero = GET_MODE_MASK (GET_MODE (x));
6313 nonzero >>= INTVAL (XEXP (x, 1));
6314 }
6139ff20
RK
6315
6316 if ((mask & ~ nonzero) == 0
6317 || (i = exact_log2 (mask)) >= 0)
6318 {
6319 x = simplify_shift_const
6320 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
6321 i < 0 ? INTVAL (XEXP (x, 1))
6322 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
6323
6324 if (GET_CODE (x) != ASHIFTRT)
e3d616e3 6325 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6326 }
6327 }
6328
6329 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
6330 even if the shift count isn't a constant. */
6331 if (mask == 1)
6332 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
6333
d0ab8cd3 6334 /* If this is a sign-extension operation that just affects bits
4c002f29
RK
6335 we don't care about, remove it. Be sure the call above returned
6336 something that is still a shift. */
d0ab8cd3 6337
4c002f29
RK
6338 if ((GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ASHIFTRT)
6339 && GET_CODE (XEXP (x, 1)) == CONST_INT
d0ab8cd3 6340 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
6341 && (INTVAL (XEXP (x, 1))
6342 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
6343 && GET_CODE (XEXP (x, 0)) == ASHIFT
6344 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6345 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
e3d616e3
RK
6346 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask,
6347 reg, next_select);
6139ff20 6348
dfbe1b2f
RK
6349 break;
6350
6139ff20
RK
6351 case ROTATE:
6352 case ROTATERT:
6353 /* If the shift count is constant and we can do computations
6354 in the mode of X, compute where the bits we care about are.
6355 Otherwise, we can't do anything. Don't change the mode of
6356 the shift or propagate MODE into the shift, though. */
6357 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6358 && INTVAL (XEXP (x, 1)) >= 0)
6359 {
6360 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
6361 GET_MODE (x), GEN_INT (mask),
6362 XEXP (x, 1));
7d171a1e 6363 if (temp && GET_CODE(temp) == CONST_INT)
6139ff20
RK
6364 SUBST (XEXP (x, 0),
6365 force_to_mode (XEXP (x, 0), GET_MODE (x),
e3d616e3 6366 INTVAL (temp), reg, next_select));
6139ff20
RK
6367 }
6368 break;
6369
dfbe1b2f 6370 case NEG:
180b8e4b
RK
6371 /* If we just want the low-order bit, the NEG isn't needed since it
6372 won't change the low-order bit. */
6373 if (mask == 1)
6374 return force_to_mode (XEXP (x, 0), mode, mask, reg, just_select);
6375
6139ff20
RK
6376 /* We need any bits less significant than the most significant bit in
6377 MASK since carries from those bits will affect the bits we are
6378 interested in. */
6379 mask = fuller_mask;
6380 goto unop;
6381
dfbe1b2f 6382 case NOT:
6139ff20
RK
6383 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
6384 same as the XOR case above. Ensure that the constant we form is not
6385 wider than the mode of X. */
6386
6387 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
6388 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
6389 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
6390 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
6391 < GET_MODE_BITSIZE (GET_MODE (x)))
6392 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
6393 {
6394 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
6395 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
6396 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
6397
e3d616e3 6398 return force_to_mode (x, mode, mask, reg, next_select);
6139ff20
RK
6399 }
6400
f82da7d2
JW
6401 /* (and (not FOO) CONST) is (not (or FOO (not CONST))), so we must
6402 use the full mask inside the NOT. */
6403 mask = fuller_mask;
6404
6139ff20 6405 unop:
e3d616e3
RK
6406 op0 = gen_lowpart_for_combine (op_mode,
6407 force_to_mode (XEXP (x, 0), mode, mask,
6408 reg, next_select));
6139ff20 6409 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
0c1c8ea6 6410 x = gen_unary (code, op_mode, op_mode, op0);
6139ff20
RK
6411 break;
6412
6413 case NE:
6414 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
3aceff0d 6415 in STORE_FLAG_VALUE and FOO has a single bit that might be nonzero,
1a6ec070 6416 which is equal to STORE_FLAG_VALUE. */
3aceff0d
RK
6417 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 1) == const0_rtx
6418 && exact_log2 (nonzero_bits (XEXP (x, 0), mode)) >= 0
1a6ec070 6419 && nonzero_bits (XEXP (x, 0), mode) == STORE_FLAG_VALUE)
e3d616e3 6420 return force_to_mode (XEXP (x, 0), mode, mask, reg, next_select);
6139ff20 6421
d0ab8cd3
RK
6422 break;
6423
6424 case IF_THEN_ELSE:
6425 /* We have no way of knowing if the IF_THEN_ELSE can itself be
6426 written in a narrower mode. We play it safe and do not do so. */
6427
6428 SUBST (XEXP (x, 1),
6429 gen_lowpart_for_combine (GET_MODE (x),
6430 force_to_mode (XEXP (x, 1), mode,
e3d616e3 6431 mask, reg, next_select)));
d0ab8cd3
RK
6432 SUBST (XEXP (x, 2),
6433 gen_lowpart_for_combine (GET_MODE (x),
6434 force_to_mode (XEXP (x, 2), mode,
e3d616e3 6435 mask, reg,next_select)));
d0ab8cd3 6436 break;
dfbe1b2f
RK
6437 }
6438
d0ab8cd3 6439 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
6440 return gen_lowpart_for_combine (mode, x);
6441}
6442\f
abe6e52f
RK
6443/* Return nonzero if X is an expression that has one of two values depending on
6444 whether some other value is zero or nonzero. In that case, we return the
6445 value that is being tested, *PTRUE is set to the value if the rtx being
6446 returned has a nonzero value, and *PFALSE is set to the other alternative.
6447
6448 If we return zero, we set *PTRUE and *PFALSE to X. */
6449
6450static rtx
6451if_then_else_cond (x, ptrue, pfalse)
6452 rtx x;
6453 rtx *ptrue, *pfalse;
6454{
6455 enum machine_mode mode = GET_MODE (x);
6456 enum rtx_code code = GET_CODE (x);
6457 int size = GET_MODE_BITSIZE (mode);
6458 rtx cond0, cond1, true0, true1, false0, false1;
6459 unsigned HOST_WIDE_INT nz;
6460
6461 /* If this is a unary operation whose operand has one of two values, apply
6462 our opcode to compute those values. */
6463 if (GET_RTX_CLASS (code) == '1'
6464 && (cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0)) != 0)
6465 {
0c1c8ea6
RK
6466 *ptrue = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), true0);
6467 *pfalse = gen_unary (code, mode, GET_MODE (XEXP (x, 0)), false0);
abe6e52f
RK
6468 return cond0;
6469 }
6470
3a19aabc 6471 /* If this is a COMPARE, do nothing, since the IF_THEN_ELSE we would
ddd5a7c1 6472 make can't possibly match and would suppress other optimizations. */
3a19aabc
RK
6473 else if (code == COMPARE)
6474 ;
6475
abe6e52f
RK
6476 /* If this is a binary operation, see if either side has only one of two
6477 values. If either one does or if both do and they are conditional on
6478 the same value, compute the new true and false values. */
6479 else if (GET_RTX_CLASS (code) == 'c' || GET_RTX_CLASS (code) == '2'
6480 || GET_RTX_CLASS (code) == '<')
6481 {
6482 cond0 = if_then_else_cond (XEXP (x, 0), &true0, &false0);
6483 cond1 = if_then_else_cond (XEXP (x, 1), &true1, &false1);
6484
6485 if ((cond0 != 0 || cond1 != 0)
6486 && ! (cond0 != 0 && cond1 != 0 && ! rtx_equal_p (cond0, cond1)))
6487 {
6488 *ptrue = gen_binary (code, mode, true0, true1);
6489 *pfalse = gen_binary (code, mode, false0, false1);
6490 return cond0 ? cond0 : cond1;
6491 }
9210df58 6492
9210df58 6493 /* See if we have PLUS, IOR, XOR, MINUS or UMAX, where one of the
0802d516
RK
6494 operands is zero when the other is non-zero, and vice-versa,
6495 and STORE_FLAG_VALUE is 1 or -1. */
9210df58 6496
0802d516
RK
6497 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6498 && (code == PLUS || code == IOR || code == XOR || code == MINUS
9210df58
RK
6499 || code == UMAX)
6500 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6501 {
6502 rtx op0 = XEXP (XEXP (x, 0), 1);
6503 rtx op1 = XEXP (XEXP (x, 1), 1);
6504
6505 cond0 = XEXP (XEXP (x, 0), 0);
6506 cond1 = XEXP (XEXP (x, 1), 0);
6507
6508 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6509 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6510 && reversible_comparison_p (cond1)
6511 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6512 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6513 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6514 || ((swap_condition (GET_CODE (cond0))
6515 == reverse_condition (GET_CODE (cond1)))
6516 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6517 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6518 && ! side_effects_p (x))
6519 {
6520 *ptrue = gen_binary (MULT, mode, op0, const_true_rtx);
6521 *pfalse = gen_binary (MULT, mode,
6522 (code == MINUS
0c1c8ea6 6523 ? gen_unary (NEG, mode, mode, op1) : op1),
9210df58
RK
6524 const_true_rtx);
6525 return cond0;
6526 }
6527 }
6528
6529 /* Similarly for MULT, AND and UMIN, execpt that for these the result
6530 is always zero. */
0802d516
RK
6531 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6532 && (code == MULT || code == AND || code == UMIN)
9210df58
RK
6533 && GET_CODE (XEXP (x, 0)) == MULT && GET_CODE (XEXP (x, 1)) == MULT)
6534 {
6535 cond0 = XEXP (XEXP (x, 0), 0);
6536 cond1 = XEXP (XEXP (x, 1), 0);
6537
6538 if (GET_RTX_CLASS (GET_CODE (cond0)) == '<'
6539 && GET_RTX_CLASS (GET_CODE (cond1)) == '<'
6540 && reversible_comparison_p (cond1)
6541 && ((GET_CODE (cond0) == reverse_condition (GET_CODE (cond1))
6542 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 0))
6543 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 1)))
6544 || ((swap_condition (GET_CODE (cond0))
6545 == reverse_condition (GET_CODE (cond1)))
6546 && rtx_equal_p (XEXP (cond0, 0), XEXP (cond1, 1))
6547 && rtx_equal_p (XEXP (cond0, 1), XEXP (cond1, 0))))
6548 && ! side_effects_p (x))
6549 {
6550 *ptrue = *pfalse = const0_rtx;
6551 return cond0;
6552 }
6553 }
abe6e52f
RK
6554 }
6555
6556 else if (code == IF_THEN_ELSE)
6557 {
6558 /* If we have IF_THEN_ELSE already, extract the condition and
6559 canonicalize it if it is NE or EQ. */
6560 cond0 = XEXP (x, 0);
6561 *ptrue = XEXP (x, 1), *pfalse = XEXP (x, 2);
6562 if (GET_CODE (cond0) == NE && XEXP (cond0, 1) == const0_rtx)
6563 return XEXP (cond0, 0);
6564 else if (GET_CODE (cond0) == EQ && XEXP (cond0, 1) == const0_rtx)
6565 {
6566 *ptrue = XEXP (x, 2), *pfalse = XEXP (x, 1);
6567 return XEXP (cond0, 0);
6568 }
6569 else
6570 return cond0;
6571 }
6572
6573 /* If X is a normal SUBREG with both inner and outer modes integral,
6574 we can narrow both the true and false values of the inner expression,
6575 if there is a condition. */
6576 else if (code == SUBREG && GET_MODE_CLASS (mode) == MODE_INT
6577 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (x))) == MODE_INT
6578 && GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
6579 && 0 != (cond0 = if_then_else_cond (SUBREG_REG (x),
6580 &true0, &false0)))
6581 {
00244e6b
RK
6582 *ptrue = force_to_mode (true0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
6583 *pfalse
6584 = force_to_mode (false0, mode, GET_MODE_MASK (mode), NULL_RTX, 0);
abe6e52f 6585
abe6e52f
RK
6586 return cond0;
6587 }
6588
6589 /* If X is a constant, this isn't special and will cause confusions
6590 if we treat it as such. Likewise if it is equivalent to a constant. */
6591 else if (CONSTANT_P (x)
6592 || ((cond0 = get_last_value (x)) != 0 && CONSTANT_P (cond0)))
6593 ;
6594
6595 /* If X is known to be either 0 or -1, those are the true and
6596 false values when testing X. */
6597 else if (num_sign_bit_copies (x, mode) == size)
6598 {
6599 *ptrue = constm1_rtx, *pfalse = const0_rtx;
6600 return x;
6601 }
6602
6603 /* Likewise for 0 or a single bit. */
6604 else if (exact_log2 (nz = nonzero_bits (x, mode)) >= 0)
6605 {
6606 *ptrue = GEN_INT (nz), *pfalse = const0_rtx;
6607 return x;
6608 }
6609
6610 /* Otherwise fail; show no condition with true and false values the same. */
6611 *ptrue = *pfalse = x;
6612 return 0;
6613}
6614\f
1a26b032
RK
6615/* Return the value of expression X given the fact that condition COND
6616 is known to be true when applied to REG as its first operand and VAL
6617 as its second. X is known to not be shared and so can be modified in
6618 place.
6619
6620 We only handle the simplest cases, and specifically those cases that
6621 arise with IF_THEN_ELSE expressions. */
6622
6623static rtx
6624known_cond (x, cond, reg, val)
6625 rtx x;
6626 enum rtx_code cond;
6627 rtx reg, val;
6628{
6629 enum rtx_code code = GET_CODE (x);
f24ad0e4 6630 rtx temp;
1a26b032
RK
6631 char *fmt;
6632 int i, j;
6633
6634 if (side_effects_p (x))
6635 return x;
6636
6637 if (cond == EQ && rtx_equal_p (x, reg))
6638 return val;
6639
6640 /* If X is (abs REG) and we know something about REG's relationship
6641 with zero, we may be able to simplify this. */
6642
6643 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
6644 switch (cond)
6645 {
6646 case GE: case GT: case EQ:
6647 return XEXP (x, 0);
6648 case LT: case LE:
0c1c8ea6
RK
6649 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), GET_MODE (XEXP (x, 0)),
6650 XEXP (x, 0));
1a26b032
RK
6651 }
6652
6653 /* The only other cases we handle are MIN, MAX, and comparisons if the
6654 operands are the same as REG and VAL. */
6655
6656 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
6657 {
6658 if (rtx_equal_p (XEXP (x, 0), val))
6659 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
6660
6661 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
6662 {
6663 if (GET_RTX_CLASS (code) == '<')
6664 return (comparison_dominates_p (cond, code) ? const_true_rtx
6665 : (comparison_dominates_p (cond,
6666 reverse_condition (code))
6667 ? const0_rtx : x));
6668
6669 else if (code == SMAX || code == SMIN
6670 || code == UMIN || code == UMAX)
6671 {
6672 int unsignedp = (code == UMIN || code == UMAX);
6673
6674 if (code == SMAX || code == UMAX)
6675 cond = reverse_condition (cond);
6676
6677 switch (cond)
6678 {
6679 case GE: case GT:
6680 return unsignedp ? x : XEXP (x, 1);
6681 case LE: case LT:
6682 return unsignedp ? x : XEXP (x, 0);
6683 case GEU: case GTU:
6684 return unsignedp ? XEXP (x, 1) : x;
6685 case LEU: case LTU:
6686 return unsignedp ? XEXP (x, 0) : x;
6687 }
6688 }
6689 }
6690 }
6691
6692 fmt = GET_RTX_FORMAT (code);
6693 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6694 {
6695 if (fmt[i] == 'e')
6696 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6697 else if (fmt[i] == 'E')
6698 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6699 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6700 cond, reg, val));
6701 }
6702
6703 return x;
6704}
6705\f
e11fa86f
RK
6706/* See if X and Y are equal for the purposes of seeing if we can rewrite an
6707 assignment as a field assignment. */
6708
6709static int
6710rtx_equal_for_field_assignment_p (x, y)
6711 rtx x;
6712 rtx y;
6713{
6714 rtx last_x, last_y;
6715
6716 if (x == y || rtx_equal_p (x, y))
6717 return 1;
6718
6719 if (x == 0 || y == 0 || GET_MODE (x) != GET_MODE (y))
6720 return 0;
6721
6722 /* Check for a paradoxical SUBREG of a MEM compared with the MEM.
6723 Note that all SUBREGs of MEM are paradoxical; otherwise they
6724 would have been rewritten. */
6725 if (GET_CODE (x) == MEM && GET_CODE (y) == SUBREG
6726 && GET_CODE (SUBREG_REG (y)) == MEM
6727 && rtx_equal_p (SUBREG_REG (y),
6728 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (y)), x)))
6729 return 1;
6730
6731 if (GET_CODE (y) == MEM && GET_CODE (x) == SUBREG
6732 && GET_CODE (SUBREG_REG (x)) == MEM
6733 && rtx_equal_p (SUBREG_REG (x),
6734 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (x)), y)))
6735 return 1;
6736
6737 last_x = get_last_value (x);
6738 last_y = get_last_value (y);
6739
0f47edd3
JL
6740 return ((last_x != 0
6741 && GET_CODE (last_x) != CLOBBER
6742 && rtx_equal_for_field_assignment_p (last_x, y))
6743 || (last_y != 0
6744 && GET_CODE (last_y) != CLOBBER
6745 && rtx_equal_for_field_assignment_p (x, last_y))
e11fa86f 6746 || (last_x != 0 && last_y != 0
0f47edd3
JL
6747 && GET_CODE (last_x) != CLOBBER
6748 && GET_CODE (last_y) != CLOBBER
e11fa86f
RK
6749 && rtx_equal_for_field_assignment_p (last_x, last_y)));
6750}
6751\f
230d793d
RS
6752/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6753 Return that assignment if so.
6754
6755 We only handle the most common cases. */
6756
6757static rtx
6758make_field_assignment (x)
6759 rtx x;
6760{
6761 rtx dest = SET_DEST (x);
6762 rtx src = SET_SRC (x);
dfbe1b2f 6763 rtx assign;
e11fa86f 6764 rtx rhs, lhs;
5f4f0e22
CH
6765 HOST_WIDE_INT c1;
6766 int pos, len;
dfbe1b2f
RK
6767 rtx other;
6768 enum machine_mode mode;
230d793d
RS
6769
6770 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6771 a clear of a one-bit field. We will have changed it to
6772 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6773 for a SUBREG. */
6774
6775 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6776 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6777 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
e11fa86f 6778 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6779 {
8999a12e 6780 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6781 1, 1, 1, 0);
76184def
DE
6782 if (assign != 0)
6783 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6784 return x;
230d793d
RS
6785 }
6786
6787 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6788 && subreg_lowpart_p (XEXP (src, 0))
6789 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6790 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6791 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6792 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
e11fa86f 6793 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6794 {
8999a12e 6795 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
6796 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6797 1, 1, 1, 0);
76184def
DE
6798 if (assign != 0)
6799 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
6800 return x;
230d793d
RS
6801 }
6802
9dd11dcb 6803 /* If SRC is (ior (ashift (const_int 1) POS) DEST), this is a set of a
230d793d
RS
6804 one-bit field. */
6805 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6806 && XEXP (XEXP (src, 0), 0) == const1_rtx
e11fa86f 6807 && rtx_equal_for_field_assignment_p (dest, XEXP (src, 1)))
230d793d 6808 {
8999a12e 6809 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6810 1, 1, 1, 0);
76184def
DE
6811 if (assign != 0)
6812 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
6813 return x;
230d793d
RS
6814 }
6815
dfbe1b2f 6816 /* The other case we handle is assignments into a constant-position
9dd11dcb 6817 field. They look like (ior/xor (and DEST C1) OTHER). If C1 represents
dfbe1b2f
RK
6818 a mask that has all one bits except for a group of zero bits and
6819 OTHER is known to have zeros where C1 has ones, this is such an
6820 assignment. Compute the position and length from C1. Shift OTHER
6821 to the appropriate position, force it to the required mode, and
6822 make the extraction. Check for the AND in both operands. */
6823
9dd11dcb 6824 if (GET_CODE (src) != IOR && GET_CODE (src) != XOR)
e11fa86f
RK
6825 return x;
6826
6827 rhs = expand_compound_operation (XEXP (src, 0));
6828 lhs = expand_compound_operation (XEXP (src, 1));
6829
6830 if (GET_CODE (rhs) == AND
6831 && GET_CODE (XEXP (rhs, 1)) == CONST_INT
6832 && rtx_equal_for_field_assignment_p (XEXP (rhs, 0), dest))
6833 c1 = INTVAL (XEXP (rhs, 1)), other = lhs;
6834 else if (GET_CODE (lhs) == AND
6835 && GET_CODE (XEXP (lhs, 1)) == CONST_INT
6836 && rtx_equal_for_field_assignment_p (XEXP (lhs, 0), dest))
6837 c1 = INTVAL (XEXP (lhs, 1)), other = rhs;
dfbe1b2f
RK
6838 else
6839 return x;
230d793d 6840
e11fa86f 6841 pos = get_pos_from_mask ((~ c1) & GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 6842 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 6843 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 6844 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 6845 return x;
230d793d 6846
5f4f0e22 6847 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
76184def
DE
6848 if (assign == 0)
6849 return x;
230d793d 6850
dfbe1b2f
RK
6851 /* The mode to use for the source is the mode of the assignment, or of
6852 what is inside a possible STRICT_LOW_PART. */
6853 mode = (GET_CODE (assign) == STRICT_LOW_PART
6854 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 6855
dfbe1b2f
RK
6856 /* Shift OTHER right POS places and make it the source, restricting it
6857 to the proper length and mode. */
230d793d 6858
5f4f0e22
CH
6859 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6860 GET_MODE (src), other, pos),
6139ff20
RK
6861 mode,
6862 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6863 ? GET_MODE_MASK (mode)
6864 : ((HOST_WIDE_INT) 1 << len) - 1,
e3d616e3 6865 dest, 0);
230d793d 6866
dfbe1b2f 6867 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
6868}
6869\f
6870/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6871 if so. */
6872
6873static rtx
6874apply_distributive_law (x)
6875 rtx x;
6876{
6877 enum rtx_code code = GET_CODE (x);
6878 rtx lhs, rhs, other;
6879 rtx tem;
6880 enum rtx_code inner_code;
6881
d8a8a4da
RS
6882 /* Distributivity is not true for floating point.
6883 It can change the value. So don't do it.
6884 -- rms and moshier@world.std.com. */
3ad2180a 6885 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
6886 return x;
6887
230d793d
RS
6888 /* The outer operation can only be one of the following: */
6889 if (code != IOR && code != AND && code != XOR
6890 && code != PLUS && code != MINUS)
6891 return x;
6892
6893 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6894
0f41302f
MS
6895 /* If either operand is a primitive we can't do anything, so get out
6896 fast. */
230d793d 6897 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 6898 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
6899 return x;
6900
6901 lhs = expand_compound_operation (lhs);
6902 rhs = expand_compound_operation (rhs);
6903 inner_code = GET_CODE (lhs);
6904 if (inner_code != GET_CODE (rhs))
6905 return x;
6906
6907 /* See if the inner and outer operations distribute. */
6908 switch (inner_code)
6909 {
6910 case LSHIFTRT:
6911 case ASHIFTRT:
6912 case AND:
6913 case IOR:
6914 /* These all distribute except over PLUS. */
6915 if (code == PLUS || code == MINUS)
6916 return x;
6917 break;
6918
6919 case MULT:
6920 if (code != PLUS && code != MINUS)
6921 return x;
6922 break;
6923
6924 case ASHIFT:
45620ed4 6925 /* This is also a multiply, so it distributes over everything. */
230d793d
RS
6926 break;
6927
6928 case SUBREG:
dfbe1b2f
RK
6929 /* Non-paradoxical SUBREGs distributes over all operations, provided
6930 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
6931 of a low-order part, we don't convert an fp operation to int or
6932 vice versa, and we would not be converting a single-word
dfbe1b2f 6933 operation into a multi-word operation. The latter test is not
2b4bd1bc 6934 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
6935 Some of the previous tests are redundant given the latter test, but
6936 are retained because they are required for correctness.
6937
6938 We produce the result slightly differently in this case. */
6939
6940 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6941 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6942 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
6943 || (GET_MODE_CLASS (GET_MODE (lhs))
6944 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 6945 || (GET_MODE_SIZE (GET_MODE (lhs))
8af24e26 6946 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f 6947 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
6948 return x;
6949
6950 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6951 SUBREG_REG (lhs), SUBREG_REG (rhs));
6952 return gen_lowpart_for_combine (GET_MODE (x), tem);
6953
6954 default:
6955 return x;
6956 }
6957
6958 /* Set LHS and RHS to the inner operands (A and B in the example
6959 above) and set OTHER to the common operand (C in the example).
6960 These is only one way to do this unless the inner operation is
6961 commutative. */
6962 if (GET_RTX_CLASS (inner_code) == 'c'
6963 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6964 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6965 else if (GET_RTX_CLASS (inner_code) == 'c'
6966 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6967 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6968 else if (GET_RTX_CLASS (inner_code) == 'c'
6969 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6970 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6971 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6972 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6973 else
6974 return x;
6975
6976 /* Form the new inner operation, seeing if it simplifies first. */
6977 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6978
6979 /* There is one exception to the general way of distributing:
6980 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6981 if (code == XOR && inner_code == IOR)
6982 {
6983 inner_code = AND;
0c1c8ea6 6984 other = gen_unary (NOT, GET_MODE (x), GET_MODE (x), other);
230d793d
RS
6985 }
6986
6987 /* We may be able to continuing distributing the result, so call
6988 ourselves recursively on the inner operation before forming the
6989 outer operation, which we return. */
6990 return gen_binary (inner_code, GET_MODE (x),
6991 apply_distributive_law (tem), other);
6992}
6993\f
6994/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6995 in MODE.
6996
6997 Return an equivalent form, if different from X. Otherwise, return X. If
6998 X is zero, we are to always construct the equivalent form. */
6999
7000static rtx
7001simplify_and_const_int (x, mode, varop, constop)
7002 rtx x;
7003 enum machine_mode mode;
7004 rtx varop;
5f4f0e22 7005 unsigned HOST_WIDE_INT constop;
230d793d 7006{
951553af 7007 unsigned HOST_WIDE_INT nonzero;
9fa6d012 7008 int width = GET_MODE_BITSIZE (mode);
42301240 7009 int i;
230d793d 7010
6139ff20
RK
7011 /* Simplify VAROP knowing that we will be only looking at some of the
7012 bits in it. */
e3d616e3 7013 varop = force_to_mode (varop, mode, constop, NULL_RTX, 0);
230d793d 7014
6139ff20
RK
7015 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
7016 CONST_INT, we are done. */
7017 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
7018 return varop;
230d793d 7019
fc06d7aa
RK
7020 /* See what bits may be nonzero in VAROP. Unlike the general case of
7021 a call to nonzero_bits, here we don't care about bits outside
7022 MODE. */
7023
7024 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d 7025
9fa6d012
TG
7026 /* If this would be an entire word for the target, but is not for
7027 the host, then sign-extend on the host so that the number will look
7028 the same way on the host that it would on the target.
7029
7030 For example, when building a 64 bit alpha hosted 32 bit sparc
7031 targeted compiler, then we want the 32 bit unsigned value -1 to be
7032 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7033 The later confuses the sparc backend. */
7034
7035 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7036 && (nonzero & ((HOST_WIDE_INT) 1 << (width - 1))))
7037 nonzero |= ((HOST_WIDE_INT) (-1) << width);
7038
230d793d 7039 /* Turn off all bits in the constant that are known to already be zero.
951553af 7040 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
7041 which is tested below. */
7042
951553af 7043 constop &= nonzero;
230d793d
RS
7044
7045 /* If we don't have any bits left, return zero. */
7046 if (constop == 0)
7047 return const0_rtx;
7048
42301240
RK
7049 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
7050 a power of two, we can replace this with a ASHIFT. */
7051 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
7052 && (i = exact_log2 (constop)) >= 0)
7053 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
7054
6139ff20
RK
7055 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
7056 or XOR, then try to apply the distributive law. This may eliminate
7057 operations if either branch can be simplified because of the AND.
7058 It may also make some cases more complex, but those cases probably
7059 won't match a pattern either with or without this. */
7060
7061 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
7062 return
7063 gen_lowpart_for_combine
7064 (mode,
7065 apply_distributive_law
7066 (gen_binary (GET_CODE (varop), GET_MODE (varop),
7067 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7068 XEXP (varop, 0), constop),
7069 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
7070 XEXP (varop, 1), constop))));
7071
230d793d
RS
7072 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
7073 if we already had one (just check for the simplest cases). */
7074 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7075 && GET_MODE (XEXP (x, 0)) == mode
7076 && SUBREG_REG (XEXP (x, 0)) == varop)
7077 varop = XEXP (x, 0);
7078 else
7079 varop = gen_lowpart_for_combine (mode, varop);
7080
0f41302f 7081 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
7082 if (GET_CODE (varop) == CLOBBER)
7083 return x ? x : varop;
7084
7085 /* If we are only masking insignificant bits, return VAROP. */
951553af 7086 if (constop == nonzero)
230d793d
RS
7087 x = varop;
7088
7089 /* Otherwise, return an AND. See how much, if any, of X we can use. */
7090 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 7091 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
7092
7093 else
7094 {
7095 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7096 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 7097 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
7098
7099 SUBST (XEXP (x, 0), varop);
7100 }
7101
7102 return x;
7103}
7104\f
b3728b0e
JW
7105/* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
7106 We don't let nonzero_bits recur into num_sign_bit_copies, because that
7107 is less useful. We can't allow both, because that results in exponential
7108 run time recusion. There is a nullstone testcase that triggered
7109 this. This macro avoids accidental uses of num_sign_bit_copies. */
7110#define num_sign_bit_copies()
7111
230d793d
RS
7112/* Given an expression, X, compute which bits in X can be non-zero.
7113 We don't care about bits outside of those defined in MODE.
7114
7115 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
7116 a shift, AND, or zero_extract, we can do better. */
7117
5f4f0e22 7118static unsigned HOST_WIDE_INT
951553af 7119nonzero_bits (x, mode)
230d793d
RS
7120 rtx x;
7121 enum machine_mode mode;
7122{
951553af
RK
7123 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
7124 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
7125 enum rtx_code code;
7126 int mode_width = GET_MODE_BITSIZE (mode);
7127 rtx tem;
7128
1c75dfa4
RK
7129 /* For floating-point values, assume all bits are needed. */
7130 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
7131 return nonzero;
7132
230d793d
RS
7133 /* If X is wider than MODE, use its mode instead. */
7134 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
7135 {
7136 mode = GET_MODE (x);
951553af 7137 nonzero = GET_MODE_MASK (mode);
230d793d
RS
7138 mode_width = GET_MODE_BITSIZE (mode);
7139 }
7140
5f4f0e22 7141 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
7142 /* Our only callers in this case look for single bit values. So
7143 just return the mode mask. Those tests will then be false. */
951553af 7144 return nonzero;
230d793d 7145
8baf60bb 7146#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 7147 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
7148 and target machines, we can compute this from which bits of the
7149 object might be nonzero in its own mode, taking into account the fact
7150 that on many CISC machines, accessing an object in a wider mode
7151 causes the high-order bits to become undefined. So they are
7152 not known to be zero. */
7153
7154 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
7155 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
7156 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 7157 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
7158 {
7159 nonzero &= nonzero_bits (x, GET_MODE (x));
7160 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
7161 return nonzero;
7162 }
7163#endif
7164
230d793d
RS
7165 code = GET_CODE (x);
7166 switch (code)
7167 {
7168 case REG:
320dd7a7
RK
7169#ifdef POINTERS_EXTEND_UNSIGNED
7170 /* If pointers extend unsigned and this is a pointer in Pmode, say that
7171 all the bits above ptr_mode are known to be zero. */
7172 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
7173 && REGNO_POINTER_FLAG (REGNO (x)))
7174 nonzero &= GET_MODE_MASK (ptr_mode);
7175#endif
7176
b0d71df9
RK
7177#ifdef STACK_BOUNDARY
7178 /* If this is the stack pointer, we may know something about its
7179 alignment. If PUSH_ROUNDING is defined, it is possible for the
230d793d
RS
7180 stack to be momentarily aligned only to that amount, so we pick
7181 the least alignment. */
7182
ee49a9c7
JW
7183 /* We can't check for arg_pointer_rtx here, because it is not
7184 guaranteed to have as much alignment as the stack pointer.
7185 In particular, in the Irix6 n64 ABI, the stack has 128 bit
7186 alignment but the argument pointer has only 64 bit alignment. */
7187
b0d71df9 7188 if (x == stack_pointer_rtx || x == frame_pointer_rtx
ee49a9c7 7189 || x == hard_frame_pointer_rtx
b0d71df9
RK
7190 || (REGNO (x) >= FIRST_VIRTUAL_REGISTER
7191 && REGNO (x) <= LAST_VIRTUAL_REGISTER))
230d793d 7192 {
b0d71df9 7193 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
230d793d
RS
7194
7195#ifdef PUSH_ROUNDING
91102d5a 7196 if (REGNO (x) == STACK_POINTER_REGNUM)
b0d71df9 7197 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
230d793d
RS
7198#endif
7199
320dd7a7
RK
7200 /* We must return here, otherwise we may get a worse result from
7201 one of the choices below. There is nothing useful below as
7202 far as the stack pointer is concerned. */
b0d71df9 7203 return nonzero &= ~ (sp_alignment - 1);
230d793d 7204 }
b0d71df9 7205#endif
230d793d 7206
55310dad
RK
7207 /* If X is a register whose nonzero bits value is current, use it.
7208 Otherwise, if X is a register whose value we can find, use that
7209 value. Otherwise, use the previously-computed global nonzero bits
7210 for this register. */
7211
7212 if (reg_last_set_value[REGNO (x)] != 0
7213 && reg_last_set_mode[REGNO (x)] == mode
7214 && (reg_n_sets[REGNO (x)] == 1
7215 || reg_last_set_label[REGNO (x)] == label_tick)
7216 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7217 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
7218
7219 tem = get_last_value (x);
9afa3d54 7220
230d793d 7221 if (tem)
9afa3d54
RK
7222 {
7223#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7224 /* If X is narrower than MODE and TEM is a non-negative
7225 constant that would appear negative in the mode of X,
7226 sign-extend it for use in reg_nonzero_bits because some
7227 machines (maybe most) will actually do the sign-extension
7228 and this is the conservative approach.
7229
7230 ??? For 2.5, try to tighten up the MD files in this regard
7231 instead of this kludge. */
7232
7233 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
7234 && GET_CODE (tem) == CONST_INT
7235 && INTVAL (tem) > 0
7236 && 0 != (INTVAL (tem)
7237 & ((HOST_WIDE_INT) 1
9e69be8c 7238 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1))))
9afa3d54
RK
7239 tem = GEN_INT (INTVAL (tem)
7240 | ((HOST_WIDE_INT) (-1)
7241 << GET_MODE_BITSIZE (GET_MODE (x))));
7242#endif
7243 return nonzero_bits (tem, mode);
7244 }
951553af
RK
7245 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
7246 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 7247 else
951553af 7248 return nonzero;
230d793d
RS
7249
7250 case CONST_INT:
9afa3d54
RK
7251#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
7252 /* If X is negative in MODE, sign-extend the value. */
9e69be8c
RK
7253 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
7254 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
7255 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
9afa3d54
RK
7256#endif
7257
230d793d
RS
7258 return INTVAL (x);
7259
230d793d 7260 case MEM:
8baf60bb 7261#ifdef LOAD_EXTEND_OP
230d793d
RS
7262 /* In many, if not most, RISC machines, reading a byte from memory
7263 zeros the rest of the register. Noticing that fact saves a lot
7264 of extra zero-extends. */
8baf60bb
RK
7265 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
7266 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 7267#endif
8baf60bb 7268 break;
230d793d 7269
230d793d
RS
7270 case EQ: case NE:
7271 case GT: case GTU:
7272 case LT: case LTU:
7273 case GE: case GEU:
7274 case LE: case LEU:
3f508eca 7275
c6965c0f
RK
7276 /* If this produces an integer result, we know which bits are set.
7277 Code here used to clear bits outside the mode of X, but that is
7278 now done above. */
230d793d 7279
c6965c0f
RK
7280 if (GET_MODE_CLASS (mode) == MODE_INT
7281 && mode_width <= HOST_BITS_PER_WIDE_INT)
7282 nonzero = STORE_FLAG_VALUE;
230d793d 7283 break;
230d793d 7284
230d793d 7285 case NEG:
b3728b0e
JW
7286#if 0
7287 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7288 and num_sign_bit_copies. */
d0ab8cd3
RK
7289 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7290 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7291 nonzero = 1;
b3728b0e 7292#endif
230d793d
RS
7293
7294 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 7295 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 7296 break;
d0ab8cd3
RK
7297
7298 case ABS:
b3728b0e
JW
7299#if 0
7300 /* Disabled to avoid exponential mutual recursion between nonzero_bits
7301 and num_sign_bit_copies. */
d0ab8cd3
RK
7302 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
7303 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 7304 nonzero = 1;
b3728b0e 7305#endif
d0ab8cd3 7306 break;
230d793d
RS
7307
7308 case TRUNCATE:
951553af 7309 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
7310 break;
7311
7312 case ZERO_EXTEND:
951553af 7313 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 7314 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 7315 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
7316 break;
7317
7318 case SIGN_EXTEND:
7319 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
7320 Otherwise, show all the bits in the outer mode but not the inner
7321 may be non-zero. */
951553af 7322 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
7323 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
7324 {
951553af
RK
7325 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
7326 if (inner_nz &
5f4f0e22
CH
7327 (((HOST_WIDE_INT) 1
7328 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 7329 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
7330 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
7331 }
7332
951553af 7333 nonzero &= inner_nz;
230d793d
RS
7334 break;
7335
7336 case AND:
951553af
RK
7337 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7338 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7339 break;
7340
d0ab8cd3
RK
7341 case XOR: case IOR:
7342 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
7343 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
7344 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
7345 break;
7346
7347 case PLUS: case MINUS:
7348 case MULT:
7349 case DIV: case UDIV:
7350 case MOD: case UMOD:
7351 /* We can apply the rules of arithmetic to compute the number of
7352 high- and low-order zero bits of these operations. We start by
7353 computing the width (position of the highest-order non-zero bit)
7354 and the number of low-order zero bits for each value. */
7355 {
951553af
RK
7356 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
7357 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
7358 int width0 = floor_log2 (nz0) + 1;
7359 int width1 = floor_log2 (nz1) + 1;
7360 int low0 = floor_log2 (nz0 & -nz0);
7361 int low1 = floor_log2 (nz1 & -nz1);
318b149c
RK
7362 HOST_WIDE_INT op0_maybe_minusp
7363 = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
7364 HOST_WIDE_INT op1_maybe_minusp
7365 = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
7366 int result_width = mode_width;
7367 int result_low = 0;
7368
7369 switch (code)
7370 {
7371 case PLUS:
7372 result_width = MAX (width0, width1) + 1;
7373 result_low = MIN (low0, low1);
7374 break;
7375 case MINUS:
7376 result_low = MIN (low0, low1);
7377 break;
7378 case MULT:
7379 result_width = width0 + width1;
7380 result_low = low0 + low1;
7381 break;
7382 case DIV:
7383 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7384 result_width = width0;
7385 break;
7386 case UDIV:
7387 result_width = width0;
7388 break;
7389 case MOD:
7390 if (! op0_maybe_minusp && ! op1_maybe_minusp)
7391 result_width = MIN (width0, width1);
7392 result_low = MIN (low0, low1);
7393 break;
7394 case UMOD:
7395 result_width = MIN (width0, width1);
7396 result_low = MIN (low0, low1);
7397 break;
7398 }
7399
7400 if (result_width < mode_width)
951553af 7401 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
7402
7403 if (result_low > 0)
951553af 7404 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
7405 }
7406 break;
7407
7408 case ZERO_EXTRACT:
7409 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 7410 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 7411 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
7412 break;
7413
7414 case SUBREG:
c3c2cb37
RK
7415 /* If this is a SUBREG formed for a promoted variable that has
7416 been zero-extended, we know that at least the high-order bits
7417 are zero, though others might be too. */
7418
7419 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
7420 nonzero = (GET_MODE_MASK (GET_MODE (x))
7421 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 7422
230d793d
RS
7423 /* If the inner mode is a single word for both the host and target
7424 machines, we can compute this from which bits of the inner
951553af 7425 object might be nonzero. */
230d793d 7426 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
7427 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7428 <= HOST_BITS_PER_WIDE_INT))
230d793d 7429 {
951553af 7430 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb
RK
7431
7432#ifndef WORD_REGISTER_OPERATIONS
230d793d
RS
7433 /* On many CISC machines, accessing an object in a wider mode
7434 causes the high-order bits to become undefined. So they are
7435 not known to be zero. */
7436 if (GET_MODE_SIZE (GET_MODE (x))
7437 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
7438 nonzero |= (GET_MODE_MASK (GET_MODE (x))
7439 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
7440#endif
7441 }
7442 break;
7443
7444 case ASHIFTRT:
7445 case LSHIFTRT:
7446 case ASHIFT:
230d793d 7447 case ROTATE:
951553af 7448 /* The nonzero bits are in two classes: any bits within MODE
230d793d 7449 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 7450 nonzero bits are those that are significant in the operand of
230d793d
RS
7451 the shift when shifted the appropriate number of bits. This
7452 shows that high-order bits are cleared by the right shift and
7453 low-order bits by left shifts. */
7454 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7455 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 7456 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7457 {
7458 enum machine_mode inner_mode = GET_MODE (x);
7459 int width = GET_MODE_BITSIZE (inner_mode);
7460 int count = INTVAL (XEXP (x, 1));
5f4f0e22 7461 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
7462 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
7463 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 7464 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
7465
7466 if (mode_width > width)
951553af 7467 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
7468
7469 if (code == LSHIFTRT)
7470 inner >>= count;
7471 else if (code == ASHIFTRT)
7472 {
7473 inner >>= count;
7474
951553af 7475 /* If the sign bit may have been nonzero before the shift, we
230d793d 7476 need to mark all the places it could have been copied to
951553af 7477 by the shift as possibly nonzero. */
5f4f0e22
CH
7478 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
7479 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d 7480 }
45620ed4 7481 else if (code == ASHIFT)
230d793d
RS
7482 inner <<= count;
7483 else
7484 inner = ((inner << (count % width)
7485 | (inner >> (width - (count % width)))) & mode_mask);
7486
951553af 7487 nonzero &= (outer | inner);
230d793d
RS
7488 }
7489 break;
7490
7491 case FFS:
7492 /* This is at most the number of bits in the mode. */
951553af 7493 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 7494 break;
d0ab8cd3
RK
7495
7496 case IF_THEN_ELSE:
951553af
RK
7497 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
7498 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 7499 break;
230d793d
RS
7500 }
7501
951553af 7502 return nonzero;
230d793d 7503}
b3728b0e
JW
7504
7505/* See the macro definition above. */
7506#undef num_sign_bit_copies
230d793d 7507\f
d0ab8cd3 7508/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
7509 be equal to the sign bit. X will be used in mode MODE; if MODE is
7510 VOIDmode, X will be used in its own mode. The returned value will always
7511 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
7512
7513static int
7514num_sign_bit_copies (x, mode)
7515 rtx x;
7516 enum machine_mode mode;
7517{
7518 enum rtx_code code = GET_CODE (x);
7519 int bitwidth;
7520 int num0, num1, result;
951553af 7521 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
7522 rtx tem;
7523
7524 /* If we weren't given a mode, use the mode of X. If the mode is still
1c75dfa4
RK
7525 VOIDmode, we don't know anything. Likewise if one of the modes is
7526 floating-point. */
d0ab8cd3
RK
7527
7528 if (mode == VOIDmode)
7529 mode = GET_MODE (x);
7530
1c75dfa4 7531 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
6752e8d2 7532 return 1;
d0ab8cd3
RK
7533
7534 bitwidth = GET_MODE_BITSIZE (mode);
7535
0f41302f 7536 /* For a smaller object, just ignore the high bits. */
312def2e
RK
7537 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
7538 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
7539 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
7540
0c314d1a
RK
7541#ifndef WORD_REGISTER_OPERATIONS
7542 /* If this machine does not do all register operations on the entire
7543 register and MODE is wider than the mode of X, we can say nothing
7544 at all about the high-order bits. */
7545 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
7546 return 1;
7547#endif
7548
d0ab8cd3
RK
7549 switch (code)
7550 {
7551 case REG:
55310dad 7552
ff0dbdd1
RK
7553#ifdef POINTERS_EXTEND_UNSIGNED
7554 /* If pointers extend signed and this is a pointer in Pmode, say that
7555 all the bits above ptr_mode are known to be sign bit copies. */
7556 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
7557 && REGNO_POINTER_FLAG (REGNO (x)))
7558 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
7559#endif
7560
55310dad
RK
7561 if (reg_last_set_value[REGNO (x)] != 0
7562 && reg_last_set_mode[REGNO (x)] == mode
7563 && (reg_n_sets[REGNO (x)] == 1
7564 || reg_last_set_label[REGNO (x)] == label_tick)
7565 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
7566 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7567
7568 tem = get_last_value (x);
7569 if (tem != 0)
7570 return num_sign_bit_copies (tem, mode);
55310dad
RK
7571
7572 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
7573 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
7574 break;
7575
457816e2 7576 case MEM:
8baf60bb 7577#ifdef LOAD_EXTEND_OP
457816e2 7578 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
7579 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
7580 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 7581#endif
8baf60bb 7582 break;
457816e2 7583
d0ab8cd3
RK
7584 case CONST_INT:
7585 /* If the constant is negative, take its 1's complement and remask.
7586 Then see how many zero bits we have. */
951553af 7587 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 7588 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
7589 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7590 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 7591
951553af 7592 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7593
7594 case SUBREG:
c3c2cb37
RK
7595 /* If this is a SUBREG for a promoted object that is sign-extended
7596 and we are looking at it in a wider mode, we know that at least the
7597 high-order bits are known to be sign bit copies. */
7598
7599 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
7600 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
7601 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 7602
0f41302f 7603 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7604 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
7605 {
7606 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
7607 return MAX (1, (num0
7608 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
7609 - bitwidth)));
7610 }
457816e2 7611
8baf60bb 7612#ifdef WORD_REGISTER_OPERATIONS
2aec5b7a 7613#ifdef LOAD_EXTEND_OP
8baf60bb
RK
7614 /* For paradoxical SUBREGs on machines where all register operations
7615 affect the entire register, just look inside. Note that we are
7616 passing MODE to the recursive call, so the number of sign bit copies
7617 will remain relative to that mode, not the inner mode. */
457816e2 7618
2aec5b7a
JW
7619 /* This works only if loads sign extend. Otherwise, if we get a
7620 reload for the inner part, it may be loaded from the stack, and
7621 then we lose all sign bit copies that existed before the store
7622 to the stack. */
7623
7624 if ((GET_MODE_SIZE (GET_MODE (x))
7625 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7626 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND)
457816e2 7627 return num_sign_bit_copies (SUBREG_REG (x), mode);
2aec5b7a 7628#endif
457816e2 7629#endif
d0ab8cd3
RK
7630 break;
7631
7632 case SIGN_EXTRACT:
7633 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
7634 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
7635 break;
7636
7637 case SIGN_EXTEND:
7638 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7639 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
7640
7641 case TRUNCATE:
0f41302f 7642 /* For a smaller object, just ignore the high bits. */
d0ab8cd3
RK
7643 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
7644 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
7645 - bitwidth)));
7646
7647 case NOT:
7648 return num_sign_bit_copies (XEXP (x, 0), mode);
7649
7650 case ROTATE: case ROTATERT:
7651 /* If we are rotating left by a number of bits less than the number
7652 of sign bit copies, we can just subtract that amount from the
7653 number. */
7654 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7655 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
7656 {
7657 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7658 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
7659 : bitwidth - INTVAL (XEXP (x, 1))));
7660 }
7661 break;
7662
7663 case NEG:
7664 /* In general, this subtracts one sign bit copy. But if the value
7665 is known to be positive, the number of sign bit copies is the
951553af
RK
7666 same as that of the input. Finally, if the input has just one bit
7667 that might be nonzero, all the bits are copies of the sign bit. */
7668 nonzero = nonzero_bits (XEXP (x, 0), mode);
7669 if (nonzero == 1)
d0ab8cd3
RK
7670 return bitwidth;
7671
7672 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7673 if (num0 > 1
ac49a949 7674 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7675 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
7676 num0--;
7677
7678 return num0;
7679
7680 case IOR: case AND: case XOR:
7681 case SMIN: case SMAX: case UMIN: case UMAX:
7682 /* Logical operations will preserve the number of sign-bit copies.
7683 MIN and MAX operations always return one of the operands. */
7684 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7685 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7686 return MIN (num0, num1);
7687
7688 case PLUS: case MINUS:
7689 /* For addition and subtraction, we can have a 1-bit carry. However,
7690 if we are subtracting 1 from a positive number, there will not
7691 be such a carry. Furthermore, if the positive number is known to
7692 be 0 or 1, we know the result is either -1 or 0. */
7693
3e3ea975 7694 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 7695 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 7696 {
951553af
RK
7697 nonzero = nonzero_bits (XEXP (x, 0), mode);
7698 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
7699 return (nonzero == 1 || nonzero == 0 ? bitwidth
7700 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7701 }
7702
7703 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7704 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7705 return MAX (1, MIN (num0, num1) - 1);
7706
7707 case MULT:
7708 /* The number of bits of the product is the sum of the number of
7709 bits of both terms. However, unless one of the terms if known
7710 to be positive, we must allow for an additional bit since negating
7711 a negative number can remove one sign bit copy. */
7712
7713 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7714 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
7715
7716 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
7717 if (result > 0
9295e6af 7718 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7719 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 7720 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
01c82bbb
RK
7721 && ((nonzero_bits (XEXP (x, 1), mode)
7722 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
d0ab8cd3
RK
7723 result--;
7724
7725 return MAX (1, result);
7726
7727 case UDIV:
7728 /* The result must be <= the first operand. */
7729 return num_sign_bit_copies (XEXP (x, 0), mode);
7730
7731 case UMOD:
7732 /* The result must be <= the scond operand. */
7733 return num_sign_bit_copies (XEXP (x, 1), mode);
7734
7735 case DIV:
7736 /* Similar to unsigned division, except that we have to worry about
7737 the case where the divisor is negative, in which case we have
7738 to add 1. */
7739 result = num_sign_bit_copies (XEXP (x, 0), mode);
7740 if (result > 1
ac49a949 7741 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7742 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7743 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7744 result --;
7745
7746 return result;
7747
7748 case MOD:
7749 result = num_sign_bit_copies (XEXP (x, 1), mode);
7750 if (result > 1
ac49a949 7751 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 7752 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
7753 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
7754 result --;
7755
7756 return result;
7757
7758 case ASHIFTRT:
7759 /* Shifts by a constant add to the number of bits equal to the
7760 sign bit. */
7761 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7762 if (GET_CODE (XEXP (x, 1)) == CONST_INT
7763 && INTVAL (XEXP (x, 1)) > 0)
7764 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
7765
7766 return num0;
7767
7768 case ASHIFT:
d0ab8cd3
RK
7769 /* Left shifts destroy copies. */
7770 if (GET_CODE (XEXP (x, 1)) != CONST_INT
7771 || INTVAL (XEXP (x, 1)) < 0
7772 || INTVAL (XEXP (x, 1)) >= bitwidth)
7773 return 1;
7774
7775 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
7776 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
7777
7778 case IF_THEN_ELSE:
7779 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
7780 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
7781 return MIN (num0, num1);
7782
d0ab8cd3
RK
7783 case EQ: case NE: case GE: case GT: case LE: case LT:
7784 case GEU: case GTU: case LEU: case LTU:
0802d516
RK
7785 if (STORE_FLAG_VALUE == -1)
7786 return bitwidth;
d0ab8cd3
RK
7787 }
7788
7789 /* If we haven't been able to figure it out by one of the above rules,
7790 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
7791 count those bits and return one less than that amount. If we can't
7792 safely compute the mask for this mode, always return BITWIDTH. */
7793
7794 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 7795 return 1;
d0ab8cd3 7796
951553af 7797 nonzero = nonzero_bits (x, mode);
df6f4086 7798 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 7799 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7800}
7801\f
1a26b032
RK
7802/* Return the number of "extended" bits there are in X, when interpreted
7803 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7804 unsigned quantities, this is the number of high-order zero bits.
7805 For signed quantities, this is the number of copies of the sign bit
7806 minus 1. In both case, this function returns the number of "spare"
7807 bits. For example, if two quantities for which this function returns
7808 at least 1 are added, the addition is known not to overflow.
7809
7810 This function will always return 0 unless called during combine, which
7811 implies that it must be called from a define_split. */
7812
7813int
7814extended_count (x, mode, unsignedp)
7815 rtx x;
7816 enum machine_mode mode;
7817 int unsignedp;
7818{
951553af 7819 if (nonzero_sign_valid == 0)
1a26b032
RK
7820 return 0;
7821
7822 return (unsignedp
ac49a949
RS
7823 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7824 && (GET_MODE_BITSIZE (mode) - 1
951553af 7825 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
7826 : num_sign_bit_copies (x, mode) - 1);
7827}
7828\f
230d793d
RS
7829/* This function is called from `simplify_shift_const' to merge two
7830 outer operations. Specifically, we have already found that we need
7831 to perform operation *POP0 with constant *PCONST0 at the outermost
7832 position. We would now like to also perform OP1 with constant CONST1
7833 (with *POP0 being done last).
7834
7835 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7836 the resulting operation. *PCOMP_P is set to 1 if we would need to
7837 complement the innermost operand, otherwise it is unchanged.
7838
7839 MODE is the mode in which the operation will be done. No bits outside
7840 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 7841 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
7842
7843 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7844 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7845 result is simply *PCONST0.
7846
7847 If the resulting operation cannot be expressed as one operation, we
7848 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7849
7850static int
7851merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7852 enum rtx_code *pop0;
5f4f0e22 7853 HOST_WIDE_INT *pconst0;
230d793d 7854 enum rtx_code op1;
5f4f0e22 7855 HOST_WIDE_INT const1;
230d793d
RS
7856 enum machine_mode mode;
7857 int *pcomp_p;
7858{
7859 enum rtx_code op0 = *pop0;
5f4f0e22 7860 HOST_WIDE_INT const0 = *pconst0;
9fa6d012 7861 int width = GET_MODE_BITSIZE (mode);
230d793d
RS
7862
7863 const0 &= GET_MODE_MASK (mode);
7864 const1 &= GET_MODE_MASK (mode);
7865
7866 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7867 if (op0 == AND)
7868 const1 &= const0;
7869
7870 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7871 if OP0 is SET. */
7872
7873 if (op1 == NIL || op0 == SET)
7874 return 1;
7875
7876 else if (op0 == NIL)
7877 op0 = op1, const0 = const1;
7878
7879 else if (op0 == op1)
7880 {
7881 switch (op0)
7882 {
7883 case AND:
7884 const0 &= const1;
7885 break;
7886 case IOR:
7887 const0 |= const1;
7888 break;
7889 case XOR:
7890 const0 ^= const1;
7891 break;
7892 case PLUS:
7893 const0 += const1;
7894 break;
7895 case NEG:
7896 op0 = NIL;
7897 break;
7898 }
7899 }
7900
7901 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7902 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7903 return 0;
7904
7905 /* If the two constants aren't the same, we can't do anything. The
7906 remaining six cases can all be done. */
7907 else if (const0 != const1)
7908 return 0;
7909
7910 else
7911 switch (op0)
7912 {
7913 case IOR:
7914 if (op1 == AND)
7915 /* (a & b) | b == b */
7916 op0 = SET;
7917 else /* op1 == XOR */
7918 /* (a ^ b) | b == a | b */
7919 ;
7920 break;
7921
7922 case XOR:
7923 if (op1 == AND)
7924 /* (a & b) ^ b == (~a) & b */
7925 op0 = AND, *pcomp_p = 1;
7926 else /* op1 == IOR */
7927 /* (a | b) ^ b == a & ~b */
7928 op0 = AND, *pconst0 = ~ const0;
7929 break;
7930
7931 case AND:
7932 if (op1 == IOR)
7933 /* (a | b) & b == b */
7934 op0 = SET;
7935 else /* op1 == XOR */
7936 /* (a ^ b) & b) == (~a) & b */
7937 *pcomp_p = 1;
7938 break;
7939 }
7940
7941 /* Check for NO-OP cases. */
7942 const0 &= GET_MODE_MASK (mode);
7943 if (const0 == 0
7944 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7945 op0 = NIL;
7946 else if (const0 == 0 && op0 == AND)
7947 op0 = SET;
7948 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7949 op0 = NIL;
7950
9fa6d012
TG
7951 /* If this would be an entire word for the target, but is not for
7952 the host, then sign-extend on the host so that the number will look
7953 the same way on the host that it would on the target.
7954
7955 For example, when building a 64 bit alpha hosted 32 bit sparc
7956 targeted compiler, then we want the 32 bit unsigned value -1 to be
7957 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
7958 The later confuses the sparc backend. */
7959
7960 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
7961 && (const0 & ((HOST_WIDE_INT) 1 << (width - 1))))
7962 const0 |= ((HOST_WIDE_INT) (-1) << width);
7963
230d793d
RS
7964 *pop0 = op0;
7965 *pconst0 = const0;
7966
7967 return 1;
7968}
7969\f
7970/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7971 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7972 that we started with.
7973
7974 The shift is normally computed in the widest mode we find in VAROP, as
7975 long as it isn't a different number of words than RESULT_MODE. Exceptions
7976 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7977
7978static rtx
7979simplify_shift_const (x, code, result_mode, varop, count)
7980 rtx x;
7981 enum rtx_code code;
7982 enum machine_mode result_mode;
7983 rtx varop;
7984 int count;
7985{
7986 enum rtx_code orig_code = code;
7987 int orig_count = count;
7988 enum machine_mode mode = result_mode;
7989 enum machine_mode shift_mode, tmode;
7990 int mode_words
7991 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7992 /* We form (outer_op (code varop count) (outer_const)). */
7993 enum rtx_code outer_op = NIL;
c4e861e8 7994 HOST_WIDE_INT outer_const = 0;
230d793d
RS
7995 rtx const_rtx;
7996 int complement_p = 0;
7997 rtx new;
7998
7999 /* If we were given an invalid count, don't do anything except exactly
8000 what was requested. */
8001
8002 if (count < 0 || count > GET_MODE_BITSIZE (mode))
8003 {
8004 if (x)
8005 return x;
8006
5f4f0e22 8007 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
8008 }
8009
8010 /* Unless one of the branches of the `if' in this loop does a `continue',
8011 we will `break' the loop after the `if'. */
8012
8013 while (count != 0)
8014 {
8015 /* If we have an operand of (clobber (const_int 0)), just return that
8016 value. */
8017 if (GET_CODE (varop) == CLOBBER)
8018 return varop;
8019
8020 /* If we discovered we had to complement VAROP, leave. Making a NOT
8021 here would cause an infinite loop. */
8022 if (complement_p)
8023 break;
8024
abc95ed3 8025 /* Convert ROTATERT to ROTATE. */
230d793d
RS
8026 if (code == ROTATERT)
8027 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
8028
230d793d 8029 /* We need to determine what mode we will do the shift in. If the
f6789c77
RK
8030 shift is a right shift or a ROTATE, we must always do it in the mode
8031 it was originally done in. Otherwise, we can do it in MODE, the
0f41302f 8032 widest mode encountered. */
f6789c77
RK
8033 shift_mode
8034 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8035 ? result_mode : mode);
230d793d
RS
8036
8037 /* Handle cases where the count is greater than the size of the mode
8038 minus 1. For ASHIFT, use the size minus one as the count (this can
8039 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
8040 take the count modulo the size. For other shifts, the result is
8041 zero.
8042
8043 Since these shifts are being produced by the compiler by combining
8044 multiple operations, each of which are defined, we know what the
8045 result is supposed to be. */
8046
8047 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
8048 {
8049 if (code == ASHIFTRT)
8050 count = GET_MODE_BITSIZE (shift_mode) - 1;
8051 else if (code == ROTATE || code == ROTATERT)
8052 count %= GET_MODE_BITSIZE (shift_mode);
8053 else
8054 {
8055 /* We can't simply return zero because there may be an
8056 outer op. */
8057 varop = const0_rtx;
8058 count = 0;
8059 break;
8060 }
8061 }
8062
8063 /* Negative counts are invalid and should not have been made (a
8064 programmer-specified negative count should have been handled
0f41302f 8065 above). */
230d793d
RS
8066 else if (count < 0)
8067 abort ();
8068
312def2e
RK
8069 /* An arithmetic right shift of a quantity known to be -1 or 0
8070 is a no-op. */
8071 if (code == ASHIFTRT
8072 && (num_sign_bit_copies (varop, shift_mode)
8073 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 8074 {
312def2e
RK
8075 count = 0;
8076 break;
8077 }
d0ab8cd3 8078
312def2e
RK
8079 /* If we are doing an arithmetic right shift and discarding all but
8080 the sign bit copies, this is equivalent to doing a shift by the
8081 bitsize minus one. Convert it into that shift because it will often
8082 allow other simplifications. */
500c518b 8083
312def2e
RK
8084 if (code == ASHIFTRT
8085 && (count + num_sign_bit_copies (varop, shift_mode)
8086 >= GET_MODE_BITSIZE (shift_mode)))
8087 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 8088
230d793d
RS
8089 /* We simplify the tests below and elsewhere by converting
8090 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
8091 `make_compound_operation' will convert it to a ASHIFTRT for
8092 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 8093 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8094 && code == ASHIFTRT
951553af 8095 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
8096 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
8097 == 0))
230d793d
RS
8098 code = LSHIFTRT;
8099
8100 switch (GET_CODE (varop))
8101 {
8102 case SIGN_EXTEND:
8103 case ZERO_EXTEND:
8104 case SIGN_EXTRACT:
8105 case ZERO_EXTRACT:
8106 new = expand_compound_operation (varop);
8107 if (new != varop)
8108 {
8109 varop = new;
8110 continue;
8111 }
8112 break;
8113
8114 case MEM:
8115 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
8116 minus the width of a smaller mode, we can do this with a
8117 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
8118 if ((code == ASHIFTRT || code == LSHIFTRT)
8119 && ! mode_dependent_address_p (XEXP (varop, 0))
8120 && ! MEM_VOLATILE_P (varop)
8121 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8122 MODE_INT, 1)) != BLKmode)
8123 {
f76b9db2
ILT
8124 if (BYTES_BIG_ENDIAN)
8125 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
8126 else
e24b00c8
ILT
8127 new = gen_rtx (MEM, tmode,
8128 plus_constant (XEXP (varop, 0),
8129 count / BITS_PER_UNIT));
8130 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
8131 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
8132 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
230d793d
RS
8133 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8134 : ZERO_EXTEND, mode, new);
8135 count = 0;
8136 continue;
8137 }
8138 break;
8139
8140 case USE:
8141 /* Similar to the case above, except that we can only do this if
8142 the resulting mode is the same as that of the underlying
8143 MEM and adjust the address depending on the *bits* endianness
8144 because of the way that bit-field extract insns are defined. */
8145 if ((code == ASHIFTRT || code == LSHIFTRT)
8146 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
8147 MODE_INT, 1)) != BLKmode
8148 && tmode == GET_MODE (XEXP (varop, 0)))
8149 {
f76b9db2
ILT
8150 if (BITS_BIG_ENDIAN)
8151 new = XEXP (varop, 0);
8152 else
8153 {
8154 new = copy_rtx (XEXP (varop, 0));
8155 SUBST (XEXP (new, 0),
8156 plus_constant (XEXP (new, 0),
8157 count / BITS_PER_UNIT));
8158 }
230d793d
RS
8159
8160 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
8161 : ZERO_EXTEND, mode, new);
8162 count = 0;
8163 continue;
8164 }
8165 break;
8166
8167 case SUBREG:
8168 /* If VAROP is a SUBREG, strip it as long as the inner operand has
8169 the same number of words as what we've seen so far. Then store
8170 the widest mode in MODE. */
f9e67232
RS
8171 if (subreg_lowpart_p (varop)
8172 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8173 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
8174 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
8175 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
8176 == mode_words))
8177 {
8178 varop = SUBREG_REG (varop);
8179 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
8180 mode = GET_MODE (varop);
8181 continue;
8182 }
8183 break;
8184
8185 case MULT:
8186 /* Some machines use MULT instead of ASHIFT because MULT
8187 is cheaper. But it is still better on those machines to
8188 merge two shifts into one. */
8189 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8190 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8191 {
8192 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8193 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
8194 continue;
8195 }
8196 break;
8197
8198 case UDIV:
8199 /* Similar, for when divides are cheaper. */
8200 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8201 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
8202 {
8203 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 8204 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
8205 continue;
8206 }
8207 break;
8208
8209 case ASHIFTRT:
8210 /* If we are extracting just the sign bit of an arithmetic right
8211 shift, that shift is not needed. */
8212 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
8213 {
8214 varop = XEXP (varop, 0);
8215 continue;
8216 }
8217
0f41302f 8218 /* ... fall through ... */
230d793d
RS
8219
8220 case LSHIFTRT:
8221 case ASHIFT:
230d793d
RS
8222 case ROTATE:
8223 /* Here we have two nested shifts. The result is usually the
8224 AND of a new shift with a mask. We compute the result below. */
8225 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8226 && INTVAL (XEXP (varop, 1)) >= 0
8227 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
8228 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
8229 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
8230 {
8231 enum rtx_code first_code = GET_CODE (varop);
8232 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 8233 unsigned HOST_WIDE_INT mask;
230d793d 8234 rtx mask_rtx;
230d793d 8235
230d793d
RS
8236 /* We have one common special case. We can't do any merging if
8237 the inner code is an ASHIFTRT of a smaller mode. However, if
8238 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
8239 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
8240 we can convert it to
8241 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
8242 This simplifies certain SIGN_EXTEND operations. */
8243 if (code == ASHIFT && first_code == ASHIFTRT
8244 && (GET_MODE_BITSIZE (result_mode)
8245 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
8246 {
8247 /* C3 has the low-order C1 bits zero. */
8248
5f4f0e22
CH
8249 mask = (GET_MODE_MASK (mode)
8250 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 8251
5f4f0e22 8252 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 8253 XEXP (varop, 0), mask);
5f4f0e22 8254 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
8255 varop, count);
8256 count = first_count;
8257 code = ASHIFTRT;
8258 continue;
8259 }
8260
d0ab8cd3
RK
8261 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
8262 than C1 high-order bits equal to the sign bit, we can convert
8263 this to either an ASHIFT or a ASHIFTRT depending on the
8264 two counts.
230d793d
RS
8265
8266 We cannot do this if VAROP's mode is not SHIFT_MODE. */
8267
8268 if (code == ASHIFTRT && first_code == ASHIFT
8269 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
8270 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
8271 > first_count))
230d793d 8272 {
d0ab8cd3
RK
8273 count -= first_count;
8274 if (count < 0)
8275 count = - count, code = ASHIFT;
8276 varop = XEXP (varop, 0);
8277 continue;
230d793d
RS
8278 }
8279
8280 /* There are some cases we can't do. If CODE is ASHIFTRT,
8281 we can only do this if FIRST_CODE is also ASHIFTRT.
8282
8283 We can't do the case when CODE is ROTATE and FIRST_CODE is
8284 ASHIFTRT.
8285
8286 If the mode of this shift is not the mode of the outer shift,
bdaae9a0 8287 we can't do this if either shift is a right shift or ROTATE.
230d793d
RS
8288
8289 Finally, we can't do any of these if the mode is too wide
8290 unless the codes are the same.
8291
8292 Handle the case where the shift codes are the same
8293 first. */
8294
8295 if (code == first_code)
8296 {
8297 if (GET_MODE (varop) != result_mode
bdaae9a0
RK
8298 && (code == ASHIFTRT || code == LSHIFTRT
8299 || code == ROTATE))
230d793d
RS
8300 break;
8301
8302 count += first_count;
8303 varop = XEXP (varop, 0);
8304 continue;
8305 }
8306
8307 if (code == ASHIFTRT
8308 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 8309 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d 8310 || (GET_MODE (varop) != result_mode
bdaae9a0
RK
8311 && (first_code == ASHIFTRT || first_code == LSHIFTRT
8312 || first_code == ROTATE
230d793d
RS
8313 || code == ROTATE)))
8314 break;
8315
8316 /* To compute the mask to apply after the shift, shift the
951553af 8317 nonzero bits of the inner shift the same way the
230d793d
RS
8318 outer shift will. */
8319
951553af 8320 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
8321
8322 mask_rtx
8323 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 8324 GEN_INT (count));
230d793d
RS
8325
8326 /* Give up if we can't compute an outer operation to use. */
8327 if (mask_rtx == 0
8328 || GET_CODE (mask_rtx) != CONST_INT
8329 || ! merge_outer_ops (&outer_op, &outer_const, AND,
8330 INTVAL (mask_rtx),
8331 result_mode, &complement_p))
8332 break;
8333
8334 /* If the shifts are in the same direction, we add the
8335 counts. Otherwise, we subtract them. */
8336 if ((code == ASHIFTRT || code == LSHIFTRT)
8337 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
8338 count += first_count;
8339 else
8340 count -= first_count;
8341
8342 /* If COUNT is positive, the new shift is usually CODE,
8343 except for the two exceptions below, in which case it is
8344 FIRST_CODE. If the count is negative, FIRST_CODE should
8345 always be used */
8346 if (count > 0
8347 && ((first_code == ROTATE && code == ASHIFT)
8348 || (first_code == ASHIFTRT && code == LSHIFTRT)))
8349 code = first_code;
8350 else if (count < 0)
8351 code = first_code, count = - count;
8352
8353 varop = XEXP (varop, 0);
8354 continue;
8355 }
8356
8357 /* If we have (A << B << C) for any shift, we can convert this to
8358 (A << C << B). This wins if A is a constant. Only try this if
8359 B is not a constant. */
8360
8361 else if (GET_CODE (varop) == code
8362 && GET_CODE (XEXP (varop, 1)) != CONST_INT
8363 && 0 != (new
8364 = simplify_binary_operation (code, mode,
8365 XEXP (varop, 0),
5f4f0e22 8366 GEN_INT (count))))
230d793d
RS
8367 {
8368 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
8369 count = 0;
8370 continue;
8371 }
8372 break;
8373
8374 case NOT:
8375 /* Make this fit the case below. */
8376 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 8377 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
8378 continue;
8379
8380 case IOR:
8381 case AND:
8382 case XOR:
8383 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
8384 with C the size of VAROP - 1 and the shift is logical if
8385 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8386 we have an (le X 0) operation. If we have an arithmetic shift
8387 and STORE_FLAG_VALUE is 1 or we have a logical shift with
8388 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
8389
8390 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
8391 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
8392 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8393 && (code == LSHIFTRT || code == ASHIFTRT)
8394 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
8395 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8396 {
8397 count = 0;
8398 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
8399 const0_rtx);
8400
8401 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8402 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8403
8404 continue;
8405 }
8406
8407 /* If we have (shift (logical)), move the logical to the outside
8408 to allow it to possibly combine with another logical and the
8409 shift to combine with another shift. This also canonicalizes to
8410 what a ZERO_EXTRACT looks like. Also, some machines have
8411 (and (shift)) insns. */
8412
8413 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
8414 && (new = simplify_binary_operation (code, result_mode,
8415 XEXP (varop, 1),
5f4f0e22 8416 GEN_INT (count))) != 0
7d171a1e 8417 && GET_CODE(new) == CONST_INT
230d793d
RS
8418 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
8419 INTVAL (new), result_mode, &complement_p))
8420 {
8421 varop = XEXP (varop, 0);
8422 continue;
8423 }
8424
8425 /* If we can't do that, try to simplify the shift in each arm of the
8426 logical expression, make a new logical expression, and apply
8427 the inverse distributive law. */
8428 {
00d4ca1c 8429 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 8430 XEXP (varop, 0), count);
00d4ca1c 8431 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
8432 XEXP (varop, 1), count);
8433
21a64bf1 8434 varop = gen_binary (GET_CODE (varop), shift_mode, lhs, rhs);
230d793d
RS
8435 varop = apply_distributive_law (varop);
8436
8437 count = 0;
8438 }
8439 break;
8440
8441 case EQ:
45620ed4 8442 /* convert (lshiftrt (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
230d793d 8443 says that the sign bit can be tested, FOO has mode MODE, C is
45620ed4
RK
8444 GET_MODE_BITSIZE (MODE) - 1, and FOO has only its low-order bit
8445 that may be nonzero. */
8446 if (code == LSHIFTRT
230d793d
RS
8447 && XEXP (varop, 1) == const0_rtx
8448 && GET_MODE (XEXP (varop, 0)) == result_mode
8449 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 8450 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 8451 && ((STORE_FLAG_VALUE
5f4f0e22 8452 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 8453 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8454 && merge_outer_ops (&outer_op, &outer_const, XOR,
8455 (HOST_WIDE_INT) 1, result_mode,
8456 &complement_p))
230d793d
RS
8457 {
8458 varop = XEXP (varop, 0);
8459 count = 0;
8460 continue;
8461 }
8462 break;
8463
8464 case NEG:
d0ab8cd3
RK
8465 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
8466 than the number of bits in the mode is equivalent to A. */
8467 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 8468 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 8469 {
d0ab8cd3 8470 varop = XEXP (varop, 0);
230d793d
RS
8471 count = 0;
8472 continue;
8473 }
8474
8475 /* NEG commutes with ASHIFT since it is multiplication. Move the
8476 NEG outside to allow shifts to combine. */
8477 if (code == ASHIFT
5f4f0e22
CH
8478 && merge_outer_ops (&outer_op, &outer_const, NEG,
8479 (HOST_WIDE_INT) 0, result_mode,
8480 &complement_p))
230d793d
RS
8481 {
8482 varop = XEXP (varop, 0);
8483 continue;
8484 }
8485 break;
8486
8487 case PLUS:
d0ab8cd3
RK
8488 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
8489 is one less than the number of bits in the mode is
8490 equivalent to (xor A 1). */
230d793d
RS
8491 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
8492 && XEXP (varop, 1) == constm1_rtx
951553af 8493 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
8494 && merge_outer_ops (&outer_op, &outer_const, XOR,
8495 (HOST_WIDE_INT) 1, result_mode,
8496 &complement_p))
230d793d
RS
8497 {
8498 count = 0;
8499 varop = XEXP (varop, 0);
8500 continue;
8501 }
8502
3f508eca 8503 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 8504 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
8505 bits are known zero in FOO, we can replace the PLUS with FOO.
8506 Similarly in the other operand order. This code occurs when
8507 we are computing the size of a variable-size array. */
8508
8509 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8510 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
8511 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
8512 && (nonzero_bits (XEXP (varop, 1), result_mode)
8513 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
8514 {
8515 varop = XEXP (varop, 0);
8516 continue;
8517 }
8518 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 8519 && count < HOST_BITS_PER_WIDE_INT
ac49a949 8520 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 8521 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 8522 >> count)
951553af
RK
8523 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
8524 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
8525 result_mode)))
8526 {
8527 varop = XEXP (varop, 1);
8528 continue;
8529 }
8530
230d793d
RS
8531 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
8532 if (code == ASHIFT
8533 && GET_CODE (XEXP (varop, 1)) == CONST_INT
8534 && (new = simplify_binary_operation (ASHIFT, result_mode,
8535 XEXP (varop, 1),
5f4f0e22 8536 GEN_INT (count))) != 0
7d171a1e 8537 && GET_CODE(new) == CONST_INT
230d793d
RS
8538 && merge_outer_ops (&outer_op, &outer_const, PLUS,
8539 INTVAL (new), result_mode, &complement_p))
8540 {
8541 varop = XEXP (varop, 0);
8542 continue;
8543 }
8544 break;
8545
8546 case MINUS:
8547 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
8548 with C the size of VAROP - 1 and the shift is logical if
8549 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
8550 we have a (gt X 0) operation. If the shift is arithmetic with
8551 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
8552 we have a (neg (gt X 0)) operation. */
8553
0802d516
RK
8554 if ((STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
8555 && GET_CODE (XEXP (varop, 0)) == ASHIFTRT
230d793d 8556 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
230d793d
RS
8557 && (code == LSHIFTRT || code == ASHIFTRT)
8558 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
8559 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
8560 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
8561 {
8562 count = 0;
8563 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
8564 const0_rtx);
8565
8566 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
8567 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
8568
8569 continue;
8570 }
8571 break;
8572 }
8573
8574 break;
8575 }
8576
8577 /* We need to determine what mode to do the shift in. If the shift is
f6789c77
RK
8578 a right shift or ROTATE, we must always do it in the mode it was
8579 originally done in. Otherwise, we can do it in MODE, the widest mode
8580 encountered. The code we care about is that of the shift that will
8581 actually be done, not the shift that was originally requested. */
8582 shift_mode
8583 = (code == ASHIFTRT || code == LSHIFTRT || code == ROTATE
8584 ? result_mode : mode);
230d793d
RS
8585
8586 /* We have now finished analyzing the shift. The result should be
8587 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
8588 OUTER_OP is non-NIL, it is an operation that needs to be applied
8589 to the result of the shift. OUTER_CONST is the relevant constant,
8590 but we must turn off all bits turned off in the shift.
8591
8592 If we were passed a value for X, see if we can use any pieces of
8593 it. If not, make new rtx. */
8594
8595 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
8596 && GET_CODE (XEXP (x, 1)) == CONST_INT
8597 && INTVAL (XEXP (x, 1)) == count)
8598 const_rtx = XEXP (x, 1);
8599 else
5f4f0e22 8600 const_rtx = GEN_INT (count);
230d793d
RS
8601
8602 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
8603 && GET_MODE (XEXP (x, 0)) == shift_mode
8604 && SUBREG_REG (XEXP (x, 0)) == varop)
8605 varop = XEXP (x, 0);
8606 else if (GET_MODE (varop) != shift_mode)
8607 varop = gen_lowpart_for_combine (shift_mode, varop);
8608
0f41302f 8609 /* If we can't make the SUBREG, try to return what we were given. */
230d793d
RS
8610 if (GET_CODE (varop) == CLOBBER)
8611 return x ? x : varop;
8612
8613 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
8614 if (new != 0)
8615 x = new;
8616 else
8617 {
8618 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
8619 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
8620
8621 SUBST (XEXP (x, 0), varop);
8622 SUBST (XEXP (x, 1), const_rtx);
8623 }
8624
224eeff2
RK
8625 /* If we have an outer operation and we just made a shift, it is
8626 possible that we could have simplified the shift were it not
8627 for the outer operation. So try to do the simplification
8628 recursively. */
8629
8630 if (outer_op != NIL && GET_CODE (x) == code
8631 && GET_CODE (XEXP (x, 1)) == CONST_INT)
8632 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
8633 INTVAL (XEXP (x, 1)));
8634
230d793d
RS
8635 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
8636 turn off all the bits that the shift would have turned off. */
8637 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 8638 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
8639 GET_MODE_MASK (result_mode) >> orig_count);
8640
8641 /* Do the remainder of the processing in RESULT_MODE. */
8642 x = gen_lowpart_for_combine (result_mode, x);
8643
8644 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
8645 operation. */
8646 if (complement_p)
0c1c8ea6 8647 x = gen_unary (NOT, result_mode, result_mode, x);
230d793d
RS
8648
8649 if (outer_op != NIL)
8650 {
5f4f0e22 8651 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
9fa6d012
TG
8652 {
8653 int width = GET_MODE_BITSIZE (result_mode);
8654
8655 outer_const &= GET_MODE_MASK (result_mode);
8656
8657 /* If this would be an entire word for the target, but is not for
8658 the host, then sign-extend on the host so that the number will
8659 look the same way on the host that it would on the target.
8660
8661 For example, when building a 64 bit alpha hosted 32 bit sparc
8662 targeted compiler, then we want the 32 bit unsigned value -1 to be
8663 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
8664 The later confuses the sparc backend. */
8665
8666 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT && BITS_PER_WORD == width
8667 && (outer_const & ((HOST_WIDE_INT) 1 << (width - 1))))
8668 outer_const |= ((HOST_WIDE_INT) (-1) << width);
8669 }
230d793d
RS
8670
8671 if (outer_op == AND)
5f4f0e22 8672 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
8673 else if (outer_op == SET)
8674 /* This means that we have determined that the result is
8675 equivalent to a constant. This should be rare. */
5f4f0e22 8676 x = GEN_INT (outer_const);
230d793d 8677 else if (GET_RTX_CLASS (outer_op) == '1')
0c1c8ea6 8678 x = gen_unary (outer_op, result_mode, result_mode, x);
230d793d 8679 else
5f4f0e22 8680 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
8681 }
8682
8683 return x;
8684}
8685\f
8686/* Like recog, but we receive the address of a pointer to a new pattern.
8687 We try to match the rtx that the pointer points to.
8688 If that fails, we may try to modify or replace the pattern,
8689 storing the replacement into the same pointer object.
8690
8691 Modifications include deletion or addition of CLOBBERs.
8692
8693 PNOTES is a pointer to a location where any REG_UNUSED notes added for
8694 the CLOBBERs are placed.
8695
a29ca9db
RK
8696 PADDED_SCRATCHES is set to the number of (clobber (scratch)) patterns
8697 we had to add.
8698
230d793d
RS
8699 The value is the final insn code from the pattern ultimately matched,
8700 or -1. */
8701
8702static int
a29ca9db 8703recog_for_combine (pnewpat, insn, pnotes, padded_scratches)
230d793d
RS
8704 rtx *pnewpat;
8705 rtx insn;
8706 rtx *pnotes;
a29ca9db 8707 int *padded_scratches;
230d793d
RS
8708{
8709 register rtx pat = *pnewpat;
8710 int insn_code_number;
8711 int num_clobbers_to_add = 0;
8712 int i;
8713 rtx notes = 0;
8714
a29ca9db
RK
8715 *padded_scratches = 0;
8716
974f4146
RK
8717 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
8718 we use to indicate that something didn't match. If we find such a
8719 thing, force rejection. */
d96023cf 8720 if (GET_CODE (pat) == PARALLEL)
974f4146 8721 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
8722 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
8723 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
8724 return -1;
8725
230d793d
RS
8726 /* Is the result of combination a valid instruction? */
8727 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8728
8729 /* If it isn't, there is the possibility that we previously had an insn
8730 that clobbered some register as a side effect, but the combined
8731 insn doesn't need to do that. So try once more without the clobbers
8732 unless this represents an ASM insn. */
8733
8734 if (insn_code_number < 0 && ! check_asm_operands (pat)
8735 && GET_CODE (pat) == PARALLEL)
8736 {
8737 int pos;
8738
8739 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
8740 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
8741 {
8742 if (i != pos)
8743 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
8744 pos++;
8745 }
8746
8747 SUBST_INT (XVECLEN (pat, 0), pos);
8748
8749 if (pos == 1)
8750 pat = XVECEXP (pat, 0, 0);
8751
8752 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
8753 }
8754
8755 /* If we had any clobbers to add, make a new pattern than contains
8756 them. Then check to make sure that all of them are dead. */
8757 if (num_clobbers_to_add)
8758 {
8759 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
8760 gen_rtvec (GET_CODE (pat) == PARALLEL
8761 ? XVECLEN (pat, 0) + num_clobbers_to_add
8762 : num_clobbers_to_add + 1));
8763
8764 if (GET_CODE (pat) == PARALLEL)
8765 for (i = 0; i < XVECLEN (pat, 0); i++)
8766 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
8767 else
8768 XVECEXP (newpat, 0, 0) = pat;
8769
8770 add_clobbers (newpat, insn_code_number);
8771
8772 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
8773 i < XVECLEN (newpat, 0); i++)
8774 {
8775 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
8776 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
8777 return -1;
a29ca9db
RK
8778 else if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == SCRATCH)
8779 (*padded_scratches)++;
230d793d
RS
8780 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
8781 XEXP (XVECEXP (newpat, 0, i), 0), notes);
8782 }
8783 pat = newpat;
8784 }
8785
8786 *pnewpat = pat;
8787 *pnotes = notes;
8788
8789 return insn_code_number;
8790}
8791\f
8792/* Like gen_lowpart but for use by combine. In combine it is not possible
8793 to create any new pseudoregs. However, it is safe to create
8794 invalid memory addresses, because combine will try to recognize
8795 them and all they will do is make the combine attempt fail.
8796
8797 If for some reason this cannot do its job, an rtx
8798 (clobber (const_int 0)) is returned.
8799 An insn containing that will not be recognized. */
8800
8801#undef gen_lowpart
8802
8803static rtx
8804gen_lowpart_for_combine (mode, x)
8805 enum machine_mode mode;
8806 register rtx x;
8807{
8808 rtx result;
8809
8810 if (GET_MODE (x) == mode)
8811 return x;
8812
eae957a8
RK
8813 /* We can only support MODE being wider than a word if X is a
8814 constant integer or has a mode the same size. */
8815
8816 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
8817 && ! ((GET_MODE (x) == VOIDmode
8818 && (GET_CODE (x) == CONST_INT
8819 || GET_CODE (x) == CONST_DOUBLE))
8820 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
230d793d
RS
8821 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8822
8823 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
8824 won't know what to do. So we will strip off the SUBREG here and
8825 process normally. */
8826 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
8827 {
8828 x = SUBREG_REG (x);
8829 if (GET_MODE (x) == mode)
8830 return x;
8831 }
8832
8833 result = gen_lowpart_common (mode, x);
64bf47a2
RK
8834 if (result != 0
8835 && GET_CODE (result) == SUBREG
8836 && GET_CODE (SUBREG_REG (result)) == REG
8837 && REGNO (SUBREG_REG (result)) >= FIRST_PSEUDO_REGISTER
8838 && (GET_MODE_SIZE (GET_MODE (result))
8839 != GET_MODE_SIZE (GET_MODE (SUBREG_REG (result)))))
8840 reg_changes_size[REGNO (SUBREG_REG (result))] = 1;
8841
230d793d
RS
8842 if (result)
8843 return result;
8844
8845 if (GET_CODE (x) == MEM)
8846 {
8847 register int offset = 0;
8848 rtx new;
8849
8850 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8851 address. */
8852 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8853 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8854
8855 /* If we want to refer to something bigger than the original memref,
8856 generate a perverse subreg instead. That will force a reload
8857 of the original memref X. */
8858 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8859 return gen_rtx (SUBREG, mode, x, 0);
8860
f76b9db2
ILT
8861 if (WORDS_BIG_ENDIAN)
8862 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8863 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8864 if (BYTES_BIG_ENDIAN)
8865 {
8866 /* Adjust the address so that the address-after-the-data is
8867 unchanged. */
8868 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8869 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8870 }
230d793d
RS
8871 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8872 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8873 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8874 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8875 return new;
8876 }
8877
8878 /* If X is a comparison operator, rewrite it in a new mode. This
8879 probably won't match, but may allow further simplifications. */
8880 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8881 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8882
8883 /* If we couldn't simplify X any other way, just enclose it in a
8884 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 8885 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 8886 else
dfbe1b2f
RK
8887 {
8888 int word = 0;
8889
8890 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8891 word = ((GET_MODE_SIZE (GET_MODE (x))
8892 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8893 / UNITS_PER_WORD);
8894 return gen_rtx (SUBREG, mode, x, word);
8895 }
230d793d
RS
8896}
8897\f
8898/* Make an rtx expression. This is a subset of gen_rtx and only supports
8899 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8900
8901 If the identical expression was previously in the insn (in the undobuf),
8902 it will be returned. Only if it is not found will a new expression
8903 be made. */
8904
8905/*VARARGS2*/
8906static rtx
4f90e4a0 8907gen_rtx_combine VPROTO((enum rtx_code code, enum machine_mode mode, ...))
230d793d 8908{
4f90e4a0 8909#ifndef __STDC__
230d793d
RS
8910 enum rtx_code code;
8911 enum machine_mode mode;
4f90e4a0
RK
8912#endif
8913 va_list p;
230d793d
RS
8914 int n_args;
8915 rtx args[3];
8916 int i, j;
8917 char *fmt;
8918 rtx rt;
241cea85 8919 struct undo *undo;
230d793d 8920
4f90e4a0
RK
8921 VA_START (p, mode);
8922
8923#ifndef __STDC__
230d793d
RS
8924 code = va_arg (p, enum rtx_code);
8925 mode = va_arg (p, enum machine_mode);
4f90e4a0
RK
8926#endif
8927
230d793d
RS
8928 n_args = GET_RTX_LENGTH (code);
8929 fmt = GET_RTX_FORMAT (code);
8930
8931 if (n_args == 0 || n_args > 3)
8932 abort ();
8933
8934 /* Get each arg and verify that it is supposed to be an expression. */
8935 for (j = 0; j < n_args; j++)
8936 {
8937 if (*fmt++ != 'e')
8938 abort ();
8939
8940 args[j] = va_arg (p, rtx);
8941 }
8942
8943 /* See if this is in undobuf. Be sure we don't use objects that came
8944 from another insn; this could produce circular rtl structures. */
8945
241cea85
RK
8946 for (undo = undobuf.undos; undo != undobuf.previous_undos; undo = undo->next)
8947 if (!undo->is_int
8948 && GET_CODE (undo->old_contents.r) == code
8949 && GET_MODE (undo->old_contents.r) == mode)
230d793d
RS
8950 {
8951 for (j = 0; j < n_args; j++)
241cea85 8952 if (XEXP (undo->old_contents.r, j) != args[j])
230d793d
RS
8953 break;
8954
8955 if (j == n_args)
241cea85 8956 return undo->old_contents.r;
230d793d
RS
8957 }
8958
8959 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8960 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8961 rt = rtx_alloc (code);
8962 PUT_MODE (rt, mode);
8963 XEXP (rt, 0) = args[0];
8964 if (n_args > 1)
8965 {
8966 XEXP (rt, 1) = args[1];
8967 if (n_args > 2)
8968 XEXP (rt, 2) = args[2];
8969 }
8970 return rt;
8971}
8972
8973/* These routines make binary and unary operations by first seeing if they
8974 fold; if not, a new expression is allocated. */
8975
8976static rtx
8977gen_binary (code, mode, op0, op1)
8978 enum rtx_code code;
8979 enum machine_mode mode;
8980 rtx op0, op1;
8981{
8982 rtx result;
1a26b032
RK
8983 rtx tem;
8984
8985 if (GET_RTX_CLASS (code) == 'c'
8986 && (GET_CODE (op0) == CONST_INT
8987 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8988 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
8989
8990 if (GET_RTX_CLASS (code) == '<')
8991 {
8992 enum machine_mode op_mode = GET_MODE (op0);
9210df58
RK
8993
8994 /* Strip the COMPARE from (REL_OP (compare X Y) 0) to get
0f41302f 8995 just (REL_OP X Y). */
9210df58
RK
8996 if (GET_CODE (op0) == COMPARE && op1 == const0_rtx)
8997 {
8998 op1 = XEXP (op0, 1);
8999 op0 = XEXP (op0, 0);
9000 op_mode = GET_MODE (op0);
9001 }
9002
230d793d
RS
9003 if (op_mode == VOIDmode)
9004 op_mode = GET_MODE (op1);
9005 result = simplify_relational_operation (code, op_mode, op0, op1);
9006 }
9007 else
9008 result = simplify_binary_operation (code, mode, op0, op1);
9009
9010 if (result)
9011 return result;
9012
9013 /* Put complex operands first and constants second. */
9014 if (GET_RTX_CLASS (code) == 'c'
9015 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
9016 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
9017 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
9018 || (GET_CODE (op0) == SUBREG
9019 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
9020 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
9021 return gen_rtx_combine (code, mode, op1, op0);
9022
9023 return gen_rtx_combine (code, mode, op0, op1);
9024}
9025
9026static rtx
0c1c8ea6 9027gen_unary (code, mode, op0_mode, op0)
230d793d 9028 enum rtx_code code;
0c1c8ea6 9029 enum machine_mode mode, op0_mode;
230d793d
RS
9030 rtx op0;
9031{
0c1c8ea6 9032 rtx result = simplify_unary_operation (code, mode, op0, op0_mode);
230d793d
RS
9033
9034 if (result)
9035 return result;
9036
9037 return gen_rtx_combine (code, mode, op0);
9038}
9039\f
9040/* Simplify a comparison between *POP0 and *POP1 where CODE is the
9041 comparison code that will be tested.
9042
9043 The result is a possibly different comparison code to use. *POP0 and
9044 *POP1 may be updated.
9045
9046 It is possible that we might detect that a comparison is either always
9047 true or always false. However, we do not perform general constant
5089e22e 9048 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
9049 should have been detected earlier. Hence we ignore all such cases. */
9050
9051static enum rtx_code
9052simplify_comparison (code, pop0, pop1)
9053 enum rtx_code code;
9054 rtx *pop0;
9055 rtx *pop1;
9056{
9057 rtx op0 = *pop0;
9058 rtx op1 = *pop1;
9059 rtx tem, tem1;
9060 int i;
9061 enum machine_mode mode, tmode;
9062
9063 /* Try a few ways of applying the same transformation to both operands. */
9064 while (1)
9065 {
3a19aabc
RK
9066#ifndef WORD_REGISTER_OPERATIONS
9067 /* The test below this one won't handle SIGN_EXTENDs on these machines,
9068 so check specially. */
9069 if (code != GTU && code != GEU && code != LTU && code != LEU
9070 && GET_CODE (op0) == ASHIFTRT && GET_CODE (op1) == ASHIFTRT
9071 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9072 && GET_CODE (XEXP (op1, 0)) == ASHIFT
9073 && GET_CODE (XEXP (XEXP (op0, 0), 0)) == SUBREG
9074 && GET_CODE (XEXP (XEXP (op1, 0), 0)) == SUBREG
9075 && (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0)))
ad25ba17 9076 == GET_MODE (SUBREG_REG (XEXP (XEXP (op1, 0), 0))))
3a19aabc
RK
9077 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9078 && GET_CODE (XEXP (op1, 1)) == CONST_INT
9079 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9080 && GET_CODE (XEXP (XEXP (op1, 0), 1)) == CONST_INT
9081 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (op1, 1))
9082 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op0, 0), 1))
9083 && INTVAL (XEXP (op0, 1)) == INTVAL (XEXP (XEXP (op1, 0), 1))
9084 && (INTVAL (XEXP (op0, 1))
9085 == (GET_MODE_BITSIZE (GET_MODE (op0))
9086 - (GET_MODE_BITSIZE
9087 (GET_MODE (SUBREG_REG (XEXP (XEXP (op0, 0), 0))))))))
9088 {
9089 op0 = SUBREG_REG (XEXP (XEXP (op0, 0), 0));
9090 op1 = SUBREG_REG (XEXP (XEXP (op1, 0), 0));
9091 }
9092#endif
9093
230d793d
RS
9094 /* If both operands are the same constant shift, see if we can ignore the
9095 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 9096 this shift are known to be zero for both inputs and if the type of
230d793d 9097 comparison is compatible with the shift. */
67232b23
RK
9098 if (GET_CODE (op0) == GET_CODE (op1)
9099 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
9100 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
45620ed4 9101 || ((GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFT)
67232b23
RK
9102 && (code != GT && code != LT && code != GE && code != LE))
9103 || (GET_CODE (op0) == ASHIFTRT
9104 && (code != GTU && code != LTU
9105 && code != GEU && code != GEU)))
9106 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9107 && INTVAL (XEXP (op0, 1)) >= 0
9108 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9109 && XEXP (op0, 1) == XEXP (op1, 1))
230d793d
RS
9110 {
9111 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 9112 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9113 int shift_count = INTVAL (XEXP (op0, 1));
9114
9115 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
9116 mask &= (mask >> shift_count) << shift_count;
45620ed4 9117 else if (GET_CODE (op0) == ASHIFT)
230d793d
RS
9118 mask = (mask & (mask << shift_count)) >> shift_count;
9119
951553af
RK
9120 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
9121 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
9122 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
9123 else
9124 break;
9125 }
9126
9127 /* If both operands are AND's of a paradoxical SUBREG by constant, the
9128 SUBREGs are of the same mode, and, in both cases, the AND would
9129 be redundant if the comparison was done in the narrower mode,
9130 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
9131 and the operand's possibly nonzero bits are 0xffffff01; in that case
9132 if we only care about QImode, we don't need the AND). This case
9133 occurs if the output mode of an scc insn is not SImode and
7e4dc511
RK
9134 STORE_FLAG_VALUE == 1 (e.g., the 386).
9135
9136 Similarly, check for a case where the AND's are ZERO_EXTEND
9137 operations from some narrower mode even though a SUBREG is not
9138 present. */
230d793d
RS
9139
9140 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
9141 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7e4dc511 9142 && GET_CODE (XEXP (op1, 1)) == CONST_INT)
230d793d 9143 {
7e4dc511
RK
9144 rtx inner_op0 = XEXP (op0, 0);
9145 rtx inner_op1 = XEXP (op1, 0);
9146 HOST_WIDE_INT c0 = INTVAL (XEXP (op0, 1));
9147 HOST_WIDE_INT c1 = INTVAL (XEXP (op1, 1));
9148 int changed = 0;
9149
9150 if (GET_CODE (inner_op0) == SUBREG && GET_CODE (inner_op1) == SUBREG
9151 && (GET_MODE_SIZE (GET_MODE (inner_op0))
9152 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (inner_op0))))
9153 && (GET_MODE (SUBREG_REG (inner_op0))
9154 == GET_MODE (SUBREG_REG (inner_op1)))
9155 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9156 <= HOST_BITS_PER_WIDE_INT)
01c82bbb
RK
9157 && (0 == ((~c0) & nonzero_bits (SUBREG_REG (inner_op0),
9158 GET_MODE (SUBREG_REG (op0)))))
9159 && (0 == ((~c1) & nonzero_bits (SUBREG_REG (inner_op1),
9160 GET_MODE (SUBREG_REG (inner_op1))))))
7e4dc511
RK
9161 {
9162 op0 = SUBREG_REG (inner_op0);
9163 op1 = SUBREG_REG (inner_op1);
9164
9165 /* The resulting comparison is always unsigned since we masked
0f41302f 9166 off the original sign bit. */
7e4dc511
RK
9167 code = unsigned_condition (code);
9168
9169 changed = 1;
9170 }
230d793d 9171
7e4dc511
RK
9172 else if (c0 == c1)
9173 for (tmode = GET_CLASS_NARROWEST_MODE
9174 (GET_MODE_CLASS (GET_MODE (op0)));
9175 tmode != GET_MODE (op0); tmode = GET_MODE_WIDER_MODE (tmode))
9176 if (c0 == GET_MODE_MASK (tmode))
9177 {
9178 op0 = gen_lowpart_for_combine (tmode, inner_op0);
9179 op1 = gen_lowpart_for_combine (tmode, inner_op1);
66415c8b 9180 code = unsigned_condition (code);
7e4dc511
RK
9181 changed = 1;
9182 break;
9183 }
9184
9185 if (! changed)
9186 break;
230d793d 9187 }
3a19aabc 9188
ad25ba17
RK
9189 /* If both operands are NOT, we can strip off the outer operation
9190 and adjust the comparison code for swapped operands; similarly for
9191 NEG, except that this must be an equality comparison. */
9192 else if ((GET_CODE (op0) == NOT && GET_CODE (op1) == NOT)
9193 || (GET_CODE (op0) == NEG && GET_CODE (op1) == NEG
9194 && (code == EQ || code == NE)))
9195 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0), code = swap_condition (code);
3a19aabc 9196
230d793d
RS
9197 else
9198 break;
9199 }
9200
9201 /* If the first operand is a constant, swap the operands and adjust the
3aceff0d
RK
9202 comparison code appropriately, but don't do this if the second operand
9203 is already a constant integer. */
9204 if (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
230d793d
RS
9205 {
9206 tem = op0, op0 = op1, op1 = tem;
9207 code = swap_condition (code);
9208 }
9209
9210 /* We now enter a loop during which we will try to simplify the comparison.
9211 For the most part, we only are concerned with comparisons with zero,
9212 but some things may really be comparisons with zero but not start
9213 out looking that way. */
9214
9215 while (GET_CODE (op1) == CONST_INT)
9216 {
9217 enum machine_mode mode = GET_MODE (op0);
9218 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 9219 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
9220 int equality_comparison_p;
9221 int sign_bit_comparison_p;
9222 int unsigned_comparison_p;
5f4f0e22 9223 HOST_WIDE_INT const_op;
230d793d
RS
9224
9225 /* We only want to handle integral modes. This catches VOIDmode,
9226 CCmode, and the floating-point modes. An exception is that we
9227 can handle VOIDmode if OP0 is a COMPARE or a comparison
9228 operation. */
9229
9230 if (GET_MODE_CLASS (mode) != MODE_INT
9231 && ! (mode == VOIDmode
9232 && (GET_CODE (op0) == COMPARE
9233 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
9234 break;
9235
9236 /* Get the constant we are comparing against and turn off all bits
9237 not on in our mode. */
9238 const_op = INTVAL (op1);
5f4f0e22 9239 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 9240 const_op &= mask;
230d793d
RS
9241
9242 /* If we are comparing against a constant power of two and the value
951553af 9243 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
9244 `and'ed with that bit), we can replace this with a comparison
9245 with zero. */
9246 if (const_op
9247 && (code == EQ || code == NE || code == GE || code == GEU
9248 || code == LT || code == LTU)
5f4f0e22 9249 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9250 && exact_log2 (const_op) >= 0
951553af 9251 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
9252 {
9253 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
9254 op1 = const0_rtx, const_op = 0;
9255 }
9256
d0ab8cd3
RK
9257 /* Similarly, if we are comparing a value known to be either -1 or
9258 0 with -1, change it to the opposite comparison against zero. */
9259
9260 if (const_op == -1
9261 && (code == EQ || code == NE || code == GT || code == LE
9262 || code == GEU || code == LTU)
9263 && num_sign_bit_copies (op0, mode) == mode_width)
9264 {
9265 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
9266 op1 = const0_rtx, const_op = 0;
9267 }
9268
230d793d 9269 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
9270 comparisons against zero and then prefer equality comparisons.
9271 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
9272
9273 switch (code)
9274 {
9275 case LT:
4803a34a
RK
9276 /* < C is equivalent to <= (C - 1) */
9277 if (const_op > 0)
230d793d 9278 {
4803a34a 9279 const_op -= 1;
5f4f0e22 9280 op1 = GEN_INT (const_op);
230d793d
RS
9281 code = LE;
9282 /* ... fall through to LE case below. */
9283 }
9284 else
9285 break;
9286
9287 case LE:
4803a34a
RK
9288 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
9289 if (const_op < 0)
9290 {
9291 const_op += 1;
5f4f0e22 9292 op1 = GEN_INT (const_op);
4803a34a
RK
9293 code = LT;
9294 }
230d793d
RS
9295
9296 /* If we are doing a <= 0 comparison on a value known to have
9297 a zero sign bit, we can replace this with == 0. */
9298 else if (const_op == 0
5f4f0e22 9299 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9300 && (nonzero_bits (op0, mode)
5f4f0e22 9301 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9302 code = EQ;
9303 break;
9304
9305 case GE:
0f41302f 9306 /* >= C is equivalent to > (C - 1). */
4803a34a 9307 if (const_op > 0)
230d793d 9308 {
4803a34a 9309 const_op -= 1;
5f4f0e22 9310 op1 = GEN_INT (const_op);
230d793d
RS
9311 code = GT;
9312 /* ... fall through to GT below. */
9313 }
9314 else
9315 break;
9316
9317 case GT:
4803a34a
RK
9318 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
9319 if (const_op < 0)
9320 {
9321 const_op += 1;
5f4f0e22 9322 op1 = GEN_INT (const_op);
4803a34a
RK
9323 code = GE;
9324 }
230d793d
RS
9325
9326 /* If we are doing a > 0 comparison on a value known to have
9327 a zero sign bit, we can replace this with != 0. */
9328 else if (const_op == 0
5f4f0e22 9329 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9330 && (nonzero_bits (op0, mode)
5f4f0e22 9331 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
9332 code = NE;
9333 break;
9334
230d793d 9335 case LTU:
4803a34a
RK
9336 /* < C is equivalent to <= (C - 1). */
9337 if (const_op > 0)
9338 {
9339 const_op -= 1;
5f4f0e22 9340 op1 = GEN_INT (const_op);
4803a34a 9341 code = LEU;
0f41302f 9342 /* ... fall through ... */
4803a34a 9343 }
d0ab8cd3
RK
9344
9345 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
f77aada2
JW
9346 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9347 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9348 {
9349 const_op = 0, op1 = const0_rtx;
9350 code = GE;
9351 break;
9352 }
4803a34a
RK
9353 else
9354 break;
230d793d
RS
9355
9356 case LEU:
9357 /* unsigned <= 0 is equivalent to == 0 */
9358 if (const_op == 0)
9359 code = EQ;
d0ab8cd3 9360
0f41302f 9361 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
f77aada2
JW
9362 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9363 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9364 {
9365 const_op = 0, op1 = const0_rtx;
9366 code = GE;
9367 }
230d793d
RS
9368 break;
9369
4803a34a
RK
9370 case GEU:
9371 /* >= C is equivalent to < (C - 1). */
9372 if (const_op > 1)
9373 {
9374 const_op -= 1;
5f4f0e22 9375 op1 = GEN_INT (const_op);
4803a34a 9376 code = GTU;
0f41302f 9377 /* ... fall through ... */
4803a34a 9378 }
d0ab8cd3
RK
9379
9380 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
f77aada2
JW
9381 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9382 && (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1)))
d0ab8cd3
RK
9383 {
9384 const_op = 0, op1 = const0_rtx;
9385 code = LT;
8b2e69e1 9386 break;
d0ab8cd3 9387 }
4803a34a
RK
9388 else
9389 break;
9390
230d793d
RS
9391 case GTU:
9392 /* unsigned > 0 is equivalent to != 0 */
9393 if (const_op == 0)
9394 code = NE;
d0ab8cd3
RK
9395
9396 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
f77aada2
JW
9397 else if ((mode_width <= HOST_BITS_PER_WIDE_INT)
9398 && (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1))
d0ab8cd3
RK
9399 {
9400 const_op = 0, op1 = const0_rtx;
9401 code = LT;
9402 }
230d793d
RS
9403 break;
9404 }
9405
9406 /* Compute some predicates to simplify code below. */
9407
9408 equality_comparison_p = (code == EQ || code == NE);
9409 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
9410 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
9411 || code == LEU);
9412
6139ff20
RK
9413 /* If this is a sign bit comparison and we can do arithmetic in
9414 MODE, say that we will only be needing the sign bit of OP0. */
9415 if (sign_bit_comparison_p
9416 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
9417 op0 = force_to_mode (op0, mode,
9418 ((HOST_WIDE_INT) 1
9419 << (GET_MODE_BITSIZE (mode) - 1)),
e3d616e3 9420 NULL_RTX, 0);
6139ff20 9421
230d793d
RS
9422 /* Now try cases based on the opcode of OP0. If none of the cases
9423 does a "continue", we exit this loop immediately after the
9424 switch. */
9425
9426 switch (GET_CODE (op0))
9427 {
9428 case ZERO_EXTRACT:
9429 /* If we are extracting a single bit from a variable position in
9430 a constant that has only a single bit set and are comparing it
9431 with zero, we can convert this into an equality comparison
d7cd794f 9432 between the position and the location of the single bit. */
230d793d 9433
230d793d
RS
9434 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
9435 && XEXP (op0, 1) == const1_rtx
9436 && equality_comparison_p && const_op == 0
d7cd794f 9437 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
230d793d 9438 {
f76b9db2 9439 if (BITS_BIG_ENDIAN)
d7cd794f 9440#ifdef HAVE_extzv
f76b9db2
ILT
9441 i = (GET_MODE_BITSIZE
9442 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
d7cd794f
RK
9443#else
9444 i = BITS_PER_WORD - 1 - i;
230d793d
RS
9445#endif
9446
9447 op0 = XEXP (op0, 2);
5f4f0e22 9448 op1 = GEN_INT (i);
230d793d
RS
9449 const_op = i;
9450
9451 /* Result is nonzero iff shift count is equal to I. */
9452 code = reverse_condition (code);
9453 continue;
9454 }
230d793d 9455
0f41302f 9456 /* ... fall through ... */
230d793d
RS
9457
9458 case SIGN_EXTRACT:
9459 tem = expand_compound_operation (op0);
9460 if (tem != op0)
9461 {
9462 op0 = tem;
9463 continue;
9464 }
9465 break;
9466
9467 case NOT:
9468 /* If testing for equality, we can take the NOT of the constant. */
9469 if (equality_comparison_p
9470 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
9471 {
9472 op0 = XEXP (op0, 0);
9473 op1 = tem;
9474 continue;
9475 }
9476
9477 /* If just looking at the sign bit, reverse the sense of the
9478 comparison. */
9479 if (sign_bit_comparison_p)
9480 {
9481 op0 = XEXP (op0, 0);
9482 code = (code == GE ? LT : GE);
9483 continue;
9484 }
9485 break;
9486
9487 case NEG:
9488 /* If testing for equality, we can take the NEG of the constant. */
9489 if (equality_comparison_p
9490 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
9491 {
9492 op0 = XEXP (op0, 0);
9493 op1 = tem;
9494 continue;
9495 }
9496
9497 /* The remaining cases only apply to comparisons with zero. */
9498 if (const_op != 0)
9499 break;
9500
9501 /* When X is ABS or is known positive,
9502 (neg X) is < 0 if and only if X != 0. */
9503
9504 if (sign_bit_comparison_p
9505 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 9506 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9507 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9508 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
9509 {
9510 op0 = XEXP (op0, 0);
9511 code = (code == LT ? NE : EQ);
9512 continue;
9513 }
9514
3bed8141 9515 /* If we have NEG of something whose two high-order bits are the
0f41302f 9516 same, we know that "(-a) < 0" is equivalent to "a > 0". */
3bed8141 9517 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
9518 {
9519 op0 = XEXP (op0, 0);
9520 code = swap_condition (code);
9521 continue;
9522 }
9523 break;
9524
9525 case ROTATE:
9526 /* If we are testing equality and our count is a constant, we
9527 can perform the inverse operation on our RHS. */
9528 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
9529 && (tem = simplify_binary_operation (ROTATERT, mode,
9530 op1, XEXP (op0, 1))) != 0)
9531 {
9532 op0 = XEXP (op0, 0);
9533 op1 = tem;
9534 continue;
9535 }
9536
9537 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
9538 a particular bit. Convert it to an AND of a constant of that
9539 bit. This will be converted into a ZERO_EXTRACT. */
9540 if (const_op == 0 && sign_bit_comparison_p
9541 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9542 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9543 {
5f4f0e22
CH
9544 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9545 ((HOST_WIDE_INT) 1
9546 << (mode_width - 1
9547 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9548 code = (code == LT ? NE : EQ);
9549 continue;
9550 }
9551
0f41302f 9552 /* ... fall through ... */
230d793d
RS
9553
9554 case ABS:
9555 /* ABS is ignorable inside an equality comparison with zero. */
9556 if (const_op == 0 && equality_comparison_p)
9557 {
9558 op0 = XEXP (op0, 0);
9559 continue;
9560 }
9561 break;
9562
9563
9564 case SIGN_EXTEND:
9565 /* Can simplify (compare (zero/sign_extend FOO) CONST)
9566 to (compare FOO CONST) if CONST fits in FOO's mode and we
9567 are either testing inequality or have an unsigned comparison
9568 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
9569 if (! unsigned_comparison_p
9570 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9571 <= HOST_BITS_PER_WIDE_INT)
9572 && ((unsigned HOST_WIDE_INT) const_op
9573 < (((HOST_WIDE_INT) 1
9574 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
9575 {
9576 op0 = XEXP (op0, 0);
9577 continue;
9578 }
9579 break;
9580
9581 case SUBREG:
a687e897 9582 /* Check for the case where we are comparing A - C1 with C2,
abc95ed3 9583 both constants are smaller than 1/2 the maximum positive
a687e897
RK
9584 value in MODE, and the comparison is equality or unsigned.
9585 In that case, if A is either zero-extended to MODE or has
9586 sufficient sign bits so that the high-order bit in MODE
9587 is a copy of the sign in the inner mode, we can prove that it is
9588 safe to do the operation in the wider mode. This simplifies
9589 many range checks. */
9590
9591 if (mode_width <= HOST_BITS_PER_WIDE_INT
9592 && subreg_lowpart_p (op0)
9593 && GET_CODE (SUBREG_REG (op0)) == PLUS
9594 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
9595 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
9596 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
9597 < GET_MODE_MASK (mode) / 2)
adb7a1cb 9598 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
9599 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
9600 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
9601 & ~ GET_MODE_MASK (mode))
9602 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
9603 GET_MODE (SUBREG_REG (op0)))
9604 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9605 - GET_MODE_BITSIZE (mode)))))
9606 {
9607 op0 = SUBREG_REG (op0);
9608 continue;
9609 }
9610
fe0cf571
RK
9611 /* If the inner mode is narrower and we are extracting the low part,
9612 we can treat the SUBREG as if it were a ZERO_EXTEND. */
9613 if (subreg_lowpart_p (op0)
89f1c7f2
RS
9614 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
9615 /* Fall through */ ;
9616 else
230d793d
RS
9617 break;
9618
0f41302f 9619 /* ... fall through ... */
230d793d
RS
9620
9621 case ZERO_EXTEND:
9622 if ((unsigned_comparison_p || equality_comparison_p)
9623 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
9624 <= HOST_BITS_PER_WIDE_INT)
9625 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
9626 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
9627 {
9628 op0 = XEXP (op0, 0);
9629 continue;
9630 }
9631 break;
9632
9633 case PLUS:
20fdd649 9634 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 9635 this for equality comparisons due to pathological cases involving
230d793d 9636 overflows. */
20fdd649
RK
9637 if (equality_comparison_p
9638 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9639 op1, XEXP (op0, 1))))
230d793d
RS
9640 {
9641 op0 = XEXP (op0, 0);
9642 op1 = tem;
9643 continue;
9644 }
9645
9646 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
9647 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
9648 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
9649 {
9650 op0 = XEXP (XEXP (op0, 0), 0);
9651 code = (code == LT ? EQ : NE);
9652 continue;
9653 }
9654 break;
9655
9656 case MINUS:
20fdd649
RK
9657 /* (eq (minus A B) C) -> (eq A (plus B C)) or
9658 (eq B (minus A C)), whichever simplifies. We can only do
9659 this for equality comparisons due to pathological cases involving
9660 overflows. */
9661 if (equality_comparison_p
9662 && 0 != (tem = simplify_binary_operation (PLUS, mode,
9663 XEXP (op0, 1), op1)))
9664 {
9665 op0 = XEXP (op0, 0);
9666 op1 = tem;
9667 continue;
9668 }
9669
9670 if (equality_comparison_p
9671 && 0 != (tem = simplify_binary_operation (MINUS, mode,
9672 XEXP (op0, 0), op1)))
9673 {
9674 op0 = XEXP (op0, 1);
9675 op1 = tem;
9676 continue;
9677 }
9678
230d793d
RS
9679 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
9680 of bits in X minus 1, is one iff X > 0. */
9681 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
9682 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9683 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
9684 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9685 {
9686 op0 = XEXP (op0, 1);
9687 code = (code == GE ? LE : GT);
9688 continue;
9689 }
9690 break;
9691
9692 case XOR:
9693 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
9694 if C is zero or B is a constant. */
9695 if (equality_comparison_p
9696 && 0 != (tem = simplify_binary_operation (XOR, mode,
9697 XEXP (op0, 1), op1)))
9698 {
9699 op0 = XEXP (op0, 0);
9700 op1 = tem;
9701 continue;
9702 }
9703 break;
9704
9705 case EQ: case NE:
9706 case LT: case LTU: case LE: case LEU:
9707 case GT: case GTU: case GE: case GEU:
9708 /* We can't do anything if OP0 is a condition code value, rather
9709 than an actual data value. */
9710 if (const_op != 0
9711#ifdef HAVE_cc0
9712 || XEXP (op0, 0) == cc0_rtx
9713#endif
9714 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
9715 break;
9716
9717 /* Get the two operands being compared. */
9718 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
9719 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
9720 else
9721 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
9722
9723 /* Check for the cases where we simply want the result of the
9724 earlier test or the opposite of that result. */
9725 if (code == NE
9726 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 9727 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 9728 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 9729 && (STORE_FLAG_VALUE
5f4f0e22
CH
9730 & (((HOST_WIDE_INT) 1
9731 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
9732 && (code == LT
9733 || (code == GE && reversible_comparison_p (op0)))))
9734 {
9735 code = (code == LT || code == NE
9736 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
9737 op0 = tem, op1 = tem1;
9738 continue;
9739 }
9740 break;
9741
9742 case IOR:
9743 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
9744 iff X <= 0. */
9745 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
9746 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
9747 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
9748 {
9749 op0 = XEXP (op0, 1);
9750 code = (code == GE ? GT : LE);
9751 continue;
9752 }
9753 break;
9754
9755 case AND:
9756 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
9757 will be converted to a ZERO_EXTRACT later. */
9758 if (const_op == 0 && equality_comparison_p
45620ed4 9759 && GET_CODE (XEXP (op0, 0)) == ASHIFT
230d793d
RS
9760 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
9761 {
9762 op0 = simplify_and_const_int
9763 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
9764 XEXP (op0, 1),
9765 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 9766 (HOST_WIDE_INT) 1);
230d793d
RS
9767 continue;
9768 }
9769
9770 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
9771 zero and X is a comparison and C1 and C2 describe only bits set
9772 in STORE_FLAG_VALUE, we can compare with X. */
9773 if (const_op == 0 && equality_comparison_p
5f4f0e22 9774 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
9775 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9776 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
9777 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
9778 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 9779 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
9780 {
9781 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
9782 << INTVAL (XEXP (XEXP (op0, 0), 1)));
9783 if ((~ STORE_FLAG_VALUE & mask) == 0
9784 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
9785 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
9786 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
9787 {
9788 op0 = XEXP (XEXP (op0, 0), 0);
9789 continue;
9790 }
9791 }
9792
9793 /* If we are doing an equality comparison of an AND of a bit equal
9794 to the sign bit, replace this with a LT or GE comparison of
9795 the underlying value. */
9796 if (equality_comparison_p
9797 && const_op == 0
9798 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9799 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 9800 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 9801 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
9802 {
9803 op0 = XEXP (op0, 0);
9804 code = (code == EQ ? GE : LT);
9805 continue;
9806 }
9807
9808 /* If this AND operation is really a ZERO_EXTEND from a narrower
9809 mode, the constant fits within that mode, and this is either an
9810 equality or unsigned comparison, try to do this comparison in
9811 the narrower mode. */
9812 if ((equality_comparison_p || unsigned_comparison_p)
9813 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9814 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
9815 & GET_MODE_MASK (mode))
9816 + 1)) >= 0
9817 && const_op >> i == 0
9818 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
9819 {
9820 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
9821 continue;
9822 }
9823 break;
9824
9825 case ASHIFT:
45620ed4 9826 /* If we have (compare (ashift FOO N) (const_int C)) and
230d793d 9827 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 9828 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
9829 shifted right N bits so long as the low-order N bits of C are
9830 zero. */
9831 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9832 && INTVAL (XEXP (op0, 1)) >= 0
9833 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
9834 < HOST_BITS_PER_WIDE_INT)
9835 && ((const_op
34785d05 9836 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 9837 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9838 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
9839 & ~ (mask >> (INTVAL (XEXP (op0, 1))
9840 + ! equality_comparison_p))) == 0)
9841 {
9842 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 9843 op1 = GEN_INT (const_op);
230d793d
RS
9844 op0 = XEXP (op0, 0);
9845 continue;
9846 }
9847
dfbe1b2f 9848 /* If we are doing a sign bit comparison, it means we are testing
230d793d 9849 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 9850 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 9851 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 9852 {
5f4f0e22
CH
9853 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9854 ((HOST_WIDE_INT) 1
9855 << (mode_width - 1
9856 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
9857 code = (code == LT ? NE : EQ);
9858 continue;
9859 }
dfbe1b2f
RK
9860
9861 /* If this an equality comparison with zero and we are shifting
9862 the low bit to the sign bit, we can convert this to an AND of the
9863 low-order bit. */
9864 if (const_op == 0 && equality_comparison_p
9865 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9866 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9867 {
5f4f0e22
CH
9868 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
9869 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
9870 continue;
9871 }
230d793d
RS
9872 break;
9873
9874 case ASHIFTRT:
d0ab8cd3
RK
9875 /* If this is an equality comparison with zero, we can do this
9876 as a logical shift, which might be much simpler. */
9877 if (equality_comparison_p && const_op == 0
9878 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
9879 {
9880 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
9881 XEXP (op0, 0),
9882 INTVAL (XEXP (op0, 1)));
9883 continue;
9884 }
9885
230d793d
RS
9886 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
9887 do the comparison in a narrower mode. */
9888 if (! unsigned_comparison_p
9889 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9890 && GET_CODE (XEXP (op0, 0)) == ASHIFT
9891 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
9892 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 9893 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
9894 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
9895 || ((unsigned HOST_WIDE_INT) - const_op
9896 <= GET_MODE_MASK (tmode))))
230d793d
RS
9897 {
9898 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
9899 continue;
9900 }
9901
0f41302f 9902 /* ... fall through ... */
230d793d
RS
9903 case LSHIFTRT:
9904 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 9905 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
9906 by comparing FOO with C shifted left N bits so long as no
9907 overflow occurs. */
9908 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
9909 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
9910 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
9911 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 9912 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 9913 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
9914 && (const_op == 0
9915 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
9916 < mode_width)))
9917 {
9918 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 9919 op1 = GEN_INT (const_op);
230d793d
RS
9920 op0 = XEXP (op0, 0);
9921 continue;
9922 }
9923
9924 /* If we are using this shift to extract just the sign bit, we
9925 can replace this with an LT or GE comparison. */
9926 if (const_op == 0
9927 && (equality_comparison_p || sign_bit_comparison_p)
9928 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9929 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9930 {
9931 op0 = XEXP (op0, 0);
9932 code = (code == NE || code == GT ? LT : GE);
9933 continue;
9934 }
9935 break;
9936 }
9937
9938 break;
9939 }
9940
9941 /* Now make any compound operations involved in this comparison. Then,
9942 check for an outmost SUBREG on OP0 that isn't doing anything or is
9943 paradoxical. The latter case can only occur when it is known that the
9944 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9945 We can never remove a SUBREG for a non-equality comparison because the
9946 sign bit is in a different place in the underlying object. */
9947
9948 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9949 op1 = make_compound_operation (op1, SET);
9950
9951 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9952 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9953 && (code == NE || code == EQ)
9954 && ((GET_MODE_SIZE (GET_MODE (op0))
9955 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9956 {
9957 op0 = SUBREG_REG (op0);
9958 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9959 }
9960
9961 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9962 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9963 && (code == NE || code == EQ)
ac49a949
RS
9964 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9965 <= HOST_BITS_PER_WIDE_INT)
951553af 9966 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9967 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9968 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9969 op1),
951553af 9970 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9971 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9972 op0 = SUBREG_REG (op0), op1 = tem;
9973
9974 /* We now do the opposite procedure: Some machines don't have compare
9975 insns in all modes. If OP0's mode is an integer mode smaller than a
9976 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
9977 mode for which we can do the compare. There are a number of cases in
9978 which we can use the wider mode. */
230d793d
RS
9979
9980 mode = GET_MODE (op0);
9981 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9982 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9983 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9984 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
9985 (tmode != VOIDmode
9986 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 9987 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 9988 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 9989 {
951553af 9990 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
9991 narrower mode and this is an equality or unsigned comparison,
9992 we can use the wider mode. Similarly for sign-extended
7e4dc511 9993 values, in which case it is true for all comparisons. */
a687e897
RK
9994 if (((code == EQ || code == NE
9995 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
9996 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9997 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
7e4dc511
RK
9998 || ((num_sign_bit_copies (op0, tmode)
9999 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 10000 && (num_sign_bit_copies (op1, tmode)
58744483 10001 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
10002 {
10003 op0 = gen_lowpart_for_combine (tmode, op0);
10004 op1 = gen_lowpart_for_combine (tmode, op1);
10005 break;
10006 }
230d793d 10007
a687e897
RK
10008 /* If this is a test for negative, we can make an explicit
10009 test of the sign bit. */
10010
10011 if (op1 == const0_rtx && (code == LT || code == GE)
10012 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 10013 {
a687e897
RK
10014 op0 = gen_binary (AND, tmode,
10015 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
10016 GEN_INT ((HOST_WIDE_INT) 1
10017 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 10018 code = (code == LT) ? NE : EQ;
a687e897 10019 break;
230d793d 10020 }
230d793d
RS
10021 }
10022
b7a775b2
RK
10023#ifdef CANONICALIZE_COMPARISON
10024 /* If this machine only supports a subset of valid comparisons, see if we
10025 can convert an unsupported one into a supported one. */
10026 CANONICALIZE_COMPARISON (code, op0, op1);
10027#endif
10028
230d793d
RS
10029 *pop0 = op0;
10030 *pop1 = op1;
10031
10032 return code;
10033}
10034\f
10035/* Return 1 if we know that X, a comparison operation, is not operating
10036 on a floating-point value or is EQ or NE, meaning that we can safely
10037 reverse it. */
10038
10039static int
10040reversible_comparison_p (x)
10041 rtx x;
10042{
10043 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7e2a0d8e 10044 || flag_fast_math
230d793d
RS
10045 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
10046 return 1;
10047
10048 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
10049 {
10050 case MODE_INT:
3ad2180a
RK
10051 case MODE_PARTIAL_INT:
10052 case MODE_COMPLEX_INT:
230d793d
RS
10053 return 1;
10054
10055 case MODE_CC:
9210df58
RK
10056 /* If the mode of the condition codes tells us that this is safe,
10057 we need look no further. */
10058 if (REVERSIBLE_CC_MODE (GET_MODE (XEXP (x, 0))))
10059 return 1;
10060
10061 /* Otherwise try and find where the condition codes were last set and
10062 use that. */
230d793d
RS
10063 x = get_last_value (XEXP (x, 0));
10064 return (x && GET_CODE (x) == COMPARE
3ad2180a 10065 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
230d793d
RS
10066 }
10067
10068 return 0;
10069}
10070\f
10071/* Utility function for following routine. Called when X is part of a value
10072 being stored into reg_last_set_value. Sets reg_last_set_table_tick
10073 for each register mentioned. Similar to mention_regs in cse.c */
10074
10075static void
10076update_table_tick (x)
10077 rtx x;
10078{
10079 register enum rtx_code code = GET_CODE (x);
10080 register char *fmt = GET_RTX_FORMAT (code);
10081 register int i;
10082
10083 if (code == REG)
10084 {
10085 int regno = REGNO (x);
10086 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10087 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10088
10089 for (i = regno; i < endregno; i++)
10090 reg_last_set_table_tick[i] = label_tick;
10091
10092 return;
10093 }
10094
10095 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10096 /* Note that we can't have an "E" in values stored; see
10097 get_last_value_validate. */
10098 if (fmt[i] == 'e')
10099 update_table_tick (XEXP (x, i));
10100}
10101
10102/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
10103 are saying that the register is clobbered and we no longer know its
7988fd36
RK
10104 value. If INSN is zero, don't update reg_last_set; this is only permitted
10105 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
10106
10107static void
10108record_value_for_reg (reg, insn, value)
10109 rtx reg;
10110 rtx insn;
10111 rtx value;
10112{
10113 int regno = REGNO (reg);
10114 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10115 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
10116 int i;
10117
10118 /* If VALUE contains REG and we have a previous value for REG, substitute
10119 the previous value. */
10120 if (value && insn && reg_overlap_mentioned_p (reg, value))
10121 {
10122 rtx tem;
10123
10124 /* Set things up so get_last_value is allowed to see anything set up to
10125 our insn. */
10126 subst_low_cuid = INSN_CUID (insn);
10127 tem = get_last_value (reg);
10128
10129 if (tem)
10130 value = replace_rtx (copy_rtx (value), reg, tem);
10131 }
10132
10133 /* For each register modified, show we don't know its value, that
ef026f91
RS
10134 we don't know about its bitwise content, that its value has been
10135 updated, and that we don't know the location of the death of the
10136 register. */
230d793d
RS
10137 for (i = regno; i < endregno; i ++)
10138 {
10139 if (insn)
10140 reg_last_set[i] = insn;
10141 reg_last_set_value[i] = 0;
ef026f91
RS
10142 reg_last_set_mode[i] = 0;
10143 reg_last_set_nonzero_bits[i] = 0;
10144 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
10145 reg_last_death[i] = 0;
10146 }
10147
10148 /* Mark registers that are being referenced in this value. */
10149 if (value)
10150 update_table_tick (value);
10151
10152 /* Now update the status of each register being set.
10153 If someone is using this register in this block, set this register
10154 to invalid since we will get confused between the two lives in this
10155 basic block. This makes using this register always invalid. In cse, we
10156 scan the table to invalidate all entries using this register, but this
10157 is too much work for us. */
10158
10159 for (i = regno; i < endregno; i++)
10160 {
10161 reg_last_set_label[i] = label_tick;
10162 if (value && reg_last_set_table_tick[i] == label_tick)
10163 reg_last_set_invalid[i] = 1;
10164 else
10165 reg_last_set_invalid[i] = 0;
10166 }
10167
10168 /* The value being assigned might refer to X (like in "x++;"). In that
10169 case, we must replace it with (clobber (const_int 0)) to prevent
10170 infinite loops. */
10171 if (value && ! get_last_value_validate (&value,
10172 reg_last_set_label[regno], 0))
10173 {
10174 value = copy_rtx (value);
10175 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
10176 value = 0;
10177 }
10178
55310dad
RK
10179 /* For the main register being modified, update the value, the mode, the
10180 nonzero bits, and the number of sign bit copies. */
10181
230d793d
RS
10182 reg_last_set_value[regno] = value;
10183
55310dad
RK
10184 if (value)
10185 {
2afabb48 10186 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
10187 reg_last_set_mode[regno] = GET_MODE (reg);
10188 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
10189 reg_last_set_sign_bit_copies[regno]
10190 = num_sign_bit_copies (value, GET_MODE (reg));
10191 }
230d793d
RS
10192}
10193
10194/* Used for communication between the following two routines. */
10195static rtx record_dead_insn;
10196
10197/* Called via note_stores from record_dead_and_set_regs to handle one
10198 SET or CLOBBER in an insn. */
10199
10200static void
10201record_dead_and_set_regs_1 (dest, setter)
10202 rtx dest, setter;
10203{
ca89d290
RK
10204 if (GET_CODE (dest) == SUBREG)
10205 dest = SUBREG_REG (dest);
10206
230d793d
RS
10207 if (GET_CODE (dest) == REG)
10208 {
10209 /* If we are setting the whole register, we know its value. Otherwise
10210 show that we don't know the value. We can handle SUBREG in
10211 some cases. */
10212 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
10213 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
10214 else if (GET_CODE (setter) == SET
10215 && GET_CODE (SET_DEST (setter)) == SUBREG
10216 && SUBREG_REG (SET_DEST (setter)) == dest
90bf8081 10217 && GET_MODE_BITSIZE (GET_MODE (dest)) <= BITS_PER_WORD
230d793d 10218 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
10219 record_value_for_reg (dest, record_dead_insn,
10220 gen_lowpart_for_combine (GET_MODE (dest),
10221 SET_SRC (setter)));
230d793d 10222 else
5f4f0e22 10223 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
10224 }
10225 else if (GET_CODE (dest) == MEM
10226 /* Ignore pushes, they clobber nothing. */
10227 && ! push_operand (dest, GET_MODE (dest)))
10228 mem_last_set = INSN_CUID (record_dead_insn);
10229}
10230
10231/* Update the records of when each REG was most recently set or killed
10232 for the things done by INSN. This is the last thing done in processing
10233 INSN in the combiner loop.
10234
ef026f91
RS
10235 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
10236 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
10237 and also the similar information mem_last_set (which insn most recently
10238 modified memory) and last_call_cuid (which insn was the most recent
10239 subroutine call). */
230d793d
RS
10240
10241static void
10242record_dead_and_set_regs (insn)
10243 rtx insn;
10244{
10245 register rtx link;
55310dad
RK
10246 int i;
10247
230d793d
RS
10248 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
10249 {
dbc131f3
RK
10250 if (REG_NOTE_KIND (link) == REG_DEAD
10251 && GET_CODE (XEXP (link, 0)) == REG)
10252 {
10253 int regno = REGNO (XEXP (link, 0));
10254 int endregno
10255 = regno + (regno < FIRST_PSEUDO_REGISTER
10256 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
10257 : 1);
dbc131f3
RK
10258
10259 for (i = regno; i < endregno; i++)
10260 reg_last_death[i] = insn;
10261 }
230d793d 10262 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 10263 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
10264 }
10265
10266 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
10267 {
10268 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
10269 if (call_used_regs[i])
10270 {
10271 reg_last_set_value[i] = 0;
ef026f91
RS
10272 reg_last_set_mode[i] = 0;
10273 reg_last_set_nonzero_bits[i] = 0;
10274 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
10275 reg_last_death[i] = 0;
10276 }
10277
10278 last_call_cuid = mem_last_set = INSN_CUID (insn);
10279 }
230d793d
RS
10280
10281 record_dead_insn = insn;
10282 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
10283}
10284\f
10285/* Utility routine for the following function. Verify that all the registers
10286 mentioned in *LOC are valid when *LOC was part of a value set when
10287 label_tick == TICK. Return 0 if some are not.
10288
10289 If REPLACE is non-zero, replace the invalid reference with
10290 (clobber (const_int 0)) and return 1. This replacement is useful because
10291 we often can get useful information about the form of a value (e.g., if
10292 it was produced by a shift that always produces -1 or 0) even though
10293 we don't know exactly what registers it was produced from. */
10294
10295static int
10296get_last_value_validate (loc, tick, replace)
10297 rtx *loc;
10298 int tick;
10299 int replace;
10300{
10301 rtx x = *loc;
10302 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
10303 int len = GET_RTX_LENGTH (GET_CODE (x));
10304 int i;
10305
10306 if (GET_CODE (x) == REG)
10307 {
10308 int regno = REGNO (x);
10309 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10310 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10311 int j;
10312
10313 for (j = regno; j < endregno; j++)
10314 if (reg_last_set_invalid[j]
10315 /* If this is a pseudo-register that was only set once, it is
10316 always valid. */
10317 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
10318 && reg_last_set_label[j] > tick))
10319 {
10320 if (replace)
10321 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
10322 return replace;
10323 }
10324
10325 return 1;
10326 }
10327
10328 for (i = 0; i < len; i++)
10329 if ((fmt[i] == 'e'
10330 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
10331 /* Don't bother with these. They shouldn't occur anyway. */
10332 || fmt[i] == 'E')
10333 return 0;
10334
10335 /* If we haven't found a reason for it to be invalid, it is valid. */
10336 return 1;
10337}
10338
10339/* Get the last value assigned to X, if known. Some registers
10340 in the value may be replaced with (clobber (const_int 0)) if their value
10341 is known longer known reliably. */
10342
10343static rtx
10344get_last_value (x)
10345 rtx x;
10346{
10347 int regno;
10348 rtx value;
10349
10350 /* If this is a non-paradoxical SUBREG, get the value of its operand and
10351 then convert it to the desired mode. If this is a paradoxical SUBREG,
0f41302f 10352 we cannot predict what values the "extra" bits might have. */
230d793d
RS
10353 if (GET_CODE (x) == SUBREG
10354 && subreg_lowpart_p (x)
10355 && (GET_MODE_SIZE (GET_MODE (x))
10356 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
10357 && (value = get_last_value (SUBREG_REG (x))) != 0)
10358 return gen_lowpart_for_combine (GET_MODE (x), value);
10359
10360 if (GET_CODE (x) != REG)
10361 return 0;
10362
10363 regno = REGNO (x);
10364 value = reg_last_set_value[regno];
10365
0f41302f
MS
10366 /* If we don't have a value or if it isn't for this basic block,
10367 return 0. */
230d793d
RS
10368
10369 if (value == 0
10370 || (reg_n_sets[regno] != 1
55310dad 10371 && reg_last_set_label[regno] != label_tick))
230d793d
RS
10372 return 0;
10373
4255220d 10374 /* If the value was set in a later insn than the ones we are processing,
4090a6b3
RK
10375 we can't use it even if the register was only set once, but make a quick
10376 check to see if the previous insn set it to something. This is commonly
0d9641d1
JW
10377 the case when the same pseudo is used by repeated insns.
10378
10379 This does not work if there exists an instruction which is temporarily
10380 not on the insn chain. */
d0ab8cd3 10381
bcd49eb7 10382 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
10383 {
10384 rtx insn, set;
10385
bcd49eb7
JW
10386 /* We can not do anything useful in this case, because there is
10387 an instruction which is not on the insn chain. */
10388 if (subst_prev_insn)
10389 return 0;
10390
4255220d
JW
10391 /* Skip over USE insns. They are not useful here, and they may have
10392 been made by combine, in which case they do not have a INSN_CUID
d6c80562 10393 value. We can't use prev_real_insn, because that would incorrectly
e340018d
JW
10394 take us backwards across labels. Skip over BARRIERs also, since
10395 they could have been made by combine. If we see one, we must be
10396 optimizing dead code, so it doesn't matter what we do. */
d6c80562
JW
10397 for (insn = prev_nonnote_insn (subst_insn);
10398 insn && ((GET_CODE (insn) == INSN
10399 && GET_CODE (PATTERN (insn)) == USE)
e340018d 10400 || GET_CODE (insn) == BARRIER
4255220d 10401 || INSN_CUID (insn) >= subst_low_cuid);
d6c80562 10402 insn = prev_nonnote_insn (insn))
3adde2a5 10403 ;
d0ab8cd3
RK
10404
10405 if (insn
10406 && (set = single_set (insn)) != 0
10407 && rtx_equal_p (SET_DEST (set), x))
10408 {
10409 value = SET_SRC (set);
10410
10411 /* Make sure that VALUE doesn't reference X. Replace any
ddd5a7c1 10412 explicit references with a CLOBBER. If there are any remaining
d0ab8cd3
RK
10413 references (rare), don't use the value. */
10414
10415 if (reg_mentioned_p (x, value))
10416 value = replace_rtx (copy_rtx (value), x,
10417 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
10418
10419 if (reg_overlap_mentioned_p (x, value))
10420 return 0;
10421 }
10422 else
10423 return 0;
10424 }
10425
10426 /* If the value has all its registers valid, return it. */
230d793d
RS
10427 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
10428 return value;
10429
10430 /* Otherwise, make a copy and replace any invalid register with
10431 (clobber (const_int 0)). If that fails for some reason, return 0. */
10432
10433 value = copy_rtx (value);
10434 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
10435 return value;
10436
10437 return 0;
10438}
10439\f
10440/* Return nonzero if expression X refers to a REG or to memory
10441 that is set in an instruction more recent than FROM_CUID. */
10442
10443static int
10444use_crosses_set_p (x, from_cuid)
10445 register rtx x;
10446 int from_cuid;
10447{
10448 register char *fmt;
10449 register int i;
10450 register enum rtx_code code = GET_CODE (x);
10451
10452 if (code == REG)
10453 {
10454 register int regno = REGNO (x);
e28f5732
RK
10455 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
10456 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
10457
230d793d
RS
10458#ifdef PUSH_ROUNDING
10459 /* Don't allow uses of the stack pointer to be moved,
10460 because we don't know whether the move crosses a push insn. */
10461 if (regno == STACK_POINTER_REGNUM)
10462 return 1;
10463#endif
e28f5732
RK
10464 for (;regno < endreg; regno++)
10465 if (reg_last_set[regno]
10466 && INSN_CUID (reg_last_set[regno]) > from_cuid)
10467 return 1;
10468 return 0;
230d793d
RS
10469 }
10470
10471 if (code == MEM && mem_last_set > from_cuid)
10472 return 1;
10473
10474 fmt = GET_RTX_FORMAT (code);
10475
10476 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10477 {
10478 if (fmt[i] == 'E')
10479 {
10480 register int j;
10481 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
10482 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
10483 return 1;
10484 }
10485 else if (fmt[i] == 'e'
10486 && use_crosses_set_p (XEXP (x, i), from_cuid))
10487 return 1;
10488 }
10489 return 0;
10490}
10491\f
10492/* Define three variables used for communication between the following
10493 routines. */
10494
10495static int reg_dead_regno, reg_dead_endregno;
10496static int reg_dead_flag;
10497
10498/* Function called via note_stores from reg_dead_at_p.
10499
ddd5a7c1 10500 If DEST is within [reg_dead_regno, reg_dead_endregno), set
230d793d
RS
10501 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
10502
10503static void
10504reg_dead_at_p_1 (dest, x)
10505 rtx dest;
10506 rtx x;
10507{
10508 int regno, endregno;
10509
10510 if (GET_CODE (dest) != REG)
10511 return;
10512
10513 regno = REGNO (dest);
10514 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
10515 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
10516
10517 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
10518 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
10519}
10520
10521/* Return non-zero if REG is known to be dead at INSN.
10522
10523 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
10524 referencing REG, it is dead. If we hit a SET referencing REG, it is
10525 live. Otherwise, see if it is live or dead at the start of the basic
6e25d159
RK
10526 block we are in. Hard regs marked as being live in NEWPAT_USED_REGS
10527 must be assumed to be always live. */
230d793d
RS
10528
10529static int
10530reg_dead_at_p (reg, insn)
10531 rtx reg;
10532 rtx insn;
10533{
10534 int block, i;
10535
10536 /* Set variables for reg_dead_at_p_1. */
10537 reg_dead_regno = REGNO (reg);
10538 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
10539 ? HARD_REGNO_NREGS (reg_dead_regno,
10540 GET_MODE (reg))
10541 : 1);
10542
10543 reg_dead_flag = 0;
10544
6e25d159
RK
10545 /* Check that reg isn't mentioned in NEWPAT_USED_REGS. */
10546 if (reg_dead_regno < FIRST_PSEUDO_REGISTER)
10547 {
10548 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
10549 if (TEST_HARD_REG_BIT (newpat_used_regs, i))
10550 return 0;
10551 }
10552
230d793d
RS
10553 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
10554 beginning of function. */
60715d0b 10555 for (; insn && GET_CODE (insn) != CODE_LABEL && GET_CODE (insn) != BARRIER;
230d793d
RS
10556 insn = prev_nonnote_insn (insn))
10557 {
10558 note_stores (PATTERN (insn), reg_dead_at_p_1);
10559 if (reg_dead_flag)
10560 return reg_dead_flag == 1 ? 1 : 0;
10561
10562 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
10563 return 1;
10564 }
10565
10566 /* Get the basic block number that we were in. */
10567 if (insn == 0)
10568 block = 0;
10569 else
10570 {
10571 for (block = 0; block < n_basic_blocks; block++)
10572 if (insn == basic_block_head[block])
10573 break;
10574
10575 if (block == n_basic_blocks)
10576 return 0;
10577 }
10578
10579 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
10580 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
10581 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
10582 return 0;
10583
10584 return 1;
10585}
6e25d159
RK
10586\f
10587/* Note hard registers in X that are used. This code is similar to
10588 that in flow.c, but much simpler since we don't care about pseudos. */
10589
10590static void
10591mark_used_regs_combine (x)
10592 rtx x;
10593{
10594 register RTX_CODE code = GET_CODE (x);
10595 register int regno;
10596 int i;
10597
10598 switch (code)
10599 {
10600 case LABEL_REF:
10601 case SYMBOL_REF:
10602 case CONST_INT:
10603 case CONST:
10604 case CONST_DOUBLE:
10605 case PC:
10606 case ADDR_VEC:
10607 case ADDR_DIFF_VEC:
10608 case ASM_INPUT:
10609#ifdef HAVE_cc0
10610 /* CC0 must die in the insn after it is set, so we don't need to take
10611 special note of it here. */
10612 case CC0:
10613#endif
10614 return;
10615
10616 case CLOBBER:
10617 /* If we are clobbering a MEM, mark any hard registers inside the
10618 address as used. */
10619 if (GET_CODE (XEXP (x, 0)) == MEM)
10620 mark_used_regs_combine (XEXP (XEXP (x, 0), 0));
10621 return;
10622
10623 case REG:
10624 regno = REGNO (x);
10625 /* A hard reg in a wide mode may really be multiple registers.
10626 If so, mark all of them just like the first. */
10627 if (regno < FIRST_PSEUDO_REGISTER)
10628 {
10629 /* None of this applies to the stack, frame or arg pointers */
10630 if (regno == STACK_POINTER_REGNUM
10631#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
10632 || regno == HARD_FRAME_POINTER_REGNUM
10633#endif
10634#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
10635 || (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
10636#endif
10637 || regno == FRAME_POINTER_REGNUM)
10638 return;
10639
10640 i = HARD_REGNO_NREGS (regno, GET_MODE (x));
10641 while (i-- > 0)
10642 SET_HARD_REG_BIT (newpat_used_regs, regno + i);
10643 }
10644 return;
10645
10646 case SET:
10647 {
10648 /* If setting a MEM, or a SUBREG of a MEM, then note any hard regs in
10649 the address. */
10650 register rtx testreg = SET_DEST (x);
10651
e048778f
RK
10652 while (GET_CODE (testreg) == SUBREG
10653 || GET_CODE (testreg) == ZERO_EXTRACT
10654 || GET_CODE (testreg) == SIGN_EXTRACT
10655 || GET_CODE (testreg) == STRICT_LOW_PART)
6e25d159
RK
10656 testreg = XEXP (testreg, 0);
10657
10658 if (GET_CODE (testreg) == MEM)
10659 mark_used_regs_combine (XEXP (testreg, 0));
10660
10661 mark_used_regs_combine (SET_SRC (x));
10662 return;
10663 }
10664 }
10665
10666 /* Recursively scan the operands of this expression. */
10667
10668 {
10669 register char *fmt = GET_RTX_FORMAT (code);
10670
10671 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
10672 {
10673 if (fmt[i] == 'e')
10674 mark_used_regs_combine (XEXP (x, i));
10675 else if (fmt[i] == 'E')
10676 {
10677 register int j;
10678
10679 for (j = 0; j < XVECLEN (x, i); j++)
10680 mark_used_regs_combine (XVECEXP (x, i, j));
10681 }
10682 }
10683 }
10684}
10685
230d793d
RS
10686\f
10687/* Remove register number REGNO from the dead registers list of INSN.
10688
10689 Return the note used to record the death, if there was one. */
10690
10691rtx
10692remove_death (regno, insn)
10693 int regno;
10694 rtx insn;
10695{
10696 register rtx note = find_regno_note (insn, REG_DEAD, regno);
10697
10698 if (note)
1a26b032
RK
10699 {
10700 reg_n_deaths[regno]--;
10701 remove_note (insn, note);
10702 }
230d793d
RS
10703
10704 return note;
10705}
10706
10707/* For each register (hardware or pseudo) used within expression X, if its
10708 death is in an instruction with cuid between FROM_CUID (inclusive) and
10709 TO_INSN (exclusive), put a REG_DEAD note for that register in the
10710 list headed by PNOTES.
10711
6eb12cef
RK
10712 That said, don't move registers killed by maybe_kill_insn.
10713
230d793d
RS
10714 This is done when X is being merged by combination into TO_INSN. These
10715 notes will then be distributed as needed. */
10716
10717static void
6eb12cef 10718move_deaths (x, maybe_kill_insn, from_cuid, to_insn, pnotes)
230d793d 10719 rtx x;
6eb12cef 10720 rtx maybe_kill_insn;
230d793d
RS
10721 int from_cuid;
10722 rtx to_insn;
10723 rtx *pnotes;
10724{
10725 register char *fmt;
10726 register int len, i;
10727 register enum rtx_code code = GET_CODE (x);
10728
10729 if (code == REG)
10730 {
10731 register int regno = REGNO (x);
10732 register rtx where_dead = reg_last_death[regno];
e340018d
JW
10733 register rtx before_dead, after_dead;
10734
6eb12cef
RK
10735 /* Don't move the register if it gets killed in between from and to */
10736 if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
10737 && !reg_referenced_p (x, maybe_kill_insn))
10738 return;
10739
e340018d
JW
10740 /* WHERE_DEAD could be a USE insn made by combine, so first we
10741 make sure that we have insns with valid INSN_CUID values. */
10742 before_dead = where_dead;
10743 while (before_dead && INSN_UID (before_dead) > max_uid_cuid)
10744 before_dead = PREV_INSN (before_dead);
10745 after_dead = where_dead;
10746 while (after_dead && INSN_UID (after_dead) > max_uid_cuid)
10747 after_dead = NEXT_INSN (after_dead);
10748
10749 if (before_dead && after_dead
10750 && INSN_CUID (before_dead) >= from_cuid
10751 && (INSN_CUID (after_dead) < INSN_CUID (to_insn)
10752 || (where_dead != after_dead
10753 && INSN_CUID (after_dead) == INSN_CUID (to_insn))))
230d793d 10754 {
dbc131f3 10755 rtx note = remove_death (regno, where_dead);
230d793d
RS
10756
10757 /* It is possible for the call above to return 0. This can occur
10758 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
10759 In that case make a new note.
10760
10761 We must also check for the case where X is a hard register
10762 and NOTE is a death note for a range of hard registers
10763 including X. In that case, we must put REG_DEAD notes for
10764 the remaining registers in place of NOTE. */
10765
10766 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
10767 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
10768 != GET_MODE_SIZE (GET_MODE (x))))
10769 {
10770 int deadregno = REGNO (XEXP (note, 0));
10771 int deadend
10772 = (deadregno + HARD_REGNO_NREGS (deadregno,
10773 GET_MODE (XEXP (note, 0))));
10774 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10775 int i;
10776
10777 for (i = deadregno; i < deadend; i++)
10778 if (i < regno || i >= ourend)
10779 REG_NOTES (where_dead)
10780 = gen_rtx (EXPR_LIST, REG_DEAD,
36b878d1 10781 gen_rtx (REG, reg_raw_mode[i], i),
dbc131f3
RK
10782 REG_NOTES (where_dead));
10783 }
fabd69e8
RK
10784 /* If we didn't find any note, and we have a multi-reg hard
10785 register, then to be safe we must check for REG_DEAD notes
10786 for each register other than the first. They could have
10787 their own REG_DEAD notes lying around. */
10788 else if (note == 0 && regno < FIRST_PSEUDO_REGISTER
10789 && HARD_REGNO_NREGS (regno, GET_MODE (x)) > 1)
10790 {
10791 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10792 int i;
10793 rtx oldnotes = 0;
10794
10795 for (i = regno + 1; i < ourend; i++)
10796 move_deaths (gen_rtx (REG, reg_raw_mode[i], i),
6eb12cef 10797 maybe_kill_insn, from_cuid, to_insn, &oldnotes);
fabd69e8 10798 }
230d793d 10799
dbc131f3 10800 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
10801 {
10802 XEXP (note, 1) = *pnotes;
10803 *pnotes = note;
10804 }
10805 else
10806 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
1a26b032
RK
10807
10808 reg_n_deaths[regno]++;
230d793d
RS
10809 }
10810
10811 return;
10812 }
10813
10814 else if (GET_CODE (x) == SET)
10815 {
10816 rtx dest = SET_DEST (x);
10817
6eb12cef 10818 move_deaths (SET_SRC (x), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d 10819
a7c99304
RK
10820 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
10821 that accesses one word of a multi-word item, some
10822 piece of everything register in the expression is used by
10823 this insn, so remove any old death. */
10824
10825 if (GET_CODE (dest) == ZERO_EXTRACT
10826 || GET_CODE (dest) == STRICT_LOW_PART
10827 || (GET_CODE (dest) == SUBREG
10828 && (((GET_MODE_SIZE (GET_MODE (dest))
10829 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
10830 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
10831 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 10832 {
6eb12cef 10833 move_deaths (dest, maybe_kill_insn, from_cuid, to_insn, pnotes);
a7c99304 10834 return;
230d793d
RS
10835 }
10836
a7c99304
RK
10837 /* If this is some other SUBREG, we know it replaces the entire
10838 value, so use that as the destination. */
10839 if (GET_CODE (dest) == SUBREG)
10840 dest = SUBREG_REG (dest);
10841
10842 /* If this is a MEM, adjust deaths of anything used in the address.
10843 For a REG (the only other possibility), the entire value is
10844 being replaced so the old value is not used in this insn. */
230d793d
RS
10845
10846 if (GET_CODE (dest) == MEM)
6eb12cef
RK
10847 move_deaths (XEXP (dest, 0), maybe_kill_insn, from_cuid,
10848 to_insn, pnotes);
230d793d
RS
10849 return;
10850 }
10851
10852 else if (GET_CODE (x) == CLOBBER)
10853 return;
10854
10855 len = GET_RTX_LENGTH (code);
10856 fmt = GET_RTX_FORMAT (code);
10857
10858 for (i = 0; i < len; i++)
10859 {
10860 if (fmt[i] == 'E')
10861 {
10862 register int j;
10863 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6eb12cef
RK
10864 move_deaths (XVECEXP (x, i, j), maybe_kill_insn, from_cuid,
10865 to_insn, pnotes);
230d793d
RS
10866 }
10867 else if (fmt[i] == 'e')
6eb12cef 10868 move_deaths (XEXP (x, i), maybe_kill_insn, from_cuid, to_insn, pnotes);
230d793d
RS
10869 }
10870}
10871\f
a7c99304
RK
10872/* Return 1 if X is the target of a bit-field assignment in BODY, the
10873 pattern of an insn. X must be a REG. */
230d793d
RS
10874
10875static int
a7c99304
RK
10876reg_bitfield_target_p (x, body)
10877 rtx x;
230d793d
RS
10878 rtx body;
10879{
10880 int i;
10881
10882 if (GET_CODE (body) == SET)
a7c99304
RK
10883 {
10884 rtx dest = SET_DEST (body);
10885 rtx target;
10886 int regno, tregno, endregno, endtregno;
10887
10888 if (GET_CODE (dest) == ZERO_EXTRACT)
10889 target = XEXP (dest, 0);
10890 else if (GET_CODE (dest) == STRICT_LOW_PART)
10891 target = SUBREG_REG (XEXP (dest, 0));
10892 else
10893 return 0;
10894
10895 if (GET_CODE (target) == SUBREG)
10896 target = SUBREG_REG (target);
10897
10898 if (GET_CODE (target) != REG)
10899 return 0;
10900
10901 tregno = REGNO (target), regno = REGNO (x);
10902 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
10903 return target == x;
10904
10905 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
10906 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
10907
10908 return endregno > tregno && regno < endtregno;
10909 }
230d793d
RS
10910
10911 else if (GET_CODE (body) == PARALLEL)
10912 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 10913 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
10914 return 1;
10915
10916 return 0;
10917}
10918\f
10919/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
10920 as appropriate. I3 and I2 are the insns resulting from the combination
10921 insns including FROM (I2 may be zero).
10922
10923 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
10924 not need REG_DEAD notes because they are being substituted for. This
10925 saves searching in the most common cases.
10926
10927 Each note in the list is either ignored or placed on some insns, depending
10928 on the type of note. */
10929
10930static void
10931distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
10932 rtx notes;
10933 rtx from_insn;
10934 rtx i3, i2;
10935 rtx elim_i2, elim_i1;
10936{
10937 rtx note, next_note;
10938 rtx tem;
10939
10940 for (note = notes; note; note = next_note)
10941 {
10942 rtx place = 0, place2 = 0;
10943
10944 /* If this NOTE references a pseudo register, ensure it references
10945 the latest copy of that register. */
10946 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
10947 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
10948 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
10949
10950 next_note = XEXP (note, 1);
10951 switch (REG_NOTE_KIND (note))
10952 {
10953 case REG_UNUSED:
07d0cbdd 10954 /* Any clobbers for i3 may still exist, and so we must process
176c9e6b
JW
10955 REG_UNUSED notes from that insn.
10956
10957 Any clobbers from i2 or i1 can only exist if they were added by
10958 recog_for_combine. In that case, recog_for_combine created the
10959 necessary REG_UNUSED notes. Trying to keep any original
10960 REG_UNUSED notes from these insns can cause incorrect output
10961 if it is for the same register as the original i3 dest.
10962 In that case, we will notice that the register is set in i3,
10963 and then add a REG_UNUSED note for the destination of i3, which
07d0cbdd
JW
10964 is wrong. However, it is possible to have REG_UNUSED notes from
10965 i2 or i1 for register which were both used and clobbered, so
10966 we keep notes from i2 or i1 if they will turn into REG_DEAD
10967 notes. */
176c9e6b 10968
230d793d
RS
10969 /* If this register is set or clobbered in I3, put the note there
10970 unless there is one already. */
07d0cbdd 10971 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
230d793d 10972 {
07d0cbdd
JW
10973 if (from_insn != i3)
10974 break;
10975
230d793d
RS
10976 if (! (GET_CODE (XEXP (note, 0)) == REG
10977 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
10978 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
10979 place = i3;
10980 }
10981 /* Otherwise, if this register is used by I3, then this register
10982 now dies here, so we must put a REG_DEAD note here unless there
10983 is one already. */
10984 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
10985 && ! (GET_CODE (XEXP (note, 0)) == REG
10986 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
10987 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
10988 {
10989 PUT_REG_NOTE_KIND (note, REG_DEAD);
10990 place = i3;
10991 }
10992 break;
10993
10994 case REG_EQUAL:
10995 case REG_EQUIV:
10996 case REG_NONNEG:
10997 /* These notes say something about results of an insn. We can
10998 only support them if they used to be on I3 in which case they
a687e897
RK
10999 remain on I3. Otherwise they are ignored.
11000
11001 If the note refers to an expression that is not a constant, we
11002 must also ignore the note since we cannot tell whether the
11003 equivalence is still true. It might be possible to do
11004 slightly better than this (we only have a problem if I2DEST
11005 or I1DEST is present in the expression), but it doesn't
11006 seem worth the trouble. */
11007
11008 if (from_insn == i3
11009 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
11010 place = i3;
11011 break;
11012
11013 case REG_INC:
11014 case REG_NO_CONFLICT:
11015 case REG_LABEL:
11016 /* These notes say something about how a register is used. They must
11017 be present on any use of the register in I2 or I3. */
11018 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
11019 place = i3;
11020
11021 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
11022 {
11023 if (place)
11024 place2 = i2;
11025 else
11026 place = i2;
11027 }
11028 break;
11029
11030 case REG_WAS_0:
11031 /* It is too much trouble to try to see if this note is still
11032 correct in all situations. It is better to simply delete it. */
11033 break;
11034
11035 case REG_RETVAL:
11036 /* If the insn previously containing this note still exists,
11037 put it back where it was. Otherwise move it to the previous
11038 insn. Adjust the corresponding REG_LIBCALL note. */
11039 if (GET_CODE (from_insn) != NOTE)
11040 place = from_insn;
11041 else
11042 {
5f4f0e22 11043 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
11044 place = prev_real_insn (from_insn);
11045 if (tem && place)
11046 XEXP (tem, 0) = place;
11047 }
11048 break;
11049
11050 case REG_LIBCALL:
11051 /* This is handled similarly to REG_RETVAL. */
11052 if (GET_CODE (from_insn) != NOTE)
11053 place = from_insn;
11054 else
11055 {
5f4f0e22 11056 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
11057 place = next_real_insn (from_insn);
11058 if (tem && place)
11059 XEXP (tem, 0) = place;
11060 }
11061 break;
11062
11063 case REG_DEAD:
11064 /* If the register is used as an input in I3, it dies there.
11065 Similarly for I2, if it is non-zero and adjacent to I3.
11066
11067 If the register is not used as an input in either I3 or I2
11068 and it is not one of the registers we were supposed to eliminate,
11069 there are two possibilities. We might have a non-adjacent I2
11070 or we might have somehow eliminated an additional register
11071 from a computation. For example, we might have had A & B where
11072 we discover that B will always be zero. In this case we will
11073 eliminate the reference to A.
11074
11075 In both cases, we must search to see if we can find a previous
11076 use of A and put the death note there. */
11077
6e2d1486
RK
11078 if (from_insn
11079 && GET_CODE (from_insn) == CALL_INSN
11080 && find_reg_fusage (from_insn, USE, XEXP (note, 0)))
11081 place = from_insn;
11082 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
230d793d
RS
11083 place = i3;
11084 else if (i2 != 0 && next_nonnote_insn (i2) == i3
11085 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11086 place = i2;
11087
11088 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
11089 break;
11090
510dd77e
RK
11091 /* If the register is used in both I2 and I3 and it dies in I3,
11092 we might have added another reference to it. If reg_n_refs
11093 was 2, bump it to 3. This has to be correct since the
11094 register must have been set somewhere. The reason this is
11095 done is because local-alloc.c treats 2 references as a
11096 special case. */
11097
11098 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
11099 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
11100 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11101 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
11102
230d793d 11103 if (place == 0)
38d8473f
RK
11104 {
11105 for (tem = prev_nonnote_insn (i3);
11106 place == 0 && tem
11107 && (GET_CODE (tem) == INSN || GET_CODE (tem) == CALL_INSN);
11108 tem = prev_nonnote_insn (tem))
11109 {
11110 /* If the register is being set at TEM, see if that is all
11111 TEM is doing. If so, delete TEM. Otherwise, make this
11112 into a REG_UNUSED note instead. */
11113 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
11114 {
11115 rtx set = single_set (tem);
11116
11117 /* Verify that it was the set, and not a clobber that
11118 modified the register. */
11119
11120 if (set != 0 && ! side_effects_p (SET_SRC (set))
d02089a5
RK
11121 && (rtx_equal_p (XEXP (note, 0), SET_DEST (set))
11122 || (GET_CODE (SET_DEST (set)) == SUBREG
11123 && rtx_equal_p (XEXP (note, 0),
11124 XEXP (SET_DEST (set), 0)))))
38d8473f
RK
11125 {
11126 /* Move the notes and links of TEM elsewhere.
11127 This might delete other dead insns recursively.
11128 First set the pattern to something that won't use
11129 any register. */
11130
11131 PATTERN (tem) = pc_rtx;
11132
11133 distribute_notes (REG_NOTES (tem), tem, tem,
11134 NULL_RTX, NULL_RTX, NULL_RTX);
11135 distribute_links (LOG_LINKS (tem));
11136
11137 PUT_CODE (tem, NOTE);
11138 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
11139 NOTE_SOURCE_FILE (tem) = 0;
11140 }
11141 else
11142 {
11143 PUT_REG_NOTE_KIND (note, REG_UNUSED);
11144
11145 /* If there isn't already a REG_UNUSED note, put one
11146 here. */
11147 if (! find_regno_note (tem, REG_UNUSED,
11148 REGNO (XEXP (note, 0))))
11149 place = tem;
11150 break;
230d793d
RS
11151 }
11152 }
13018fad
RE
11153 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem))
11154 || (GET_CODE (tem) == CALL_INSN
11155 && find_reg_fusage (tem, USE, XEXP (note, 0))))
230d793d
RS
11156 {
11157 place = tem;
932d1119
RK
11158
11159 /* If we are doing a 3->2 combination, and we have a
11160 register which formerly died in i3 and was not used
11161 by i2, which now no longer dies in i3 and is used in
11162 i2 but does not die in i2, and place is between i2
11163 and i3, then we may need to move a link from place to
11164 i2. */
a8908849
RK
11165 if (i2 && INSN_UID (place) <= max_uid_cuid
11166 && INSN_CUID (place) > INSN_CUID (i2)
932d1119
RK
11167 && from_insn && INSN_CUID (from_insn) > INSN_CUID (i2)
11168 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
11169 {
11170 rtx links = LOG_LINKS (place);
11171 LOG_LINKS (place) = 0;
11172 distribute_links (links);
11173 }
230d793d
RS
11174 break;
11175 }
38d8473f
RK
11176 }
11177
11178 /* If we haven't found an insn for the death note and it
11179 is still a REG_DEAD note, but we have hit a CODE_LABEL,
11180 insert a USE insn for the register at that label and
11181 put the death node there. This prevents problems with
11182 call-state tracking in caller-save.c. */
11183 if (REG_NOTE_KIND (note) == REG_DEAD && place == 0 && tem != 0)
e2cce0cf
RK
11184 {
11185 place
11186 = emit_insn_after (gen_rtx (USE, VOIDmode, XEXP (note, 0)),
11187 tem);
11188
11189 /* If this insn was emitted between blocks, then update
11190 basic_block_head of the current block to include it. */
11191 if (basic_block_end[this_basic_block - 1] == tem)
11192 basic_block_head[this_basic_block] = place;
11193 }
38d8473f 11194 }
230d793d
RS
11195
11196 /* If the register is set or already dead at PLACE, we needn't do
11197 anything with this note if it is still a REG_DEAD note.
11198
11199 Note that we cannot use just `dead_or_set_p' here since we can
11200 convert an assignment to a register into a bit-field assignment.
11201 Therefore, we must also omit the note if the register is the
11202 target of a bitfield assignment. */
11203
11204 if (place && REG_NOTE_KIND (note) == REG_DEAD)
11205 {
11206 int regno = REGNO (XEXP (note, 0));
11207
11208 if (dead_or_set_p (place, XEXP (note, 0))
11209 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
11210 {
11211 /* Unless the register previously died in PLACE, clear
11212 reg_last_death. [I no longer understand why this is
11213 being done.] */
11214 if (reg_last_death[regno] != place)
11215 reg_last_death[regno] = 0;
11216 place = 0;
11217 }
11218 else
11219 reg_last_death[regno] = place;
11220
11221 /* If this is a death note for a hard reg that is occupying
11222 multiple registers, ensure that we are still using all
11223 parts of the object. If we find a piece of the object
11224 that is unused, we must add a USE for that piece before
11225 PLACE and put the appropriate REG_DEAD note on it.
11226
11227 An alternative would be to put a REG_UNUSED for the pieces
11228 on the insn that set the register, but that can't be done if
11229 it is not in the same block. It is simpler, though less
11230 efficient, to add the USE insns. */
11231
11232 if (place && regno < FIRST_PSEUDO_REGISTER
11233 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
11234 {
11235 int endregno
11236 = regno + HARD_REGNO_NREGS (regno,
11237 GET_MODE (XEXP (note, 0)));
11238 int all_used = 1;
11239 int i;
11240
11241 for (i = regno; i < endregno; i++)
9fd5bb62
JW
11242 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0)
11243 && ! find_regno_fusage (place, USE, i))
230d793d 11244 {
485eeec4 11245 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
28f6d3af
RK
11246 rtx p;
11247
11248 /* See if we already placed a USE note for this
11249 register in front of PLACE. */
11250 for (p = place;
11251 GET_CODE (PREV_INSN (p)) == INSN
11252 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
11253 p = PREV_INSN (p))
11254 if (rtx_equal_p (piece,
11255 XEXP (PATTERN (PREV_INSN (p)), 0)))
11256 {
11257 p = 0;
11258 break;
11259 }
11260
11261 if (p)
11262 {
11263 rtx use_insn
11264 = emit_insn_before (gen_rtx (USE, VOIDmode,
11265 piece),
11266 p);
11267 REG_NOTES (use_insn)
11268 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
11269 REG_NOTES (use_insn));
11270 }
230d793d 11271
5089e22e 11272 all_used = 0;
230d793d
RS
11273 }
11274
a394b17b
JW
11275 /* Check for the case where the register dying partially
11276 overlaps the register set by this insn. */
11277 if (all_used)
11278 for (i = regno; i < endregno; i++)
11279 if (dead_or_set_regno_p (place, i))
11280 {
11281 all_used = 0;
11282 break;
11283 }
11284
230d793d
RS
11285 if (! all_used)
11286 {
11287 /* Put only REG_DEAD notes for pieces that are
11288 still used and that are not already dead or set. */
11289
11290 for (i = regno; i < endregno; i++)
11291 {
485eeec4 11292 rtx piece = gen_rtx (REG, reg_raw_mode[i], i);
230d793d 11293
17cbf358
JW
11294 if ((reg_referenced_p (piece, PATTERN (place))
11295 || (GET_CODE (place) == CALL_INSN
11296 && find_reg_fusage (place, USE, piece)))
230d793d
RS
11297 && ! dead_or_set_p (place, piece)
11298 && ! reg_bitfield_target_p (piece,
11299 PATTERN (place)))
11300 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
11301 piece,
11302 REG_NOTES (place));
11303 }
11304
11305 place = 0;
11306 }
11307 }
11308 }
11309 break;
11310
11311 default:
11312 /* Any other notes should not be present at this point in the
11313 compilation. */
11314 abort ();
11315 }
11316
11317 if (place)
11318 {
11319 XEXP (note, 1) = REG_NOTES (place);
11320 REG_NOTES (place) = note;
11321 }
1a26b032
RK
11322 else if ((REG_NOTE_KIND (note) == REG_DEAD
11323 || REG_NOTE_KIND (note) == REG_UNUSED)
11324 && GET_CODE (XEXP (note, 0)) == REG)
11325 reg_n_deaths[REGNO (XEXP (note, 0))]--;
230d793d
RS
11326
11327 if (place2)
1a26b032
RK
11328 {
11329 if ((REG_NOTE_KIND (note) == REG_DEAD
11330 || REG_NOTE_KIND (note) == REG_UNUSED)
11331 && GET_CODE (XEXP (note, 0)) == REG)
11332 reg_n_deaths[REGNO (XEXP (note, 0))]++;
11333
11334 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
11335 XEXP (note, 0), REG_NOTES (place2));
11336 }
230d793d
RS
11337 }
11338}
11339\f
11340/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
11341 I3, I2, and I1 to new locations. This is also called in one case to
11342 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
11343
11344static void
11345distribute_links (links)
11346 rtx links;
11347{
11348 rtx link, next_link;
11349
11350 for (link = links; link; link = next_link)
11351 {
11352 rtx place = 0;
11353 rtx insn;
11354 rtx set, reg;
11355
11356 next_link = XEXP (link, 1);
11357
11358 /* If the insn that this link points to is a NOTE or isn't a single
11359 set, ignore it. In the latter case, it isn't clear what we
11360 can do other than ignore the link, since we can't tell which
11361 register it was for. Such links wouldn't be used by combine
11362 anyway.
11363
11364 It is not possible for the destination of the target of the link to
11365 have been changed by combine. The only potential of this is if we
11366 replace I3, I2, and I1 by I3 and I2. But in that case the
11367 destination of I2 also remains unchanged. */
11368
11369 if (GET_CODE (XEXP (link, 0)) == NOTE
11370 || (set = single_set (XEXP (link, 0))) == 0)
11371 continue;
11372
11373 reg = SET_DEST (set);
11374 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
11375 || GET_CODE (reg) == SIGN_EXTRACT
11376 || GET_CODE (reg) == STRICT_LOW_PART)
11377 reg = XEXP (reg, 0);
11378
11379 /* A LOG_LINK is defined as being placed on the first insn that uses
11380 a register and points to the insn that sets the register. Start
11381 searching at the next insn after the target of the link and stop
11382 when we reach a set of the register or the end of the basic block.
11383
11384 Note that this correctly handles the link that used to point from
5089e22e 11385 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
11386 since most links don't point very far away. */
11387
11388 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3
RK
11389 (insn && (this_basic_block == n_basic_blocks - 1
11390 || basic_block_head[this_basic_block + 1] != insn));
230d793d
RS
11391 insn = NEXT_INSN (insn))
11392 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
11393 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
11394 {
11395 if (reg_referenced_p (reg, PATTERN (insn)))
11396 place = insn;
11397 break;
11398 }
6e2d1486
RK
11399 else if (GET_CODE (insn) == CALL_INSN
11400 && find_reg_fusage (insn, USE, reg))
11401 {
11402 place = insn;
11403 break;
11404 }
230d793d
RS
11405
11406 /* If we found a place to put the link, place it there unless there
11407 is already a link to the same insn as LINK at that point. */
11408
11409 if (place)
11410 {
11411 rtx link2;
11412
11413 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
11414 if (XEXP (link2, 0) == XEXP (link, 0))
11415 break;
11416
11417 if (link2 == 0)
11418 {
11419 XEXP (link, 1) = LOG_LINKS (place);
11420 LOG_LINKS (place) = link;
abe6e52f
RK
11421
11422 /* Set added_links_insn to the earliest insn we added a
11423 link to. */
11424 if (added_links_insn == 0
11425 || INSN_CUID (added_links_insn) > INSN_CUID (place))
11426 added_links_insn = place;
230d793d
RS
11427 }
11428 }
11429 }
11430}
11431\f
1427d6d2
RK
11432/* Compute INSN_CUID for INSN, which is an insn made by combine. */
11433
11434static int
11435insn_cuid (insn)
11436 rtx insn;
11437{
11438 while (insn != 0 && INSN_UID (insn) > max_uid_cuid
11439 && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE)
11440 insn = NEXT_INSN (insn);
11441
11442 if (INSN_UID (insn) > max_uid_cuid)
11443 abort ();
11444
11445 return INSN_CUID (insn);
11446}
11447\f
230d793d
RS
11448void
11449dump_combine_stats (file)
11450 FILE *file;
11451{
11452 fprintf
11453 (file,
11454 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
11455 combine_attempts, combine_merges, combine_extras, combine_successes);
11456}
11457
11458void
11459dump_combine_total_stats (file)
11460 FILE *file;
11461{
11462 fprintf
11463 (file,
11464 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
11465 total_attempts, total_merges, total_extras, total_successes);
11466}
This page took 2.916748 seconds and 5 git commands to generate.