]> gcc.gnu.org Git - gcc.git/blame - gcc/combine.c
(schedule_insns): Don't zero reg_n_calls_crossed for
[gcc.git] / gcc / combine.c
CommitLineData
230d793d 1/* Optimize by combining instructions for GNU compiler.
dc3e17ad 2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
230d793d
RS
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21/* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
230d793d 76#include "config.h"
9c3b4c8b
RS
77/* Must precede rtl.h for FFS. */
78#include <stdio.h>
79
230d793d
RS
80#include "gvarargs.h"
81#include "rtl.h"
82#include "flags.h"
83#include "regs.h"
55310dad 84#include "hard-reg-set.h"
230d793d
RS
85#include "expr.h"
86#include "basic-block.h"
87#include "insn-config.h"
88#include "insn-flags.h"
89#include "insn-codes.h"
90#include "insn-attr.h"
91#include "recog.h"
92#include "real.h"
93
94/* It is not safe to use ordinary gen_lowpart in combine.
95 Use gen_lowpart_for_combine instead. See comments there. */
96#define gen_lowpart dont_use_gen_lowpart_you_dummy
97
98/* Number of attempts to combine instructions in this function. */
99
100static int combine_attempts;
101
102/* Number of attempts that got as far as substitution in this function. */
103
104static int combine_merges;
105
106/* Number of instructions combined with added SETs in this function. */
107
108static int combine_extras;
109
110/* Number of instructions combined in this function. */
111
112static int combine_successes;
113
114/* Totals over entire compilation. */
115
116static int total_attempts, total_merges, total_extras, total_successes;
117\f
118/* Vector mapping INSN_UIDs to cuids.
5089e22e 119 The cuids are like uids but increase monotonically always.
230d793d
RS
120 Combine always uses cuids so that it can compare them.
121 But actually renumbering the uids, which we used to do,
122 proves to be a bad idea because it makes it hard to compare
123 the dumps produced by earlier passes with those from later passes. */
124
125static int *uid_cuid;
126
127/* Get the cuid of an insn. */
128
129#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
130
131/* Maximum register number, which is the size of the tables below. */
132
133static int combine_max_regno;
134
135/* Record last point of death of (hard or pseudo) register n. */
136
137static rtx *reg_last_death;
138
139/* Record last point of modification of (hard or pseudo) register n. */
140
141static rtx *reg_last_set;
142
143/* Record the cuid of the last insn that invalidated memory
144 (anything that writes memory, and subroutine calls, but not pushes). */
145
146static int mem_last_set;
147
148/* Record the cuid of the last CALL_INSN
149 so we can tell whether a potential combination crosses any calls. */
150
151static int last_call_cuid;
152
153/* When `subst' is called, this is the insn that is being modified
154 (by combining in a previous insn). The PATTERN of this insn
155 is still the old pattern partially modified and it should not be
156 looked at, but this may be used to examine the successors of the insn
157 to judge whether a simplification is valid. */
158
159static rtx subst_insn;
160
161/* This is the lowest CUID that `subst' is currently dealing with.
162 get_last_value will not return a value if the register was set at or
163 after this CUID. If not for this mechanism, we could get confused if
164 I2 or I1 in try_combine were an insn that used the old value of a register
165 to obtain a new value. In that case, we might erroneously get the
166 new value of the register when we wanted the old one. */
167
168static int subst_low_cuid;
169
170/* This is the value of undobuf.num_undo when we started processing this
171 substitution. This will prevent gen_rtx_combine from re-used a piece
172 from the previous expression. Doing so can produce circular rtl
173 structures. */
174
175static int previous_num_undos;
ca5c3ef4 176
0d4d42c3
RK
177/* Basic block number of the block in which we are performing combines. */
178static int this_basic_block;
230d793d
RS
179\f
180/* The next group of arrays allows the recording of the last value assigned
181 to (hard or pseudo) register n. We use this information to see if a
5089e22e 182 operation being processed is redundant given a prior operation performed
230d793d
RS
183 on the register. For example, an `and' with a constant is redundant if
184 all the zero bits are already known to be turned off.
185
186 We use an approach similar to that used by cse, but change it in the
187 following ways:
188
189 (1) We do not want to reinitialize at each label.
190 (2) It is useful, but not critical, to know the actual value assigned
191 to a register. Often just its form is helpful.
192
193 Therefore, we maintain the following arrays:
194
195 reg_last_set_value the last value assigned
196 reg_last_set_label records the value of label_tick when the
197 register was assigned
198 reg_last_set_table_tick records the value of label_tick when a
199 value using the register is assigned
200 reg_last_set_invalid set to non-zero when it is not valid
201 to use the value of this register in some
202 register's value
203
204 To understand the usage of these tables, it is important to understand
205 the distinction between the value in reg_last_set_value being valid
206 and the register being validly contained in some other expression in the
207 table.
208
209 Entry I in reg_last_set_value is valid if it is non-zero, and either
210 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
211
212 Register I may validly appear in any expression returned for the value
213 of another register if reg_n_sets[i] is 1. It may also appear in the
214 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
215 reg_last_set_invalid[j] is zero.
216
217 If an expression is found in the table containing a register which may
218 not validly appear in an expression, the register is replaced by
219 something that won't match, (clobber (const_int 0)).
220
221 reg_last_set_invalid[i] is set non-zero when register I is being assigned
222 to and reg_last_set_table_tick[i] == label_tick. */
223
224/* Record last value assigned to (hard or pseudo) register n. */
225
226static rtx *reg_last_set_value;
227
228/* Record the value of label_tick when the value for register n is placed in
229 reg_last_set_value[n]. */
230
568356af 231static int *reg_last_set_label;
230d793d
RS
232
233/* Record the value of label_tick when an expression involving register n
234 is placed in reg_last_set_value. */
235
568356af 236static int *reg_last_set_table_tick;
230d793d
RS
237
238/* Set non-zero if references to register n in expressions should not be
239 used. */
240
241static char *reg_last_set_invalid;
242
243/* Incremented for each label. */
244
568356af 245static int label_tick;
230d793d
RS
246
247/* Some registers that are set more than once and used in more than one
248 basic block are nevertheless always set in similar ways. For example,
249 a QImode register may be loaded from memory in two places on a machine
250 where byte loads zero extend.
251
951553af 252 We record in the following array what we know about the nonzero
230d793d
RS
253 bits of a register, specifically which bits are known to be zero.
254
255 If an entry is zero, it means that we don't know anything special. */
256
55310dad 257static unsigned HOST_WIDE_INT *reg_nonzero_bits;
230d793d 258
951553af 259/* Mode used to compute significance in reg_nonzero_bits. It is the largest
5f4f0e22 260 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
230d793d 261
951553af 262static enum machine_mode nonzero_bits_mode;
230d793d 263
d0ab8cd3
RK
264/* Nonzero if we know that a register has some leading bits that are always
265 equal to the sign bit. */
266
267static char *reg_sign_bit_copies;
268
951553af 269/* Nonzero when reg_nonzero_bits and reg_sign_bit_copies can be safely used.
1a26b032
RK
270 It is zero while computing them and after combine has completed. This
271 former test prevents propagating values based on previously set values,
272 which can be incorrect if a variable is modified in a loop. */
230d793d 273
951553af 274static int nonzero_sign_valid;
55310dad
RK
275
276/* These arrays are maintained in parallel with reg_last_set_value
277 and are used to store the mode in which the register was last set,
278 the bits that were known to be zero when it was last set, and the
279 number of sign bits copies it was known to have when it was last set. */
280
281static enum machine_mode *reg_last_set_mode;
282static unsigned HOST_WIDE_INT *reg_last_set_nonzero_bits;
283static char *reg_last_set_sign_bit_copies;
230d793d
RS
284\f
285/* Record one modification to rtl structure
286 to be undone by storing old_contents into *where.
287 is_int is 1 if the contents are an int. */
288
289struct undo
290{
230d793d 291 int is_int;
f5393ab9
RS
292 union {rtx r; int i;} old_contents;
293 union {rtx *r; int *i;} where;
230d793d
RS
294};
295
296/* Record a bunch of changes to be undone, up to MAX_UNDO of them.
297 num_undo says how many are currently recorded.
298
299 storage is nonzero if we must undo the allocation of new storage.
300 The value of storage is what to pass to obfree.
301
302 other_insn is nonzero if we have modified some other insn in the process
303 of working on subst_insn. It must be verified too. */
304
305#define MAX_UNDO 50
306
307struct undobuf
308{
309 int num_undo;
310 char *storage;
311 struct undo undo[MAX_UNDO];
312 rtx other_insn;
313};
314
315static struct undobuf undobuf;
316
cc876596 317/* Substitute NEWVAL, an rtx expression, into INTO, a place in some
230d793d 318 insn. The substitution can be undone by undo_all. If INTO is already
cc876596
RK
319 set to NEWVAL, do not record this change. Because computing NEWVAL might
320 also call SUBST, we have to compute it before we put anything into
321 the undo table. */
230d793d
RS
322
323#define SUBST(INTO, NEWVAL) \
cc876596
RK
324 do { rtx _new = (NEWVAL); \
325 if (undobuf.num_undo < MAX_UNDO) \
230d793d 326 { \
230d793d 327 undobuf.undo[undobuf.num_undo].is_int = 0; \
f5393ab9
RS
328 undobuf.undo[undobuf.num_undo].where.r = &INTO; \
329 undobuf.undo[undobuf.num_undo].old_contents.r = INTO; \
cc876596 330 INTO = _new; \
f5393ab9 331 if (undobuf.undo[undobuf.num_undo].old_contents.r != INTO) \
230d793d
RS
332 undobuf.num_undo++; \
333 } \
334 } while (0)
335
336/* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
337 expression.
338 Note that substitution for the value of a CONST_INT is not safe. */
339
340#define SUBST_INT(INTO, NEWVAL) \
341 do { if (undobuf.num_undo < MAX_UNDO) \
342{ \
7c046e4e
RK
343 undobuf.undo[undobuf.num_undo].is_int = 1; \
344 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
345 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
230d793d 346 INTO = NEWVAL; \
7c046e4e 347 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
230d793d
RS
348 undobuf.num_undo++; \
349 } \
350 } while (0)
351
352/* Number of times the pseudo being substituted for
353 was found and replaced. */
354
355static int n_occurrences;
356
ef026f91 357static void init_reg_last_arrays PROTO(());
fe2db4fb
RK
358static void setup_incoming_promotions PROTO(());
359static void set_nonzero_bits_and_sign_copies PROTO((rtx, rtx));
360static int can_combine_p PROTO((rtx, rtx, rtx, rtx, rtx *, rtx *));
361static int combinable_i3pat PROTO((rtx, rtx *, rtx, rtx, int, rtx *));
362static rtx try_combine PROTO((rtx, rtx, rtx));
363static void undo_all PROTO((void));
364static rtx *find_split_point PROTO((rtx *, rtx));
365static rtx subst PROTO((rtx, rtx, rtx, int, int));
366static rtx expand_compound_operation PROTO((rtx));
367static rtx expand_field_assignment PROTO((rtx));
368static rtx make_extraction PROTO((enum machine_mode, rtx, int, rtx, int,
369 int, int, int));
370static rtx make_compound_operation PROTO((rtx, enum rtx_code));
371static int get_pos_from_mask PROTO((unsigned HOST_WIDE_INT, int *));
6139ff20
RK
372static rtx force_to_mode PROTO((rtx, enum machine_mode,
373 unsigned HOST_WIDE_INT, rtx));
fe2db4fb
RK
374static rtx known_cond PROTO((rtx, enum rtx_code, rtx, rtx));
375static rtx make_field_assignment PROTO((rtx));
376static rtx apply_distributive_law PROTO((rtx));
377static rtx simplify_and_const_int PROTO((rtx, enum machine_mode, rtx,
378 unsigned HOST_WIDE_INT));
379static unsigned HOST_WIDE_INT nonzero_bits PROTO((rtx, enum machine_mode));
380static int num_sign_bit_copies PROTO((rtx, enum machine_mode));
381static int merge_outer_ops PROTO((enum rtx_code *, HOST_WIDE_INT *,
382 enum rtx_code, HOST_WIDE_INT,
383 enum machine_mode, int *));
384static rtx simplify_shift_const PROTO((rtx, enum rtx_code, enum machine_mode,
385 rtx, int));
386static int recog_for_combine PROTO((rtx *, rtx, rtx *));
387static rtx gen_lowpart_for_combine PROTO((enum machine_mode, rtx));
388static rtx gen_rtx_combine (); /* This is varargs. */
389static rtx gen_binary PROTO((enum rtx_code, enum machine_mode,
390 rtx, rtx));
391static rtx gen_unary PROTO((enum rtx_code, enum machine_mode, rtx));
392static enum rtx_code simplify_comparison PROTO((enum rtx_code, rtx *, rtx *));
393static int reversible_comparison_p PROTO((rtx));
394static void update_table_tick PROTO((rtx));
395static void record_value_for_reg PROTO((rtx, rtx, rtx));
396static void record_dead_and_set_regs_1 PROTO((rtx, rtx));
397static void record_dead_and_set_regs PROTO((rtx));
398static int get_last_value_validate PROTO((rtx *, int, int));
399static rtx get_last_value PROTO((rtx));
400static int use_crosses_set_p PROTO((rtx, int));
401static void reg_dead_at_p_1 PROTO((rtx, rtx));
402static int reg_dead_at_p PROTO((rtx, rtx));
403static void move_deaths PROTO((rtx, int, rtx, rtx *));
404static int reg_bitfield_target_p PROTO((rtx, rtx));
405static void distribute_notes PROTO((rtx, rtx, rtx, rtx, rtx, rtx));
406static void distribute_links PROTO((rtx));
230d793d
RS
407\f
408/* Main entry point for combiner. F is the first insn of the function.
409 NREGS is the first unused pseudo-reg number. */
410
411void
412combine_instructions (f, nregs)
413 rtx f;
414 int nregs;
415{
416 register rtx insn, next, prev;
417 register int i;
418 register rtx links, nextlinks;
419
420 combine_attempts = 0;
421 combine_merges = 0;
422 combine_extras = 0;
423 combine_successes = 0;
bef9925b 424 undobuf.num_undo = previous_num_undos = 0;
230d793d
RS
425
426 combine_max_regno = nregs;
427
ef026f91
RS
428 reg_nonzero_bits
429 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
430 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
431
432 bzero (reg_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
433 bzero (reg_sign_bit_copies, nregs * sizeof (char));
434
230d793d
RS
435 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
436 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
437 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
568356af
RK
438 reg_last_set_table_tick = (int *) alloca (nregs * sizeof (int));
439 reg_last_set_label = (int *) alloca (nregs * sizeof (int));
5f4f0e22 440 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
55310dad
RK
441 reg_last_set_mode
442 = (enum machine_mode *) alloca (nregs * sizeof (enum machine_mode));
443 reg_last_set_nonzero_bits
444 = (unsigned HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
445 reg_last_set_sign_bit_copies
446 = (char *) alloca (nregs * sizeof (char));
447
ef026f91 448 init_reg_last_arrays ();
230d793d
RS
449
450 init_recog_no_volatile ();
451
452 /* Compute maximum uid value so uid_cuid can be allocated. */
453
454 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
455 if (INSN_UID (insn) > i)
456 i = INSN_UID (insn);
457
458 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
459
951553af 460 nonzero_bits_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
230d793d 461
951553af 462 /* Don't use reg_nonzero_bits when computing it. This can cause problems
230d793d
RS
463 when, for example, we have j <<= 1 in a loop. */
464
951553af 465 nonzero_sign_valid = 0;
230d793d
RS
466
467 /* Compute the mapping from uids to cuids.
468 Cuids are numbers assigned to insns, like uids,
469 except that cuids increase monotonically through the code.
470
471 Scan all SETs and see if we can deduce anything about what
951553af 472 bits are known to be zero for some registers and how many copies
d79f08e0
RK
473 of the sign bit are known to exist for those registers.
474
475 Also set any known values so that we can use it while searching
476 for what bits are known to be set. */
477
478 label_tick = 1;
230d793d 479
7988fd36
RK
480 setup_incoming_promotions ();
481
230d793d
RS
482 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
483 {
484 INSN_CUID (insn) = ++i;
d79f08e0
RK
485 subst_low_cuid = i;
486 subst_insn = insn;
487
230d793d 488 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
d79f08e0
RK
489 {
490 note_stores (PATTERN (insn), set_nonzero_bits_and_sign_copies);
491 record_dead_and_set_regs (insn);
492 }
493
494 if (GET_CODE (insn) == CODE_LABEL)
495 label_tick++;
230d793d
RS
496 }
497
951553af 498 nonzero_sign_valid = 1;
230d793d
RS
499
500 /* Now scan all the insns in forward order. */
501
0d4d42c3 502 this_basic_block = -1;
230d793d
RS
503 label_tick = 1;
504 last_call_cuid = 0;
505 mem_last_set = 0;
ef026f91 506 init_reg_last_arrays ();
7988fd36
RK
507 setup_incoming_promotions ();
508
230d793d
RS
509 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
510 {
511 next = 0;
512
0d4d42c3 513 /* If INSN starts a new basic block, update our basic block number. */
f085c9cd 514 if (this_basic_block + 1 < n_basic_blocks
0d4d42c3
RK
515 && basic_block_head[this_basic_block + 1] == insn)
516 this_basic_block++;
517
230d793d
RS
518 if (GET_CODE (insn) == CODE_LABEL)
519 label_tick++;
520
0d4d42c3 521 else if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
230d793d
RS
522 {
523 /* Try this insn with each insn it links back to. */
524
525 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
5f4f0e22 526 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
230d793d
RS
527 goto retry;
528
529 /* Try each sequence of three linked insns ending with this one. */
530
531 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
532 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
533 nextlinks = XEXP (nextlinks, 1))
534 if ((next = try_combine (insn, XEXP (links, 0),
535 XEXP (nextlinks, 0))) != 0)
536 goto retry;
537
538#ifdef HAVE_cc0
539 /* Try to combine a jump insn that uses CC0
540 with a preceding insn that sets CC0, and maybe with its
541 logical predecessor as well.
542 This is how we make decrement-and-branch insns.
543 We need this special code because data flow connections
544 via CC0 do not get entered in LOG_LINKS. */
545
546 if (GET_CODE (insn) == JUMP_INSN
547 && (prev = prev_nonnote_insn (insn)) != 0
548 && GET_CODE (prev) == INSN
549 && sets_cc0_p (PATTERN (prev)))
550 {
5f4f0e22 551 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
552 goto retry;
553
554 for (nextlinks = LOG_LINKS (prev); nextlinks;
555 nextlinks = XEXP (nextlinks, 1))
556 if ((next = try_combine (insn, prev,
557 XEXP (nextlinks, 0))) != 0)
558 goto retry;
559 }
560
561 /* Do the same for an insn that explicitly references CC0. */
562 if (GET_CODE (insn) == INSN
563 && (prev = prev_nonnote_insn (insn)) != 0
564 && GET_CODE (prev) == INSN
565 && sets_cc0_p (PATTERN (prev))
566 && GET_CODE (PATTERN (insn)) == SET
567 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
568 {
5f4f0e22 569 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
230d793d
RS
570 goto retry;
571
572 for (nextlinks = LOG_LINKS (prev); nextlinks;
573 nextlinks = XEXP (nextlinks, 1))
574 if ((next = try_combine (insn, prev,
575 XEXP (nextlinks, 0))) != 0)
576 goto retry;
577 }
578
579 /* Finally, see if any of the insns that this insn links to
580 explicitly references CC0. If so, try this insn, that insn,
5089e22e 581 and its predecessor if it sets CC0. */
230d793d
RS
582 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
583 if (GET_CODE (XEXP (links, 0)) == INSN
584 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
585 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
586 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
587 && GET_CODE (prev) == INSN
588 && sets_cc0_p (PATTERN (prev))
589 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
590 goto retry;
591#endif
592
593 /* Try combining an insn with two different insns whose results it
594 uses. */
595 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
596 for (nextlinks = XEXP (links, 1); nextlinks;
597 nextlinks = XEXP (nextlinks, 1))
598 if ((next = try_combine (insn, XEXP (links, 0),
599 XEXP (nextlinks, 0))) != 0)
600 goto retry;
601
602 if (GET_CODE (insn) != NOTE)
603 record_dead_and_set_regs (insn);
604
605 retry:
606 ;
607 }
608 }
609
610 total_attempts += combine_attempts;
611 total_merges += combine_merges;
612 total_extras += combine_extras;
613 total_successes += combine_successes;
1a26b032 614
951553af 615 nonzero_sign_valid = 0;
230d793d 616}
ef026f91
RS
617
618/* Wipe the reg_last_xxx arrays in preparation for another pass. */
619
620static void
621init_reg_last_arrays ()
622{
623 int nregs = combine_max_regno;
624
625 bzero (reg_last_death, nregs * sizeof (rtx));
626 bzero (reg_last_set, nregs * sizeof (rtx));
627 bzero (reg_last_set_value, nregs * sizeof (rtx));
628 bzero (reg_last_set_table_tick, nregs * sizeof (int));
629 bzero (reg_last_set_label, nregs * sizeof (int));
630 bzero (reg_last_set_invalid, nregs * sizeof (char));
631 bzero (reg_last_set_mode, nregs * sizeof (enum machine_mode));
632 bzero (reg_last_set_nonzero_bits, nregs * sizeof (HOST_WIDE_INT));
633 bzero (reg_last_set_sign_bit_copies, nregs * sizeof (char));
634}
230d793d 635\f
7988fd36
RK
636/* Set up any promoted values for incoming argument registers. */
637
ee791cc3 638static void
7988fd36
RK
639setup_incoming_promotions ()
640{
641#ifdef PROMOTE_FUNCTION_ARGS
642 int regno;
643 rtx reg;
644 enum machine_mode mode;
645 int unsignedp;
646 rtx first = get_insns ();
647
648 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
649 if (FUNCTION_ARG_REGNO_P (regno)
650 && (reg = promoted_input_arg (regno, &mode, &unsignedp)) != 0)
651 record_value_for_reg (reg, first,
652 gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
500c518b
RK
653 GET_MODE (reg),
654 gen_rtx (CLOBBER, mode, const0_rtx)));
7988fd36
RK
655#endif
656}
657\f
230d793d 658/* Called via note_stores. If X is a pseudo that is used in more than
5f4f0e22 659 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
951553af 660 set, record what bits are known zero. If we are clobbering X,
230d793d
RS
661 ignore this "set" because the clobbered value won't be used.
662
663 If we are setting only a portion of X and we can't figure out what
664 portion, assume all bits will be used since we don't know what will
d0ab8cd3
RK
665 be happening.
666
667 Similarly, set how many bits of X are known to be copies of the sign bit
668 at all locations in the function. This is the smallest number implied
669 by any set of X. */
230d793d
RS
670
671static void
951553af 672set_nonzero_bits_and_sign_copies (x, set)
230d793d
RS
673 rtx x;
674 rtx set;
675{
d0ab8cd3
RK
676 int num;
677
230d793d
RS
678 if (GET_CODE (x) == REG
679 && REGNO (x) >= FIRST_PSEUDO_REGISTER
680 && reg_n_sets[REGNO (x)] > 1
681 && reg_basic_block[REGNO (x)] < 0
e8095e80
RK
682 /* If this register is undefined at the start of the file, we can't
683 say what its contents were. */
684 && ! (basic_block_live_at_start[0][REGNO (x) / REGSET_ELT_BITS]
685 & ((REGSET_ELT_TYPE) 1 << (REGNO (x) % REGSET_ELT_BITS)))
5f4f0e22 686 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
687 {
688 if (GET_CODE (set) == CLOBBER)
e8095e80
RK
689 {
690 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
691 reg_sign_bit_copies[REGNO (x)] = 0;
692 return;
693 }
230d793d
RS
694
695 /* If this is a complex assignment, see if we can convert it into a
5089e22e 696 simple assignment. */
230d793d 697 set = expand_field_assignment (set);
d79f08e0
RK
698
699 /* If this is a simple assignment, or we have a paradoxical SUBREG,
700 set what we know about X. */
701
702 if (SET_DEST (set) == x
703 || (GET_CODE (SET_DEST (set)) == SUBREG
705c7b3b
JW
704 && (GET_MODE_SIZE (GET_MODE (SET_DEST (set)))
705 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (set)))))
d79f08e0 706 && SUBREG_REG (SET_DEST (set)) == x))
d0ab8cd3 707 {
9afa3d54
RK
708 rtx src = SET_SRC (set);
709
710#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
711 /* If X is narrower than a word and SRC is a non-negative
712 constant that would appear negative in the mode of X,
713 sign-extend it for use in reg_nonzero_bits because some
714 machines (maybe most) will actually do the sign-extension
715 and this is the conservative approach.
716
717 ??? For 2.5, try to tighten up the MD files in this regard
718 instead of this kludge. */
719
720 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
721 && GET_CODE (src) == CONST_INT
722 && INTVAL (src) > 0
723 && 0 != (INTVAL (src)
724 & ((HOST_WIDE_INT) 1
725 << GET_MODE_BITSIZE (GET_MODE (x)))))
726 src = GEN_INT (INTVAL (src)
727 | ((HOST_WIDE_INT) (-1)
728 << GET_MODE_BITSIZE (GET_MODE (x))));
729#endif
730
951553af 731 reg_nonzero_bits[REGNO (x)]
9afa3d54 732 |= nonzero_bits (src, nonzero_bits_mode);
d0ab8cd3
RK
733 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
734 if (reg_sign_bit_copies[REGNO (x)] == 0
735 || reg_sign_bit_copies[REGNO (x)] > num)
736 reg_sign_bit_copies[REGNO (x)] = num;
737 }
230d793d 738 else
d0ab8cd3 739 {
951553af 740 reg_nonzero_bits[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
d0ab8cd3
RK
741 reg_sign_bit_copies[REGNO (x)] = 0;
742 }
230d793d
RS
743 }
744}
745\f
746/* See if INSN can be combined into I3. PRED and SUCC are optionally
747 insns that were previously combined into I3 or that will be combined
748 into the merger of INSN and I3.
749
750 Return 0 if the combination is not allowed for any reason.
751
752 If the combination is allowed, *PDEST will be set to the single
753 destination of INSN and *PSRC to the single source, and this function
754 will return 1. */
755
756static int
757can_combine_p (insn, i3, pred, succ, pdest, psrc)
758 rtx insn;
759 rtx i3;
760 rtx pred, succ;
761 rtx *pdest, *psrc;
762{
763 int i;
764 rtx set = 0, src, dest;
765 rtx p, link;
766 int all_adjacent = (succ ? (next_active_insn (insn) == succ
767 && next_active_insn (succ) == i3)
768 : next_active_insn (insn) == i3);
769
770 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
771 or a PARALLEL consisting of such a SET and CLOBBERs.
772
773 If INSN has CLOBBER parallel parts, ignore them for our processing.
774 By definition, these happen during the execution of the insn. When it
775 is merged with another insn, all bets are off. If they are, in fact,
776 needed and aren't also supplied in I3, they may be added by
777 recog_for_combine. Otherwise, it won't match.
778
779 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
780 note.
781
782 Get the source and destination of INSN. If more than one, can't
783 combine. */
784
785 if (GET_CODE (PATTERN (insn)) == SET)
786 set = PATTERN (insn);
787 else if (GET_CODE (PATTERN (insn)) == PARALLEL
788 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
789 {
790 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
791 {
792 rtx elt = XVECEXP (PATTERN (insn), 0, i);
793
794 switch (GET_CODE (elt))
795 {
796 /* We can ignore CLOBBERs. */
797 case CLOBBER:
798 break;
799
800 case SET:
801 /* Ignore SETs whose result isn't used but not those that
802 have side-effects. */
803 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
804 && ! side_effects_p (elt))
805 break;
806
807 /* If we have already found a SET, this is a second one and
808 so we cannot combine with this insn. */
809 if (set)
810 return 0;
811
812 set = elt;
813 break;
814
815 default:
816 /* Anything else means we can't combine. */
817 return 0;
818 }
819 }
820
821 if (set == 0
822 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
823 so don't do anything with it. */
824 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
825 return 0;
826 }
827 else
828 return 0;
829
830 if (set == 0)
831 return 0;
832
833 set = expand_field_assignment (set);
834 src = SET_SRC (set), dest = SET_DEST (set);
835
836 /* Don't eliminate a store in the stack pointer. */
837 if (dest == stack_pointer_rtx
230d793d
RS
838 /* If we couldn't eliminate a field assignment, we can't combine. */
839 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
840 /* Don't combine with an insn that sets a register to itself if it has
841 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
5f4f0e22 842 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
230d793d
RS
843 /* Can't merge a function call. */
844 || GET_CODE (src) == CALL
845 /* Don't substitute into an incremented register. */
846 || FIND_REG_INC_NOTE (i3, dest)
847 || (succ && FIND_REG_INC_NOTE (succ, dest))
848 /* Don't combine the end of a libcall into anything. */
5f4f0e22 849 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
230d793d
RS
850 /* Make sure that DEST is not used after SUCC but before I3. */
851 || (succ && ! all_adjacent
852 && reg_used_between_p (dest, succ, i3))
853 /* Make sure that the value that is to be substituted for the register
854 does not use any registers whose values alter in between. However,
855 If the insns are adjacent, a use can't cross a set even though we
856 think it might (this can happen for a sequence of insns each setting
857 the same destination; reg_last_set of that register might point to
a66a10c7
RS
858 a NOTE). Also, don't move a volatile asm or UNSPEC_VOLATILE across
859 any other insns. */
230d793d
RS
860 || (! all_adjacent
861 && (use_crosses_set_p (src, INSN_CUID (insn))
a66a10c7
RS
862 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))
863 || GET_CODE (src) == UNSPEC_VOLATILE))
230d793d
RS
864 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
865 better register allocation by not doing the combine. */
866 || find_reg_note (i3, REG_NO_CONFLICT, dest)
867 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
868 /* Don't combine across a CALL_INSN, because that would possibly
869 change whether the life span of some REGs crosses calls or not,
870 and it is a pain to update that information.
871 Exception: if source is a constant, moving it later can't hurt.
872 Accept that special case, because it helps -fforce-addr a lot. */
873 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
874 return 0;
875
876 /* DEST must either be a REG or CC0. */
877 if (GET_CODE (dest) == REG)
878 {
879 /* If register alignment is being enforced for multi-word items in all
880 cases except for parameters, it is possible to have a register copy
881 insn referencing a hard register that is not allowed to contain the
882 mode being copied and which would not be valid as an operand of most
883 insns. Eliminate this problem by not combining with such an insn.
884
885 Also, on some machines we don't want to extend the life of a hard
886 register. */
887
888 if (GET_CODE (src) == REG
889 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
890 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
891#ifdef SMALL_REGISTER_CLASSES
892 /* Don't extend the life of a hard register. */
893 || REGNO (src) < FIRST_PSEUDO_REGISTER
894#else
895 || (REGNO (src) < FIRST_PSEUDO_REGISTER
896 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
897#endif
898 ))
899 return 0;
900 }
901 else if (GET_CODE (dest) != CC0)
902 return 0;
903
5f96750d
RS
904 /* Don't substitute for a register intended as a clobberable operand.
905 Similarly, don't substitute an expression containing a register that
906 will be clobbered in I3. */
230d793d
RS
907 if (GET_CODE (PATTERN (i3)) == PARALLEL)
908 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
909 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
5f96750d
RS
910 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
911 src)
912 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
230d793d
RS
913 return 0;
914
915 /* If INSN contains anything volatile, or is an `asm' (whether volatile
916 or not), reject, unless nothing volatile comes between it and I3,
917 with the exception of SUCC. */
918
919 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
920 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
921 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
922 && p != succ && volatile_refs_p (PATTERN (p)))
923 return 0;
924
925 /* If INSN or I2 contains an autoincrement or autodecrement,
926 make sure that register is not used between there and I3,
927 and not already used in I3 either.
928 Also insist that I3 not be a jump; if it were one
929 and the incremented register were spilled, we would lose. */
930
931#ifdef AUTO_INC_DEC
932 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
933 if (REG_NOTE_KIND (link) == REG_INC
934 && (GET_CODE (i3) == JUMP_INSN
935 || reg_used_between_p (XEXP (link, 0), insn, i3)
936 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
937 return 0;
938#endif
939
940#ifdef HAVE_cc0
941 /* Don't combine an insn that follows a CC0-setting insn.
942 An insn that uses CC0 must not be separated from the one that sets it.
943 We do, however, allow I2 to follow a CC0-setting insn if that insn
944 is passed as I1; in that case it will be deleted also.
945 We also allow combining in this case if all the insns are adjacent
946 because that would leave the two CC0 insns adjacent as well.
947 It would be more logical to test whether CC0 occurs inside I1 or I2,
948 but that would be much slower, and this ought to be equivalent. */
949
950 p = prev_nonnote_insn (insn);
951 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
952 && ! all_adjacent)
953 return 0;
954#endif
955
956 /* If we get here, we have passed all the tests and the combination is
957 to be allowed. */
958
959 *pdest = dest;
960 *psrc = src;
961
962 return 1;
963}
964\f
965/* LOC is the location within I3 that contains its pattern or the component
966 of a PARALLEL of the pattern. We validate that it is valid for combining.
967
968 One problem is if I3 modifies its output, as opposed to replacing it
969 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
970 so would produce an insn that is not equivalent to the original insns.
971
972 Consider:
973
974 (set (reg:DI 101) (reg:DI 100))
975 (set (subreg:SI (reg:DI 101) 0) <foo>)
976
977 This is NOT equivalent to:
978
979 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
980 (set (reg:DI 101) (reg:DI 100))])
981
982 Not only does this modify 100 (in which case it might still be valid
983 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
984
985 We can also run into a problem if I2 sets a register that I1
986 uses and I1 gets directly substituted into I3 (not via I2). In that
987 case, we would be getting the wrong value of I2DEST into I3, so we
988 must reject the combination. This case occurs when I2 and I1 both
989 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
990 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
991 of a SET must prevent combination from occurring.
992
993 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
994 if the destination of a SET is a hard register.
995
996 Before doing the above check, we first try to expand a field assignment
997 into a set of logical operations.
998
999 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
1000 we place a register that is both set and used within I3. If more than one
1001 such register is detected, we fail.
1002
1003 Return 1 if the combination is valid, zero otherwise. */
1004
1005static int
1006combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
1007 rtx i3;
1008 rtx *loc;
1009 rtx i2dest;
1010 rtx i1dest;
1011 int i1_not_in_src;
1012 rtx *pi3dest_killed;
1013{
1014 rtx x = *loc;
1015
1016 if (GET_CODE (x) == SET)
1017 {
1018 rtx set = expand_field_assignment (x);
1019 rtx dest = SET_DEST (set);
1020 rtx src = SET_SRC (set);
1021 rtx inner_dest = dest, inner_src = src;
1022
1023 SUBST (*loc, set);
1024
1025 while (GET_CODE (inner_dest) == STRICT_LOW_PART
1026 || GET_CODE (inner_dest) == SUBREG
1027 || GET_CODE (inner_dest) == ZERO_EXTRACT)
1028 inner_dest = XEXP (inner_dest, 0);
1029
1030 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
1031 was added. */
1032#if 0
1033 while (GET_CODE (inner_src) == STRICT_LOW_PART
1034 || GET_CODE (inner_src) == SUBREG
1035 || GET_CODE (inner_src) == ZERO_EXTRACT)
1036 inner_src = XEXP (inner_src, 0);
1037
1038 /* If it is better that two different modes keep two different pseudos,
1039 avoid combining them. This avoids producing the following pattern
1040 on a 386:
1041 (set (subreg:SI (reg/v:QI 21) 0)
1042 (lshiftrt:SI (reg/v:SI 20)
1043 (const_int 24)))
1044 If that were made, reload could not handle the pair of
1045 reg 20/21, since it would try to get any GENERAL_REGS
1046 but some of them don't handle QImode. */
1047
1048 if (rtx_equal_p (inner_src, i2dest)
1049 && GET_CODE (inner_dest) == REG
1050 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
1051 return 0;
1052#endif
1053
1054 /* Check for the case where I3 modifies its output, as
1055 discussed above. */
1056 if ((inner_dest != dest
1057 && (reg_overlap_mentioned_p (i2dest, inner_dest)
1058 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
3f508eca
RK
1059 /* This is the same test done in can_combine_p except that we
1060 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
1061 CALL operation. */
230d793d 1062 || (GET_CODE (inner_dest) == REG
dfbe1b2f 1063 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
3f508eca
RK
1064#ifdef SMALL_REGISTER_CLASSES
1065 && GET_CODE (src) != CALL
1066#else
dfbe1b2f
RK
1067 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
1068 GET_MODE (inner_dest))
230d793d 1069#endif
dfbe1b2f
RK
1070 )
1071
230d793d
RS
1072 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
1073 return 0;
1074
1075 /* If DEST is used in I3, it is being killed in this insn,
36a9c2e9
JL
1076 so record that for later.
1077 Never add REG_DEAD notes for the FRAME_POINTER_REGNUM or the
1078 STACK_POINTER_REGNUM, since these are always considered to be
1079 live. Similarly for ARG_POINTER_REGNUM if it is fixed. */
230d793d 1080 if (pi3dest_killed && GET_CODE (dest) == REG
36a9c2e9
JL
1081 && reg_referenced_p (dest, PATTERN (i3))
1082 && REGNO (dest) != FRAME_POINTER_REGNUM
6d7096b0
DE
1083#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1084 && REGNO (dest) != HARD_FRAME_POINTER_REGNUM
1085#endif
36a9c2e9
JL
1086#if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
1087 && (REGNO (dest) != ARG_POINTER_REGNUM
1088 || ! fixed_regs [REGNO (dest)])
1089#endif
1090 && REGNO (dest) != STACK_POINTER_REGNUM)
230d793d
RS
1091 {
1092 if (*pi3dest_killed)
1093 return 0;
1094
1095 *pi3dest_killed = dest;
1096 }
1097 }
1098
1099 else if (GET_CODE (x) == PARALLEL)
1100 {
1101 int i;
1102
1103 for (i = 0; i < XVECLEN (x, 0); i++)
1104 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
1105 i1_not_in_src, pi3dest_killed))
1106 return 0;
1107 }
1108
1109 return 1;
1110}
1111\f
1112/* Try to combine the insns I1 and I2 into I3.
1113 Here I1 and I2 appear earlier than I3.
1114 I1 can be zero; then we combine just I2 into I3.
1115
1116 It we are combining three insns and the resulting insn is not recognized,
1117 try splitting it into two insns. If that happens, I2 and I3 are retained
1118 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
1119 are pseudo-deleted.
1120
1121 If we created two insns, return I2; otherwise return I3.
1122 Return 0 if the combination does not work. Then nothing is changed. */
1123
1124static rtx
1125try_combine (i3, i2, i1)
1126 register rtx i3, i2, i1;
1127{
1128 /* New patterns for I3 and I3, respectively. */
1129 rtx newpat, newi2pat = 0;
1130 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
1131 int added_sets_1, added_sets_2;
1132 /* Total number of SETs to put into I3. */
1133 int total_sets;
1134 /* Nonzero is I2's body now appears in I3. */
1135 int i2_is_used;
1136 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1137 int insn_code_number, i2_code_number, other_code_number;
1138 /* Contains I3 if the destination of I3 is used in its source, which means
1139 that the old life of I3 is being killed. If that usage is placed into
1140 I2 and not in I3, a REG_DEAD note must be made. */
1141 rtx i3dest_killed = 0;
1142 /* SET_DEST and SET_SRC of I2 and I1. */
1143 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1144 /* PATTERN (I2), or a copy of it in certain cases. */
1145 rtx i2pat;
1146 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1147 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1148 int i1_feeds_i3 = 0;
1149 /* Notes that must be added to REG_NOTES in I3 and I2. */
1150 rtx new_i3_notes, new_i2_notes;
1151
1152 int maxreg;
1153 rtx temp;
1154 register rtx link;
1155 int i;
1156
1157 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1158 This can occur when flow deletes an insn that it has merged into an
1159 auto-increment address. We also can't do anything if I3 has a
1160 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1161 libcall. */
1162
1163 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1164 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1165 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
5f4f0e22 1166 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
230d793d
RS
1167 return 0;
1168
1169 combine_attempts++;
1170
1171 undobuf.num_undo = previous_num_undos = 0;
1172 undobuf.other_insn = 0;
1173
1174 /* Save the current high-water-mark so we can free storage if we didn't
1175 accept this combination. */
1176 undobuf.storage = (char *) oballoc (0);
1177
1178 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1179 code below, set I1 to be the earlier of the two insns. */
1180 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1181 temp = i1, i1 = i2, i2 = temp;
1182
1183 /* First check for one important special-case that the code below will
1184 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1185 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1186 we may be able to replace that destination with the destination of I3.
1187 This occurs in the common code where we compute both a quotient and
1188 remainder into a structure, in which case we want to do the computation
1189 directly into the structure to avoid register-register copies.
1190
1191 We make very conservative checks below and only try to handle the
1192 most common cases of this. For example, we only handle the case
1193 where I2 and I3 are adjacent to avoid making difficult register
1194 usage tests. */
1195
1196 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1197 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1198 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1199#ifdef SMALL_REGISTER_CLASSES
1200 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1201 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1202#endif
1203 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1204 && GET_CODE (PATTERN (i2)) == PARALLEL
1205 && ! side_effects_p (SET_DEST (PATTERN (i3)))
5089e22e
RS
1206 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1207 below would need to check what is inside (and reg_overlap_mentioned_p
1208 doesn't support those codes anyway). Don't allow those destinations;
1209 the resulting insn isn't likely to be recognized anyway. */
1210 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1211 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
230d793d
RS
1212 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1213 SET_DEST (PATTERN (i3)))
1214 && next_real_insn (i2) == i3)
5089e22e
RS
1215 {
1216 rtx p2 = PATTERN (i2);
1217
1218 /* Make sure that the destination of I3,
1219 which we are going to substitute into one output of I2,
1220 is not used within another output of I2. We must avoid making this:
1221 (parallel [(set (mem (reg 69)) ...)
1222 (set (reg 69) ...)])
1223 which is not well-defined as to order of actions.
1224 (Besides, reload can't handle output reloads for this.)
1225
1226 The problem can also happen if the dest of I3 is a memory ref,
1227 if another dest in I2 is an indirect memory ref. */
1228 for (i = 0; i < XVECLEN (p2, 0); i++)
1229 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1230 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1231 SET_DEST (XVECEXP (p2, 0, i))))
1232 break;
230d793d 1233
5089e22e
RS
1234 if (i == XVECLEN (p2, 0))
1235 for (i = 0; i < XVECLEN (p2, 0); i++)
1236 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1237 {
1238 combine_merges++;
230d793d 1239
5089e22e
RS
1240 subst_insn = i3;
1241 subst_low_cuid = INSN_CUID (i2);
230d793d 1242
5089e22e
RS
1243 added_sets_2 = 0;
1244 i2dest = SET_SRC (PATTERN (i3));
230d793d 1245
5089e22e
RS
1246 /* Replace the dest in I2 with our dest and make the resulting
1247 insn the new pattern for I3. Then skip to where we
1248 validate the pattern. Everything was set up above. */
1249 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1250 SET_DEST (PATTERN (i3)));
1251
1252 newpat = p2;
1253 goto validate_replacement;
1254 }
1255 }
230d793d
RS
1256
1257#ifndef HAVE_cc0
1258 /* If we have no I1 and I2 looks like:
1259 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1260 (set Y OP)])
1261 make up a dummy I1 that is
1262 (set Y OP)
1263 and change I2 to be
1264 (set (reg:CC X) (compare:CC Y (const_int 0)))
1265
1266 (We can ignore any trailing CLOBBERs.)
1267
1268 This undoes a previous combination and allows us to match a branch-and-
1269 decrement insn. */
1270
1271 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1272 && XVECLEN (PATTERN (i2), 0) >= 2
1273 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1274 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1275 == MODE_CC)
1276 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1277 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1278 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1279 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1280 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1281 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1282 {
1283 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1284 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1285 break;
1286
1287 if (i == 1)
1288 {
1289 /* We make I1 with the same INSN_UID as I2. This gives it
1290 the same INSN_CUID for value tracking. Our fake I1 will
1291 never appear in the insn stream so giving it the same INSN_UID
1292 as I2 will not cause a problem. */
1293
1294 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1295 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1296
1297 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1298 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1299 SET_DEST (PATTERN (i1)));
1300 }
1301 }
1302#endif
1303
1304 /* Verify that I2 and I1 are valid for combining. */
5f4f0e22
CH
1305 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1306 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
230d793d
RS
1307 {
1308 undo_all ();
1309 return 0;
1310 }
1311
1312 /* Record whether I2DEST is used in I2SRC and similarly for the other
1313 cases. Knowing this will help in register status updating below. */
1314 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1315 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1316 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1317
916f14f1 1318 /* See if I1 directly feeds into I3. It does if I1DEST is not used
230d793d
RS
1319 in I2SRC. */
1320 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1321
1322 /* Ensure that I3's pattern can be the destination of combines. */
1323 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1324 i1 && i2dest_in_i1src && i1_feeds_i3,
1325 &i3dest_killed))
1326 {
1327 undo_all ();
1328 return 0;
1329 }
1330
1331 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1332 We used to do this EXCEPT in one case: I3 has a post-inc in an
1333 output operand. However, that exception can give rise to insns like
1334 mov r3,(r3)+
1335 which is a famous insn on the PDP-11 where the value of r3 used as the
5089e22e 1336 source was model-dependent. Avoid this sort of thing. */
230d793d
RS
1337
1338#if 0
1339 if (!(GET_CODE (PATTERN (i3)) == SET
1340 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1341 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1342 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1343 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1344 /* It's not the exception. */
1345#endif
1346#ifdef AUTO_INC_DEC
1347 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1348 if (REG_NOTE_KIND (link) == REG_INC
1349 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1350 || (i1 != 0
1351 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1352 {
1353 undo_all ();
1354 return 0;
1355 }
1356#endif
1357
1358 /* See if the SETs in I1 or I2 need to be kept around in the merged
1359 instruction: whenever the value set there is still needed past I3.
1360 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1361
1362 For the SET in I1, we have two cases: If I1 and I2 independently
1363 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1364 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1365 in I1 needs to be kept around unless I1DEST dies or is set in either
1366 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1367 I1DEST. If so, we know I1 feeds into I2. */
1368
1369 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1370
1371 added_sets_1
1372 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1373 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1374
1375 /* If the set in I2 needs to be kept around, we must make a copy of
1376 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
5089e22e 1377 PATTERN (I2), we are only substituting for the original I1DEST, not into
230d793d
RS
1378 an already-substituted copy. This also prevents making self-referential
1379 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1380 I2DEST. */
1381
1382 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1383 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1384 : PATTERN (i2));
1385
1386 if (added_sets_2)
1387 i2pat = copy_rtx (i2pat);
1388
1389 combine_merges++;
1390
1391 /* Substitute in the latest insn for the regs set by the earlier ones. */
1392
1393 maxreg = max_reg_num ();
1394
1395 subst_insn = i3;
230d793d
RS
1396
1397 /* It is possible that the source of I2 or I1 may be performing an
1398 unneeded operation, such as a ZERO_EXTEND of something that is known
1399 to have the high part zero. Handle that case by letting subst look at
1400 the innermost one of them.
1401
1402 Another way to do this would be to have a function that tries to
1403 simplify a single insn instead of merging two or more insns. We don't
1404 do this because of the potential of infinite loops and because
1405 of the potential extra memory required. However, doing it the way
1406 we are is a bit of a kludge and doesn't catch all cases.
1407
1408 But only do this if -fexpensive-optimizations since it slows things down
1409 and doesn't usually win. */
1410
1411 if (flag_expensive_optimizations)
1412 {
1413 /* Pass pc_rtx so no substitutions are done, just simplifications.
1414 The cases that we are interested in here do not involve the few
1415 cases were is_replaced is checked. */
1416 if (i1)
d0ab8cd3
RK
1417 {
1418 subst_low_cuid = INSN_CUID (i1);
1419 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1420 }
230d793d 1421 else
d0ab8cd3
RK
1422 {
1423 subst_low_cuid = INSN_CUID (i2);
1424 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1425 }
230d793d
RS
1426
1427 previous_num_undos = undobuf.num_undo;
1428 }
1429
1430#ifndef HAVE_cc0
1431 /* Many machines that don't use CC0 have insns that can both perform an
1432 arithmetic operation and set the condition code. These operations will
1433 be represented as a PARALLEL with the first element of the vector
1434 being a COMPARE of an arithmetic operation with the constant zero.
1435 The second element of the vector will set some pseudo to the result
1436 of the same arithmetic operation. If we simplify the COMPARE, we won't
1437 match such a pattern and so will generate an extra insn. Here we test
1438 for this case, where both the comparison and the operation result are
1439 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1440 I2SRC. Later we will make the PARALLEL that contains I2. */
1441
1442 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1443 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1444 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1445 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1446 {
1447 rtx *cc_use;
1448 enum machine_mode compare_mode;
1449
1450 newpat = PATTERN (i3);
1451 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1452
1453 i2_is_used = 1;
1454
1455#ifdef EXTRA_CC_MODES
1456 /* See if a COMPARE with the operand we substituted in should be done
1457 with the mode that is currently being used. If not, do the same
1458 processing we do in `subst' for a SET; namely, if the destination
1459 is used only once, try to replace it with a register of the proper
1460 mode and also replace the COMPARE. */
1461 if (undobuf.other_insn == 0
1462 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1463 &undobuf.other_insn))
77fa0940
RK
1464 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1465 i2src, const0_rtx))
230d793d
RS
1466 != GET_MODE (SET_DEST (newpat))))
1467 {
1468 int regno = REGNO (SET_DEST (newpat));
1469 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1470
1471 if (regno < FIRST_PSEUDO_REGISTER
1472 || (reg_n_sets[regno] == 1 && ! added_sets_2
1473 && ! REG_USERVAR_P (SET_DEST (newpat))))
1474 {
1475 if (regno >= FIRST_PSEUDO_REGISTER)
1476 SUBST (regno_reg_rtx[regno], new_dest);
1477
1478 SUBST (SET_DEST (newpat), new_dest);
1479 SUBST (XEXP (*cc_use, 0), new_dest);
1480 SUBST (SET_SRC (newpat),
1481 gen_rtx_combine (COMPARE, compare_mode,
1482 i2src, const0_rtx));
1483 }
1484 else
1485 undobuf.other_insn = 0;
1486 }
1487#endif
1488 }
1489 else
1490#endif
1491 {
1492 n_occurrences = 0; /* `subst' counts here */
1493
1494 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1495 need to make a unique copy of I2SRC each time we substitute it
1496 to avoid self-referential rtl. */
1497
d0ab8cd3 1498 subst_low_cuid = INSN_CUID (i2);
230d793d
RS
1499 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1500 ! i1_feeds_i3 && i1dest_in_i1src);
1501 previous_num_undos = undobuf.num_undo;
1502
1503 /* Record whether i2's body now appears within i3's body. */
1504 i2_is_used = n_occurrences;
1505 }
1506
1507 /* If we already got a failure, don't try to do more. Otherwise,
1508 try to substitute in I1 if we have it. */
1509
1510 if (i1 && GET_CODE (newpat) != CLOBBER)
1511 {
1512 /* Before we can do this substitution, we must redo the test done
1513 above (see detailed comments there) that ensures that I1DEST
1514 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1515
5f4f0e22
CH
1516 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1517 0, NULL_PTR))
230d793d
RS
1518 {
1519 undo_all ();
1520 return 0;
1521 }
1522
1523 n_occurrences = 0;
d0ab8cd3 1524 subst_low_cuid = INSN_CUID (i1);
230d793d
RS
1525 newpat = subst (newpat, i1dest, i1src, 0, 0);
1526 previous_num_undos = undobuf.num_undo;
1527 }
1528
916f14f1
RK
1529 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1530 to count all the ways that I2SRC and I1SRC can be used. */
5f4f0e22 1531 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
916f14f1 1532 && i2_is_used + added_sets_2 > 1)
5f4f0e22 1533 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
916f14f1
RK
1534 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1535 > 1))
230d793d
RS
1536 /* Fail if we tried to make a new register (we used to abort, but there's
1537 really no reason to). */
1538 || max_reg_num () != maxreg
1539 /* Fail if we couldn't do something and have a CLOBBER. */
1540 || GET_CODE (newpat) == CLOBBER)
1541 {
1542 undo_all ();
1543 return 0;
1544 }
1545
1546 /* If the actions of the earlier insns must be kept
1547 in addition to substituting them into the latest one,
1548 we must make a new PARALLEL for the latest insn
1549 to hold additional the SETs. */
1550
1551 if (added_sets_1 || added_sets_2)
1552 {
1553 combine_extras++;
1554
1555 if (GET_CODE (newpat) == PARALLEL)
1556 {
1557 rtvec old = XVEC (newpat, 0);
1558 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1559 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1560 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1561 sizeof (old->elem[0]) * old->num_elem);
1562 }
1563 else
1564 {
1565 rtx old = newpat;
1566 total_sets = 1 + added_sets_1 + added_sets_2;
1567 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1568 XVECEXP (newpat, 0, 0) = old;
1569 }
1570
1571 if (added_sets_1)
1572 XVECEXP (newpat, 0, --total_sets)
1573 = (GET_CODE (PATTERN (i1)) == PARALLEL
1574 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1575
1576 if (added_sets_2)
1577 {
1578 /* If there is no I1, use I2's body as is. We used to also not do
1579 the subst call below if I2 was substituted into I3,
1580 but that could lose a simplification. */
1581 if (i1 == 0)
1582 XVECEXP (newpat, 0, --total_sets) = i2pat;
1583 else
1584 /* See comment where i2pat is assigned. */
1585 XVECEXP (newpat, 0, --total_sets)
1586 = subst (i2pat, i1dest, i1src, 0, 0);
1587 }
1588 }
1589
1590 /* We come here when we are replacing a destination in I2 with the
1591 destination of I3. */
1592 validate_replacement:
1593
1594 /* Is the result of combination a valid instruction? */
1595 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1596
1597 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1598 the second SET's destination is a register that is unused. In that case,
1599 we just need the first SET. This can occur when simplifying a divmod
1600 insn. We *must* test for this case here because the code below that
1601 splits two independent SETs doesn't handle this case correctly when it
1602 updates the register status. Also check the case where the first
1603 SET's destination is unused. That would not cause incorrect code, but
1604 does cause an unneeded insn to remain. */
1605
1606 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1607 && XVECLEN (newpat, 0) == 2
1608 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1609 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1610 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1611 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1612 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1613 && asm_noperands (newpat) < 0)
1614 {
1615 newpat = XVECEXP (newpat, 0, 0);
1616 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1617 }
1618
1619 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1620 && XVECLEN (newpat, 0) == 2
1621 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1622 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1623 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1624 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1625 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1626 && asm_noperands (newpat) < 0)
1627 {
1628 newpat = XVECEXP (newpat, 0, 1);
1629 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1630 }
1631
d0ab8cd3
RK
1632 /* See if this is an XOR. If so, perhaps the problem is that the
1633 constant is out of range. Replace it with a complemented XOR with
1634 a complemented constant; it might be in range. */
1635
1636 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1637 && GET_CODE (SET_SRC (newpat)) == XOR
1638 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1639 && ((temp = simplify_unary_operation (NOT,
1640 GET_MODE (SET_SRC (newpat)),
1641 XEXP (SET_SRC (newpat), 1),
1642 GET_MODE (SET_SRC (newpat))))
1643 != 0))
1644 {
1645 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1646 rtx pat
1647 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1648 gen_unary (NOT, i_mode,
1649 gen_binary (XOR, i_mode,
1650 XEXP (SET_SRC (newpat), 0),
1651 temp)));
1652
1653 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1654 if (insn_code_number >= 0)
1655 newpat = pat;
1656 }
1657
230d793d
RS
1658 /* If we were combining three insns and the result is a simple SET
1659 with no ASM_OPERANDS that wasn't recognized, try to split it into two
916f14f1
RK
1660 insns. There are two ways to do this. It can be split using a
1661 machine-specific method (like when you have an addition of a large
1662 constant) or by combine in the function find_split_point. */
1663
230d793d
RS
1664 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1665 && asm_noperands (newpat) < 0)
1666 {
916f14f1 1667 rtx m_split, *split;
42495ca0 1668 rtx ni2dest = i2dest;
916f14f1
RK
1669
1670 /* See if the MD file can split NEWPAT. If it can't, see if letting it
42495ca0
RK
1671 use I2DEST as a scratch register will help. In the latter case,
1672 convert I2DEST to the mode of the source of NEWPAT if we can. */
916f14f1
RK
1673
1674 m_split = split_insns (newpat, i3);
a70c61d9
JW
1675
1676 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1677 inputs of NEWPAT. */
1678
1679 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1680 possible to try that as a scratch reg. This would require adding
1681 more code to make it work though. */
1682
1683 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
42495ca0
RK
1684 {
1685 /* If I2DEST is a hard register or the only use of a pseudo,
1686 we can change its mode. */
1687 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
02f4ada4 1688 && GET_MODE (SET_DEST (newpat)) != VOIDmode
60654f77 1689 && GET_CODE (i2dest) == REG
42495ca0
RK
1690 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1691 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1692 && ! REG_USERVAR_P (i2dest))))
1693 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1694 REGNO (i2dest));
1695
1696 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1697 gen_rtvec (2, newpat,
1698 gen_rtx (CLOBBER,
1699 VOIDmode,
1700 ni2dest))),
1701 i3);
1702 }
916f14f1
RK
1703
1704 if (m_split && GET_CODE (m_split) == SEQUENCE
3f508eca
RK
1705 && XVECLEN (m_split, 0) == 2
1706 && (next_real_insn (i2) == i3
1707 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1708 INSN_CUID (i2))))
916f14f1 1709 {
1a26b032 1710 rtx i2set, i3set;
d0ab8cd3 1711 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
916f14f1 1712 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
916f14f1 1713
e4ba89be
RK
1714 i3set = single_set (XVECEXP (m_split, 0, 1));
1715 i2set = single_set (XVECEXP (m_split, 0, 0));
1a26b032 1716
42495ca0
RK
1717 /* In case we changed the mode of I2DEST, replace it in the
1718 pseudo-register table here. We can't do it above in case this
1719 code doesn't get executed and we do a split the other way. */
1720
1721 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1722 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1723
916f14f1 1724 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1a26b032
RK
1725
1726 /* If I2 or I3 has multiple SETs, we won't know how to track
1727 register status, so don't use these insns. */
1728
1729 if (i2_code_number >= 0 && i2set && i3set)
8888fada
RK
1730 insn_code_number = recog_for_combine (&newi3pat, i3,
1731 &new_i3_notes);
c767f54b 1732
d0ab8cd3
RK
1733 if (insn_code_number >= 0)
1734 newpat = newi3pat;
1735
c767f54b 1736 /* It is possible that both insns now set the destination of I3.
22609cbf 1737 If so, we must show an extra use of it. */
c767f54b 1738
1a26b032
RK
1739 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1740 && GET_CODE (SET_DEST (i2set)) == REG
1741 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
22609cbf 1742 reg_n_sets[REGNO (SET_DEST (i2set))]++;
916f14f1 1743 }
230d793d
RS
1744
1745 /* If we can split it and use I2DEST, go ahead and see if that
1746 helps things be recognized. Verify that none of the registers
1747 are set between I2 and I3. */
d0ab8cd3 1748 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
230d793d
RS
1749#ifdef HAVE_cc0
1750 && GET_CODE (i2dest) == REG
1751#endif
1752 /* We need I2DEST in the proper mode. If it is a hard register
1753 or the only use of a pseudo, we can change its mode. */
1754 && (GET_MODE (*split) == GET_MODE (i2dest)
1755 || GET_MODE (*split) == VOIDmode
1756 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1757 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1758 && ! REG_USERVAR_P (i2dest)))
1759 && (next_real_insn (i2) == i3
1760 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1761 /* We can't overwrite I2DEST if its value is still used by
1762 NEWPAT. */
1763 && ! reg_referenced_p (i2dest, newpat))
1764 {
1765 rtx newdest = i2dest;
1766
1767 /* Get NEWDEST as a register in the proper mode. We have already
1768 validated that we can do this. */
1769 if (GET_MODE (i2dest) != GET_MODE (*split)
1770 && GET_MODE (*split) != VOIDmode)
1771 {
1772 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1773
1774 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1775 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1776 }
1777
1778 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1779 an ASHIFT. This can occur if it was inside a PLUS and hence
1780 appeared to be a memory address. This is a kludge. */
1781 if (GET_CODE (*split) == MULT
1782 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1783 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1784 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
5f4f0e22 1785 XEXP (*split, 0), GEN_INT (i)));
230d793d
RS
1786
1787#ifdef INSN_SCHEDULING
1788 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1789 be written as a ZERO_EXTEND. */
1790 if (GET_CODE (*split) == SUBREG
1791 && GET_CODE (SUBREG_REG (*split)) == MEM)
1792 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1793 XEXP (*split, 0)));
1794#endif
1795
1796 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1797 SUBST (*split, newdest);
1798 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1799 if (i2_code_number >= 0)
1800 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1801 }
1802 }
1803
1804 /* Check for a case where we loaded from memory in a narrow mode and
1805 then sign extended it, but we need both registers. In that case,
1806 we have a PARALLEL with both loads from the same memory location.
1807 We can split this into a load from memory followed by a register-register
1808 copy. This saves at least one insn, more if register allocation can
f0343c74
RK
1809 eliminate the copy.
1810
1811 We cannot do this if the destination of the second assignment is
1812 a register that we have already assumed is zero-extended. Similarly
1813 for a SUBREG of such a register. */
230d793d
RS
1814
1815 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1816 && GET_CODE (newpat) == PARALLEL
1817 && XVECLEN (newpat, 0) == 2
1818 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1819 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1820 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1821 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1822 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1823 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1824 INSN_CUID (i2))
1825 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1826 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
f0343c74
RK
1827 && ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
1828 (GET_CODE (temp) == REG
1829 && reg_nonzero_bits[REGNO (temp)] != 0
1830 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1831 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1832 && (reg_nonzero_bits[REGNO (temp)]
1833 != GET_MODE_MASK (word_mode))))
1834 && ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
1835 && (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
1836 (GET_CODE (temp) == REG
1837 && reg_nonzero_bits[REGNO (temp)] != 0
1838 && GET_MODE_BITSIZE (GET_MODE (temp)) < BITS_PER_WORD
1839 && GET_MODE_BITSIZE (GET_MODE (temp)) < HOST_BITS_PER_INT
1840 && (reg_nonzero_bits[REGNO (temp)]
1841 != GET_MODE_MASK (word_mode)))))
230d793d
RS
1842 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1843 SET_SRC (XVECEXP (newpat, 0, 1)))
1844 && ! find_reg_note (i3, REG_UNUSED,
1845 SET_DEST (XVECEXP (newpat, 0, 0))))
1846 {
472fbdd1
RK
1847 rtx ni2dest;
1848
230d793d 1849 newi2pat = XVECEXP (newpat, 0, 0);
472fbdd1 1850 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
230d793d
RS
1851 newpat = XVECEXP (newpat, 0, 1);
1852 SUBST (SET_SRC (newpat),
472fbdd1 1853 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
230d793d
RS
1854 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1855 if (i2_code_number >= 0)
1856 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
5089e22e
RS
1857
1858 if (insn_code_number >= 0)
1859 {
1860 rtx insn;
1861 rtx link;
1862
1863 /* If we will be able to accept this, we have made a change to the
1864 destination of I3. This can invalidate a LOG_LINKS pointing
1865 to I3. No other part of combine.c makes such a transformation.
1866
1867 The new I3 will have a destination that was previously the
1868 destination of I1 or I2 and which was used in i2 or I3. Call
1869 distribute_links to make a LOG_LINK from the next use of
1870 that destination. */
1871
1872 PATTERN (i3) = newpat;
5f4f0e22 1873 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
5089e22e
RS
1874
1875 /* I3 now uses what used to be its destination and which is
1876 now I2's destination. That means we need a LOG_LINK from
1877 I3 to I2. But we used to have one, so we still will.
1878
1879 However, some later insn might be using I2's dest and have
1880 a LOG_LINK pointing at I3. We must remove this link.
1881 The simplest way to remove the link is to point it at I1,
1882 which we know will be a NOTE. */
1883
1884 for (insn = NEXT_INSN (i3);
0d4d42c3
RK
1885 insn && (this_basic_block == n_basic_blocks - 1
1886 || insn != basic_block_head[this_basic_block + 1]);
5089e22e
RS
1887 insn = NEXT_INSN (insn))
1888 {
1889 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
472fbdd1 1890 && reg_referenced_p (ni2dest, PATTERN (insn)))
5089e22e
RS
1891 {
1892 for (link = LOG_LINKS (insn); link;
1893 link = XEXP (link, 1))
1894 if (XEXP (link, 0) == i3)
1895 XEXP (link, 0) = i1;
1896
1897 break;
1898 }
1899 }
1900 }
230d793d
RS
1901 }
1902
1903 /* Similarly, check for a case where we have a PARALLEL of two independent
1904 SETs but we started with three insns. In this case, we can do the sets
1905 as two separate insns. This case occurs when some SET allows two
1906 other insns to combine, but the destination of that SET is still live. */
1907
1908 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1909 && GET_CODE (newpat) == PARALLEL
1910 && XVECLEN (newpat, 0) == 2
1911 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1912 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1913 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1914 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1915 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1916 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1917 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1918 INSN_CUID (i2))
1919 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1920 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1921 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1922 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1923 XVECEXP (newpat, 0, 0))
1924 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1925 XVECEXP (newpat, 0, 1)))
1926 {
1927 newi2pat = XVECEXP (newpat, 0, 1);
1928 newpat = XVECEXP (newpat, 0, 0);
1929
1930 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1931 if (i2_code_number >= 0)
1932 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1933 }
1934
1935 /* If it still isn't recognized, fail and change things back the way they
1936 were. */
1937 if ((insn_code_number < 0
1938 /* Is the result a reasonable ASM_OPERANDS? */
1939 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1940 {
1941 undo_all ();
1942 return 0;
1943 }
1944
1945 /* If we had to change another insn, make sure it is valid also. */
1946 if (undobuf.other_insn)
1947 {
1948 rtx other_notes = REG_NOTES (undobuf.other_insn);
1949 rtx other_pat = PATTERN (undobuf.other_insn);
1950 rtx new_other_notes;
1951 rtx note, next;
1952
1953 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1954 &new_other_notes);
1955
1956 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1957 {
1958 undo_all ();
1959 return 0;
1960 }
1961
1962 PATTERN (undobuf.other_insn) = other_pat;
1963
1964 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1965 are still valid. Then add any non-duplicate notes added by
1966 recog_for_combine. */
1967 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1968 {
1969 next = XEXP (note, 1);
1970
1971 if (REG_NOTE_KIND (note) == REG_UNUSED
1972 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1a26b032
RK
1973 {
1974 if (GET_CODE (XEXP (note, 0)) == REG)
1975 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1976
1977 remove_note (undobuf.other_insn, note);
1978 }
230d793d
RS
1979 }
1980
1a26b032
RK
1981 for (note = new_other_notes; note; note = XEXP (note, 1))
1982 if (GET_CODE (XEXP (note, 0)) == REG)
1983 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1984
230d793d 1985 distribute_notes (new_other_notes, undobuf.other_insn,
5f4f0e22 1986 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
1987 }
1988
1989 /* We now know that we can do this combination. Merge the insns and
1990 update the status of registers and LOG_LINKS. */
1991
1992 {
1993 rtx i3notes, i2notes, i1notes = 0;
1994 rtx i3links, i2links, i1links = 0;
1995 rtx midnotes = 0;
1996 int all_adjacent = (next_real_insn (i2) == i3
1997 && (i1 == 0 || next_real_insn (i1) == i2));
1998 register int regno;
1999 /* Compute which registers we expect to eliminate. */
2000 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
2001 ? 0 : i2dest);
2002 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
2003
2004 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
2005 clear them. */
2006 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
2007 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
2008 if (i1)
2009 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
2010
2011 /* Ensure that we do not have something that should not be shared but
2012 occurs multiple times in the new insns. Check this by first
5089e22e 2013 resetting all the `used' flags and then copying anything is shared. */
230d793d
RS
2014
2015 reset_used_flags (i3notes);
2016 reset_used_flags (i2notes);
2017 reset_used_flags (i1notes);
2018 reset_used_flags (newpat);
2019 reset_used_flags (newi2pat);
2020 if (undobuf.other_insn)
2021 reset_used_flags (PATTERN (undobuf.other_insn));
2022
2023 i3notes = copy_rtx_if_shared (i3notes);
2024 i2notes = copy_rtx_if_shared (i2notes);
2025 i1notes = copy_rtx_if_shared (i1notes);
2026 newpat = copy_rtx_if_shared (newpat);
2027 newi2pat = copy_rtx_if_shared (newi2pat);
2028 if (undobuf.other_insn)
2029 reset_used_flags (PATTERN (undobuf.other_insn));
2030
2031 INSN_CODE (i3) = insn_code_number;
2032 PATTERN (i3) = newpat;
2033 if (undobuf.other_insn)
2034 INSN_CODE (undobuf.other_insn) = other_code_number;
2035
2036 /* We had one special case above where I2 had more than one set and
2037 we replaced a destination of one of those sets with the destination
2038 of I3. In that case, we have to update LOG_LINKS of insns later
2039 in this basic block. Note that this (expensive) case is rare. */
2040
2041 if (GET_CODE (PATTERN (i2)) == PARALLEL)
2042 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
2043 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
2044 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
2045 && ! find_reg_note (i2, REG_UNUSED,
2046 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
0d4d42c3
RK
2047 for (temp = NEXT_INSN (i2);
2048 temp && (this_basic_block == n_basic_blocks - 1
2049 || basic_block_head[this_basic_block] != temp);
2050 temp = NEXT_INSN (temp))
2051 if (temp != i3 && GET_RTX_CLASS (GET_CODE (temp)) == 'i')
2052 for (link = LOG_LINKS (temp); link; link = XEXP (link, 1))
2053 if (XEXP (link, 0) == i2)
2054 XEXP (link, 0) = i3;
230d793d
RS
2055
2056 LOG_LINKS (i3) = 0;
2057 REG_NOTES (i3) = 0;
2058 LOG_LINKS (i2) = 0;
2059 REG_NOTES (i2) = 0;
2060
2061 if (newi2pat)
2062 {
2063 INSN_CODE (i2) = i2_code_number;
2064 PATTERN (i2) = newi2pat;
2065 }
2066 else
2067 {
2068 PUT_CODE (i2, NOTE);
2069 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
2070 NOTE_SOURCE_FILE (i2) = 0;
2071 }
2072
2073 if (i1)
2074 {
2075 LOG_LINKS (i1) = 0;
2076 REG_NOTES (i1) = 0;
2077 PUT_CODE (i1, NOTE);
2078 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
2079 NOTE_SOURCE_FILE (i1) = 0;
2080 }
2081
2082 /* Get death notes for everything that is now used in either I3 or
2083 I2 and used to die in a previous insn. */
2084
2085 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
2086 if (newi2pat)
2087 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
2088
2089 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
2090 if (i3notes)
5f4f0e22
CH
2091 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
2092 elim_i2, elim_i1);
230d793d 2093 if (i2notes)
5f4f0e22
CH
2094 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
2095 elim_i2, elim_i1);
230d793d 2096 if (i1notes)
5f4f0e22
CH
2097 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
2098 elim_i2, elim_i1);
230d793d 2099 if (midnotes)
5f4f0e22
CH
2100 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2101 elim_i2, elim_i1);
230d793d
RS
2102
2103 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
2104 know these are REG_UNUSED and want them to go to the desired insn,
1a26b032
RK
2105 so we always pass it as i3. We have not counted the notes in
2106 reg_n_deaths yet, so we need to do so now. */
2107
230d793d 2108 if (newi2pat && new_i2_notes)
1a26b032
RK
2109 {
2110 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
2111 if (GET_CODE (XEXP (temp, 0)) == REG)
2112 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2113
2114 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2115 }
2116
230d793d 2117 if (new_i3_notes)
1a26b032
RK
2118 {
2119 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
2120 if (GET_CODE (XEXP (temp, 0)) == REG)
2121 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
2122
2123 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
2124 }
230d793d
RS
2125
2126 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1a26b032
RK
2127 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
2128 Show an additional death due to the REG_DEAD note we make here. If
2129 we discard it in distribute_notes, we will decrement it again. */
d0ab8cd3 2130
230d793d 2131 if (i3dest_killed)
1a26b032
RK
2132 {
2133 if (GET_CODE (i3dest_killed) == REG)
2134 reg_n_deaths[REGNO (i3dest_killed)]++;
2135
2136 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
2137 NULL_RTX),
2138 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2139 NULL_RTX, NULL_RTX);
2140 }
58c8c593
RK
2141
2142 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
2143 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
2144 we passed I3 in that case, it might delete I2. */
2145
230d793d 2146 if (i2dest_in_i2src)
58c8c593 2147 {
1a26b032
RK
2148 if (GET_CODE (i2dest) == REG)
2149 reg_n_deaths[REGNO (i2dest)]++;
2150
58c8c593
RK
2151 if (newi2pat && reg_set_p (i2dest, newi2pat))
2152 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2153 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2154 else
2155 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2156 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2157 NULL_RTX, NULL_RTX);
2158 }
2159
230d793d 2160 if (i1dest_in_i1src)
58c8c593 2161 {
1a26b032
RK
2162 if (GET_CODE (i1dest) == REG)
2163 reg_n_deaths[REGNO (i1dest)]++;
2164
58c8c593
RK
2165 if (newi2pat && reg_set_p (i1dest, newi2pat))
2166 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2167 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2168 else
2169 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2170 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2171 NULL_RTX, NULL_RTX);
2172 }
230d793d
RS
2173
2174 distribute_links (i3links);
2175 distribute_links (i2links);
2176 distribute_links (i1links);
2177
2178 if (GET_CODE (i2dest) == REG)
2179 {
d0ab8cd3
RK
2180 rtx link;
2181 rtx i2_insn = 0, i2_val = 0, set;
2182
2183 /* The insn that used to set this register doesn't exist, and
2184 this life of the register may not exist either. See if one of
2185 I3's links points to an insn that sets I2DEST. If it does,
2186 that is now the last known value for I2DEST. If we don't update
2187 this and I2 set the register to a value that depended on its old
230d793d
RS
2188 contents, we will get confused. If this insn is used, thing
2189 will be set correctly in combine_instructions. */
d0ab8cd3
RK
2190
2191 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2192 if ((set = single_set (XEXP (link, 0))) != 0
2193 && rtx_equal_p (i2dest, SET_DEST (set)))
2194 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2195
2196 record_value_for_reg (i2dest, i2_insn, i2_val);
230d793d
RS
2197
2198 /* If the reg formerly set in I2 died only once and that was in I3,
2199 zero its use count so it won't make `reload' do any work. */
2200 if (! added_sets_2 && newi2pat == 0)
2201 {
2202 regno = REGNO (i2dest);
2203 reg_n_sets[regno]--;
2204 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2205 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2206 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2207 reg_n_refs[regno] = 0;
2208 }
2209 }
2210
2211 if (i1 && GET_CODE (i1dest) == REG)
2212 {
d0ab8cd3
RK
2213 rtx link;
2214 rtx i1_insn = 0, i1_val = 0, set;
2215
2216 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2217 if ((set = single_set (XEXP (link, 0))) != 0
2218 && rtx_equal_p (i1dest, SET_DEST (set)))
2219 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2220
2221 record_value_for_reg (i1dest, i1_insn, i1_val);
2222
230d793d
RS
2223 regno = REGNO (i1dest);
2224 if (! added_sets_1)
2225 {
2226 reg_n_sets[regno]--;
2227 if (reg_n_sets[regno] == 0
5f4f0e22
CH
2228 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2229 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
230d793d
RS
2230 reg_n_refs[regno] = 0;
2231 }
2232 }
2233
951553af 2234 /* Update reg_nonzero_bits et al for any changes that may have been made
22609cbf
RK
2235 to this insn. */
2236
951553af 2237 note_stores (newpat, set_nonzero_bits_and_sign_copies);
22609cbf 2238 if (newi2pat)
951553af 2239 note_stores (newi2pat, set_nonzero_bits_and_sign_copies);
22609cbf 2240
230d793d
RS
2241 /* If I3 is now an unconditional jump, ensure that it has a
2242 BARRIER following it since it may have initially been a
381ee8af 2243 conditional jump. It may also be the last nonnote insn. */
230d793d
RS
2244
2245 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
381ee8af
TW
2246 && ((temp = next_nonnote_insn (i3)) == NULL_RTX
2247 || GET_CODE (temp) != BARRIER))
230d793d
RS
2248 emit_barrier_after (i3);
2249 }
2250
2251 combine_successes++;
2252
2253 return newi2pat ? i2 : i3;
2254}
2255\f
2256/* Undo all the modifications recorded in undobuf. */
2257
2258static void
2259undo_all ()
2260{
2261 register int i;
2262 if (undobuf.num_undo > MAX_UNDO)
2263 undobuf.num_undo = MAX_UNDO;
2264 for (i = undobuf.num_undo - 1; i >= 0; i--)
7c046e4e
RK
2265 {
2266 if (undobuf.undo[i].is_int)
2267 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2268 else
f5393ab9 2269 *undobuf.undo[i].where.r = undobuf.undo[i].old_contents.r;
7c046e4e
RK
2270
2271 }
230d793d
RS
2272
2273 obfree (undobuf.storage);
2274 undobuf.num_undo = 0;
2275}
2276\f
2277/* Find the innermost point within the rtx at LOC, possibly LOC itself,
d0ab8cd3
RK
2278 where we have an arithmetic expression and return that point. LOC will
2279 be inside INSN.
230d793d
RS
2280
2281 try_combine will call this function to see if an insn can be split into
2282 two insns. */
2283
2284static rtx *
d0ab8cd3 2285find_split_point (loc, insn)
230d793d 2286 rtx *loc;
d0ab8cd3 2287 rtx insn;
230d793d
RS
2288{
2289 rtx x = *loc;
2290 enum rtx_code code = GET_CODE (x);
2291 rtx *split;
2292 int len = 0, pos, unsignedp;
2293 rtx inner;
2294
2295 /* First special-case some codes. */
2296 switch (code)
2297 {
2298 case SUBREG:
2299#ifdef INSN_SCHEDULING
2300 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2301 point. */
2302 if (GET_CODE (SUBREG_REG (x)) == MEM)
2303 return loc;
2304#endif
d0ab8cd3 2305 return find_split_point (&SUBREG_REG (x), insn);
230d793d 2306
230d793d 2307 case MEM:
916f14f1 2308#ifdef HAVE_lo_sum
230d793d
RS
2309 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2310 using LO_SUM and HIGH. */
2311 if (GET_CODE (XEXP (x, 0)) == CONST
2312 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2313 {
2314 SUBST (XEXP (x, 0),
2315 gen_rtx_combine (LO_SUM, Pmode,
2316 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2317 XEXP (x, 0)));
2318 return &XEXP (XEXP (x, 0), 0);
2319 }
230d793d
RS
2320#endif
2321
916f14f1
RK
2322 /* If we have a PLUS whose second operand is a constant and the
2323 address is not valid, perhaps will can split it up using
2324 the machine-specific way to split large constants. We use
d0ab8cd3 2325 the first psuedo-reg (one of the virtual regs) as a placeholder;
916f14f1
RK
2326 it will not remain in the result. */
2327 if (GET_CODE (XEXP (x, 0)) == PLUS
2328 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2329 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2330 {
2331 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2332 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2333 subst_insn);
2334
2335 /* This should have produced two insns, each of which sets our
2336 placeholder. If the source of the second is a valid address,
2337 we can make put both sources together and make a split point
2338 in the middle. */
2339
2340 if (seq && XVECLEN (seq, 0) == 2
2341 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2342 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2343 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2344 && ! reg_mentioned_p (reg,
2345 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2346 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2347 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2348 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2349 && memory_address_p (GET_MODE (x),
2350 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2351 {
2352 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2353 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2354
2355 /* Replace the placeholder in SRC2 with SRC1. If we can
2356 find where in SRC2 it was placed, that can become our
2357 split point and we can replace this address with SRC2.
2358 Just try two obvious places. */
2359
2360 src2 = replace_rtx (src2, reg, src1);
2361 split = 0;
2362 if (XEXP (src2, 0) == src1)
2363 split = &XEXP (src2, 0);
2364 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2365 && XEXP (XEXP (src2, 0), 0) == src1)
2366 split = &XEXP (XEXP (src2, 0), 0);
2367
2368 if (split)
2369 {
2370 SUBST (XEXP (x, 0), src2);
2371 return split;
2372 }
2373 }
1a26b032
RK
2374
2375 /* If that didn't work, perhaps the first operand is complex and
2376 needs to be computed separately, so make a split point there.
2377 This will occur on machines that just support REG + CONST
2378 and have a constant moved through some previous computation. */
2379
2380 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2381 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2382 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2383 == 'o')))
2384 return &XEXP (XEXP (x, 0), 0);
916f14f1
RK
2385 }
2386 break;
2387
230d793d
RS
2388 case SET:
2389#ifdef HAVE_cc0
2390 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2391 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2392 we need to put the operand into a register. So split at that
2393 point. */
2394
2395 if (SET_DEST (x) == cc0_rtx
2396 && GET_CODE (SET_SRC (x)) != COMPARE
2397 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2398 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2399 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2400 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2401 return &SET_SRC (x);
2402#endif
2403
2404 /* See if we can split SET_SRC as it stands. */
d0ab8cd3 2405 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2406 if (split && split != &SET_SRC (x))
2407 return split;
2408
2409 /* See if this is a bitfield assignment with everything constant. If
2410 so, this is an IOR of an AND, so split it into that. */
2411 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2412 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
5f4f0e22 2413 <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
2414 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2415 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2416 && GET_CODE (SET_SRC (x)) == CONST_INT
2417 && ((INTVAL (XEXP (SET_DEST (x), 1))
2418 + INTVAL (XEXP (SET_DEST (x), 2)))
2419 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2420 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2421 {
2422 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2423 int len = INTVAL (XEXP (SET_DEST (x), 1));
2424 int src = INTVAL (SET_SRC (x));
2425 rtx dest = XEXP (SET_DEST (x), 0);
2426 enum machine_mode mode = GET_MODE (dest);
5f4f0e22 2427 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
230d793d
RS
2428
2429#if BITS_BIG_ENDIAN
2430 pos = GET_MODE_BITSIZE (mode) - len - pos;
2431#endif
2432
2433 if (src == mask)
2434 SUBST (SET_SRC (x),
5f4f0e22 2435 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
230d793d
RS
2436 else
2437 SUBST (SET_SRC (x),
2438 gen_binary (IOR, mode,
2439 gen_binary (AND, mode, dest,
5f4f0e22
CH
2440 GEN_INT (~ (mask << pos)
2441 & GET_MODE_MASK (mode))),
2442 GEN_INT (src << pos)));
230d793d
RS
2443
2444 SUBST (SET_DEST (x), dest);
2445
d0ab8cd3 2446 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2447 if (split && split != &SET_SRC (x))
2448 return split;
2449 }
2450
2451 /* Otherwise, see if this is an operation that we can split into two.
2452 If so, try to split that. */
2453 code = GET_CODE (SET_SRC (x));
2454
2455 switch (code)
2456 {
d0ab8cd3
RK
2457 case AND:
2458 /* If we are AND'ing with a large constant that is only a single
2459 bit and the result is only being used in a context where we
2460 need to know if it is zero or non-zero, replace it with a bit
2461 extraction. This will avoid the large constant, which might
2462 have taken more than one insn to make. If the constant were
2463 not a valid argument to the AND but took only one insn to make,
2464 this is no worse, but if it took more than one insn, it will
2465 be better. */
2466
2467 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2468 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2469 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2470 && GET_CODE (SET_DEST (x)) == REG
2471 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2472 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2473 && XEXP (*split, 0) == SET_DEST (x)
2474 && XEXP (*split, 1) == const0_rtx)
2475 {
2476 SUBST (SET_SRC (x),
2477 make_extraction (GET_MODE (SET_DEST (x)),
2478 XEXP (SET_SRC (x), 0),
2479 pos, NULL_RTX, 1, 1, 0, 0));
2480 return find_split_point (loc, insn);
2481 }
2482 break;
2483
230d793d
RS
2484 case SIGN_EXTEND:
2485 inner = XEXP (SET_SRC (x), 0);
2486 pos = 0;
2487 len = GET_MODE_BITSIZE (GET_MODE (inner));
2488 unsignedp = 0;
2489 break;
2490
2491 case SIGN_EXTRACT:
2492 case ZERO_EXTRACT:
2493 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2494 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2495 {
2496 inner = XEXP (SET_SRC (x), 0);
2497 len = INTVAL (XEXP (SET_SRC (x), 1));
2498 pos = INTVAL (XEXP (SET_SRC (x), 2));
2499
2500#if BITS_BIG_ENDIAN
2501 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2502#endif
2503 unsignedp = (code == ZERO_EXTRACT);
2504 }
2505 break;
2506 }
2507
2508 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2509 {
2510 enum machine_mode mode = GET_MODE (SET_SRC (x));
2511
d0ab8cd3
RK
2512 /* For unsigned, we have a choice of a shift followed by an
2513 AND or two shifts. Use two shifts for field sizes where the
2514 constant might be too large. We assume here that we can
2515 always at least get 8-bit constants in an AND insn, which is
2516 true for every current RISC. */
2517
2518 if (unsignedp && len <= 8)
230d793d
RS
2519 {
2520 SUBST (SET_SRC (x),
2521 gen_rtx_combine
2522 (AND, mode,
2523 gen_rtx_combine (LSHIFTRT, mode,
2524 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2525 GEN_INT (pos)),
2526 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
230d793d 2527
d0ab8cd3 2528 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2529 if (split && split != &SET_SRC (x))
2530 return split;
2531 }
2532 else
2533 {
2534 SUBST (SET_SRC (x),
2535 gen_rtx_combine
d0ab8cd3 2536 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
230d793d
RS
2537 gen_rtx_combine (ASHIFT, mode,
2538 gen_lowpart_for_combine (mode, inner),
5f4f0e22
CH
2539 GEN_INT (GET_MODE_BITSIZE (mode)
2540 - len - pos)),
2541 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
230d793d 2542
d0ab8cd3 2543 split = find_split_point (&SET_SRC (x), insn);
230d793d
RS
2544 if (split && split != &SET_SRC (x))
2545 return split;
2546 }
2547 }
2548
2549 /* See if this is a simple operation with a constant as the second
2550 operand. It might be that this constant is out of range and hence
2551 could be used as a split point. */
2552 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2553 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2554 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2555 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2556 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2557 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2558 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2559 == 'o'))))
2560 return &XEXP (SET_SRC (x), 1);
2561
2562 /* Finally, see if this is a simple operation with its first operand
2563 not in a register. The operation might require this operand in a
2564 register, so return it as a split point. We can always do this
2565 because if the first operand were another operation, we would have
2566 already found it as a split point. */
2567 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2568 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2569 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2570 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2571 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2572 return &XEXP (SET_SRC (x), 0);
2573
2574 return 0;
2575
2576 case AND:
2577 case IOR:
2578 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2579 it is better to write this as (not (ior A B)) so we can split it.
2580 Similarly for IOR. */
2581 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2582 {
2583 SUBST (*loc,
2584 gen_rtx_combine (NOT, GET_MODE (x),
2585 gen_rtx_combine (code == IOR ? AND : IOR,
2586 GET_MODE (x),
2587 XEXP (XEXP (x, 0), 0),
2588 XEXP (XEXP (x, 1), 0))));
d0ab8cd3 2589 return find_split_point (loc, insn);
230d793d
RS
2590 }
2591
2592 /* Many RISC machines have a large set of logical insns. If the
2593 second operand is a NOT, put it first so we will try to split the
2594 other operand first. */
2595 if (GET_CODE (XEXP (x, 1)) == NOT)
2596 {
2597 rtx tem = XEXP (x, 0);
2598 SUBST (XEXP (x, 0), XEXP (x, 1));
2599 SUBST (XEXP (x, 1), tem);
2600 }
2601 break;
2602 }
2603
2604 /* Otherwise, select our actions depending on our rtx class. */
2605 switch (GET_RTX_CLASS (code))
2606 {
2607 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2608 case '3':
d0ab8cd3 2609 split = find_split_point (&XEXP (x, 2), insn);
230d793d
RS
2610 if (split)
2611 return split;
2612 /* ... fall through ... */
2613 case '2':
2614 case 'c':
2615 case '<':
d0ab8cd3 2616 split = find_split_point (&XEXP (x, 1), insn);
230d793d
RS
2617 if (split)
2618 return split;
2619 /* ... fall through ... */
2620 case '1':
2621 /* Some machines have (and (shift ...) ...) insns. If X is not
2622 an AND, but XEXP (X, 0) is, use it as our split point. */
2623 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2624 return &XEXP (x, 0);
2625
d0ab8cd3 2626 split = find_split_point (&XEXP (x, 0), insn);
230d793d
RS
2627 if (split)
2628 return split;
2629 return loc;
2630 }
2631
2632 /* Otherwise, we don't have a split point. */
2633 return 0;
2634}
2635\f
2636/* Throughout X, replace FROM with TO, and return the result.
2637 The result is TO if X is FROM;
2638 otherwise the result is X, but its contents may have been modified.
2639 If they were modified, a record was made in undobuf so that
2640 undo_all will (among other things) return X to its original state.
2641
2642 If the number of changes necessary is too much to record to undo,
2643 the excess changes are not made, so the result is invalid.
2644 The changes already made can still be undone.
2645 undobuf.num_undo is incremented for such changes, so by testing that
2646 the caller can tell whether the result is valid.
2647
2648 `n_occurrences' is incremented each time FROM is replaced.
2649
2650 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2651
5089e22e 2652 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
230d793d
RS
2653 by copying if `n_occurrences' is non-zero. */
2654
2655static rtx
2656subst (x, from, to, in_dest, unique_copy)
2657 register rtx x, from, to;
2658 int in_dest;
2659 int unique_copy;
2660{
2661 register char *fmt;
2662 register int len, i;
2663 register enum rtx_code code = GET_CODE (x), orig_code = code;
2664 rtx temp;
2665 enum machine_mode mode = GET_MODE (x);
2666 enum machine_mode op0_mode = VOIDmode;
2667 rtx other_insn;
2668 rtx *cc_use;
2669 int n_restarts = 0;
2670
2671/* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2672 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2673 If it is 0, that cannot be done. We can now do this for any MEM
2674 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2675 If not for that, MEM's would very rarely be safe. */
2676
2677/* Reject MODEs bigger than a word, because we might not be able
2678 to reference a two-register group starting with an arbitrary register
2679 (and currently gen_lowpart might crash for a SUBREG). */
2680
2681#define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2682 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2683
2684/* Two expressions are equal if they are identical copies of a shared
2685 RTX or if they are both registers with the same register number
2686 and mode. */
2687
2688#define COMBINE_RTX_EQUAL_P(X,Y) \
2689 ((X) == (Y) \
2690 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2691 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2692
2693 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2694 {
2695 n_occurrences++;
2696 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2697 }
2698
2699 /* If X and FROM are the same register but different modes, they will
2700 not have been seen as equal above. However, flow.c will make a
2701 LOG_LINKS entry for that case. If we do nothing, we will try to
2702 rerecognize our original insn and, when it succeeds, we will
2703 delete the feeding insn, which is incorrect.
2704
2705 So force this insn not to match in this (rare) case. */
2706 if (! in_dest && code == REG && GET_CODE (from) == REG
2707 && REGNO (x) == REGNO (from))
2708 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2709
2710 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2711 of which may contain things that can be combined. */
2712 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2713 return x;
2714
2715 /* It is possible to have a subexpression appear twice in the insn.
2716 Suppose that FROM is a register that appears within TO.
2717 Then, after that subexpression has been scanned once by `subst',
2718 the second time it is scanned, TO may be found. If we were
2719 to scan TO here, we would find FROM within it and create a
2720 self-referent rtl structure which is completely wrong. */
2721 if (COMBINE_RTX_EQUAL_P (x, to))
2722 return to;
2723
2724 len = GET_RTX_LENGTH (code);
2725 fmt = GET_RTX_FORMAT (code);
2726
2727 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2728 set up to skip this common case. All other cases where we want to
2729 suppress replacing something inside a SET_SRC are handled via the
2730 IN_DEST operand. */
2731 if (code == SET
2732 && (GET_CODE (SET_DEST (x)) == REG
2733 || GET_CODE (SET_DEST (x)) == CC0
2734 || GET_CODE (SET_DEST (x)) == PC))
2735 fmt = "ie";
2736
2737 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2738 if (fmt[0] == 'e')
2739 op0_mode = GET_MODE (XEXP (x, 0));
2740
2741 for (i = 0; i < len; i++)
2742 {
2743 if (fmt[i] == 'E')
2744 {
2745 register int j;
2746 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2747 {
2748 register rtx new;
2749 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2750 {
2751 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2752 n_occurrences++;
2753 }
2754 else
2755 {
2756 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2757
2758 /* If this substitution failed, this whole thing fails. */
2759 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2760 return new;
2761 }
2762
2763 SUBST (XVECEXP (x, i, j), new);
2764 }
2765 }
2766 else if (fmt[i] == 'e')
2767 {
2768 register rtx new;
2769
2770 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2771 {
42301240
RK
2772 /* In general, don't install a subreg involving two modes not
2773 tieable. It can worsen register allocation, and can even
2774 make invalid reload insns, since the reg inside may need to
2775 be copied from in the outside mode, and that may be invalid
2776 if it is an fp reg copied in integer mode.
2777
2778 We allow two exceptions to this: It is valid if it is inside
2779 another SUBREG and the mode of that SUBREG and the mode of
2780 the inside of TO is tieable and it is valid if X is a SET
2781 that copies FROM to CC0. */
2782 if (GET_CODE (to) == SUBREG
2783 && ! MODES_TIEABLE_P (GET_MODE (to),
2784 GET_MODE (SUBREG_REG (to)))
2785 && ! (code == SUBREG
2786 && MODES_TIEABLE_P (mode, GET_MODE (SUBREG_REG (to))))
2787#ifdef HAVE_cc0
2788 && ! (code == SET && i == 1 && XEXP (x, 0) == cc0_rtx)
2789#endif
2790 )
2791 return gen_rtx (CLOBBER, VOIDmode, const0_rtx);
2792
230d793d
RS
2793 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2794 n_occurrences++;
2795 }
2796 else
2797 /* If we are in a SET_DEST, suppress most cases unless we
2798 have gone inside a MEM, in which case we want to
2799 simplify the address. We assume here that things that
2800 are actually part of the destination have their inner
2801 parts in the first expression. This is true for SUBREG,
2802 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2803 things aside from REG and MEM that should appear in a
2804 SET_DEST. */
2805 new = subst (XEXP (x, i), from, to,
2806 (((in_dest
2807 && (code == SUBREG || code == STRICT_LOW_PART
2808 || code == ZERO_EXTRACT))
2809 || code == SET)
2810 && i == 0), unique_copy);
2811
2812 /* If we found that we will have to reject this combination,
2813 indicate that by returning the CLOBBER ourselves, rather than
2814 an expression containing it. This will speed things up as
2815 well as prevent accidents where two CLOBBERs are considered
2816 to be equal, thus producing an incorrect simplification. */
2817
2818 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2819 return new;
2820
2821 SUBST (XEXP (x, i), new);
2822 }
2823 }
2824
d0ab8cd3
RK
2825 /* We come back to here if we have replaced the expression with one of
2826 a different code and it is likely that further simplification will be
2827 possible. */
2828
2829 restart:
2830
eeb43d32
RK
2831 /* If we have restarted more than 4 times, we are probably looping, so
2832 give up. */
2833 if (++n_restarts > 4)
2834 return x;
2835
2836 /* If we are restarting at all, it means that we no longer know the
2837 original mode of operand 0 (since we have probably changed the
2838 form of X). */
2839
2840 if (n_restarts > 1)
2841 op0_mode = VOIDmode;
2842
d0ab8cd3
RK
2843 code = GET_CODE (x);
2844
230d793d
RS
2845 /* If this is a commutative operation, put a constant last and a complex
2846 expression first. We don't need to do this for comparisons here. */
2847 if (GET_RTX_CLASS (code) == 'c'
2848 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2849 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2850 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2851 || (GET_CODE (XEXP (x, 0)) == SUBREG
2852 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2853 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2854 {
2855 temp = XEXP (x, 0);
2856 SUBST (XEXP (x, 0), XEXP (x, 1));
2857 SUBST (XEXP (x, 1), temp);
2858 }
2859
22609cbf
RK
2860 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2861 sign extension of a PLUS with a constant, reverse the order of the sign
2862 extension and the addition. Note that this not the same as the original
2863 code, but overflow is undefined for signed values. Also note that the
2864 PLUS will have been partially moved "inside" the sign-extension, so that
2865 the first operand of X will really look like:
2866 (ashiftrt (plus (ashift A C4) C5) C4).
2867 We convert this to
2868 (plus (ashiftrt (ashift A C4) C2) C4)
2869 and replace the first operand of X with that expression. Later parts
2870 of this function may simplify the expression further.
2871
2872 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2873 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2874 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2875
2876 We do this to simplify address expressions. */
2877
2878 if ((code == PLUS || code == MINUS || code == MULT)
2879 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2880 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2881 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2882 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2883 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2884 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2885 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2886 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2887 XEXP (XEXP (XEXP (x, 0), 0), 1),
2888 XEXP (XEXP (x, 0), 1))) != 0)
2889 {
2890 rtx new
2891 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2892 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2893 INTVAL (XEXP (XEXP (x, 0), 1)));
2894
2895 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2896 INTVAL (XEXP (XEXP (x, 0), 1)));
2897
2898 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2899 }
2900
d0ab8cd3
RK
2901 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2902 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2903 things. Don't deal with operations that change modes here. */
2904
2905 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2906 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2907 {
58744483
RK
2908 /* Don't do this by using SUBST inside X since we might be messing
2909 up a shared expression. */
2910 rtx cond = XEXP (XEXP (x, 0), 0);
2911 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2912 XEXP (x, 1)),
1a26b032 2913 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2914 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2915 XEXP (x, 1)),
1a26b032 2916 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2917
2918
2919 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
d0ab8cd3
RK
2920 goto restart;
2921 }
2922
5109d49f
RK
2923 else if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2924 && GET_CODE (XEXP (x, 1)) == IF_THEN_ELSE)
2925 {
2926 /* Don't do this by using SUBST inside X since we might be messing
2927 up a shared expression. */
2928 rtx cond = XEXP (XEXP (x, 1), 0);
2929 rtx t_arm = subst (gen_binary (code, mode, XEXP (x, 0),
2930 XEXP (XEXP (x, 1), 1)),
2931 pc_rtx, pc_rtx, 0, 0);
2932 rtx f_arm = subst (gen_binary (code, mode, XEXP (x, 0),
2933 XEXP (XEXP (x, 1), 2)),
2934 pc_rtx, pc_rtx, 0, 0);
2935
2936 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2937 goto restart;
2938 }
2939
d0ab8cd3
RK
2940 else if (GET_RTX_CLASS (code) == '1'
2941 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2942 && GET_MODE (XEXP (x, 0)) == mode)
2943 {
58744483
RK
2944 rtx cond = XEXP (XEXP (x, 0), 0);
2945 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
1a26b032 2946 pc_rtx, pc_rtx, 0, 0);
58744483 2947 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
1a26b032 2948 pc_rtx, pc_rtx, 0, 0);
58744483
RK
2949
2950 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
d0ab8cd3
RK
2951 goto restart;
2952 }
2953
230d793d
RS
2954 /* Try to fold this expression in case we have constants that weren't
2955 present before. */
2956 temp = 0;
2957 switch (GET_RTX_CLASS (code))
2958 {
2959 case '1':
2960 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2961 break;
2962 case '<':
2963 temp = simplify_relational_operation (code, op0_mode,
2964 XEXP (x, 0), XEXP (x, 1));
77fa0940
RK
2965#ifdef FLOAT_STORE_FLAG_VALUE
2966 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2967 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2968 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2969#endif
230d793d
RS
2970 break;
2971 case 'c':
2972 case '2':
2973 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2974 break;
2975 case 'b':
2976 case '3':
2977 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2978 XEXP (x, 1), XEXP (x, 2));
2979 break;
2980 }
2981
2982 if (temp)
d0ab8cd3 2983 x = temp, code = GET_CODE (temp);
230d793d 2984
230d793d 2985 /* First see if we can apply the inverse distributive law. */
224eeff2
RK
2986 if (code == PLUS || code == MINUS
2987 || code == AND || code == IOR || code == XOR)
230d793d
RS
2988 {
2989 x = apply_distributive_law (x);
2990 code = GET_CODE (x);
2991 }
2992
2993 /* If CODE is an associative operation not otherwise handled, see if we
2994 can associate some operands. This can win if they are constants or
2995 if they are logically related (i.e. (a & b) & a. */
2996 if ((code == PLUS || code == MINUS
2997 || code == MULT || code == AND || code == IOR || code == XOR
2998 || code == DIV || code == UDIV
2999 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
3ad2180a 3000 && INTEGRAL_MODE_P (mode))
230d793d
RS
3001 {
3002 if (GET_CODE (XEXP (x, 0)) == code)
3003 {
3004 rtx other = XEXP (XEXP (x, 0), 0);
3005 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
3006 rtx inner_op1 = XEXP (x, 1);
3007 rtx inner;
3008
3009 /* Make sure we pass the constant operand if any as the second
3010 one if this is a commutative operation. */
3011 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
3012 {
3013 rtx tem = inner_op0;
3014 inner_op0 = inner_op1;
3015 inner_op1 = tem;
3016 }
3017 inner = simplify_binary_operation (code == MINUS ? PLUS
3018 : code == DIV ? MULT
3019 : code == UDIV ? MULT
3020 : code,
3021 mode, inner_op0, inner_op1);
3022
3023 /* For commutative operations, try the other pair if that one
3024 didn't simplify. */
3025 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
3026 {
3027 other = XEXP (XEXP (x, 0), 1);
3028 inner = simplify_binary_operation (code, mode,
3029 XEXP (XEXP (x, 0), 0),
3030 XEXP (x, 1));
3031 }
3032
3033 if (inner)
3034 {
3035 x = gen_binary (code, mode, other, inner);
3036 goto restart;
3037
3038 }
3039 }
3040 }
3041
3042 /* A little bit of algebraic simplification here. */
3043 switch (code)
3044 {
3045 case MEM:
3046 /* Ensure that our address has any ASHIFTs converted to MULT in case
3047 address-recognizing predicates are called later. */
3048 temp = make_compound_operation (XEXP (x, 0), MEM);
3049 SUBST (XEXP (x, 0), temp);
3050 break;
3051
3052 case SUBREG:
3053 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
3054 is paradoxical. If we can't do that safely, then it becomes
3055 something nonsensical so that this combination won't take place. */
3056
3057 if (GET_CODE (SUBREG_REG (x)) == MEM
3058 && (GET_MODE_SIZE (mode)
3059 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
3060 {
3061 rtx inner = SUBREG_REG (x);
3062 int endian_offset = 0;
3063 /* Don't change the mode of the MEM
3064 if that would change the meaning of the address. */
3065 if (MEM_VOLATILE_P (SUBREG_REG (x))
3066 || mode_dependent_address_p (XEXP (inner, 0)))
3067 return gen_rtx (CLOBBER, mode, const0_rtx);
3068
3069#if BYTES_BIG_ENDIAN
3070 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3071 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
3072 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
3073 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
3074#endif
3075 /* Note if the plus_constant doesn't make a valid address
3076 then this combination won't be accepted. */
3077 x = gen_rtx (MEM, mode,
3078 plus_constant (XEXP (inner, 0),
3079 (SUBREG_WORD (x) * UNITS_PER_WORD
3080 + endian_offset)));
3081 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
3082 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
3083 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
3084 return x;
3085 }
3086
3087 /* If we are in a SET_DEST, these other cases can't apply. */
3088 if (in_dest)
3089 return x;
3090
3091 /* Changing mode twice with SUBREG => just change it once,
3092 or not at all if changing back to starting mode. */
3093 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
3094 {
3095 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
3096 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
3097 return SUBREG_REG (SUBREG_REG (x));
3098
3099 SUBST_INT (SUBREG_WORD (x),
3100 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
3101 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
3102 }
3103
3104 /* SUBREG of a hard register => just change the register number
3105 and/or mode. If the hard register is not valid in that mode,
26ecfc76
RK
3106 suppress this combination. If the hard register is the stack,
3107 frame, or argument pointer, leave this as a SUBREG. */
230d793d
RS
3108
3109 if (GET_CODE (SUBREG_REG (x)) == REG
26ecfc76
RK
3110 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
3111 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
6d7096b0
DE
3112#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
3113 && REGNO (SUBREG_REG (x)) != HARD_FRAME_POINTER_REGNUM
3114#endif
26ecfc76
RK
3115#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3116 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
3117#endif
3118 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
230d793d
RS
3119 {
3120 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
3121 mode))
3122 return gen_rtx (REG, mode,
3123 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
3124 else
3125 return gen_rtx (CLOBBER, mode, const0_rtx);
3126 }
3127
3128 /* For a constant, try to pick up the part we want. Handle a full
a4bde0b1
RK
3129 word and low-order part. Only do this if we are narrowing
3130 the constant; if it is being widened, we have no idea what
3131 the extra bits will have been set to. */
230d793d
RS
3132
3133 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
3134 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
a4bde0b1 3135 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
230d793d
RS
3136 && GET_MODE_CLASS (mode) == MODE_INT)
3137 {
3138 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
5f4f0e22 3139 0, op0_mode);
230d793d
RS
3140 if (temp)
3141 return temp;
3142 }
3143
19808e22
RS
3144 /* If we want a subreg of a constant, at offset 0,
3145 take the low bits. On a little-endian machine, that's
3146 always valid. On a big-endian machine, it's valid
3147 only if the constant's mode fits in one word. */
a4bde0b1 3148 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
19808e22
RS
3149 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode)
3150#if WORDS_BIG_ENDIAN
097e45d1 3151 && GET_MODE_BITSIZE (op0_mode) <= BITS_PER_WORD
19808e22
RS
3152#endif
3153 )
230d793d
RS
3154 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
3155
3156 /* If we are narrowing the object, we need to see if we can simplify
3157 the expression for the object knowing that we only need the
d0ab8cd3
RK
3158 low-order bits. */
3159
230d793d 3160 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
d0ab8cd3 3161 && subreg_lowpart_p (x))
6139ff20 3162 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_MASK (mode),
d0ab8cd3 3163 NULL_RTX);
230d793d
RS
3164 break;
3165
3166 case NOT:
3167 /* (not (plus X -1)) can become (neg X). */
3168 if (GET_CODE (XEXP (x, 0)) == PLUS
3169 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
3170 {
3171 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
3172 goto restart;
3173 }
3174
3175 /* Similarly, (not (neg X)) is (plus X -1). */
3176 if (GET_CODE (XEXP (x, 0)) == NEG)
3177 {
3178 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3179 goto restart;
3180 }
3181
d0ab8cd3
RK
3182 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
3183 if (GET_CODE (XEXP (x, 0)) == XOR
3184 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3185 && (temp = simplify_unary_operation (NOT, mode,
3186 XEXP (XEXP (x, 0), 1),
3187 mode)) != 0)
3188 {
3189 SUBST (XEXP (XEXP (x, 0), 1), temp);
3190 return XEXP (x, 0);
3191 }
3192
230d793d
RS
3193 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
3194 other than 1, but that is not valid. We could do a similar
3195 simplification for (not (lshiftrt C X)) where C is just the sign bit,
3196 but this doesn't seem common enough to bother with. */
3197 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3198 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3199 {
3200 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
3201 XEXP (XEXP (x, 0), 1));
3202 goto restart;
3203 }
3204
3205 if (GET_CODE (XEXP (x, 0)) == SUBREG
3206 && subreg_lowpart_p (XEXP (x, 0))
3207 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3208 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3209 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3210 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3211 {
3212 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3213
3214 x = gen_rtx (ROTATE, inner_mode,
3215 gen_unary (NOT, inner_mode, const1_rtx),
3216 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3217 x = gen_lowpart_for_combine (mode, x);
3218 goto restart;
3219 }
3220
3221#if STORE_FLAG_VALUE == -1
3222 /* (not (comparison foo bar)) can be done by reversing the comparison
3223 code if valid. */
3224 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3225 && reversible_comparison_p (XEXP (x, 0)))
3226 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3227 mode, XEXP (XEXP (x, 0), 0),
3228 XEXP (XEXP (x, 0), 1));
500c518b
RK
3229
3230 /* (ashiftrt foo C) where C is the number of bits in FOO minus 1
3231 is (lt foo (const_int 0)), so we can perform the above
3232 simplification. */
3233
3234 if (XEXP (x, 1) == const1_rtx
3235 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
3236 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3237 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3238 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
3239#endif
3240
3241 /* Apply De Morgan's laws to reduce number of patterns for machines
3242 with negating logical insns (and-not, nand, etc.). If result has
3243 only one NOT, put it first, since that is how the patterns are
3244 coded. */
3245
3246 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3247 {
3248 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3249
3250 if (GET_CODE (in1) == NOT)
3251 in1 = XEXP (in1, 0);
3252 else
3253 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3254
3255 if (GET_CODE (in2) == NOT)
3256 in2 = XEXP (in2, 0);
3257 else if (GET_CODE (in2) == CONST_INT
5f4f0e22
CH
3258 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3259 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
230d793d
RS
3260 else
3261 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3262
3263 if (GET_CODE (in2) == NOT)
3264 {
3265 rtx tem = in2;
3266 in2 = in1; in1 = tem;
3267 }
3268
3269 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3270 mode, in1, in2);
3271 goto restart;
3272 }
3273 break;
3274
3275 case NEG:
3276 /* (neg (plus X 1)) can become (not X). */
3277 if (GET_CODE (XEXP (x, 0)) == PLUS
3278 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3279 {
3280 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3281 goto restart;
3282 }
3283
3284 /* Similarly, (neg (not X)) is (plus X 1). */
3285 if (GET_CODE (XEXP (x, 0)) == NOT)
3286 {
5109d49f 3287 x = plus_constant (XEXP (XEXP (x, 0), 0), 1);
230d793d
RS
3288 goto restart;
3289 }
3290
230d793d
RS
3291 /* (neg (minus X Y)) can become (minus Y X). */
3292 if (GET_CODE (XEXP (x, 0)) == MINUS
3ad2180a 3293 && (! FLOAT_MODE_P (mode)
230d793d
RS
3294 /* x-y != -(y-x) with IEEE floating point. */
3295 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3296 {
3297 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3298 XEXP (XEXP (x, 0), 0));
3299 goto restart;
3300 }
3301
d0ab8cd3
RK
3302 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3303 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
951553af 3304 && nonzero_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
d0ab8cd3
RK
3305 {
3306 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3307 goto restart;
3308 }
3309
230d793d
RS
3310 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3311 if we can then eliminate the NEG (e.g.,
3312 if the operand is a constant). */
3313
3314 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3315 {
3316 temp = simplify_unary_operation (NEG, mode,
3317 XEXP (XEXP (x, 0), 0), mode);
3318 if (temp)
3319 {
3320 SUBST (XEXP (XEXP (x, 0), 0), temp);
3321 return XEXP (x, 0);
3322 }
3323 }
3324
3325 temp = expand_compound_operation (XEXP (x, 0));
3326
3327 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3328 replaced by (lshiftrt X C). This will convert
3329 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3330
3331 if (GET_CODE (temp) == ASHIFTRT
3332 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3333 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3334 {
3335 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3336 INTVAL (XEXP (temp, 1)));
3337 goto restart;
3338 }
3339
951553af 3340 /* If X has only a single bit that might be nonzero, say, bit I, convert
230d793d
RS
3341 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3342 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3343 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3344 or a SUBREG of one since we'd be making the expression more
3345 complex if it was just a register. */
3346
3347 if (GET_CODE (temp) != REG
3348 && ! (GET_CODE (temp) == SUBREG
3349 && GET_CODE (SUBREG_REG (temp)) == REG)
951553af 3350 && (i = exact_log2 (nonzero_bits (temp, mode))) >= 0)
230d793d
RS
3351 {
3352 rtx temp1 = simplify_shift_const
5f4f0e22
CH
3353 (NULL_RTX, ASHIFTRT, mode,
3354 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
230d793d
RS
3355 GET_MODE_BITSIZE (mode) - 1 - i),
3356 GET_MODE_BITSIZE (mode) - 1 - i);
3357
3358 /* If all we did was surround TEMP with the two shifts, we
3359 haven't improved anything, so don't use it. Otherwise,
3360 we are better off with TEMP1. */
3361 if (GET_CODE (temp1) != ASHIFTRT
3362 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3363 || XEXP (XEXP (temp1, 0), 0) != temp)
3364 {
3365 x = temp1;
3366 goto restart;
3367 }
3368 }
3369 break;
3370
3371 case FLOAT_TRUNCATE:
3372 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3373 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3374 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3375 return XEXP (XEXP (x, 0), 0);
3376 break;
3377
3378#ifdef HAVE_cc0
3379 case COMPARE:
3380 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3381 using cc0, in which case we want to leave it as a COMPARE
3382 so we can distinguish it from a register-register-copy. */
3383 if (XEXP (x, 1) == const0_rtx)
3384 return XEXP (x, 0);
3385
3386 /* In IEEE floating point, x-0 is not the same as x. */
3387 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3ad2180a 3388 || ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))))
230d793d
RS
3389 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3390 return XEXP (x, 0);
3391 break;
3392#endif
3393
3394 case CONST:
3395 /* (const (const X)) can become (const X). Do it this way rather than
3396 returning the inner CONST since CONST can be shared with a
3397 REG_EQUAL note. */
3398 if (GET_CODE (XEXP (x, 0)) == CONST)
3399 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3400 break;
3401
3402#ifdef HAVE_lo_sum
3403 case LO_SUM:
3404 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3405 can add in an offset. find_split_point will split this address up
3406 again if it doesn't match. */
3407 if (GET_CODE (XEXP (x, 0)) == HIGH
3408 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3409 return XEXP (x, 1);
3410 break;
3411#endif
3412
3413 case PLUS:
3414 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3415 outermost. That's because that's the way indexed addresses are
3416 supposed to appear. This code used to check many more cases, but
3417 they are now checked elsewhere. */
3418 if (GET_CODE (XEXP (x, 0)) == PLUS
3419 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3420 return gen_binary (PLUS, mode,
3421 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3422 XEXP (x, 1)),
3423 XEXP (XEXP (x, 0), 1));
3424
3425 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3426 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3427 bit-field and can be replaced by either a sign_extend or a
3428 sign_extract. The `and' may be a zero_extend. */
3429 if (GET_CODE (XEXP (x, 0)) == XOR
3430 && GET_CODE (XEXP (x, 1)) == CONST_INT
3431 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3432 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3433 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
5f4f0e22 3434 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
3435 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3436 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3437 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
5f4f0e22 3438 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
230d793d
RS
3439 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3440 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3441 == i + 1))))
3442 {
3443 x = simplify_shift_const
5f4f0e22
CH
3444 (NULL_RTX, ASHIFTRT, mode,
3445 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3446 XEXP (XEXP (XEXP (x, 0), 0), 0),
3447 GET_MODE_BITSIZE (mode) - (i + 1)),
3448 GET_MODE_BITSIZE (mode) - (i + 1));
3449 goto restart;
3450 }
3451
bc0776c6
RK
3452 /* (plus (comparison A B) C) can become (neg (rev-comp A B)) if
3453 C is 1 and STORE_FLAG_VALUE is -1 or if C is -1 and STORE_FLAG_VALUE
3454 is 1. This produces better code than the alternative immediately
3455 below. */
3456 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3457 && reversible_comparison_p (XEXP (x, 0))
3458 && ((STORE_FLAG_VALUE == -1 && XEXP (x, 1) == const1_rtx)
3459 || (STORE_FLAG_VALUE == 1 && XEXP (x, 1) == constm1_rtx)))
3460 {
3461 x = gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3462 mode, XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1));
3463 x = gen_unary (NEG, mode, x);
3464 goto restart;
3465 }
3466
3467 /* If only the low-order bit of X is possibly nonzero, (plus x -1)
230d793d
RS
3468 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3469 the bitsize of the mode - 1. This allows simplification of
3470 "a = (b & 8) == 0;" */
3471 if (XEXP (x, 1) == constm1_rtx
3472 && GET_CODE (XEXP (x, 0)) != REG
3473 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3474 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
951553af 3475 && nonzero_bits (XEXP (x, 0), mode) == 1)
230d793d
RS
3476 {
3477 x = simplify_shift_const
5f4f0e22
CH
3478 (NULL_RTX, ASHIFTRT, mode,
3479 simplify_shift_const (NULL_RTX, ASHIFT, mode,
230d793d
RS
3480 gen_rtx_combine (XOR, mode,
3481 XEXP (x, 0), const1_rtx),
3482 GET_MODE_BITSIZE (mode) - 1),
3483 GET_MODE_BITSIZE (mode) - 1);
3484 goto restart;
3485 }
02f4ada4
RK
3486
3487 /* If we are adding two things that have no bits in common, convert
3488 the addition into an IOR. This will often be further simplified,
3489 for example in cases like ((a & 1) + (a & 2)), which can
3490 become a & 3. */
3491
ac49a949 3492 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af
RK
3493 && (nonzero_bits (XEXP (x, 0), mode)
3494 & nonzero_bits (XEXP (x, 1), mode)) == 0)
02f4ada4
RK
3495 {
3496 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3497 goto restart;
3498 }
230d793d
RS
3499 break;
3500
3501 case MINUS:
5109d49f
RK
3502#if STORE_FLAG_VALUE == 1
3503 /* (minus 1 (comparison foo bar)) can be done by reversing the comparison
3504 code if valid. */
3505 if (XEXP (x, 0) == const1_rtx
3506 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == '<'
3507 && reversible_comparison_p (XEXP (x, 1)))
3508 return gen_binary (reverse_condition (GET_CODE (XEXP (x, 1))),
3509 mode, XEXP (XEXP (x, 1), 0),
3510 XEXP (XEXP (x, 1), 1));
3511#endif
3512
230d793d
RS
3513 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3514 (and <foo> (const_int pow2-1)) */
3515 if (GET_CODE (XEXP (x, 1)) == AND
3516 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3517 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3518 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3519 {
5f4f0e22 3520 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
230d793d
RS
3521 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3522 goto restart;
3523 }
3524 break;
3525
3526 case MULT:
3527 /* If we have (mult (plus A B) C), apply the distributive law and then
3528 the inverse distributive law to see if things simplify. This
3529 occurs mostly in addresses, often when unrolling loops. */
3530
3531 if (GET_CODE (XEXP (x, 0)) == PLUS)
3532 {
3533 x = apply_distributive_law
3534 (gen_binary (PLUS, mode,
3535 gen_binary (MULT, mode,
3536 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3537 gen_binary (MULT, mode,
3538 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3539
3540 if (GET_CODE (x) != MULT)
3541 goto restart;
3542 }
3543
3544 /* If this is multiplication by a power of two and its first operand is
3545 a shift, treat the multiply as a shift to allow the shifts to
3546 possibly combine. */
3547 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3548 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3549 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3550 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3551 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3552 || GET_CODE (XEXP (x, 0)) == ROTATE
3553 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3554 {
5f4f0e22 3555 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
230d793d
RS
3556 goto restart;
3557 }
3558
3559 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3560 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3561 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3562 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3563 XEXP (XEXP (x, 0), 1));
3564 break;
3565
3566 case UDIV:
3567 /* If this is a divide by a power of two, treat it as a shift if
3568 its first operand is a shift. */
3569 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3570 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3571 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3572 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3573 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3574 || GET_CODE (XEXP (x, 0)) == ROTATE
3575 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3576 {
5f4f0e22 3577 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
230d793d
RS
3578 goto restart;
3579 }
3580 break;
3581
3582 case EQ: case NE:
3583 case GT: case GTU: case GE: case GEU:
3584 case LT: case LTU: case LE: case LEU:
3585 /* If the first operand is a condition code, we can't do anything
3586 with it. */
3587 if (GET_CODE (XEXP (x, 0)) == COMPARE
3588 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3589#ifdef HAVE_cc0
3590 && XEXP (x, 0) != cc0_rtx
3591#endif
3592 ))
3593 {
3594 rtx op0 = XEXP (x, 0);
3595 rtx op1 = XEXP (x, 1);
3596 enum rtx_code new_code;
3597
3598 if (GET_CODE (op0) == COMPARE)
3599 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3600
3601 /* Simplify our comparison, if possible. */
3602 new_code = simplify_comparison (code, &op0, &op1);
3603
3604#if STORE_FLAG_VALUE == 1
3605 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
951553af 3606 if only the low-order bit is possibly nonzero in X (such as when
5109d49f
RK
3607 X is a ZERO_EXTRACT of one bit). Similarly, we can convert EQ to
3608 (xor X 1) or (minus 1 X); we use the former. Finally, if X is
3609 known to be either 0 or -1, NE becomes a NEG and EQ becomes
3610 (plus X 1).
3611
3612 Remove any ZERO_EXTRACT we made when thinking this was a
3613 comparison. It may now be simpler to use, e.g., an AND. If a
3614 ZERO_EXTRACT is indeed appropriate, it will be placed back by
3615 the call to make_compound_operation in the SET case. */
3616
3f508eca 3617 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3618 && op1 == const0_rtx
5109d49f 3619 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3620 return gen_lowpart_for_combine (mode,
3621 expand_compound_operation (op0));
5109d49f
RK
3622
3623 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3624 && op1 == const0_rtx
3625 && (num_sign_bit_copies (op0, mode)
3626 == GET_MODE_BITSIZE (mode)))
3627 {
3628 op0 = expand_compound_operation (op0);
3629 x = gen_unary (NEG, mode, gen_lowpart_for_combine (mode, op0));
3630 goto restart;
3631 }
3632
3f508eca 3633 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3634 && op1 == const0_rtx
5109d49f 3635 && nonzero_bits (op0, mode) == 1)
818b11b9
RK
3636 {
3637 op0 = expand_compound_operation (op0);
5109d49f
RK
3638 x = gen_binary (XOR, mode,
3639 gen_lowpart_for_combine (mode, op0),
3640 const1_rtx);
3641 goto restart;
3642 }
818b11b9 3643
5109d49f
RK
3644 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3645 && op1 == const0_rtx
3646 && (num_sign_bit_copies (op0, mode)
3647 == GET_MODE_BITSIZE (mode)))
3648 {
3649 op0 = expand_compound_operation (op0);
3650 x = plus_constant (gen_lowpart_for_combine (mode, op0), 1);
818b11b9
RK
3651 goto restart;
3652 }
230d793d
RS
3653#endif
3654
3655#if STORE_FLAG_VALUE == -1
5109d49f
RK
3656 /* If STORE_FLAG_VALUE is -1, we have cases similar to
3657 those above. */
3f508eca 3658 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
230d793d 3659 && op1 == const0_rtx
5109d49f
RK
3660 && (num_sign_bit_copies (op0, mode)
3661 == GET_MODE_BITSIZE (mode)))
3662 return gen_lowpart_for_combine (mode,
3663 expand_compound_operation (op0));
3664
3665 else if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3666 && op1 == const0_rtx
3667 && nonzero_bits (op0, mode) == 1)
3668 {
3669 op0 = expand_compound_operation (op0);
3670 x = gen_unary (NEG, mode, gen_lowpart_for_combine (mode, op0));
3671 goto restart;
3672 }
3673
3674 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3675 && op1 == const0_rtx
3676 && (num_sign_bit_copies (op0, mode)
3677 == GET_MODE_BITSIZE (mode)))
230d793d 3678 {
818b11b9 3679 op0 = expand_compound_operation (op0);
5109d49f
RK
3680 x = gen_unary (NOT, mode, gen_lowpart_for_combine (mode, op0));
3681 goto restart;
3682 }
3683
3684 /* If X is 0/1, (eq X 0) is X-1. */
3685 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3686 && op1 == const0_rtx
3687 && nonzero_bits (op0, mode) == 1)
3688 {
3689 op0 = expand_compound_operation (op0);
3690 x = plus_constant (gen_lowpart_for_combine (mode, op0), -1);
230d793d
RS
3691 goto restart;
3692 }
3693#endif
3694
3695 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
951553af
RK
3696 one bit that might be nonzero, we can convert (ne x 0) to
3697 (ashift x c) where C puts the bit in the sign bit. Remove any
3698 AND with STORE_FLAG_VALUE when we are done, since we are only
3699 going to test the sign bit. */
3f508eca 3700 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
5f4f0e22
CH
3701 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3702 && (STORE_FLAG_VALUE
3703 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
3704 && op1 == const0_rtx
3705 && mode == GET_MODE (op0)
5109d49f 3706 && (i = exact_log2 (nonzero_bits (op0, mode))) >= 0)
230d793d 3707 {
818b11b9
RK
3708 x = simplify_shift_const (NULL_RTX, ASHIFT, mode,
3709 expand_compound_operation (op0),
230d793d
RS
3710 GET_MODE_BITSIZE (mode) - 1 - i);
3711 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3712 return XEXP (x, 0);
3713 else
3714 return x;
3715 }
3716
3717 /* If the code changed, return a whole new comparison. */
3718 if (new_code != code)
3719 return gen_rtx_combine (new_code, mode, op0, op1);
3720
3721 /* Otherwise, keep this operation, but maybe change its operands.
3722 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3723 SUBST (XEXP (x, 0), op0);
3724 SUBST (XEXP (x, 1), op1);
3725 }
3726 break;
3727
3728 case IF_THEN_ELSE:
1a26b032
RK
3729 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3730 used in it is being compared against certain values. Get the
3731 true and false comparisons and see if that says anything about the
3732 value of each arm. */
d0ab8cd3 3733
1a26b032
RK
3734 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3735 && reversible_comparison_p (XEXP (x, 0))
d0ab8cd3
RK
3736 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3737 {
951553af 3738 HOST_WIDE_INT nzb;
d0ab8cd3 3739 rtx from = XEXP (XEXP (x, 0), 0);
1a26b032
RK
3740 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3741 enum rtx_code false_code = reverse_condition (true_code);
3742 rtx true_val = XEXP (XEXP (x, 0), 1);
3743 rtx false_val = true_val;
3744 rtx true_arm = XEXP (x, 1);
3745 rtx false_arm = XEXP (x, 2);
3746 int swapped = 0;
3747
3748 /* If FALSE_CODE is EQ, swap the codes and arms. */
3749
3750 if (false_code == EQ)
3751 {
3752 swapped = 1, true_code = EQ, false_code = NE;
3753 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3754 }
d0ab8cd3 3755
1a26b032 3756 /* If we are comparing against zero and the expression being tested
951553af
RK
3757 has only a single bit that might be nonzero, that is its value
3758 when it is not equal to zero. Similarly if it is known to be
3759 -1 or 0. */
d0ab8cd3 3760
1a26b032 3761 if (true_code == EQ && true_val == const0_rtx
951553af
RK
3762 && exact_log2 (nzb = nonzero_bits (from, GET_MODE (from))) >= 0)
3763 false_code = EQ, false_val = GEN_INT (nzb);
1a26b032 3764 else if (true_code == EQ && true_val == const0_rtx
d0ab8cd3
RK
3765 && (num_sign_bit_copies (from, GET_MODE (from))
3766 == GET_MODE_BITSIZE (GET_MODE (from))))
1a26b032 3767 false_code = EQ, false_val = constm1_rtx;
d0ab8cd3
RK
3768
3769 /* Now simplify an arm if we know the value of the register
3770 in the branch and it is used in the arm. Be carefull due to
3771 the potential of locally-shared RTL. */
3772
1a26b032
RK
3773 if (reg_mentioned_p (from, true_arm))
3774 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3775 from, true_val),
3776 pc_rtx, pc_rtx, 0, 0);
3777 if (reg_mentioned_p (from, false_arm))
3778 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3779 from, false_val),
3780 pc_rtx, pc_rtx, 0, 0);
3781
3782 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3783 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
d0ab8cd3
RK
3784 }
3785
230d793d
RS
3786 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3787 reversed, do so to avoid needing two sets of patterns for
d0ab8cd3 3788 subtract-and-branch insns. Similarly if we have a constant in that
1a26b032
RK
3789 position or if the third operand is the same as the first operand
3790 of the comparison. */
3791
3792 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3793 && reversible_comparison_p (XEXP (x, 0))
3794 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3795 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
230d793d
RS
3796 {
3797 SUBST (XEXP (x, 0),
d0ab8cd3
RK
3798 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3799 GET_MODE (XEXP (x, 0)),
3800 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3801
3802 temp = XEXP (x, 1);
230d793d 3803 SUBST (XEXP (x, 1), XEXP (x, 2));
d0ab8cd3 3804 SUBST (XEXP (x, 2), temp);
230d793d 3805 }
1a26b032
RK
3806
3807 /* If the two arms are identical, we don't need the comparison. */
3808
3809 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3810 && ! side_effects_p (XEXP (x, 0)))
3811 return XEXP (x, 1);
3812
3813 /* Look for cases where we have (abs x) or (neg (abs X)). */
3814
3815 if (GET_MODE_CLASS (mode) == MODE_INT
3816 && GET_CODE (XEXP (x, 2)) == NEG
3817 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3818 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3819 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3820 && ! side_effects_p (XEXP (x, 1)))
3821 switch (GET_CODE (XEXP (x, 0)))
3822 {
3823 case GT:
3824 case GE:
3825 x = gen_unary (ABS, mode, XEXP (x, 1));
3826 goto restart;
3827 case LT:
3828 case LE:
3829 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3830 goto restart;
3831 }
3832
3833 /* Look for MIN or MAX. */
3834
3ad2180a 3835 if (! FLOAT_MODE_P (mode)
1a26b032
RK
3836 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3837 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3838 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3839 && ! side_effects_p (XEXP (x, 0)))
3840 switch (GET_CODE (XEXP (x, 0)))
3841 {
3842 case GE:
3843 case GT:
3844 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3845 goto restart;
3846 case LE:
3847 case LT:
3848 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3849 goto restart;
3850 case GEU:
3851 case GTU:
3852 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3853 goto restart;
3854 case LEU:
3855 case LTU:
3856 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3857 goto restart;
3858 }
3859
5109d49f
RK
3860#if STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1
3861
3862 /* If we have (if_then_else COND (OP Z C1) Z) and OP is an identity when
3863 its second operand is zero, this can be done as (OP Z (mult COND C2))
3864 where C2 = C1 * STORE_FLAG_VALUE. Similarly if OP has an outer
3865 ZERO_EXTEND or SIGN_EXTEND as long as Z is already extended (so
3866 we don't destroy it). We can do this kind of thing in some
3867 cases when STORE_FLAG_VALUE is neither of the above, but it isn't
3868 worth checking for. */
3869
3870 if (mode != VOIDmode && ! side_effects_p (x))
1a26b032 3871 {
5109d49f
RK
3872 rtx t = make_compound_operation (XEXP (x, 1), SET);
3873 rtx f = make_compound_operation (XEXP (x, 2), SET);
3874 rtx cond_op0 = XEXP (XEXP (x, 0), 0);
3875 rtx cond_op1 = XEXP (XEXP (x, 0), 1);
3876 enum rtx_code cond_op = GET_CODE (XEXP (x, 0));
3877 enum rtx_code op, extend_op = NIL;
1a26b032 3878 enum machine_mode m = mode;
5109d49f
RK
3879 rtx z = 0, c1, c2;
3880
3881 if ((GET_CODE (t) == PLUS || GET_CODE (t) == MINUS
3882 || GET_CODE (t) == IOR || GET_CODE (t) == XOR
3883 || GET_CODE (t) == ASHIFT
3884 || GET_CODE (t) == LSHIFTRT || GET_CODE (t) == ASHIFTRT)
3885 && rtx_equal_p (XEXP (t, 0), f))
3886 c1 = XEXP (t, 1), op = GET_CODE (t), z = f;
3887 else if (GET_CODE (t) == SIGN_EXTEND
3888 && (GET_CODE (XEXP (t, 0)) == PLUS
3889 || GET_CODE (XEXP (t, 0)) == MINUS
3890 || GET_CODE (XEXP (t, 0)) == IOR
3891 || GET_CODE (XEXP (t, 0)) == XOR
3892 || GET_CODE (XEXP (t, 0)) == ASHIFT
3893 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
3894 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
3895 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
3896 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
3897 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
3898 && (num_sign_bit_copies (f, GET_MODE (f))
3899 > (GET_MODE_BITSIZE (mode)
3900 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (t, 0), 0))))))
1a26b032 3901 {
5109d49f 3902 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
1a26b032 3903 extend_op = SIGN_EXTEND;
5109d49f 3904 m = GET_MODE (XEXP (t, 0));
1a26b032 3905 }
5109d49f
RK
3906 else if (GET_CODE (t) == ZERO_EXTEND
3907 && (GET_CODE (XEXP (t, 0)) == PLUS
3908 || GET_CODE (XEXP (t, 0)) == MINUS
3909 || GET_CODE (XEXP (t, 0)) == IOR
3910 || GET_CODE (XEXP (t, 0)) == XOR
3911 || GET_CODE (XEXP (t, 0)) == ASHIFT
3912 || GET_CODE (XEXP (t, 0)) == LSHIFTRT
3913 || GET_CODE (XEXP (t, 0)) == ASHIFTRT)
3914 && GET_CODE (XEXP (XEXP (t, 0), 0)) == SUBREG
1a26b032 3915 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
5109d49f
RK
3916 && subreg_lowpart_p (XEXP (XEXP (t, 0), 0))
3917 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (t, 0), 0)), f)
3918 && ((nonzero_bits (f, GET_MODE (f))
3919 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (t, 0), 0))))
1a26b032
RK
3920 == 0))
3921 {
5109d49f 3922 c1 = XEXP (XEXP (t, 0), 1); z = f; op = GET_CODE (XEXP (t, 0));
1a26b032 3923 extend_op = ZERO_EXTEND;
5109d49f 3924 m = GET_MODE (XEXP (t, 0));
1a26b032
RK
3925 }
3926
5109d49f
RK
3927 if (reversible_comparison_p (XEXP (x, 0))
3928 && (GET_CODE (f) == PLUS || GET_CODE (f) == MINUS
3929 || GET_CODE (f) == IOR || GET_CODE (f) == XOR
3930 || GET_CODE (f) == ASHIFT
3931 || GET_CODE (f) == LSHIFTRT || GET_CODE (f) == ASHIFTRT)
3932 && rtx_equal_p (XEXP (f, 0), t))
3933 {
3934 c1 = XEXP (f, 1), op = GET_CODE (f), z = t;
3935 cond_op = reverse_condition (cond_op);
3936 }
3937 else if (GET_CODE (f) == SIGN_EXTEND
3938 && (GET_CODE (XEXP (f, 0)) == PLUS
3939 || GET_CODE (XEXP (f, 0)) == MINUS
3940 || GET_CODE (XEXP (f, 0)) == IOR
3941 || GET_CODE (XEXP (f, 0)) == XOR
3942 || GET_CODE (XEXP (f, 0)) == ASHIFT
3943 || GET_CODE (XEXP (f, 0)) == LSHIFTRT
3944 || GET_CODE (XEXP (f, 0)) == ASHIFTRT)
3945 && GET_CODE (XEXP (XEXP (f, 0), 0)) == SUBREG
3946 && subreg_lowpart_p (XEXP (XEXP (f, 0), 0))
3947 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (f, 0), 0)), f)
3948 && (num_sign_bit_copies (t, GET_MODE (t))
3949 > (GET_MODE_BITSIZE (mode)
3950 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (f, 0), 0))))))
3951 {
3952 c1 = XEXP (XEXP (f, 0), 1); z = t; op = GET_CODE (XEXP (f, 0));
3953 extend_op = SIGN_EXTEND;
3954 m = GET_MODE (XEXP (f, 0));
3955 cond_op = reverse_condition (cond_op);
3956 }
3957 else if (GET_CODE (f) == ZERO_EXTEND
3958 && (GET_CODE (XEXP (f, 0)) == PLUS
3959 || GET_CODE (XEXP (f, 0)) == MINUS
3960 || GET_CODE (XEXP (f, 0)) == IOR
3961 || GET_CODE (XEXP (f, 0)) == XOR
3962 || GET_CODE (XEXP (f, 0)) == ASHIFT
3963 || GET_CODE (XEXP (f, 0)) == LSHIFTRT
3964 || GET_CODE (XEXP (f, 0)) == ASHIFTRT)
3965 && GET_CODE (XEXP (XEXP (f, 0), 0)) == SUBREG
3966 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3967 && subreg_lowpart_p (XEXP (XEXP (f, 0), 0))
3968 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (f, 0), 0)), t)
3969 && ((nonzero_bits (t, GET_MODE (t))
3970 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (f, 0), 0))))
3971 == 0))
1a26b032 3972 {
5109d49f
RK
3973 c1 = XEXP (XEXP (f, 0), 1); z = t; op = GET_CODE (XEXP (f, 0));
3974 extend_op = ZERO_EXTEND;
3975 m = GET_MODE (XEXP (f, 0));
3976 cond_op = reverse_condition (cond_op);
3977 }
3978
3979 if (z)
3980 {
3981 temp = subst (gen_binary (cond_op, m, cond_op0, cond_op1),
3982 pc_rtx, pc_rtx, 0, 0);
3983
3984
3985 temp = gen_binary (MULT, m, temp,
3986 gen_binary (MULT, m, c1,
3987 GEN_INT (STORE_FLAG_VALUE)));
1a26b032
RK
3988
3989 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3990
5109d49f 3991 if (extend_op != NIL)
1a26b032
RK
3992 temp = gen_unary (extend_op, mode, temp);
3993
3994 return temp;
3995 }
3996 }
5109d49f 3997#endif
224eeff2
RK
3998
3999 /* If we have (if_then_else (ne A 0) C1 0) and either A is known to
4000 be 0 or 1 and C1 is a single bit or A is known to be 0 or -1 and
4001 C1 is the negation of a single bit, we can convert this operation
4002 to a shift. We can actually do this in more general cases, but it
4003 doesn't seem worth it. */
4004
4005 if (GET_CODE (XEXP (x, 0)) == NE && XEXP (XEXP (x, 0), 1) == const0_rtx
4006 && XEXP (x, 2) == const0_rtx && GET_CODE (XEXP (x, 1)) == CONST_INT
3e61c219 4007 && ((1 == nonzero_bits (XEXP (XEXP (x, 0), 0), mode)
224eeff2 4008 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
3e61c219
RK
4009 || ((num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
4010 == GET_MODE_BITSIZE (mode))
224eeff2
RK
4011 && (i = exact_log2 (- INTVAL (XEXP (x, 1)))) >= 0)))
4012 return
4013 simplify_shift_const (NULL_RTX, ASHIFT, mode,
4014 gen_lowpart_for_combine (mode,
4015 XEXP (XEXP (x, 0), 0)),
4016 i);
230d793d
RS
4017 break;
4018
4019 case ZERO_EXTRACT:
4020 case SIGN_EXTRACT:
4021 case ZERO_EXTEND:
4022 case SIGN_EXTEND:
4023 /* If we are processing SET_DEST, we are done. */
4024 if (in_dest)
4025 return x;
4026
4027 x = expand_compound_operation (x);
4028 if (GET_CODE (x) != code)
4029 goto restart;
4030 break;
4031
4032 case SET:
4033 /* (set (pc) (return)) gets written as (return). */
4034 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
4035 return SET_SRC (x);
4036
4037 /* Convert this into a field assignment operation, if possible. */
4038 x = make_field_assignment (x);
4039
230d793d
RS
4040 /* If we are setting CC0 or if the source is a COMPARE, look for the
4041 use of the comparison result and try to simplify it unless we already
4042 have used undobuf.other_insn. */
4043 if ((GET_CODE (SET_SRC (x)) == COMPARE
4044#ifdef HAVE_cc0
4045 || SET_DEST (x) == cc0_rtx
4046#endif
4047 )
4048 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
4049 &other_insn)) != 0
4050 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
4051 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
4052 && XEXP (*cc_use, 0) == SET_DEST (x))
4053 {
4054 enum rtx_code old_code = GET_CODE (*cc_use);
4055 enum rtx_code new_code;
4056 rtx op0, op1;
4057 int other_changed = 0;
4058 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
4059
4060 if (GET_CODE (SET_SRC (x)) == COMPARE)
4061 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
4062 else
4063 op0 = SET_SRC (x), op1 = const0_rtx;
4064
4065 /* Simplify our comparison, if possible. */
4066 new_code = simplify_comparison (old_code, &op0, &op1);
4067
c141a106 4068#ifdef EXTRA_CC_MODES
230d793d
RS
4069 /* If this machine has CC modes other than CCmode, check to see
4070 if we need to use a different CC mode here. */
77fa0940 4071 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
c141a106 4072#endif /* EXTRA_CC_MODES */
230d793d 4073
c141a106 4074#if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
230d793d
RS
4075 /* If the mode changed, we have to change SET_DEST, the mode
4076 in the compare, and the mode in the place SET_DEST is used.
4077 If SET_DEST is a hard register, just build new versions with
4078 the proper mode. If it is a pseudo, we lose unless it is only
4079 time we set the pseudo, in which case we can safely change
4080 its mode. */
4081 if (compare_mode != GET_MODE (SET_DEST (x)))
4082 {
4083 int regno = REGNO (SET_DEST (x));
4084 rtx new_dest = gen_rtx (REG, compare_mode, regno);
4085
4086 if (regno < FIRST_PSEUDO_REGISTER
4087 || (reg_n_sets[regno] == 1
4088 && ! REG_USERVAR_P (SET_DEST (x))))
4089 {
4090 if (regno >= FIRST_PSEUDO_REGISTER)
4091 SUBST (regno_reg_rtx[regno], new_dest);
4092
4093 SUBST (SET_DEST (x), new_dest);
4094 SUBST (XEXP (*cc_use, 0), new_dest);
4095 other_changed = 1;
4096 }
4097 }
4098#endif
4099
4100 /* If the code changed, we have to build a new comparison
4101 in undobuf.other_insn. */
4102 if (new_code != old_code)
4103 {
951553af 4104 unsigned HOST_WIDE_INT mask;
230d793d
RS
4105
4106 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
4107 SET_DEST (x), const0_rtx));
4108
4109 /* If the only change we made was to change an EQ into an
951553af 4110 NE or vice versa, OP0 has only one bit that might be nonzero,
230d793d
RS
4111 and OP1 is zero, check if changing the user of the condition
4112 code will produce a valid insn. If it won't, we can keep
4113 the original code in that insn by surrounding our operation
4114 with an XOR. */
4115
4116 if (((old_code == NE && new_code == EQ)
4117 || (old_code == EQ && new_code == NE))
4118 && ! other_changed && op1 == const0_rtx
5f4f0e22
CH
4119 && (GET_MODE_BITSIZE (GET_MODE (op0))
4120 <= HOST_BITS_PER_WIDE_INT)
951553af 4121 && (exact_log2 (mask = nonzero_bits (op0, GET_MODE (op0)))
230d793d
RS
4122 >= 0))
4123 {
4124 rtx pat = PATTERN (other_insn), note = 0;
4125
6e2a4e3c 4126 if ((recog_for_combine (&pat, other_insn, &note) < 0
230d793d
RS
4127 && ! check_asm_operands (pat)))
4128 {
4129 PUT_CODE (*cc_use, old_code);
4130 other_insn = 0;
4131
4132 op0 = gen_binary (XOR, GET_MODE (op0), op0,
5f4f0e22 4133 GEN_INT (mask));
230d793d
RS
4134 }
4135 }
4136
4137 other_changed = 1;
4138 }
4139
4140 if (other_changed)
4141 undobuf.other_insn = other_insn;
4142
4143#ifdef HAVE_cc0
4144 /* If we are now comparing against zero, change our source if
4145 needed. If we do not use cc0, we always have a COMPARE. */
4146 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
4147 SUBST (SET_SRC (x), op0);
4148 else
4149#endif
4150
4151 /* Otherwise, if we didn't previously have a COMPARE in the
4152 correct mode, we need one. */
4153 if (GET_CODE (SET_SRC (x)) != COMPARE
4154 || GET_MODE (SET_SRC (x)) != compare_mode)
4155 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
4156 op0, op1));
4157 else
4158 {
4159 /* Otherwise, update the COMPARE if needed. */
4160 SUBST (XEXP (SET_SRC (x), 0), op0);
4161 SUBST (XEXP (SET_SRC (x), 1), op1);
4162 }
4163 }
4164 else
4165 {
4166 /* Get SET_SRC in a form where we have placed back any
4167 compound expressions. Then do the checks below. */
4168 temp = make_compound_operation (SET_SRC (x), SET);
4169 SUBST (SET_SRC (x), temp);
4170 }
4171
df62f951
RK
4172 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
4173 operation, and X being a REG or (subreg (reg)), we may be able to
4174 convert this to (set (subreg:m2 x) (op)).
4175
4176 We can always do this if M1 is narrower than M2 because that
4177 means that we only care about the low bits of the result.
4178
8baf60bb
RK
4179 However, on machines without WORD_REGISTER_OPERATIONS defined,
4180 we cannot perform a narrower operation that requested since the
4181 high-order bits will be undefined. On machine where it is defined,
4182 this transformation is safe as long as M1 and M2 have the same
4183 number of words. */
df62f951
RK
4184
4185 if (GET_CODE (SET_SRC (x)) == SUBREG
4186 && subreg_lowpart_p (SET_SRC (x))
4187 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
4188 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
4189 / UNITS_PER_WORD)
4190 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
4191 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
8baf60bb 4192#ifndef WORD_REGISTER_OPERATIONS
df62f951
RK
4193 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
4194 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
4195#endif
4196 && (GET_CODE (SET_DEST (x)) == REG
4197 || (GET_CODE (SET_DEST (x)) == SUBREG
4198 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
4199 {
df62f951 4200 SUBST (SET_DEST (x),
d0ab8cd3
RK
4201 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
4202 SET_DEST (x)));
df62f951
RK
4203 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
4204 }
4205
8baf60bb 4206#ifdef LOAD_EXTEND_OP
230d793d
RS
4207 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
4208 M wider than N, this would require a paradoxical subreg.
4209 Replace the subreg with a zero_extend to avoid the reload that
4210 would otherwise be required. */
c6dc70d6 4211
230d793d 4212 if (GET_CODE (SET_SRC (x)) == SUBREG
8baf60bb 4213 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (SET_SRC (x)))) != NIL
230d793d
RS
4214 && subreg_lowpart_p (SET_SRC (x))
4215 && SUBREG_WORD (SET_SRC (x)) == 0
4216 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
4217 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
4218 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
8baf60bb
RK
4219 SUBST (SET_SRC (x),
4220 gen_rtx_combine (LOAD_EXTEND_OP (GET_MODE
4221 (SUBREG_REG (SET_SRC (x)))),
4222 GET_MODE (SET_SRC (x)),
4223 XEXP (SET_SRC (x), 0)));
230d793d
RS
4224#endif
4225
1a26b032
RK
4226#ifndef HAVE_conditional_move
4227
4228 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
4229 and we are comparing an item known to be 0 or -1 against 0, use a
4230 logical operation instead. Check for one of the arms being an IOR
4231 of the other arm with some value. We compute three terms to be
4232 IOR'ed together. In practice, at most two will be nonzero. Then
4233 we do the IOR's. */
4234
696223d7
TW
4235 if (GET_CODE (SET_DEST (x)) != PC
4236 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
1a26b032
RK
4237 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
4238 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
4239 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
4240 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
4241 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
4242 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
4243 && ! side_effects_p (SET_SRC (x)))
4244 {
4245 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
4246 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
4247 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
4248 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
4249 rtx term1 = const0_rtx, term2, term3;
4250
4251 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
4252 term1 = false, true = XEXP (true, 1), false = const0_rtx;
4253 else if (GET_CODE (true) == IOR
4254 && rtx_equal_p (XEXP (true, 1), false))
4255 term1 = false, true = XEXP (true, 0), false = const0_rtx;
4256 else if (GET_CODE (false) == IOR
4257 && rtx_equal_p (XEXP (false, 0), true))
4258 term1 = true, false = XEXP (false, 1), true = const0_rtx;
4259 else if (GET_CODE (false) == IOR
4260 && rtx_equal_p (XEXP (false, 1), true))
4261 term1 = true, false = XEXP (false, 0), true = const0_rtx;
4262
4263 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4264 XEXP (XEXP (SET_SRC (x), 0), 0), true);
4265 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
4266 gen_unary (NOT, GET_MODE (SET_SRC (x)),
4267 XEXP (XEXP (SET_SRC (x), 0), 0)),
4268 false);
4269
4270 SUBST (SET_SRC (x),
4271 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4272 gen_binary (IOR, GET_MODE (SET_SRC (x)),
4273 term1, term2),
4274 term3));
4275 }
4276#endif
230d793d
RS
4277 break;
4278
4279 case AND:
4280 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4281 {
4282 x = simplify_and_const_int (x, mode, XEXP (x, 0),
4283 INTVAL (XEXP (x, 1)));
4284
4285 /* If we have (ior (and (X C1) C2)) and the next restart would be
4286 the last, simplify this by making C1 as small as possible
4287 and then exit. */
4288 if (n_restarts >= 3 && GET_CODE (x) == IOR
4289 && GET_CODE (XEXP (x, 0)) == AND
4290 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4291 && GET_CODE (XEXP (x, 1)) == CONST_INT)
4292 {
4293 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
5f4f0e22
CH
4294 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
4295 & ~ INTVAL (XEXP (x, 1))));
230d793d
RS
4296 return gen_binary (IOR, mode, temp, XEXP (x, 1));
4297 }
4298
4299 if (GET_CODE (x) != AND)
4300 goto restart;
4301 }
4302
4303 /* Convert (A | B) & A to A. */
4304 if (GET_CODE (XEXP (x, 0)) == IOR
4305 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4306 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4307 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4308 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4309 return XEXP (x, 1);
4310
4311 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
4312 insn (and may simplify more). */
4313 else if (GET_CODE (XEXP (x, 0)) == XOR
4314 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4315 && ! side_effects_p (XEXP (x, 1)))
4316 {
4317 x = gen_binary (AND, mode,
4318 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4319 XEXP (x, 1));
4320 goto restart;
4321 }
4322 else if (GET_CODE (XEXP (x, 0)) == XOR
4323 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4324 && ! side_effects_p (XEXP (x, 1)))
4325 {
4326 x = gen_binary (AND, mode,
4327 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4328 XEXP (x, 1));
4329 goto restart;
4330 }
4331
4332 /* Similarly for (~ (A ^ B)) & A. */
4333 else if (GET_CODE (XEXP (x, 0)) == NOT
4334 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4335 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
4336 && ! side_effects_p (XEXP (x, 1)))
4337 {
4338 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
4339 XEXP (x, 1));
4340 goto restart;
4341 }
4342 else if (GET_CODE (XEXP (x, 0)) == NOT
4343 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
4344 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
4345 && ! side_effects_p (XEXP (x, 1)))
4346 {
4347 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
4348 XEXP (x, 1));
4349 goto restart;
4350 }
4351
d0ab8cd3
RK
4352 /* If we have (and A B) with A not an object but that is known to
4353 be -1 or 0, this is equivalent to the expression
4354 (if_then_else (ne A (const_int 0)) B (const_int 0))
4355 We make this conversion because it may allow further
1a26b032
RK
4356 simplifications and then allow use of conditional move insns.
4357 If the machine doesn't have condition moves, code in case SET
4358 will convert the IF_THEN_ELSE back to the logical operation.
4359 We build the IF_THEN_ELSE here in case further simplification
4360 is possible (e.g., we can convert it to ABS). */
d0ab8cd3
RK
4361
4362 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
4363 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
4364 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
4365 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4366 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
4367 {
4368 rtx op0 = XEXP (x, 0);
4369 rtx op1 = const0_rtx;
4370 enum rtx_code comp_code
4371 = simplify_comparison (NE, &op0, &op1);
4372
4373 x = gen_rtx_combine (IF_THEN_ELSE, mode,
4374 gen_binary (comp_code, VOIDmode, op0, op1),
4375 XEXP (x, 1), const0_rtx);
4376 goto restart;
4377 }
4378
4379 /* In the following group of tests (and those in case IOR below),
230d793d
RS
4380 we start with some combination of logical operations and apply
4381 the distributive law followed by the inverse distributive law.
4382 Most of the time, this results in no change. However, if some of
4383 the operands are the same or inverses of each other, simplifications
4384 will result.
4385
4386 For example, (and (ior A B) (not B)) can occur as the result of
4387 expanding a bit field assignment. When we apply the distributive
4388 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4389 which then simplifies to (and (A (not B))). */
4390
4391 /* If we have (and (ior A B) C), apply the distributive law and then
4392 the inverse distributive law to see if things simplify. */
4393
4394 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4395 {
4396 x = apply_distributive_law
4397 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4398 gen_binary (AND, mode,
4399 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4400 gen_binary (AND, mode,
4401 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4402 if (GET_CODE (x) != AND)
4403 goto restart;
4404 }
4405
4406 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4407 {
4408 x = apply_distributive_law
4409 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4410 gen_binary (AND, mode,
4411 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4412 gen_binary (AND, mode,
4413 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4414 if (GET_CODE (x) != AND)
4415 goto restart;
4416 }
4417
4418 /* Similarly, taking advantage of the fact that
4419 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4420
4421 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4422 {
4423 x = apply_distributive_law
4424 (gen_binary (XOR, mode,
4425 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4426 XEXP (XEXP (x, 1), 0)),
4427 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4428 XEXP (XEXP (x, 1), 1))));
4429 if (GET_CODE (x) != AND)
4430 goto restart;
4431 }
4432
4433 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4434 {
4435 x = apply_distributive_law
4436 (gen_binary (XOR, mode,
4437 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4438 XEXP (XEXP (x, 0), 0)),
4439 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4440 XEXP (XEXP (x, 0), 1))));
4441 if (GET_CODE (x) != AND)
4442 goto restart;
4443 }
4444 break;
4445
4446 case IOR:
951553af 4447 /* (ior A C) is C if all bits of A that might be nonzero are on in C. */
d0ab8cd3 4448 if (GET_CODE (XEXP (x, 1)) == CONST_INT
ac49a949 4449 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
951553af 4450 && (nonzero_bits (XEXP (x, 0), mode) & ~ INTVAL (XEXP (x, 1))) == 0)
d0ab8cd3
RK
4451 return XEXP (x, 1);
4452
230d793d
RS
4453 /* Convert (A & B) | A to A. */
4454 if (GET_CODE (XEXP (x, 0)) == AND
4455 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4456 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4457 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4458 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4459 return XEXP (x, 1);
4460
4461 /* If we have (ior (and A B) C), apply the distributive law and then
4462 the inverse distributive law to see if things simplify. */
4463
4464 if (GET_CODE (XEXP (x, 0)) == AND)
4465 {
4466 x = apply_distributive_law
4467 (gen_binary (AND, mode,
4468 gen_binary (IOR, mode,
4469 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4470 gen_binary (IOR, mode,
4471 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4472
4473 if (GET_CODE (x) != IOR)
4474 goto restart;
4475 }
4476
4477 if (GET_CODE (XEXP (x, 1)) == AND)
4478 {
4479 x = apply_distributive_law
4480 (gen_binary (AND, mode,
4481 gen_binary (IOR, mode,
4482 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4483 gen_binary (IOR, mode,
4484 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4485
4486 if (GET_CODE (x) != IOR)
4487 goto restart;
4488 }
4489
4490 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4491 mode size to (rotate A CX). */
4492
4493 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4494 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4495 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4496 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4497 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4498 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4499 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4500 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4501 == GET_MODE_BITSIZE (mode)))
4502 {
4503 rtx shift_count;
4504
4505 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4506 shift_count = XEXP (XEXP (x, 0), 1);
4507 else
4508 shift_count = XEXP (XEXP (x, 1), 1);
4509 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4510 goto restart;
4511 }
4512 break;
4513
4514 case XOR:
4515 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4516 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4517 (NOT y). */
4518 {
4519 int num_negated = 0;
4520 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4521
4522 if (GET_CODE (in1) == NOT)
4523 num_negated++, in1 = XEXP (in1, 0);
4524 if (GET_CODE (in2) == NOT)
4525 num_negated++, in2 = XEXP (in2, 0);
4526
4527 if (num_negated == 2)
4528 {
4529 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4530 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4531 }
4532 else if (num_negated == 1)
d0ab8cd3
RK
4533 {
4534 x = gen_unary (NOT, mode,
4535 gen_binary (XOR, mode, in1, in2));
4536 goto restart;
4537 }
230d793d
RS
4538 }
4539
4540 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4541 correspond to a machine insn or result in further simplifications
4542 if B is a constant. */
4543
4544 if (GET_CODE (XEXP (x, 0)) == AND
4545 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4546 && ! side_effects_p (XEXP (x, 1)))
4547 {
4548 x = gen_binary (AND, mode,
4549 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4550 XEXP (x, 1));
4551 goto restart;
4552 }
4553 else if (GET_CODE (XEXP (x, 0)) == AND
4554 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4555 && ! side_effects_p (XEXP (x, 1)))
4556 {
4557 x = gen_binary (AND, mode,
4558 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4559 XEXP (x, 1));
4560 goto restart;
4561 }
4562
4563
4564#if STORE_FLAG_VALUE == 1
4565 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4566 comparison. */
4567 if (XEXP (x, 1) == const1_rtx
4568 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4569 && reversible_comparison_p (XEXP (x, 0)))
4570 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4571 mode, XEXP (XEXP (x, 0), 0),
4572 XEXP (XEXP (x, 0), 1));
500c518b
RK
4573
4574 /* (lshiftrt foo C) where C is the number of bits in FOO minus 1
4575 is (lt foo (const_int 0)), so we can perform the above
4576 simplification. */
4577
4578 if (XEXP (x, 1) == const1_rtx
4579 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4580 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4581 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
4582 return gen_rtx_combine (GE, mode, XEXP (XEXP (x, 0), 0), const0_rtx);
230d793d
RS
4583#endif
4584
4585 /* (xor (comparison foo bar) (const_int sign-bit))
4586 when STORE_FLAG_VALUE is the sign bit. */
5f4f0e22
CH
4587 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4588 && (STORE_FLAG_VALUE
4589 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
230d793d
RS
4590 && XEXP (x, 1) == const_true_rtx
4591 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4592 && reversible_comparison_p (XEXP (x, 0)))
4593 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4594 mode, XEXP (XEXP (x, 0), 0),
4595 XEXP (XEXP (x, 0), 1));
4596 break;
4597
4598 case ABS:
4599 /* (abs (neg <foo>)) -> (abs <foo>) */
4600 if (GET_CODE (XEXP (x, 0)) == NEG)
4601 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4602
4603 /* If operand is something known to be positive, ignore the ABS. */
4604 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
5f4f0e22
CH
4605 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4606 <= HOST_BITS_PER_WIDE_INT)
951553af 4607 && ((nonzero_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5f4f0e22
CH
4608 & ((HOST_WIDE_INT) 1
4609 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
230d793d
RS
4610 == 0)))
4611 return XEXP (x, 0);
4612
4613
4614 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
d0ab8cd3 4615 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
230d793d
RS
4616 {
4617 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4618 goto restart;
4619 }
4620 break;
4621
a7c99304
RK
4622 case FFS:
4623 /* (ffs (*_extend <X>)) = (ffs <X>) */
4624 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4625 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4626 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4627 break;
4628
230d793d
RS
4629 case FLOAT:
4630 /* (float (sign_extend <X>)) = (float <X>). */
4631 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4632 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4633 break;
4634
4635 case LSHIFT:
4636 case ASHIFT:
4637 case LSHIFTRT:
4638 case ASHIFTRT:
4639 case ROTATE:
4640 case ROTATERT:
230d793d
RS
4641 /* If this is a shift by a constant amount, simplify it. */
4642 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4643 {
4644 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4645 INTVAL (XEXP (x, 1)));
4646 if (GET_CODE (x) != code)
4647 goto restart;
4648 }
77fa0940
RK
4649
4650#ifdef SHIFT_COUNT_TRUNCATED
4651 else if (GET_CODE (XEXP (x, 1)) != REG)
4652 SUBST (XEXP (x, 1),
4653 force_to_mode (XEXP (x, 1), GET_MODE (x),
6139ff20
RK
4654 ((HOST_WIDE_INT) 1
4655 << exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))))
4656 - 1,
5f4f0e22 4657 NULL_RTX));
77fa0940
RK
4658#endif
4659
230d793d
RS
4660 break;
4661 }
4662
4663 return x;
4664}
4665\f
4666/* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4667 operations" because they can be replaced with two more basic operations.
4668 ZERO_EXTEND is also considered "compound" because it can be replaced with
4669 an AND operation, which is simpler, though only one operation.
4670
4671 The function expand_compound_operation is called with an rtx expression
4672 and will convert it to the appropriate shifts and AND operations,
4673 simplifying at each stage.
4674
4675 The function make_compound_operation is called to convert an expression
4676 consisting of shifts and ANDs into the equivalent compound expression.
4677 It is the inverse of this function, loosely speaking. */
4678
4679static rtx
4680expand_compound_operation (x)
4681 rtx x;
4682{
4683 int pos = 0, len;
4684 int unsignedp = 0;
4685 int modewidth;
4686 rtx tem;
4687
4688 switch (GET_CODE (x))
4689 {
4690 case ZERO_EXTEND:
4691 unsignedp = 1;
4692 case SIGN_EXTEND:
75473182
RS
4693 /* We can't necessarily use a const_int for a multiword mode;
4694 it depends on implicitly extending the value.
4695 Since we don't know the right way to extend it,
4696 we can't tell whether the implicit way is right.
4697
4698 Even for a mode that is no wider than a const_int,
4699 we can't win, because we need to sign extend one of its bits through
4700 the rest of it, and we don't know which bit. */
230d793d 4701 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
75473182 4702 return x;
230d793d
RS
4703
4704 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4705 return x;
4706
4707 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4708 /* If the inner object has VOIDmode (the only way this can happen
4709 is if it is a ASM_OPERANDS), we can't do anything since we don't
4710 know how much masking to do. */
4711 if (len == 0)
4712 return x;
4713
4714 break;
4715
4716 case ZERO_EXTRACT:
4717 unsignedp = 1;
4718 case SIGN_EXTRACT:
4719 /* If the operand is a CLOBBER, just return it. */
4720 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4721 return XEXP (x, 0);
4722
4723 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4724 || GET_CODE (XEXP (x, 2)) != CONST_INT
4725 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4726 return x;
4727
4728 len = INTVAL (XEXP (x, 1));
4729 pos = INTVAL (XEXP (x, 2));
4730
4731 /* If this goes outside the object being extracted, replace the object
4732 with a (use (mem ...)) construct that only combine understands
4733 and is used only for this purpose. */
4734 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4735 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4736
4737#if BITS_BIG_ENDIAN
4738 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4739#endif
4740 break;
4741
4742 default:
4743 return x;
4744 }
4745
4746 /* If we reach here, we want to return a pair of shifts. The inner
4747 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4748 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4749 logical depending on the value of UNSIGNEDP.
4750
4751 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4752 converted into an AND of a shift.
4753
4754 We must check for the case where the left shift would have a negative
4755 count. This can happen in a case like (x >> 31) & 255 on machines
4756 that can't shift by a constant. On those machines, we would first
4757 combine the shift with the AND to produce a variable-position
4758 extraction. Then the constant of 31 would be substituted in to produce
4759 a such a position. */
4760
4761 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4762 if (modewidth >= pos - len)
5f4f0e22 4763 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
230d793d 4764 GET_MODE (x),
5f4f0e22
CH
4765 simplify_shift_const (NULL_RTX, ASHIFT,
4766 GET_MODE (x),
230d793d
RS
4767 XEXP (x, 0),
4768 modewidth - pos - len),
4769 modewidth - len);
4770
5f4f0e22
CH
4771 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4772 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4773 simplify_shift_const (NULL_RTX, LSHIFTRT,
230d793d
RS
4774 GET_MODE (x),
4775 XEXP (x, 0), pos),
5f4f0e22 4776 ((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4777 else
4778 /* Any other cases we can't handle. */
4779 return x;
4780
4781
4782 /* If we couldn't do this for some reason, return the original
4783 expression. */
4784 if (GET_CODE (tem) == CLOBBER)
4785 return x;
4786
4787 return tem;
4788}
4789\f
4790/* X is a SET which contains an assignment of one object into
4791 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4792 or certain SUBREGS). If possible, convert it into a series of
4793 logical operations.
4794
4795 We half-heartedly support variable positions, but do not at all
4796 support variable lengths. */
4797
4798static rtx
4799expand_field_assignment (x)
4800 rtx x;
4801{
4802 rtx inner;
4803 rtx pos; /* Always counts from low bit. */
4804 int len;
4805 rtx mask;
4806 enum machine_mode compute_mode;
4807
4808 /* Loop until we find something we can't simplify. */
4809 while (1)
4810 {
4811 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4812 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4813 {
4814 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4815 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4816 pos = const0_rtx;
4817 }
4818 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4819 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4820 {
4821 inner = XEXP (SET_DEST (x), 0);
4822 len = INTVAL (XEXP (SET_DEST (x), 1));
4823 pos = XEXP (SET_DEST (x), 2);
4824
4825 /* If the position is constant and spans the width of INNER,
4826 surround INNER with a USE to indicate this. */
4827 if (GET_CODE (pos) == CONST_INT
4828 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4829 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4830
4831#if BITS_BIG_ENDIAN
4832 if (GET_CODE (pos) == CONST_INT)
5f4f0e22
CH
4833 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4834 - INTVAL (pos));
230d793d
RS
4835 else if (GET_CODE (pos) == MINUS
4836 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4837 && (INTVAL (XEXP (pos, 1))
4838 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4839 /* If position is ADJUST - X, new position is X. */
4840 pos = XEXP (pos, 0);
4841 else
4842 pos = gen_binary (MINUS, GET_MODE (pos),
5f4f0e22
CH
4843 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4844 - len),
4845 pos);
230d793d
RS
4846#endif
4847 }
4848
4849 /* A SUBREG between two modes that occupy the same numbers of words
4850 can be done by moving the SUBREG to the source. */
4851 else if (GET_CODE (SET_DEST (x)) == SUBREG
4852 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4853 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4854 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4855 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4856 {
4857 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4858 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4859 SET_SRC (x)));
4860 continue;
4861 }
4862 else
4863 break;
4864
4865 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4866 inner = SUBREG_REG (inner);
4867
4868 compute_mode = GET_MODE (inner);
4869
4870 /* Compute a mask of LEN bits, if we can do this on the host machine. */
5f4f0e22
CH
4871 if (len < HOST_BITS_PER_WIDE_INT)
4872 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
230d793d
RS
4873 else
4874 break;
4875
4876 /* Now compute the equivalent expression. Make a copy of INNER
4877 for the SET_DEST in case it is a MEM into which we will substitute;
4878 we don't want shared RTL in that case. */
4879 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4880 gen_binary (IOR, compute_mode,
4881 gen_binary (AND, compute_mode,
4882 gen_unary (NOT, compute_mode,
4883 gen_binary (ASHIFT,
4884 compute_mode,
4885 mask, pos)),
4886 inner),
4887 gen_binary (ASHIFT, compute_mode,
4888 gen_binary (AND, compute_mode,
4889 gen_lowpart_for_combine
4890 (compute_mode,
4891 SET_SRC (x)),
4892 mask),
4893 pos)));
4894 }
4895
4896 return x;
4897}
4898\f
8999a12e
RK
4899/* Return an RTX for a reference to LEN bits of INNER. If POS_RTX is nonzero,
4900 it is an RTX that represents a variable starting position; otherwise,
4901 POS is the (constant) starting bit position (counted from the LSB).
230d793d
RS
4902
4903 INNER may be a USE. This will occur when we started with a bitfield
4904 that went outside the boundary of the object in memory, which is
4905 allowed on most machines. To isolate this case, we produce a USE
4906 whose mode is wide enough and surround the MEM with it. The only
4907 code that understands the USE is this routine. If it is not removed,
4908 it will cause the resulting insn not to match.
4909
4910 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4911 signed reference.
4912
4913 IN_DEST is non-zero if this is a reference in the destination of a
4914 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4915 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4916 be used.
4917
4918 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4919 ZERO_EXTRACT should be built even for bits starting at bit 0.
4920
4921 MODE is the desired mode of the result (if IN_DEST == 0). */
4922
4923static rtx
4924make_extraction (mode, inner, pos, pos_rtx, len,
4925 unsignedp, in_dest, in_compare)
4926 enum machine_mode mode;
4927 rtx inner;
4928 int pos;
4929 rtx pos_rtx;
4930 int len;
4931 int unsignedp;
4932 int in_dest, in_compare;
4933{
94b4b17a
RS
4934 /* This mode describes the size of the storage area
4935 to fetch the overall value from. Within that, we
4936 ignore the POS lowest bits, etc. */
230d793d
RS
4937 enum machine_mode is_mode = GET_MODE (inner);
4938 enum machine_mode inner_mode;
4939 enum machine_mode wanted_mem_mode = byte_mode;
4940 enum machine_mode pos_mode = word_mode;
4941 enum machine_mode extraction_mode = word_mode;
4942 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4943 int spans_byte = 0;
4944 rtx new = 0;
8999a12e 4945 rtx orig_pos_rtx = pos_rtx;
6139ff20 4946 int orig_pos;
230d793d
RS
4947
4948 /* Get some information about INNER and get the innermost object. */
4949 if (GET_CODE (inner) == USE)
94b4b17a 4950 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
230d793d
RS
4951 /* We don't need to adjust the position because we set up the USE
4952 to pretend that it was a full-word object. */
4953 spans_byte = 1, inner = XEXP (inner, 0);
4954 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
94b4b17a
RS
4955 {
4956 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4957 consider just the QI as the memory to extract from.
4958 The subreg adds or removes high bits; its mode is
4959 irrelevant to the meaning of this extraction,
4960 since POS and LEN count from the lsb. */
4961 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4962 is_mode = GET_MODE (SUBREG_REG (inner));
4963 inner = SUBREG_REG (inner);
4964 }
230d793d
RS
4965
4966 inner_mode = GET_MODE (inner);
4967
4968 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
8999a12e 4969 pos = INTVAL (pos_rtx), pos_rtx = 0;
230d793d
RS
4970
4971 /* See if this can be done without an extraction. We never can if the
4972 width of the field is not the same as that of some integer mode. For
4973 registers, we can only avoid the extraction if the position is at the
4974 low-order bit and this is either not in the destination or we have the
4975 appropriate STRICT_LOW_PART operation available.
4976
4977 For MEM, we can avoid an extract if the field starts on an appropriate
4978 boundary and we can change the mode of the memory reference. However,
4979 we cannot directly access the MEM if we have a USE and the underlying
4980 MEM is not TMODE. This combination means that MEM was being used in a
4981 context where bits outside its mode were being referenced; that is only
4982 valid in bit-field insns. */
4983
4984 if (tmode != BLKmode
4985 && ! (spans_byte && inner_mode != tmode)
8999a12e 4986 && ((pos_rtx == 0 && pos == 0 && GET_CODE (inner) != MEM
230d793d 4987 && (! in_dest
df62f951
RK
4988 || (GET_CODE (inner) == REG
4989 && (movstrict_optab->handlers[(int) tmode].insn_code
4990 != CODE_FOR_nothing))))
8999a12e 4991 || (GET_CODE (inner) == MEM && pos_rtx == 0
dfbe1b2f
RK
4992 && (pos
4993 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4994 : BITS_PER_UNIT)) == 0
230d793d
RS
4995 /* We can't do this if we are widening INNER_MODE (it
4996 may not be aligned, for one thing). */
4997 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4998 && (inner_mode == tmode
4999 || (! mode_dependent_address_p (XEXP (inner, 0))
5000 && ! MEM_VOLATILE_P (inner))))))
5001 {
230d793d
RS
5002 /* If INNER is a MEM, make a new MEM that encompasses just the desired
5003 field. If the original and current mode are the same, we need not
5004 adjust the offset. Otherwise, we do if bytes big endian.
5005
5006 If INNER is not a MEM, get a piece consisting of the just the field
df62f951 5007 of interest (in this case POS must be 0). */
230d793d
RS
5008
5009 if (GET_CODE (inner) == MEM)
5010 {
94b4b17a
RS
5011 int offset;
5012 /* POS counts from lsb, but make OFFSET count in memory order. */
5013 if (BYTES_BIG_ENDIAN)
5014 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
5015 else
5016 offset = pos / BITS_PER_UNIT;
230d793d
RS
5017
5018 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
5019 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
5020 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
5021 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
5022 }
df62f951 5023 else if (GET_CODE (inner) == REG)
77fa0940
RK
5024 /* We can't call gen_lowpart_for_combine here since we always want
5025 a SUBREG and it would sometimes return a new hard register. */
5026 new = gen_rtx (SUBREG, tmode, inner,
5027 (WORDS_BIG_ENDIAN
3e3ea975
RS
5028 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
5029 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
6ba17bb0
RS
5030 / UNITS_PER_WORD)
5031 : 0));
230d793d 5032 else
6139ff20
RK
5033 new = force_to_mode (inner, tmode,
5034 len >= HOST_BITS_PER_WIDE_INT
5035 ? GET_MODE_MASK (tmode)
5036 : ((HOST_WIDE_INT) 1 << len) - 1,
5037 NULL_RTX);
230d793d
RS
5038
5039 /* If this extraction is going into the destination of a SET,
5040 make a STRICT_LOW_PART unless we made a MEM. */
5041
5042 if (in_dest)
5043 return (GET_CODE (new) == MEM ? new
77fa0940
RK
5044 : (GET_CODE (new) != SUBREG
5045 ? gen_rtx (CLOBBER, tmode, const0_rtx)
5046 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
230d793d
RS
5047
5048 /* Otherwise, sign- or zero-extend unless we already are in the
5049 proper mode. */
5050
5051 return (mode == tmode ? new
5052 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
5053 mode, new));
5054 }
5055
cc471082
RS
5056 /* Unless this is a COMPARE or we have a funny memory reference,
5057 don't do anything with zero-extending field extracts starting at
5058 the low-order bit since they are simple AND operations. */
8999a12e
RK
5059 if (pos_rtx == 0 && pos == 0 && ! in_dest
5060 && ! in_compare && ! spans_byte && unsignedp)
230d793d
RS
5061 return 0;
5062
5063 /* Get the mode to use should INNER be a MEM, the mode for the position,
5064 and the mode for the result. */
5065#ifdef HAVE_insv
5066 if (in_dest)
5067 {
5068 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
5069 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
5070 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
5071 }
5072#endif
5073
5074#ifdef HAVE_extzv
5075 if (! in_dest && unsignedp)
5076 {
5077 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
5078 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
5079 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
5080 }
5081#endif
5082
5083#ifdef HAVE_extv
5084 if (! in_dest && ! unsignedp)
5085 {
5086 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
5087 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
5088 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
5089 }
5090#endif
5091
5092 /* Never narrow an object, since that might not be safe. */
5093
5094 if (mode != VOIDmode
5095 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
5096 extraction_mode = mode;
5097
5098 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
5099 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5100 pos_mode = GET_MODE (pos_rtx);
5101
5102 /* If this is not from memory or we have to change the mode of memory and
5103 cannot, the desired mode is EXTRACTION_MODE. */
5104 if (GET_CODE (inner) != MEM
5105 || (inner_mode != wanted_mem_mode
5106 && (mode_dependent_address_p (XEXP (inner, 0))
5107 || MEM_VOLATILE_P (inner))))
5108 wanted_mem_mode = extraction_mode;
5109
6139ff20
RK
5110 orig_pos = pos;
5111
230d793d
RS
5112#if BITS_BIG_ENDIAN
5113 /* If position is constant, compute new position. Otherwise, build
5114 subtraction. */
8999a12e 5115 if (pos_rtx == 0)
230d793d
RS
5116 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
5117 - len - pos);
5118 else
5119 pos_rtx
5120 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
5f4f0e22
CH
5121 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
5122 GET_MODE_BITSIZE (wanted_mem_mode))
5123 - len),
5124 pos_rtx);
230d793d
RS
5125#endif
5126
5127 /* If INNER has a wider mode, make it smaller. If this is a constant
5128 extract, try to adjust the byte to point to the byte containing
5129 the value. */
5130 if (wanted_mem_mode != VOIDmode
5131 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
5132 && ((GET_CODE (inner) == MEM
5133 && (inner_mode == wanted_mem_mode
5134 || (! mode_dependent_address_p (XEXP (inner, 0))
5135 && ! MEM_VOLATILE_P (inner))))))
5136 {
5137 int offset = 0;
5138
5139 /* The computations below will be correct if the machine is big
5140 endian in both bits and bytes or little endian in bits and bytes.
5141 If it is mixed, we must adjust. */
5142
230d793d
RS
5143 /* If bytes are big endian and we had a paradoxical SUBREG, we must
5144 adjust OFFSET to compensate. */
5145#if BYTES_BIG_ENDIAN
5146 if (! spans_byte
5147 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
5148 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
5149#endif
5150
5151 /* If this is a constant position, we can move to the desired byte. */
8999a12e 5152 if (pos_rtx == 0)
230d793d
RS
5153 {
5154 offset += pos / BITS_PER_UNIT;
5155 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
5156 }
5157
c6b3f1f2
JW
5158#if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
5159 if (! spans_byte && is_mode != wanted_mem_mode)
5160 offset = (GET_MODE_SIZE (is_mode)
5161 - GET_MODE_SIZE (wanted_mem_mode) - offset);
5162#endif
5163
230d793d
RS
5164 if (offset != 0 || inner_mode != wanted_mem_mode)
5165 {
5166 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
5167 plus_constant (XEXP (inner, 0), offset));
5168 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
5169 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
5170 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
5171 inner = newmem;
5172 }
5173 }
5174
5175 /* If INNER is not memory, we can always get it into the proper mode. */
5176 else if (GET_CODE (inner) != MEM)
d0ab8cd3 5177 inner = force_to_mode (inner, extraction_mode,
6139ff20
RK
5178 pos_rtx || len + orig_pos >= HOST_BITS_PER_WIDE_INT
5179 ? GET_MODE_MASK (extraction_mode)
5180 : (((HOST_WIDE_INT) 1 << len) - 1) << orig_pos,
d0ab8cd3 5181 NULL_RTX);
230d793d
RS
5182
5183 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
5184 have to zero extend. Otherwise, we can just use a SUBREG. */
8999a12e 5185 if (pos_rtx != 0
230d793d
RS
5186 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
5187 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
8999a12e 5188 else if (pos_rtx != 0
230d793d
RS
5189 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
5190 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
5191
8999a12e
RK
5192 /* Make POS_RTX unless we already have it and it is correct. If we don't
5193 have a POS_RTX but we do have an ORIG_POS_RTX, the latter must
5194 be a CONST_INT. */
5195 if (pos_rtx == 0 && orig_pos_rtx != 0 && INTVAL (orig_pos_rtx) == pos)
5196 pos_rtx = orig_pos_rtx;
5197
5198 else if (pos_rtx == 0)
5f4f0e22 5199 pos_rtx = GEN_INT (pos);
230d793d
RS
5200
5201 /* Make the required operation. See if we can use existing rtx. */
5202 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
5f4f0e22 5203 extraction_mode, inner, GEN_INT (len), pos_rtx);
230d793d
RS
5204 if (! in_dest)
5205 new = gen_lowpart_for_combine (mode, new);
5206
5207 return new;
5208}
5209\f
5210/* Look at the expression rooted at X. Look for expressions
5211 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
5212 Form these expressions.
5213
5214 Return the new rtx, usually just X.
5215
5216 Also, for machines like the Vax that don't have logical shift insns,
5217 try to convert logical to arithmetic shift operations in cases where
5218 they are equivalent. This undoes the canonicalizations to logical
5219 shifts done elsewhere.
5220
5221 We try, as much as possible, to re-use rtl expressions to save memory.
5222
5223 IN_CODE says what kind of expression we are processing. Normally, it is
42495ca0
RK
5224 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
5225 being kludges), it is MEM. When processing the arguments of a comparison
230d793d
RS
5226 or a COMPARE against zero, it is COMPARE. */
5227
5228static rtx
5229make_compound_operation (x, in_code)
5230 rtx x;
5231 enum rtx_code in_code;
5232{
5233 enum rtx_code code = GET_CODE (x);
5234 enum machine_mode mode = GET_MODE (x);
5235 int mode_width = GET_MODE_BITSIZE (mode);
5236 enum rtx_code next_code;
d0ab8cd3 5237 int i, count;
230d793d 5238 rtx new = 0;
280f58ba 5239 rtx tem;
230d793d
RS
5240 char *fmt;
5241
5242 /* Select the code to be used in recursive calls. Once we are inside an
5243 address, we stay there. If we have a comparison, set to COMPARE,
5244 but once inside, go back to our default of SET. */
5245
42495ca0 5246 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
230d793d
RS
5247 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
5248 && XEXP (x, 1) == const0_rtx) ? COMPARE
5249 : in_code == COMPARE ? SET : in_code);
5250
5251 /* Process depending on the code of this operation. If NEW is set
5252 non-zero, it will be returned. */
5253
5254 switch (code)
5255 {
5256 case ASHIFT:
5257 case LSHIFT:
5258 /* Convert shifts by constants into multiplications if inside
5259 an address. */
5260 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 5261 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
230d793d 5262 && INTVAL (XEXP (x, 1)) >= 0)
280f58ba
RK
5263 {
5264 new = make_compound_operation (XEXP (x, 0), next_code);
5265 new = gen_rtx_combine (MULT, mode, new,
5266 GEN_INT ((HOST_WIDE_INT) 1
5267 << INTVAL (XEXP (x, 1))));
5268 }
230d793d
RS
5269 break;
5270
5271 case AND:
5272 /* If the second operand is not a constant, we can't do anything
5273 with it. */
5274 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5275 break;
5276
5277 /* If the constant is a power of two minus one and the first operand
5278 is a logical right shift, make an extraction. */
5279 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5280 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5281 {
5282 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5283 new = make_extraction (mode, new, 0, XEXP (XEXP (x, 0), 1), i, 1,
5284 0, in_code == COMPARE);
5285 }
dfbe1b2f 5286
230d793d
RS
5287 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
5288 else if (GET_CODE (XEXP (x, 0)) == SUBREG
5289 && subreg_lowpart_p (XEXP (x, 0))
5290 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
5291 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5292 {
5293 new = make_compound_operation (XEXP (SUBREG_REG (XEXP (x, 0)), 0),
5294 next_code);
5295 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))), new, 0,
5296 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
5297 0, in_code == COMPARE);
5298 }
c2f9f64e
JW
5299 /* Same as previous, but for (xor/ior (lshift...) (lshift...)). */
5300 else if ((GET_CODE (XEXP (x, 0)) == XOR
5301 || GET_CODE (XEXP (x, 0)) == IOR)
5302 && GET_CODE (XEXP (XEXP (x, 0), 0)) == LSHIFTRT
5303 && GET_CODE (XEXP (XEXP (x, 0), 1)) == LSHIFTRT
5304 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
5305 {
5306 /* Apply the distributive law, and then try to make extractions. */
5307 new = gen_rtx_combine (GET_CODE (XEXP (x, 0)), mode,
5308 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 0),
5309 XEXP (x, 1)),
5310 gen_rtx (AND, mode, XEXP (XEXP (x, 0), 1),
5311 XEXP (x, 1)));
5312 new = make_compound_operation (new, in_code);
5313 }
a7c99304
RK
5314
5315 /* If we are have (and (rotate X C) M) and C is larger than the number
5316 of bits in M, this is an extraction. */
5317
5318 else if (GET_CODE (XEXP (x, 0)) == ROTATE
5319 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5320 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
5321 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5322 {
5323 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5324 new = make_extraction (mode, new,
5325 (GET_MODE_BITSIZE (mode)
5326 - INTVAL (XEXP (XEXP (x, 0), 1))),
5327 NULL_RTX, i, 1, 0, in_code == COMPARE);
5328 }
a7c99304
RK
5329
5330 /* On machines without logical shifts, if the operand of the AND is
230d793d
RS
5331 a logical shift and our mask turns off all the propagated sign
5332 bits, we can replace the logical shift with an arithmetic shift. */
d0ab8cd3
RK
5333 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5334 && (lshr_optab->handlers[(int) mode].insn_code
5335 == CODE_FOR_nothing)
230d793d
RS
5336 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
5337 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5338 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5f4f0e22
CH
5339 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5340 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 5341 {
5f4f0e22 5342 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
5343
5344 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
5345 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
5346 SUBST (XEXP (x, 0),
280f58ba
RK
5347 gen_rtx_combine (ASHIFTRT, mode,
5348 make_compound_operation (XEXP (XEXP (x, 0), 0),
5349 next_code),
230d793d
RS
5350 XEXP (XEXP (x, 0), 1)));
5351 }
5352
5353 /* If the constant is one less than a power of two, this might be
5354 representable by an extraction even if no shift is present.
5355 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
5356 we are in a COMPARE. */
5357 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
280f58ba
RK
5358 new = make_extraction (mode,
5359 make_compound_operation (XEXP (x, 0),
5360 next_code),
5361 0, NULL_RTX, i, 1, 0, in_code == COMPARE);
230d793d
RS
5362
5363 /* If we are in a comparison and this is an AND with a power of two,
5364 convert this into the appropriate bit extract. */
5365 else if (in_code == COMPARE
5366 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
280f58ba
RK
5367 new = make_extraction (mode,
5368 make_compound_operation (XEXP (x, 0),
5369 next_code),
5370 i, NULL_RTX, 1, 1, 0, 1);
230d793d
RS
5371
5372 break;
5373
5374 case LSHIFTRT:
5375 /* If the sign bit is known to be zero, replace this with an
5376 arithmetic shift. */
d0ab8cd3
RK
5377 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
5378 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
5f4f0e22 5379 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 5380 && (nonzero_bits (XEXP (x, 0), mode) & (1 << (mode_width - 1))) == 0)
230d793d 5381 {
280f58ba
RK
5382 new = gen_rtx_combine (ASHIFTRT, mode,
5383 make_compound_operation (XEXP (x, 0),
5384 next_code),
5385 XEXP (x, 1));
230d793d
RS
5386 break;
5387 }
5388
5389 /* ... fall through ... */
5390
5391 case ASHIFTRT:
5392 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
5393 this is a SIGN_EXTRACT. */
5394 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5395 && GET_CODE (XEXP (x, 0)) == ASHIFT
5396 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5397 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
280f58ba
RK
5398 {
5399 new = make_compound_operation (XEXP (XEXP (x, 0), 0), next_code);
5400 new = make_extraction (mode, new,
5401 (INTVAL (XEXP (x, 1))
5402 - INTVAL (XEXP (XEXP (x, 0), 1))),
5403 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5404 code == LSHIFTRT, 0, in_code == COMPARE);
5405 }
d0ab8cd3
RK
5406
5407 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
5408 cases, we are better off returning a SIGN_EXTEND of the operation. */
5409
5410 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5411 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
5412 || GET_CODE (XEXP (x, 0)) == XOR
5413 || GET_CODE (XEXP (x, 0)) == PLUS)
5414 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5415 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
d0ab8cd3
RK
5416 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
5417 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
0a5cbff6
RK
5418 && 0 == (INTVAL (XEXP (XEXP (x, 0), 1))
5419 & (((HOST_WIDE_INT) 1
5420 << (MIN (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)),
5421 INTVAL (XEXP (x, 1)))
5422 - 1)))))
d0ab8cd3 5423 {
0a5cbff6
RK
5424 rtx c1 = XEXP (XEXP (XEXP (x, 0), 0), 1);
5425 rtx c2 = XEXP (x, 1);
5426 rtx c3 = XEXP (XEXP (x, 0), 1);
5427 HOST_WIDE_INT newop1;
5428 rtx inner = XEXP (XEXP (XEXP (x, 0), 0), 0);
5429
5430 /* If C1 > C2, INNER needs to have the shift performed on it
5431 for C1-C2 bits. */
5432 if (INTVAL (c1) > INTVAL (c2))
5433 {
5434 inner = gen_binary (ASHIFT, mode, inner,
5435 GEN_INT (INTVAL (c1) - INTVAL (c2)));
5436 c1 = c2;
5437 }
d0ab8cd3 5438
0a5cbff6
RK
5439 newop1 = INTVAL (c3) >> INTVAL (c1);
5440 new = make_compound_operation (inner,
5441 GET_CODE (XEXP (x, 0)) == PLUS
5442 ? MEM : GET_CODE (XEXP (x, 0)));
d0ab8cd3 5443 new = make_extraction (mode,
280f58ba 5444 gen_binary (GET_CODE (XEXP (x, 0)), mode, new,
d0ab8cd3 5445 GEN_INT (newop1)),
0a5cbff6 5446 INTVAL (c2) - INTVAL (c1),
239db5fc 5447 NULL_RTX, mode_width - INTVAL (c2),
d0ab8cd3
RK
5448 code == LSHIFTRT, 0, in_code == COMPARE);
5449 }
5450
d0dcc580
RK
5451 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
5452 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5453 && GET_CODE (XEXP (x, 0)) == NEG
5454 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5455 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5456 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
280f58ba
RK
5457 {
5458 new = make_compound_operation (XEXP (XEXP (XEXP (x, 0), 0), 0),
5459 next_code);
5460 new = make_extraction (mode,
fe2db4fb 5461 gen_unary (GET_CODE (XEXP (x, 0)), mode, new),
280f58ba
RK
5462 (INTVAL (XEXP (x, 1))
5463 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5464 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5465 code == LSHIFTRT, 0, in_code == COMPARE);
5466 }
230d793d 5467 break;
280f58ba
RK
5468
5469 case SUBREG:
5470 /* Call ourselves recursively on the inner expression. If we are
5471 narrowing the object and it has a different RTL code from
5472 what it originally did, do this SUBREG as a force_to_mode. */
5473
0a5cbff6 5474 tem = make_compound_operation (SUBREG_REG (x), in_code);
280f58ba
RK
5475 if (GET_CODE (tem) != GET_CODE (SUBREG_REG (x))
5476 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (tem))
5477 && subreg_lowpart_p (x))
0a5cbff6
RK
5478 {
5479 rtx newer = force_to_mode (tem, mode,
6139ff20 5480 GET_MODE_MASK (mode), NULL_RTX);
0a5cbff6
RK
5481
5482 /* If we have something other than a SUBREG, we might have
5483 done an expansion, so rerun outselves. */
5484 if (GET_CODE (newer) != SUBREG)
5485 newer = make_compound_operation (newer, in_code);
5486
5487 return newer;
5488 }
230d793d
RS
5489 }
5490
5491 if (new)
5492 {
df62f951 5493 x = gen_lowpart_for_combine (mode, new);
230d793d
RS
5494 code = GET_CODE (x);
5495 }
5496
5497 /* Now recursively process each operand of this operation. */
5498 fmt = GET_RTX_FORMAT (code);
5499 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5500 if (fmt[i] == 'e')
5501 {
5502 new = make_compound_operation (XEXP (x, i), next_code);
5503 SUBST (XEXP (x, i), new);
5504 }
5505
5506 return x;
5507}
5508\f
5509/* Given M see if it is a value that would select a field of bits
5510 within an item, but not the entire word. Return -1 if not.
5511 Otherwise, return the starting position of the field, where 0 is the
5512 low-order bit.
5513
5514 *PLEN is set to the length of the field. */
5515
5516static int
5517get_pos_from_mask (m, plen)
5f4f0e22 5518 unsigned HOST_WIDE_INT m;
230d793d
RS
5519 int *plen;
5520{
5521 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5522 int pos = exact_log2 (m & - m);
5523
5524 if (pos < 0)
5525 return -1;
5526
5527 /* Now shift off the low-order zero bits and see if we have a power of
5528 two minus 1. */
5529 *plen = exact_log2 ((m >> pos) + 1);
5530
5531 if (*plen <= 0)
5532 return -1;
5533
5534 return pos;
5535}
5536\f
6139ff20
RK
5537/* See if X can be simplified knowing that we will only refer to it in
5538 MODE and will only refer to those bits that are nonzero in MASK.
5539 If other bits are being computed or if masking operations are done
5540 that select a superset of the bits in MASK, they can sometimes be
5541 ignored.
5542
5543 Return a possibly simplified expression, but always convert X to
5544 MODE. If X is a CONST_INT, AND the CONST_INT with MASK.
dfbe1b2f
RK
5545
5546 Also, if REG is non-zero and X is a register equal in value to REG,
5547 replace X with REG. */
5548
5549static rtx
6139ff20 5550force_to_mode (x, mode, mask, reg)
dfbe1b2f
RK
5551 rtx x;
5552 enum machine_mode mode;
6139ff20 5553 unsigned HOST_WIDE_INT mask;
dfbe1b2f
RK
5554 rtx reg;
5555{
5556 enum rtx_code code = GET_CODE (x);
ef026f91
RS
5557 enum machine_mode op_mode;
5558 unsigned HOST_WIDE_INT fuller_mask, nonzero;
6139ff20
RK
5559 rtx op0, op1, temp;
5560
5561 /* We want to perform the operation is its present mode unless we know
5562 that the operation is valid in MODE, in which case we do the operation
5563 in MODE. */
ef026f91
RS
5564 op_mode = ((code_to_optab[(int) code] != 0
5565 && (code_to_optab[(int) code]->handlers[(int) mode].insn_code
5566 != CODE_FOR_nothing))
5567 ? mode : GET_MODE (x));
5568
5569 /* Truncate MASK to fit OP_MODE. */
5570 if (op_mode)
5571 mask &= GET_MODE_MASK (op_mode);
6139ff20
RK
5572
5573 /* When we have an arithmetic operation, or a shift whose count we
5574 do not know, we need to assume that all bit the up to the highest-order
5575 bit in MASK will be needed. This is how we form such a mask. */
ef026f91
RS
5576 if (op_mode)
5577 fuller_mask = (GET_MODE_BITSIZE (op_mode) >= HOST_BITS_PER_WIDE_INT
5578 ? GET_MODE_MASK (op_mode)
5579 : ((HOST_WIDE_INT) 1 << (floor_log2 (mask) + 1)) - 1);
5580 else
5581 fuller_mask = ~ (HOST_WIDE_INT) 0;
5582
5583 /* Determine what bits of X are guaranteed to be (non)zero. */
5584 nonzero = nonzero_bits (x, mode);
6139ff20
RK
5585
5586 /* If none of the bits in X are needed, return a zero. */
5587 if ((nonzero & mask) == 0)
5588 return const0_rtx;
dfbe1b2f 5589
6139ff20
RK
5590 /* If X is a CONST_INT, return a new one. Do this here since the
5591 test below will fail. */
5592 if (GET_CODE (x) == CONST_INT)
5593 return GEN_INT (INTVAL (x) & mask);
dfbe1b2f 5594
6139ff20
RK
5595 /* If X is narrower than MODE, just get X in the proper mode. */
5596 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
dfbe1b2f
RK
5597 return gen_lowpart_for_combine (mode, x);
5598
6139ff20
RK
5599 /* If we aren't changing the mode and all zero bits in MASK are already
5600 known to be zero in X, we need not do anything. */
5601 if (GET_MODE (x) == mode && (~ mask & nonzero) == 0)
5602 return x;
5603
dfbe1b2f
RK
5604 switch (code)
5605 {
6139ff20
RK
5606 case CLOBBER:
5607 /* If X is a (clobber (const_int)), return it since we know we are
5608 generating something that won't match. */
5609 return x;
5610
5611#if ! BITS_BIG_ENDIAN
5612 case USE:
5613 /* X is a (use (mem ..)) that was made from a bit-field extraction that
5614 spanned the boundary of the MEM. If we are now masking so it is
5615 within that boundary, we don't need the USE any more. */
5616 if ((mask & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0)))) == 0)
5617 return force_to_mode (XEXP (x, 0), mode, mask, reg);
5618#endif
5619
dfbe1b2f
RK
5620 case SIGN_EXTEND:
5621 case ZERO_EXTEND:
5622 case ZERO_EXTRACT:
5623 case SIGN_EXTRACT:
5624 x = expand_compound_operation (x);
5625 if (GET_CODE (x) != code)
6139ff20 5626 return force_to_mode (x, mode, mask, reg);
dfbe1b2f
RK
5627 break;
5628
5629 case REG:
5630 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5631 || rtx_equal_p (reg, get_last_value (x))))
5632 x = reg;
5633 break;
5634
dfbe1b2f 5635 case SUBREG:
6139ff20
RK
5636 if (subreg_lowpart_p (x)
5637 /* We can ignore the effect this SUBREG if it narrows the mode or,
8baf60bb
RK
5638 on machines where register operations are performed on the full
5639 word, if the constant masks to zero all the bits the mode
5640 doesn't have. */
6139ff20
RK
5641 && ((GET_MODE_SIZE (GET_MODE (x))
5642 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8baf60bb 5643#ifdef WORD_REGISTER_OPERATIONS
6139ff20
RK
5644 || (0 == (mask
5645 & GET_MODE_MASK (GET_MODE (x))
5646 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x)))))
5647#endif
5648 ))
5649 return force_to_mode (SUBREG_REG (x), mode, mask, reg);
dfbe1b2f
RK
5650 break;
5651
5652 case AND:
6139ff20
RK
5653 /* If this is an AND with a constant, convert it into an AND
5654 whose constant is the AND of that constant with MASK. If it
5655 remains an AND of MASK, delete it since it is redundant. */
dfbe1b2f 5656
6139ff20
RK
5657 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5658 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
dfbe1b2f 5659 {
6139ff20
RK
5660 x = simplify_and_const_int (x, op_mode, XEXP (x, 0),
5661 mask & INTVAL (XEXP (x, 1)));
dfbe1b2f
RK
5662
5663 /* If X is still an AND, see if it is an AND with a mask that
5664 is just some low-order bits. If so, and it is BITS wide (it
5665 can't be wider), we don't need it. */
5666
5667 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5668 && INTVAL (XEXP (x, 1)) == mask)
dfbe1b2f 5669 x = XEXP (x, 0);
d0ab8cd3
RK
5670
5671 break;
dfbe1b2f
RK
5672 }
5673
6139ff20 5674 goto binop;
dfbe1b2f
RK
5675
5676 case PLUS:
6139ff20
RK
5677 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5678 low-order bits (as in an alignment operation) and FOO is already
5679 aligned to that boundary, mask C1 to that boundary as well.
5680 This may eliminate that PLUS and, later, the AND. */
5681 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5682 && exact_log2 (- mask) >= 0
5683 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0
5684 && (INTVAL (XEXP (x, 1)) & ~ mask) != 0)
5685 return force_to_mode (plus_constant (XEXP (x, 0),
5686 INTVAL (XEXP (x, 1)) & mask),
5687 mode, mask, reg);
5688
5689 /* ... fall through ... */
5690
dfbe1b2f
RK
5691 case MINUS:
5692 case MULT:
6139ff20
RK
5693 /* For PLUS, MINUS and MULT, we need any bits less significant than the
5694 most significant bit in MASK since carries from those bits will
5695 affect the bits we are interested in. */
5696 mask = fuller_mask;
5697 goto binop;
5698
dfbe1b2f
RK
5699 case IOR:
5700 case XOR:
6139ff20
RK
5701 /* If X is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5702 LSHIFTRT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5703 operation which may be a bitfield extraction. Ensure that the
5704 constant we form is not wider than the mode of X. */
5705
5706 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5707 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5708 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5709 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
5710 && GET_CODE (XEXP (x, 1)) == CONST_INT
5711 && ((INTVAL (XEXP (XEXP (x, 0), 1))
5712 + floor_log2 (INTVAL (XEXP (x, 1))))
5713 < GET_MODE_BITSIZE (GET_MODE (x)))
5714 && (INTVAL (XEXP (x, 1))
5715 & ~ nonzero_bits (XEXP (x, 0), GET_MODE (x)) == 0))
5716 {
5717 temp = GEN_INT ((INTVAL (XEXP (x, 1)) & mask)
5718 << INTVAL (XEXP (XEXP (x, 0), 1)));
5719 temp = gen_binary (GET_CODE (x), GET_MODE (x),
5720 XEXP (XEXP (x, 0), 0), temp);
5721 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (x, 1));
5722 return force_to_mode (x, mode, mask, reg);
5723 }
5724
5725 binop:
dfbe1b2f 5726 /* For most binary operations, just propagate into the operation and
6139ff20
RK
5727 change the mode if we have an operation of that mode. */
5728
5729 op0 = gen_lowpart_for_combine (op_mode, force_to_mode (XEXP (x, 0),
5730 mode, mask, reg));
5731 op1 = gen_lowpart_for_combine (op_mode, force_to_mode (XEXP (x, 1),
5732 mode, mask, reg));
5733
5734 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
5735 x = gen_binary (code, op_mode, op0, op1);
d0ab8cd3 5736 break;
dfbe1b2f
RK
5737
5738 case ASHIFT:
5739 case LSHIFT:
5740 /* For left shifts, do the same, but just for the first operand.
f6785026
RK
5741 However, we cannot do anything with shifts where we cannot
5742 guarantee that the counts are smaller than the size of the mode
5743 because such a count will have a different meaning in a
6139ff20 5744 wider mode. */
f6785026
RK
5745
5746 if (! (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5747 && INTVAL (XEXP (x, 1)) >= 0
f6785026
RK
5748 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (mode))
5749 && ! (GET_MODE (XEXP (x, 1)) != VOIDmode
5750 && (nonzero_bits (XEXP (x, 1), GET_MODE (XEXP (x, 1)))
adb7a1cb 5751 < (unsigned HOST_WIDE_INT) GET_MODE_BITSIZE (mode))))
f6785026
RK
5752 break;
5753
6139ff20
RK
5754 /* If the shift count is a constant and we can do arithmetic in
5755 the mode of the shift, refine which bits we need. Otherwise, use the
5756 conservative form of the mask. */
5757 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5758 && INTVAL (XEXP (x, 1)) >= 0
5759 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (op_mode)
5760 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
5761 mask >>= INTVAL (XEXP (x, 1));
5762 else
5763 mask = fuller_mask;
5764
5765 op0 = gen_lowpart_for_combine (op_mode,
5766 force_to_mode (XEXP (x, 0), op_mode,
5767 mask, reg));
5768
5769 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5770 x = gen_binary (code, op_mode, op0, XEXP (x, 1));
d0ab8cd3 5771 break;
dfbe1b2f
RK
5772
5773 case LSHIFTRT:
5774 /* Here we can only do something if the shift count is a constant and
6139ff20 5775 we can do arithmetic in OP_MODE. */
dfbe1b2f
RK
5776
5777 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6139ff20 5778 && GET_MODE_BITSIZE (op_mode) <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 5779 {
6139ff20
RK
5780 rtx inner = XEXP (x, 0);
5781
5782 /* Select the mask of the bits we need for the shift operand. */
5783 mask <<= INTVAL (XEXP (x, 1));
d0ab8cd3 5784
6139ff20
RK
5785 /* We can only change the mode of the shift if we can do arithmetic
5786 in the mode of the shift and MASK is no wider than the width of
5787 OP_MODE. */
5788 if (GET_MODE_BITSIZE (op_mode) > HOST_BITS_PER_WIDE_INT
5789 || (mask & ~ GET_MODE_MASK (op_mode)) != 0)
d0ab8cd3
RK
5790 op_mode = GET_MODE (x);
5791
6139ff20
RK
5792 inner = force_to_mode (inner, op_mode, mask, reg);
5793
5794 if (GET_MODE (x) != op_mode || inner != XEXP (x, 0))
5795 x = gen_binary (LSHIFTRT, op_mode, inner, XEXP (x, 1));
d0ab8cd3 5796 }
6139ff20
RK
5797
5798 /* If we have (and (lshiftrt FOO C1) C2) where the combination of the
5799 shift and AND produces only copies of the sign bit (C2 is one less
5800 than a power of two), we can do this with just a shift. */
5801
5802 if (GET_CODE (x) == LSHIFTRT
5803 && GET_CODE (XEXP (x, 1)) == CONST_INT
5804 && ((INTVAL (XEXP (x, 1))
5805 + num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0))))
5806 >= GET_MODE_BITSIZE (GET_MODE (x)))
5807 && exact_log2 (mask + 1) >= 0
5808 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
5809 >= exact_log2 (mask + 1)))
5810 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5811 GEN_INT (GET_MODE_BITSIZE (GET_MODE (x))
5812 - exact_log2 (mask + 1)));
d0ab8cd3
RK
5813 break;
5814
5815 case ASHIFTRT:
6139ff20
RK
5816 /* If we are just looking for the sign bit, we don't need this shift at
5817 all, even if it has a variable count. */
5818 if (mask == ((HOST_WIDE_INT) 1
5819 << (GET_MODE_BITSIZE (GET_MODE (x)) - 1)))
5820 return force_to_mode (XEXP (x, 0), mode, mask, reg);
5821
5822 /* If this is a shift by a constant, get a mask that contains those bits
5823 that are not copies of the sign bit. We then have two cases: If
5824 MASK only includes those bits, this can be a logical shift, which may
5825 allow simplifications. If MASK is a single-bit field not within
5826 those bits, we are requesting a copy of the sign bit and hence can
5827 shift the sign bit to the appropriate location. */
5828
5829 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) >= 0
5830 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5831 {
5832 int i = -1;
5833
5834 nonzero = GET_MODE_MASK (GET_MODE (x));
5835 nonzero >>= INTVAL (XEXP (x, 1));
5836
5837 if ((mask & ~ nonzero) == 0
5838 || (i = exact_log2 (mask)) >= 0)
5839 {
5840 x = simplify_shift_const
5841 (x, LSHIFTRT, GET_MODE (x), XEXP (x, 0),
5842 i < 0 ? INTVAL (XEXP (x, 1))
5843 : GET_MODE_BITSIZE (GET_MODE (x)) - 1 - i);
5844
5845 if (GET_CODE (x) != ASHIFTRT)
5846 return force_to_mode (x, mode, mask, reg);
5847 }
5848 }
5849
5850 /* If MASK is 1, convert this to a LSHIFTRT. This can be done
5851 even if the shift count isn't a constant. */
5852 if (mask == 1)
5853 x = gen_binary (LSHIFTRT, GET_MODE (x), XEXP (x, 0), XEXP (x, 1));
5854
d0ab8cd3
RK
5855 /* If this is a sign-extension operation that just affects bits
5856 we don't care about, remove it. */
5857
5858 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5859 && INTVAL (XEXP (x, 1)) >= 0
6139ff20
RK
5860 && (INTVAL (XEXP (x, 1))
5861 <= GET_MODE_BITSIZE (GET_MODE (x)) - (floor_log2 (mask) + 1))
d0ab8cd3
RK
5862 && GET_CODE (XEXP (x, 0)) == ASHIFT
5863 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5864 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
6139ff20
RK
5865 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, mask, reg);
5866
dfbe1b2f
RK
5867 break;
5868
6139ff20
RK
5869 case ROTATE:
5870 case ROTATERT:
5871 /* If the shift count is constant and we can do computations
5872 in the mode of X, compute where the bits we care about are.
5873 Otherwise, we can't do anything. Don't change the mode of
5874 the shift or propagate MODE into the shift, though. */
5875 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5876 && INTVAL (XEXP (x, 1)) >= 0)
5877 {
5878 temp = simplify_binary_operation (code == ROTATE ? ROTATERT : ROTATE,
5879 GET_MODE (x), GEN_INT (mask),
5880 XEXP (x, 1));
5881 if (temp)
5882 SUBST (XEXP (x, 0),
5883 force_to_mode (XEXP (x, 0), GET_MODE (x),
5884 INTVAL (temp), reg));
5885 }
5886 break;
5887
dfbe1b2f 5888 case NEG:
6139ff20
RK
5889 /* We need any bits less significant than the most significant bit in
5890 MASK since carries from those bits will affect the bits we are
5891 interested in. */
5892 mask = fuller_mask;
5893 goto unop;
5894
dfbe1b2f 5895 case NOT:
6139ff20
RK
5896 /* (not FOO) is (xor FOO CONST), so if FOO is an LSHIFTRT, we can do the
5897 same as the XOR case above. Ensure that the constant we form is not
5898 wider than the mode of X. */
5899
5900 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
5901 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5902 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
5903 && (INTVAL (XEXP (XEXP (x, 0), 1)) + floor_log2 (mask)
5904 < GET_MODE_BITSIZE (GET_MODE (x)))
5905 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT)
5906 {
5907 temp = GEN_INT (mask << INTVAL (XEXP (XEXP (x, 0), 1)));
5908 temp = gen_binary (XOR, GET_MODE (x), XEXP (XEXP (x, 0), 0), temp);
5909 x = gen_binary (LSHIFTRT, GET_MODE (x), temp, XEXP (XEXP (x, 0), 1));
5910
5911 return force_to_mode (x, mode, mask, reg);
5912 }
5913
5914 unop:
5915 op0 = gen_lowpart_for_combine (op_mode, force_to_mode (XEXP (x, 0), mode,
5916 mask, reg));
5917 if (op_mode != GET_MODE (x) || op0 != XEXP (x, 0))
5918 x = gen_unary (code, op_mode, op0);
5919 break;
5920
5921 case NE:
5922 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is included
5923 in STORE_FLAG_VALUE and FOO has no bits that might be nonzero not
5924 in CONST. */
5925 if ((mask & ~ STORE_FLAG_VALUE) == 0 && XEXP (x, 0) == const0_rtx
5926 && (nonzero_bits (XEXP (x, 0), mode) & ~ mask) == 0)
5927 return force_to_mode (XEXP (x, 0), mode, mask, reg);
5928
d0ab8cd3
RK
5929 break;
5930
5931 case IF_THEN_ELSE:
5932 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5933 written in a narrower mode. We play it safe and do not do so. */
5934
5935 SUBST (XEXP (x, 1),
5936 gen_lowpart_for_combine (GET_MODE (x),
5937 force_to_mode (XEXP (x, 1), mode,
6139ff20 5938 mask, reg)));
d0ab8cd3
RK
5939 SUBST (XEXP (x, 2),
5940 gen_lowpart_for_combine (GET_MODE (x),
5941 force_to_mode (XEXP (x, 2), mode,
6139ff20 5942 mask, reg)));
d0ab8cd3 5943 break;
dfbe1b2f
RK
5944 }
5945
d0ab8cd3 5946 /* Ensure we return a value of the proper mode. */
dfbe1b2f
RK
5947 return gen_lowpart_for_combine (mode, x);
5948}
5949\f
1a26b032
RK
5950/* Return the value of expression X given the fact that condition COND
5951 is known to be true when applied to REG as its first operand and VAL
5952 as its second. X is known to not be shared and so can be modified in
5953 place.
5954
5955 We only handle the simplest cases, and specifically those cases that
5956 arise with IF_THEN_ELSE expressions. */
5957
5958static rtx
5959known_cond (x, cond, reg, val)
5960 rtx x;
5961 enum rtx_code cond;
5962 rtx reg, val;
5963{
5964 enum rtx_code code = GET_CODE (x);
5965 rtx new, temp;
5966 char *fmt;
5967 int i, j;
5968
5969 if (side_effects_p (x))
5970 return x;
5971
5972 if (cond == EQ && rtx_equal_p (x, reg))
5973 return val;
5974
5975 /* If X is (abs REG) and we know something about REG's relationship
5976 with zero, we may be able to simplify this. */
5977
5978 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5979 switch (cond)
5980 {
5981 case GE: case GT: case EQ:
5982 return XEXP (x, 0);
5983 case LT: case LE:
5984 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5985 }
5986
5987 /* The only other cases we handle are MIN, MAX, and comparisons if the
5988 operands are the same as REG and VAL. */
5989
5990 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5991 {
5992 if (rtx_equal_p (XEXP (x, 0), val))
5993 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
5994
5995 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
5996 {
5997 if (GET_RTX_CLASS (code) == '<')
5998 return (comparison_dominates_p (cond, code) ? const_true_rtx
5999 : (comparison_dominates_p (cond,
6000 reverse_condition (code))
6001 ? const0_rtx : x));
6002
6003 else if (code == SMAX || code == SMIN
6004 || code == UMIN || code == UMAX)
6005 {
6006 int unsignedp = (code == UMIN || code == UMAX);
6007
6008 if (code == SMAX || code == UMAX)
6009 cond = reverse_condition (cond);
6010
6011 switch (cond)
6012 {
6013 case GE: case GT:
6014 return unsignedp ? x : XEXP (x, 1);
6015 case LE: case LT:
6016 return unsignedp ? x : XEXP (x, 0);
6017 case GEU: case GTU:
6018 return unsignedp ? XEXP (x, 1) : x;
6019 case LEU: case LTU:
6020 return unsignedp ? XEXP (x, 0) : x;
6021 }
6022 }
6023 }
6024 }
6025
6026 fmt = GET_RTX_FORMAT (code);
6027 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6028 {
6029 if (fmt[i] == 'e')
6030 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
6031 else if (fmt[i] == 'E')
6032 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6033 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
6034 cond, reg, val));
6035 }
6036
6037 return x;
6038}
6039\f
230d793d
RS
6040/* See if X, a SET operation, can be rewritten as a bit-field assignment.
6041 Return that assignment if so.
6042
6043 We only handle the most common cases. */
6044
6045static rtx
6046make_field_assignment (x)
6047 rtx x;
6048{
6049 rtx dest = SET_DEST (x);
6050 rtx src = SET_SRC (x);
dfbe1b2f
RK
6051 rtx ourdest;
6052 rtx assign;
5f4f0e22
CH
6053 HOST_WIDE_INT c1;
6054 int pos, len;
dfbe1b2f
RK
6055 rtx other;
6056 enum machine_mode mode;
230d793d
RS
6057
6058 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
6059 a clear of a one-bit field. We will have changed it to
6060 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
6061 for a SUBREG. */
6062
6063 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
6064 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
6065 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
dfbe1b2f
RK
6066 && (rtx_equal_p (dest, XEXP (src, 1))
6067 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6068 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6069 {
8999a12e 6070 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6071 1, 1, 1, 0);
dfbe1b2f 6072 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
6073 }
6074
6075 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
6076 && subreg_lowpart_p (XEXP (src, 0))
6077 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
6078 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
6079 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
6080 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
dfbe1b2f
RK
6081 && (rtx_equal_p (dest, XEXP (src, 1))
6082 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6083 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6084 {
8999a12e 6085 assign = make_extraction (VOIDmode, dest, 0,
230d793d
RS
6086 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
6087 1, 1, 1, 0);
dfbe1b2f 6088 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
230d793d
RS
6089 }
6090
6091 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
6092 one-bit field. */
6093 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
6094 && XEXP (XEXP (src, 0), 0) == const1_rtx
dfbe1b2f
RK
6095 && (rtx_equal_p (dest, XEXP (src, 1))
6096 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
6097 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
230d793d 6098 {
8999a12e 6099 assign = make_extraction (VOIDmode, dest, 0, XEXP (XEXP (src, 0), 1),
230d793d 6100 1, 1, 1, 0);
dfbe1b2f 6101 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
230d793d
RS
6102 }
6103
dfbe1b2f
RK
6104 /* The other case we handle is assignments into a constant-position
6105 field. They look like (ior (and DEST C1) OTHER). If C1 represents
6106 a mask that has all one bits except for a group of zero bits and
6107 OTHER is known to have zeros where C1 has ones, this is such an
6108 assignment. Compute the position and length from C1. Shift OTHER
6109 to the appropriate position, force it to the required mode, and
6110 make the extraction. Check for the AND in both operands. */
6111
6112 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
6113 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
6114 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
6115 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
6116 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
6117 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
6118 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
6119 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
6120 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
6121 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
6122 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
6123 dest)))
6124 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
6125 else
6126 return x;
230d793d 6127
c2f9f64e 6128 pos = get_pos_from_mask (c1 ^ GET_MODE_MASK (GET_MODE (dest)), &len);
dfbe1b2f 6129 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
ac49a949 6130 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
951553af 6131 && (c1 & nonzero_bits (other, GET_MODE (other))) != 0))
dfbe1b2f 6132 return x;
230d793d 6133
5f4f0e22 6134 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
230d793d 6135
dfbe1b2f
RK
6136 /* The mode to use for the source is the mode of the assignment, or of
6137 what is inside a possible STRICT_LOW_PART. */
6138 mode = (GET_CODE (assign) == STRICT_LOW_PART
6139 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
230d793d 6140
dfbe1b2f
RK
6141 /* Shift OTHER right POS places and make it the source, restricting it
6142 to the proper length and mode. */
230d793d 6143
5f4f0e22
CH
6144 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
6145 GET_MODE (src), other, pos),
6139ff20
RK
6146 mode,
6147 GET_MODE_BITSIZE (mode) >= HOST_BITS_PER_WIDE_INT
6148 ? GET_MODE_MASK (mode)
6149 : ((HOST_WIDE_INT) 1 << len) - 1,
6150 dest);
230d793d 6151
dfbe1b2f 6152 return gen_rtx_combine (SET, VOIDmode, assign, src);
230d793d
RS
6153}
6154\f
6155/* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
6156 if so. */
6157
6158static rtx
6159apply_distributive_law (x)
6160 rtx x;
6161{
6162 enum rtx_code code = GET_CODE (x);
6163 rtx lhs, rhs, other;
6164 rtx tem;
6165 enum rtx_code inner_code;
6166
d8a8a4da
RS
6167 /* Distributivity is not true for floating point.
6168 It can change the value. So don't do it.
6169 -- rms and moshier@world.std.com. */
3ad2180a 6170 if (FLOAT_MODE_P (GET_MODE (x)))
d8a8a4da
RS
6171 return x;
6172
230d793d
RS
6173 /* The outer operation can only be one of the following: */
6174 if (code != IOR && code != AND && code != XOR
6175 && code != PLUS && code != MINUS)
6176 return x;
6177
6178 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
6179
dfbe1b2f 6180 /* If either operand is a primitive we can't do anything, so get out fast. */
230d793d 6181 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
dfbe1b2f 6182 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
230d793d
RS
6183 return x;
6184
6185 lhs = expand_compound_operation (lhs);
6186 rhs = expand_compound_operation (rhs);
6187 inner_code = GET_CODE (lhs);
6188 if (inner_code != GET_CODE (rhs))
6189 return x;
6190
6191 /* See if the inner and outer operations distribute. */
6192 switch (inner_code)
6193 {
6194 case LSHIFTRT:
6195 case ASHIFTRT:
6196 case AND:
6197 case IOR:
6198 /* These all distribute except over PLUS. */
6199 if (code == PLUS || code == MINUS)
6200 return x;
6201 break;
6202
6203 case MULT:
6204 if (code != PLUS && code != MINUS)
6205 return x;
6206 break;
6207
6208 case ASHIFT:
6209 case LSHIFT:
6210 /* These are also multiplies, so they distribute over everything. */
6211 break;
6212
6213 case SUBREG:
dfbe1b2f
RK
6214 /* Non-paradoxical SUBREGs distributes over all operations, provided
6215 the inner modes and word numbers are the same, this is an extraction
2b4bd1bc
JW
6216 of a low-order part, we don't convert an fp operation to int or
6217 vice versa, and we would not be converting a single-word
dfbe1b2f 6218 operation into a multi-word operation. The latter test is not
2b4bd1bc 6219 required, but it prevents generating unneeded multi-word operations.
dfbe1b2f
RK
6220 Some of the previous tests are redundant given the latter test, but
6221 are retained because they are required for correctness.
6222
6223 We produce the result slightly differently in this case. */
6224
6225 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
6226 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
6227 || ! subreg_lowpart_p (lhs)
2b4bd1bc
JW
6228 || (GET_MODE_CLASS (GET_MODE (lhs))
6229 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
dfbe1b2f
RK
6230 || (GET_MODE_SIZE (GET_MODE (lhs))
6231 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
6232 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
230d793d
RS
6233 return x;
6234
6235 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
6236 SUBREG_REG (lhs), SUBREG_REG (rhs));
6237 return gen_lowpart_for_combine (GET_MODE (x), tem);
6238
6239 default:
6240 return x;
6241 }
6242
6243 /* Set LHS and RHS to the inner operands (A and B in the example
6244 above) and set OTHER to the common operand (C in the example).
6245 These is only one way to do this unless the inner operation is
6246 commutative. */
6247 if (GET_RTX_CLASS (inner_code) == 'c'
6248 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
6249 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
6250 else if (GET_RTX_CLASS (inner_code) == 'c'
6251 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
6252 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
6253 else if (GET_RTX_CLASS (inner_code) == 'c'
6254 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
6255 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
6256 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
6257 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
6258 else
6259 return x;
6260
6261 /* Form the new inner operation, seeing if it simplifies first. */
6262 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
6263
6264 /* There is one exception to the general way of distributing:
6265 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
6266 if (code == XOR && inner_code == IOR)
6267 {
6268 inner_code = AND;
6269 other = gen_unary (NOT, GET_MODE (x), other);
6270 }
6271
6272 /* We may be able to continuing distributing the result, so call
6273 ourselves recursively on the inner operation before forming the
6274 outer operation, which we return. */
6275 return gen_binary (inner_code, GET_MODE (x),
6276 apply_distributive_law (tem), other);
6277}
6278\f
6279/* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
6280 in MODE.
6281
6282 Return an equivalent form, if different from X. Otherwise, return X. If
6283 X is zero, we are to always construct the equivalent form. */
6284
6285static rtx
6286simplify_and_const_int (x, mode, varop, constop)
6287 rtx x;
6288 enum machine_mode mode;
6289 rtx varop;
5f4f0e22 6290 unsigned HOST_WIDE_INT constop;
230d793d
RS
6291{
6292 register enum machine_mode tmode;
6293 register rtx temp;
951553af 6294 unsigned HOST_WIDE_INT nonzero;
42301240 6295 int i;
230d793d 6296
6139ff20
RK
6297 /* Simplify VAROP knowing that we will be only looking at some of the
6298 bits in it. */
6299 varop = force_to_mode (varop, mode, constop, NULL_RTX);
230d793d 6300
6139ff20
RK
6301 /* If VAROP is a CLOBBER, we will fail so return it; if it is a
6302 CONST_INT, we are done. */
6303 if (GET_CODE (varop) == CLOBBER || GET_CODE (varop) == CONST_INT)
6304 return varop;
230d793d 6305
fc06d7aa
RK
6306 /* See what bits may be nonzero in VAROP. Unlike the general case of
6307 a call to nonzero_bits, here we don't care about bits outside
6308 MODE. */
6309
6310 nonzero = nonzero_bits (varop, mode) & GET_MODE_MASK (mode);
230d793d
RS
6311
6312 /* Turn off all bits in the constant that are known to already be zero.
951553af 6313 Thus, if the AND isn't needed at all, we will have CONSTOP == NONZERO_BITS
230d793d
RS
6314 which is tested below. */
6315
951553af 6316 constop &= nonzero;
230d793d
RS
6317
6318 /* If we don't have any bits left, return zero. */
6319 if (constop == 0)
6320 return const0_rtx;
6321
42301240
RK
6322 /* If VAROP is a NEG of something known to be zero or 1 and CONSTOP is
6323 a power of two, we can replace this with a ASHIFT. */
6324 if (GET_CODE (varop) == NEG && nonzero_bits (XEXP (varop, 0), mode) == 1
6325 && (i = exact_log2 (constop)) >= 0)
6326 return simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (varop, 0), i);
6327
6139ff20
RK
6328 /* If VAROP is an IOR or XOR, apply the AND to both branches of the IOR
6329 or XOR, then try to apply the distributive law. This may eliminate
6330 operations if either branch can be simplified because of the AND.
6331 It may also make some cases more complex, but those cases probably
6332 won't match a pattern either with or without this. */
6333
6334 if (GET_CODE (varop) == IOR || GET_CODE (varop) == XOR)
6335 return
6336 gen_lowpart_for_combine
6337 (mode,
6338 apply_distributive_law
6339 (gen_binary (GET_CODE (varop), GET_MODE (varop),
6340 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6341 XEXP (varop, 0), constop),
6342 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
6343 XEXP (varop, 1), constop))));
6344
230d793d
RS
6345 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
6346 if we already had one (just check for the simplest cases). */
6347 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6348 && GET_MODE (XEXP (x, 0)) == mode
6349 && SUBREG_REG (XEXP (x, 0)) == varop)
6350 varop = XEXP (x, 0);
6351 else
6352 varop = gen_lowpart_for_combine (mode, varop);
6353
6354 /* If we can't make the SUBREG, try to return what we were given. */
6355 if (GET_CODE (varop) == CLOBBER)
6356 return x ? x : varop;
6357
6358 /* If we are only masking insignificant bits, return VAROP. */
951553af 6359 if (constop == nonzero)
230d793d
RS
6360 x = varop;
6361
6362 /* Otherwise, return an AND. See how much, if any, of X we can use. */
6363 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
6139ff20 6364 x = gen_binary (AND, mode, varop, GEN_INT (constop));
230d793d
RS
6365
6366 else
6367 {
6368 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6369 || INTVAL (XEXP (x, 1)) != constop)
5f4f0e22 6370 SUBST (XEXP (x, 1), GEN_INT (constop));
230d793d
RS
6371
6372 SUBST (XEXP (x, 0), varop);
6373 }
6374
6375 return x;
6376}
6377\f
6378/* Given an expression, X, compute which bits in X can be non-zero.
6379 We don't care about bits outside of those defined in MODE.
6380
6381 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
6382 a shift, AND, or zero_extract, we can do better. */
6383
5f4f0e22 6384static unsigned HOST_WIDE_INT
951553af 6385nonzero_bits (x, mode)
230d793d
RS
6386 rtx x;
6387 enum machine_mode mode;
6388{
951553af
RK
6389 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
6390 unsigned HOST_WIDE_INT inner_nz;
230d793d
RS
6391 enum rtx_code code;
6392 int mode_width = GET_MODE_BITSIZE (mode);
6393 rtx tem;
6394
6395 /* If X is wider than MODE, use its mode instead. */
6396 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
6397 {
6398 mode = GET_MODE (x);
951553af 6399 nonzero = GET_MODE_MASK (mode);
230d793d
RS
6400 mode_width = GET_MODE_BITSIZE (mode);
6401 }
6402
5f4f0e22 6403 if (mode_width > HOST_BITS_PER_WIDE_INT)
230d793d
RS
6404 /* Our only callers in this case look for single bit values. So
6405 just return the mode mask. Those tests will then be false. */
951553af 6406 return nonzero;
230d793d 6407
8baf60bb 6408#ifndef WORD_REGISTER_OPERATIONS
c6965c0f 6409 /* If MODE is wider than X, but both are a single word for both the host
0840fd91
RK
6410 and target machines, we can compute this from which bits of the
6411 object might be nonzero in its own mode, taking into account the fact
6412 that on many CISC machines, accessing an object in a wider mode
6413 causes the high-order bits to become undefined. So they are
6414 not known to be zero. */
6415
6416 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
6417 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
6418 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
c6965c0f 6419 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
0840fd91
RK
6420 {
6421 nonzero &= nonzero_bits (x, GET_MODE (x));
6422 nonzero |= GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x));
6423 return nonzero;
6424 }
6425#endif
6426
230d793d
RS
6427 code = GET_CODE (x);
6428 switch (code)
6429 {
6430 case REG:
6431#ifdef STACK_BOUNDARY
6432 /* If this is the stack pointer, we may know something about its
6433 alignment. If PUSH_ROUNDING is defined, it is possible for the
6434 stack to be momentarily aligned only to that amount, so we pick
6435 the least alignment. */
6436
6437 if (x == stack_pointer_rtx)
6438 {
6439 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
6440
6441#ifdef PUSH_ROUNDING
6442 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
6443#endif
6444
951553af 6445 return nonzero & ~ (sp_alignment - 1);
230d793d
RS
6446 }
6447#endif
6448
55310dad
RK
6449 /* If X is a register whose nonzero bits value is current, use it.
6450 Otherwise, if X is a register whose value we can find, use that
6451 value. Otherwise, use the previously-computed global nonzero bits
6452 for this register. */
6453
6454 if (reg_last_set_value[REGNO (x)] != 0
6455 && reg_last_set_mode[REGNO (x)] == mode
6456 && (reg_n_sets[REGNO (x)] == 1
6457 || reg_last_set_label[REGNO (x)] == label_tick)
6458 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6459 return reg_last_set_nonzero_bits[REGNO (x)];
230d793d
RS
6460
6461 tem = get_last_value (x);
9afa3d54 6462
230d793d 6463 if (tem)
9afa3d54
RK
6464 {
6465#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6466 /* If X is narrower than MODE and TEM is a non-negative
6467 constant that would appear negative in the mode of X,
6468 sign-extend it for use in reg_nonzero_bits because some
6469 machines (maybe most) will actually do the sign-extension
6470 and this is the conservative approach.
6471
6472 ??? For 2.5, try to tighten up the MD files in this regard
6473 instead of this kludge. */
6474
6475 if (GET_MODE_BITSIZE (GET_MODE (x)) < mode_width
6476 && GET_CODE (tem) == CONST_INT
6477 && INTVAL (tem) > 0
6478 && 0 != (INTVAL (tem)
6479 & ((HOST_WIDE_INT) 1
6480 << GET_MODE_BITSIZE (GET_MODE (x)))))
6481 tem = GEN_INT (INTVAL (tem)
6482 | ((HOST_WIDE_INT) (-1)
6483 << GET_MODE_BITSIZE (GET_MODE (x))));
6484#endif
6485 return nonzero_bits (tem, mode);
6486 }
951553af
RK
6487 else if (nonzero_sign_valid && reg_nonzero_bits[REGNO (x)])
6488 return reg_nonzero_bits[REGNO (x)] & nonzero;
230d793d 6489 else
951553af 6490 return nonzero;
230d793d
RS
6491
6492 case CONST_INT:
9afa3d54
RK
6493#ifdef SHORT_IMMEDIATES_SIGN_EXTEND
6494 /* If X is negative in MODE, sign-extend the value. */
6495 if (INTVAL (x) > 0
6496 && 0 != (INTVAL (x)
6497 & ((HOST_WIDE_INT) 1 << GET_MODE_BITSIZE (GET_MODE (x)))))
6498 return (INTVAL (x)
6499 | ((HOST_WIDE_INT) (-1) << GET_MODE_BITSIZE (GET_MODE (x))));
6500#endif
6501
230d793d
RS
6502 return INTVAL (x);
6503
230d793d 6504 case MEM:
8baf60bb 6505#ifdef LOAD_EXTEND_OP
230d793d
RS
6506 /* In many, if not most, RISC machines, reading a byte from memory
6507 zeros the rest of the register. Noticing that fact saves a lot
6508 of extra zero-extends. */
8baf60bb
RK
6509 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
6510 nonzero &= GET_MODE_MASK (GET_MODE (x));
230d793d 6511#endif
8baf60bb 6512 break;
230d793d 6513
230d793d
RS
6514 case EQ: case NE:
6515 case GT: case GTU:
6516 case LT: case LTU:
6517 case GE: case GEU:
6518 case LE: case LEU:
3f508eca 6519
c6965c0f
RK
6520 /* If this produces an integer result, we know which bits are set.
6521 Code here used to clear bits outside the mode of X, but that is
6522 now done above. */
230d793d 6523
c6965c0f
RK
6524 if (GET_MODE_CLASS (mode) == MODE_INT
6525 && mode_width <= HOST_BITS_PER_WIDE_INT)
6526 nonzero = STORE_FLAG_VALUE;
230d793d 6527 break;
230d793d 6528
230d793d 6529 case NEG:
d0ab8cd3
RK
6530 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6531 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6532 nonzero = 1;
230d793d
RS
6533
6534 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
951553af 6535 nonzero |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
230d793d 6536 break;
d0ab8cd3
RK
6537
6538 case ABS:
6539 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6540 == GET_MODE_BITSIZE (GET_MODE (x)))
951553af 6541 nonzero = 1;
d0ab8cd3 6542 break;
230d793d
RS
6543
6544 case TRUNCATE:
951553af 6545 nonzero &= (nonzero_bits (XEXP (x, 0), mode) & GET_MODE_MASK (mode));
230d793d
RS
6546 break;
6547
6548 case ZERO_EXTEND:
951553af 6549 nonzero &= nonzero_bits (XEXP (x, 0), mode);
230d793d 6550 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
951553af 6551 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
230d793d
RS
6552 break;
6553
6554 case SIGN_EXTEND:
6555 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6556 Otherwise, show all the bits in the outer mode but not the inner
6557 may be non-zero. */
951553af 6558 inner_nz = nonzero_bits (XEXP (x, 0), mode);
230d793d
RS
6559 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6560 {
951553af
RK
6561 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6562 if (inner_nz &
5f4f0e22
CH
6563 (((HOST_WIDE_INT) 1
6564 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
951553af 6565 inner_nz |= (GET_MODE_MASK (mode)
230d793d
RS
6566 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6567 }
6568
951553af 6569 nonzero &= inner_nz;
230d793d
RS
6570 break;
6571
6572 case AND:
951553af
RK
6573 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6574 & nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6575 break;
6576
d0ab8cd3
RK
6577 case XOR: case IOR:
6578 case UMIN: case UMAX: case SMIN: case SMAX:
951553af
RK
6579 nonzero &= (nonzero_bits (XEXP (x, 0), mode)
6580 | nonzero_bits (XEXP (x, 1), mode));
230d793d
RS
6581 break;
6582
6583 case PLUS: case MINUS:
6584 case MULT:
6585 case DIV: case UDIV:
6586 case MOD: case UMOD:
6587 /* We can apply the rules of arithmetic to compute the number of
6588 high- and low-order zero bits of these operations. We start by
6589 computing the width (position of the highest-order non-zero bit)
6590 and the number of low-order zero bits for each value. */
6591 {
951553af
RK
6592 unsigned HOST_WIDE_INT nz0 = nonzero_bits (XEXP (x, 0), mode);
6593 unsigned HOST_WIDE_INT nz1 = nonzero_bits (XEXP (x, 1), mode);
6594 int width0 = floor_log2 (nz0) + 1;
6595 int width1 = floor_log2 (nz1) + 1;
6596 int low0 = floor_log2 (nz0 & -nz0);
6597 int low1 = floor_log2 (nz1 & -nz1);
6598 int op0_maybe_minusp = (nz0 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
6599 int op1_maybe_minusp = (nz1 & ((HOST_WIDE_INT) 1 << (mode_width - 1)));
230d793d
RS
6600 int result_width = mode_width;
6601 int result_low = 0;
6602
6603 switch (code)
6604 {
6605 case PLUS:
6606 result_width = MAX (width0, width1) + 1;
6607 result_low = MIN (low0, low1);
6608 break;
6609 case MINUS:
6610 result_low = MIN (low0, low1);
6611 break;
6612 case MULT:
6613 result_width = width0 + width1;
6614 result_low = low0 + low1;
6615 break;
6616 case DIV:
6617 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6618 result_width = width0;
6619 break;
6620 case UDIV:
6621 result_width = width0;
6622 break;
6623 case MOD:
6624 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6625 result_width = MIN (width0, width1);
6626 result_low = MIN (low0, low1);
6627 break;
6628 case UMOD:
6629 result_width = MIN (width0, width1);
6630 result_low = MIN (low0, low1);
6631 break;
6632 }
6633
6634 if (result_width < mode_width)
951553af 6635 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
230d793d
RS
6636
6637 if (result_low > 0)
951553af 6638 nonzero &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
230d793d
RS
6639 }
6640 break;
6641
6642 case ZERO_EXTRACT:
6643 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5f4f0e22 6644 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
951553af 6645 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
230d793d
RS
6646 break;
6647
6648 case SUBREG:
c3c2cb37
RK
6649 /* If this is a SUBREG formed for a promoted variable that has
6650 been zero-extended, we know that at least the high-order bits
6651 are zero, though others might be too. */
6652
6653 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
951553af
RK
6654 nonzero = (GET_MODE_MASK (GET_MODE (x))
6655 & nonzero_bits (SUBREG_REG (x), GET_MODE (x)));
c3c2cb37 6656
230d793d
RS
6657 /* If the inner mode is a single word for both the host and target
6658 machines, we can compute this from which bits of the inner
951553af 6659 object might be nonzero. */
230d793d 6660 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5f4f0e22
CH
6661 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6662 <= HOST_BITS_PER_WIDE_INT))
230d793d 6663 {
951553af 6664 nonzero &= nonzero_bits (SUBREG_REG (x), mode);
8baf60bb
RK
6665
6666#ifndef WORD_REGISTER_OPERATIONS
230d793d
RS
6667 /* On many CISC machines, accessing an object in a wider mode
6668 causes the high-order bits to become undefined. So they are
6669 not known to be zero. */
6670 if (GET_MODE_SIZE (GET_MODE (x))
6671 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
951553af
RK
6672 nonzero |= (GET_MODE_MASK (GET_MODE (x))
6673 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
230d793d
RS
6674#endif
6675 }
6676 break;
6677
6678 case ASHIFTRT:
6679 case LSHIFTRT:
6680 case ASHIFT:
6681 case LSHIFT:
6682 case ROTATE:
951553af 6683 /* The nonzero bits are in two classes: any bits within MODE
230d793d 6684 that aren't in GET_MODE (x) are always significant. The rest of the
951553af 6685 nonzero bits are those that are significant in the operand of
230d793d
RS
6686 the shift when shifted the appropriate number of bits. This
6687 shows that high-order bits are cleared by the right shift and
6688 low-order bits by left shifts. */
6689 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6690 && INTVAL (XEXP (x, 1)) >= 0
5f4f0e22 6691 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
6692 {
6693 enum machine_mode inner_mode = GET_MODE (x);
6694 int width = GET_MODE_BITSIZE (inner_mode);
6695 int count = INTVAL (XEXP (x, 1));
5f4f0e22 6696 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
951553af
RK
6697 unsigned HOST_WIDE_INT op_nonzero = nonzero_bits (XEXP (x, 0), mode);
6698 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
5f4f0e22 6699 unsigned HOST_WIDE_INT outer = 0;
230d793d
RS
6700
6701 if (mode_width > width)
951553af 6702 outer = (op_nonzero & nonzero & ~ mode_mask);
230d793d
RS
6703
6704 if (code == LSHIFTRT)
6705 inner >>= count;
6706 else if (code == ASHIFTRT)
6707 {
6708 inner >>= count;
6709
951553af 6710 /* If the sign bit may have been nonzero before the shift, we
230d793d 6711 need to mark all the places it could have been copied to
951553af 6712 by the shift as possibly nonzero. */
5f4f0e22
CH
6713 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6714 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
230d793d
RS
6715 }
6716 else if (code == LSHIFT || code == ASHIFT)
6717 inner <<= count;
6718 else
6719 inner = ((inner << (count % width)
6720 | (inner >> (width - (count % width)))) & mode_mask);
6721
951553af 6722 nonzero &= (outer | inner);
230d793d
RS
6723 }
6724 break;
6725
6726 case FFS:
6727 /* This is at most the number of bits in the mode. */
951553af 6728 nonzero = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
230d793d 6729 break;
d0ab8cd3
RK
6730
6731 case IF_THEN_ELSE:
951553af
RK
6732 nonzero &= (nonzero_bits (XEXP (x, 1), mode)
6733 | nonzero_bits (XEXP (x, 2), mode));
d0ab8cd3 6734 break;
230d793d
RS
6735 }
6736
951553af 6737 return nonzero;
230d793d
RS
6738}
6739\f
d0ab8cd3 6740/* Return the number of bits at the high-order end of X that are known to
5109d49f
RK
6741 be equal to the sign bit. X will be used in mode MODE; if MODE is
6742 VOIDmode, X will be used in its own mode. The returned value will always
6743 be between 1 and the number of bits in MODE. */
d0ab8cd3
RK
6744
6745static int
6746num_sign_bit_copies (x, mode)
6747 rtx x;
6748 enum machine_mode mode;
6749{
6750 enum rtx_code code = GET_CODE (x);
6751 int bitwidth;
6752 int num0, num1, result;
951553af 6753 unsigned HOST_WIDE_INT nonzero;
d0ab8cd3
RK
6754 rtx tem;
6755
6756 /* If we weren't given a mode, use the mode of X. If the mode is still
6757 VOIDmode, we don't know anything. */
6758
6759 if (mode == VOIDmode)
6760 mode = GET_MODE (x);
6761
6762 if (mode == VOIDmode)
6752e8d2 6763 return 1;
d0ab8cd3
RK
6764
6765 bitwidth = GET_MODE_BITSIZE (mode);
6766
312def2e
RK
6767 /* For a smaller object, just ignore the high bits. */
6768 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
6769 return MAX (1, (num_sign_bit_copies (x, GET_MODE (x))
6770 - (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth)));
6771
d0ab8cd3
RK
6772 switch (code)
6773 {
6774 case REG:
55310dad
RK
6775
6776 if (reg_last_set_value[REGNO (x)] != 0
6777 && reg_last_set_mode[REGNO (x)] == mode
6778 && (reg_n_sets[REGNO (x)] == 1
6779 || reg_last_set_label[REGNO (x)] == label_tick)
6780 && INSN_CUID (reg_last_set[REGNO (x)]) < subst_low_cuid)
6781 return reg_last_set_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
6782
6783 tem = get_last_value (x);
6784 if (tem != 0)
6785 return num_sign_bit_copies (tem, mode);
55310dad
RK
6786
6787 if (nonzero_sign_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6788 return reg_sign_bit_copies[REGNO (x)];
d0ab8cd3
RK
6789 break;
6790
457816e2 6791 case MEM:
8baf60bb 6792#ifdef LOAD_EXTEND_OP
457816e2 6793 /* Some RISC machines sign-extend all loads of smaller than a word. */
8baf60bb
RK
6794 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
6795 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
457816e2 6796#endif
8baf60bb 6797 break;
457816e2 6798
d0ab8cd3
RK
6799 case CONST_INT:
6800 /* If the constant is negative, take its 1's complement and remask.
6801 Then see how many zero bits we have. */
951553af 6802 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
ac49a949 6803 if (bitwidth <= HOST_BITS_PER_WIDE_INT
951553af
RK
6804 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6805 nonzero = (~ nonzero) & GET_MODE_MASK (mode);
d0ab8cd3 6806
951553af 6807 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
6808
6809 case SUBREG:
c3c2cb37
RK
6810 /* If this is a SUBREG for a promoted object that is sign-extended
6811 and we are looking at it in a wider mode, we know that at least the
6812 high-order bits are known to be sign bit copies. */
6813
6814 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
dc3e17ad
RK
6815 return MAX (bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1,
6816 num_sign_bit_copies (SUBREG_REG (x), mode));
c3c2cb37 6817
d0ab8cd3
RK
6818 /* For a smaller object, just ignore the high bits. */
6819 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6820 {
6821 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6822 return MAX (1, (num0
6823 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6824 - bitwidth)));
6825 }
457816e2 6826
8baf60bb
RK
6827#ifdef WORD_REGISTER_OPERATIONS
6828 /* For paradoxical SUBREGs on machines where all register operations
6829 affect the entire register, just look inside. Note that we are
6830 passing MODE to the recursive call, so the number of sign bit copies
6831 will remain relative to that mode, not the inner mode. */
457816e2
RK
6832
6833 if (GET_MODE_SIZE (GET_MODE (x))
6834 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6835 return num_sign_bit_copies (SUBREG_REG (x), mode);
6836#endif
d0ab8cd3
RK
6837 break;
6838
6839 case SIGN_EXTRACT:
6840 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6841 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6842 break;
6843
6844 case SIGN_EXTEND:
6845 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6846 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6847
6848 case TRUNCATE:
6849 /* For a smaller object, just ignore the high bits. */
6850 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6851 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6852 - bitwidth)));
6853
6854 case NOT:
6855 return num_sign_bit_copies (XEXP (x, 0), mode);
6856
6857 case ROTATE: case ROTATERT:
6858 /* If we are rotating left by a number of bits less than the number
6859 of sign bit copies, we can just subtract that amount from the
6860 number. */
6861 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6862 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6863 {
6864 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6865 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6866 : bitwidth - INTVAL (XEXP (x, 1))));
6867 }
6868 break;
6869
6870 case NEG:
6871 /* In general, this subtracts one sign bit copy. But if the value
6872 is known to be positive, the number of sign bit copies is the
951553af
RK
6873 same as that of the input. Finally, if the input has just one bit
6874 that might be nonzero, all the bits are copies of the sign bit. */
6875 nonzero = nonzero_bits (XEXP (x, 0), mode);
6876 if (nonzero == 1)
d0ab8cd3
RK
6877 return bitwidth;
6878
6879 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6880 if (num0 > 1
ac49a949 6881 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6882 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
d0ab8cd3
RK
6883 num0--;
6884
6885 return num0;
6886
6887 case IOR: case AND: case XOR:
6888 case SMIN: case SMAX: case UMIN: case UMAX:
6889 /* Logical operations will preserve the number of sign-bit copies.
6890 MIN and MAX operations always return one of the operands. */
6891 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6892 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6893 return MIN (num0, num1);
6894
6895 case PLUS: case MINUS:
6896 /* For addition and subtraction, we can have a 1-bit carry. However,
6897 if we are subtracting 1 from a positive number, there will not
6898 be such a carry. Furthermore, if the positive number is known to
6899 be 0 or 1, we know the result is either -1 or 0. */
6900
3e3ea975 6901 if (code == PLUS && XEXP (x, 1) == constm1_rtx
9295e6af 6902 && bitwidth <= HOST_BITS_PER_WIDE_INT)
d0ab8cd3 6903 {
951553af
RK
6904 nonzero = nonzero_bits (XEXP (x, 0), mode);
6905 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
6906 return (nonzero == 1 || nonzero == 0 ? bitwidth
6907 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
6908 }
6909
6910 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6911 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6912 return MAX (1, MIN (num0, num1) - 1);
6913
6914 case MULT:
6915 /* The number of bits of the product is the sum of the number of
6916 bits of both terms. However, unless one of the terms if known
6917 to be positive, we must allow for an additional bit since negating
6918 a negative number can remove one sign bit copy. */
6919
6920 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6921 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6922
6923 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6924 if (result > 0
9295e6af 6925 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6926 && ((nonzero_bits (XEXP (x, 0), mode)
d0ab8cd3 6927 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
951553af 6928 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
6929 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6930 result--;
6931
6932 return MAX (1, result);
6933
6934 case UDIV:
6935 /* The result must be <= the first operand. */
6936 return num_sign_bit_copies (XEXP (x, 0), mode);
6937
6938 case UMOD:
6939 /* The result must be <= the scond operand. */
6940 return num_sign_bit_copies (XEXP (x, 1), mode);
6941
6942 case DIV:
6943 /* Similar to unsigned division, except that we have to worry about
6944 the case where the divisor is negative, in which case we have
6945 to add 1. */
6946 result = num_sign_bit_copies (XEXP (x, 0), mode);
6947 if (result > 1
ac49a949 6948 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6949 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
6950 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6951 result --;
6952
6953 return result;
6954
6955 case MOD:
6956 result = num_sign_bit_copies (XEXP (x, 1), mode);
6957 if (result > 1
ac49a949 6958 && bitwidth <= HOST_BITS_PER_WIDE_INT
951553af 6959 && (nonzero_bits (XEXP (x, 1), mode)
d0ab8cd3
RK
6960 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6961 result --;
6962
6963 return result;
6964
6965 case ASHIFTRT:
6966 /* Shifts by a constant add to the number of bits equal to the
6967 sign bit. */
6968 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6969 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6970 && INTVAL (XEXP (x, 1)) > 0)
6971 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6972
6973 return num0;
6974
6975 case ASHIFT:
6976 case LSHIFT:
6977 /* Left shifts destroy copies. */
6978 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6979 || INTVAL (XEXP (x, 1)) < 0
6980 || INTVAL (XEXP (x, 1)) >= bitwidth)
6981 return 1;
6982
6983 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6984 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6985
6986 case IF_THEN_ELSE:
6987 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6988 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6989 return MIN (num0, num1);
6990
6991#if STORE_FLAG_VALUE == -1
6992 case EQ: case NE: case GE: case GT: case LE: case LT:
6993 case GEU: case GTU: case LEU: case LTU:
6994 return bitwidth;
6995#endif
6996 }
6997
6998 /* If we haven't been able to figure it out by one of the above rules,
6999 see if some of the high-order bits are known to be zero. If so,
ac49a949
RS
7000 count those bits and return one less than that amount. If we can't
7001 safely compute the mask for this mode, always return BITWIDTH. */
7002
7003 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6752e8d2 7004 return 1;
d0ab8cd3 7005
951553af 7006 nonzero = nonzero_bits (x, mode);
df6f4086 7007 return (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
951553af 7008 ? 1 : bitwidth - floor_log2 (nonzero) - 1);
d0ab8cd3
RK
7009}
7010\f
1a26b032
RK
7011/* Return the number of "extended" bits there are in X, when interpreted
7012 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
7013 unsigned quantities, this is the number of high-order zero bits.
7014 For signed quantities, this is the number of copies of the sign bit
7015 minus 1. In both case, this function returns the number of "spare"
7016 bits. For example, if two quantities for which this function returns
7017 at least 1 are added, the addition is known not to overflow.
7018
7019 This function will always return 0 unless called during combine, which
7020 implies that it must be called from a define_split. */
7021
7022int
7023extended_count (x, mode, unsignedp)
7024 rtx x;
7025 enum machine_mode mode;
7026 int unsignedp;
7027{
951553af 7028 if (nonzero_sign_valid == 0)
1a26b032
RK
7029 return 0;
7030
7031 return (unsignedp
ac49a949
RS
7032 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7033 && (GET_MODE_BITSIZE (mode) - 1
951553af 7034 - floor_log2 (nonzero_bits (x, mode))))
1a26b032
RK
7035 : num_sign_bit_copies (x, mode) - 1);
7036}
7037\f
230d793d
RS
7038/* This function is called from `simplify_shift_const' to merge two
7039 outer operations. Specifically, we have already found that we need
7040 to perform operation *POP0 with constant *PCONST0 at the outermost
7041 position. We would now like to also perform OP1 with constant CONST1
7042 (with *POP0 being done last).
7043
7044 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
7045 the resulting operation. *PCOMP_P is set to 1 if we would need to
7046 complement the innermost operand, otherwise it is unchanged.
7047
7048 MODE is the mode in which the operation will be done. No bits outside
7049 the width of this mode matter. It is assumed that the width of this mode
5f4f0e22 7050 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
230d793d
RS
7051
7052 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
7053 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
7054 result is simply *PCONST0.
7055
7056 If the resulting operation cannot be expressed as one operation, we
7057 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
7058
7059static int
7060merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
7061 enum rtx_code *pop0;
5f4f0e22 7062 HOST_WIDE_INT *pconst0;
230d793d 7063 enum rtx_code op1;
5f4f0e22 7064 HOST_WIDE_INT const1;
230d793d
RS
7065 enum machine_mode mode;
7066 int *pcomp_p;
7067{
7068 enum rtx_code op0 = *pop0;
5f4f0e22 7069 HOST_WIDE_INT const0 = *pconst0;
230d793d
RS
7070
7071 const0 &= GET_MODE_MASK (mode);
7072 const1 &= GET_MODE_MASK (mode);
7073
7074 /* If OP0 is an AND, clear unimportant bits in CONST1. */
7075 if (op0 == AND)
7076 const1 &= const0;
7077
7078 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
7079 if OP0 is SET. */
7080
7081 if (op1 == NIL || op0 == SET)
7082 return 1;
7083
7084 else if (op0 == NIL)
7085 op0 = op1, const0 = const1;
7086
7087 else if (op0 == op1)
7088 {
7089 switch (op0)
7090 {
7091 case AND:
7092 const0 &= const1;
7093 break;
7094 case IOR:
7095 const0 |= const1;
7096 break;
7097 case XOR:
7098 const0 ^= const1;
7099 break;
7100 case PLUS:
7101 const0 += const1;
7102 break;
7103 case NEG:
7104 op0 = NIL;
7105 break;
7106 }
7107 }
7108
7109 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
7110 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
7111 return 0;
7112
7113 /* If the two constants aren't the same, we can't do anything. The
7114 remaining six cases can all be done. */
7115 else if (const0 != const1)
7116 return 0;
7117
7118 else
7119 switch (op0)
7120 {
7121 case IOR:
7122 if (op1 == AND)
7123 /* (a & b) | b == b */
7124 op0 = SET;
7125 else /* op1 == XOR */
7126 /* (a ^ b) | b == a | b */
7127 ;
7128 break;
7129
7130 case XOR:
7131 if (op1 == AND)
7132 /* (a & b) ^ b == (~a) & b */
7133 op0 = AND, *pcomp_p = 1;
7134 else /* op1 == IOR */
7135 /* (a | b) ^ b == a & ~b */
7136 op0 = AND, *pconst0 = ~ const0;
7137 break;
7138
7139 case AND:
7140 if (op1 == IOR)
7141 /* (a | b) & b == b */
7142 op0 = SET;
7143 else /* op1 == XOR */
7144 /* (a ^ b) & b) == (~a) & b */
7145 *pcomp_p = 1;
7146 break;
7147 }
7148
7149 /* Check for NO-OP cases. */
7150 const0 &= GET_MODE_MASK (mode);
7151 if (const0 == 0
7152 && (op0 == IOR || op0 == XOR || op0 == PLUS))
7153 op0 = NIL;
7154 else if (const0 == 0 && op0 == AND)
7155 op0 = SET;
7156 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
7157 op0 = NIL;
7158
7159 *pop0 = op0;
7160 *pconst0 = const0;
7161
7162 return 1;
7163}
7164\f
7165/* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
7166 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
7167 that we started with.
7168
7169 The shift is normally computed in the widest mode we find in VAROP, as
7170 long as it isn't a different number of words than RESULT_MODE. Exceptions
7171 are ASHIFTRT and ROTATE, which are always done in their original mode, */
7172
7173static rtx
7174simplify_shift_const (x, code, result_mode, varop, count)
7175 rtx x;
7176 enum rtx_code code;
7177 enum machine_mode result_mode;
7178 rtx varop;
7179 int count;
7180{
7181 enum rtx_code orig_code = code;
7182 int orig_count = count;
7183 enum machine_mode mode = result_mode;
7184 enum machine_mode shift_mode, tmode;
7185 int mode_words
7186 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
7187 /* We form (outer_op (code varop count) (outer_const)). */
7188 enum rtx_code outer_op = NIL;
5f4f0e22 7189 HOST_WIDE_INT outer_const;
230d793d
RS
7190 rtx const_rtx;
7191 int complement_p = 0;
7192 rtx new;
7193
7194 /* If we were given an invalid count, don't do anything except exactly
7195 what was requested. */
7196
7197 if (count < 0 || count > GET_MODE_BITSIZE (mode))
7198 {
7199 if (x)
7200 return x;
7201
5f4f0e22 7202 return gen_rtx (code, mode, varop, GEN_INT (count));
230d793d
RS
7203 }
7204
7205 /* Unless one of the branches of the `if' in this loop does a `continue',
7206 we will `break' the loop after the `if'. */
7207
7208 while (count != 0)
7209 {
7210 /* If we have an operand of (clobber (const_int 0)), just return that
7211 value. */
7212 if (GET_CODE (varop) == CLOBBER)
7213 return varop;
7214
7215 /* If we discovered we had to complement VAROP, leave. Making a NOT
7216 here would cause an infinite loop. */
7217 if (complement_p)
7218 break;
7219
7220 /* Convert ROTATETRT to ROTATE. */
7221 if (code == ROTATERT)
7222 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
7223
7224 /* Canonicalize LSHIFT to ASHIFT. */
7225 if (code == LSHIFT)
7226 code = ASHIFT;
7227
7228 /* We need to determine what mode we will do the shift in. If the
7229 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
7230 was originally done in. Otherwise, we can do it in MODE, the widest
7231 mode encountered. */
7232 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7233
7234 /* Handle cases where the count is greater than the size of the mode
7235 minus 1. For ASHIFT, use the size minus one as the count (this can
7236 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
7237 take the count modulo the size. For other shifts, the result is
7238 zero.
7239
7240 Since these shifts are being produced by the compiler by combining
7241 multiple operations, each of which are defined, we know what the
7242 result is supposed to be. */
7243
7244 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
7245 {
7246 if (code == ASHIFTRT)
7247 count = GET_MODE_BITSIZE (shift_mode) - 1;
7248 else if (code == ROTATE || code == ROTATERT)
7249 count %= GET_MODE_BITSIZE (shift_mode);
7250 else
7251 {
7252 /* We can't simply return zero because there may be an
7253 outer op. */
7254 varop = const0_rtx;
7255 count = 0;
7256 break;
7257 }
7258 }
7259
7260 /* Negative counts are invalid and should not have been made (a
7261 programmer-specified negative count should have been handled
7262 above). */
7263 else if (count < 0)
7264 abort ();
7265
312def2e
RK
7266 /* An arithmetic right shift of a quantity known to be -1 or 0
7267 is a no-op. */
7268 if (code == ASHIFTRT
7269 && (num_sign_bit_copies (varop, shift_mode)
7270 == GET_MODE_BITSIZE (shift_mode)))
d0ab8cd3 7271 {
312def2e
RK
7272 count = 0;
7273 break;
7274 }
d0ab8cd3 7275
312def2e
RK
7276 /* If we are doing an arithmetic right shift and discarding all but
7277 the sign bit copies, this is equivalent to doing a shift by the
7278 bitsize minus one. Convert it into that shift because it will often
7279 allow other simplifications. */
500c518b 7280
312def2e
RK
7281 if (code == ASHIFTRT
7282 && (count + num_sign_bit_copies (varop, shift_mode)
7283 >= GET_MODE_BITSIZE (shift_mode)))
7284 count = GET_MODE_BITSIZE (shift_mode) - 1;
500c518b 7285
230d793d
RS
7286 /* We simplify the tests below and elsewhere by converting
7287 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
7288 `make_compound_operation' will convert it to a ASHIFTRT for
7289 those machines (such as Vax) that don't have a LSHIFTRT. */
5f4f0e22 7290 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7291 && code == ASHIFTRT
951553af 7292 && ((nonzero_bits (varop, shift_mode)
5f4f0e22
CH
7293 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
7294 == 0))
230d793d
RS
7295 code = LSHIFTRT;
7296
7297 switch (GET_CODE (varop))
7298 {
7299 case SIGN_EXTEND:
7300 case ZERO_EXTEND:
7301 case SIGN_EXTRACT:
7302 case ZERO_EXTRACT:
7303 new = expand_compound_operation (varop);
7304 if (new != varop)
7305 {
7306 varop = new;
7307 continue;
7308 }
7309 break;
7310
7311 case MEM:
7312 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
7313 minus the width of a smaller mode, we can do this with a
7314 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
7315 if ((code == ASHIFTRT || code == LSHIFTRT)
7316 && ! mode_dependent_address_p (XEXP (varop, 0))
7317 && ! MEM_VOLATILE_P (varop)
7318 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7319 MODE_INT, 1)) != BLKmode)
7320 {
7321#if BYTES_BIG_ENDIAN
7322 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
7323#else
7324 new = gen_rtx (MEM, tmode,
7325 plus_constant (XEXP (varop, 0),
7326 count / BITS_PER_UNIT));
7327 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
7328 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
7329 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
7330#endif
7331 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7332 : ZERO_EXTEND, mode, new);
7333 count = 0;
7334 continue;
7335 }
7336 break;
7337
7338 case USE:
7339 /* Similar to the case above, except that we can only do this if
7340 the resulting mode is the same as that of the underlying
7341 MEM and adjust the address depending on the *bits* endianness
7342 because of the way that bit-field extract insns are defined. */
7343 if ((code == ASHIFTRT || code == LSHIFTRT)
7344 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
7345 MODE_INT, 1)) != BLKmode
7346 && tmode == GET_MODE (XEXP (varop, 0)))
7347 {
7348#if BITS_BIG_ENDIAN
7349 new = XEXP (varop, 0);
7350#else
7351 new = copy_rtx (XEXP (varop, 0));
7352 SUBST (XEXP (new, 0),
7353 plus_constant (XEXP (new, 0),
7354 count / BITS_PER_UNIT));
7355#endif
7356
7357 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
7358 : ZERO_EXTEND, mode, new);
7359 count = 0;
7360 continue;
7361 }
7362 break;
7363
7364 case SUBREG:
7365 /* If VAROP is a SUBREG, strip it as long as the inner operand has
7366 the same number of words as what we've seen so far. Then store
7367 the widest mode in MODE. */
f9e67232
RS
7368 if (subreg_lowpart_p (varop)
7369 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7370 > GET_MODE_SIZE (GET_MODE (varop)))
230d793d
RS
7371 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
7372 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
7373 == mode_words))
7374 {
7375 varop = SUBREG_REG (varop);
7376 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
7377 mode = GET_MODE (varop);
7378 continue;
7379 }
7380 break;
7381
7382 case MULT:
7383 /* Some machines use MULT instead of ASHIFT because MULT
7384 is cheaper. But it is still better on those machines to
7385 merge two shifts into one. */
7386 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7387 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7388 {
7389 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7390 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
230d793d
RS
7391 continue;
7392 }
7393 break;
7394
7395 case UDIV:
7396 /* Similar, for when divides are cheaper. */
7397 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7398 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
7399 {
7400 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5f4f0e22 7401 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
230d793d
RS
7402 continue;
7403 }
7404 break;
7405
7406 case ASHIFTRT:
7407 /* If we are extracting just the sign bit of an arithmetic right
7408 shift, that shift is not needed. */
7409 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
7410 {
7411 varop = XEXP (varop, 0);
7412 continue;
7413 }
7414
7415 /* ... fall through ... */
7416
7417 case LSHIFTRT:
7418 case ASHIFT:
7419 case LSHIFT:
7420 case ROTATE:
7421 /* Here we have two nested shifts. The result is usually the
7422 AND of a new shift with a mask. We compute the result below. */
7423 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7424 && INTVAL (XEXP (varop, 1)) >= 0
7425 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5f4f0e22
CH
7426 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7427 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d
RS
7428 {
7429 enum rtx_code first_code = GET_CODE (varop);
7430 int first_count = INTVAL (XEXP (varop, 1));
5f4f0e22 7431 unsigned HOST_WIDE_INT mask;
230d793d
RS
7432 rtx mask_rtx;
7433 rtx inner;
7434
7435 if (first_code == LSHIFT)
7436 first_code = ASHIFT;
7437
7438 /* We have one common special case. We can't do any merging if
7439 the inner code is an ASHIFTRT of a smaller mode. However, if
7440 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
7441 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
7442 we can convert it to
7443 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
7444 This simplifies certain SIGN_EXTEND operations. */
7445 if (code == ASHIFT && first_code == ASHIFTRT
7446 && (GET_MODE_BITSIZE (result_mode)
7447 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
7448 {
7449 /* C3 has the low-order C1 bits zero. */
7450
5f4f0e22
CH
7451 mask = (GET_MODE_MASK (mode)
7452 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
230d793d 7453
5f4f0e22 7454 varop = simplify_and_const_int (NULL_RTX, result_mode,
230d793d 7455 XEXP (varop, 0), mask);
5f4f0e22 7456 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
230d793d
RS
7457 varop, count);
7458 count = first_count;
7459 code = ASHIFTRT;
7460 continue;
7461 }
7462
d0ab8cd3
RK
7463 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
7464 than C1 high-order bits equal to the sign bit, we can convert
7465 this to either an ASHIFT or a ASHIFTRT depending on the
7466 two counts.
230d793d
RS
7467
7468 We cannot do this if VAROP's mode is not SHIFT_MODE. */
7469
7470 if (code == ASHIFTRT && first_code == ASHIFT
7471 && GET_MODE (varop) == shift_mode
d0ab8cd3
RK
7472 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
7473 > first_count))
230d793d 7474 {
d0ab8cd3
RK
7475 count -= first_count;
7476 if (count < 0)
7477 count = - count, code = ASHIFT;
7478 varop = XEXP (varop, 0);
7479 continue;
230d793d
RS
7480 }
7481
7482 /* There are some cases we can't do. If CODE is ASHIFTRT,
7483 we can only do this if FIRST_CODE is also ASHIFTRT.
7484
7485 We can't do the case when CODE is ROTATE and FIRST_CODE is
7486 ASHIFTRT.
7487
7488 If the mode of this shift is not the mode of the outer shift,
7489 we can't do this if either shift is ASHIFTRT or ROTATE.
7490
7491 Finally, we can't do any of these if the mode is too wide
7492 unless the codes are the same.
7493
7494 Handle the case where the shift codes are the same
7495 first. */
7496
7497 if (code == first_code)
7498 {
7499 if (GET_MODE (varop) != result_mode
7500 && (code == ASHIFTRT || code == ROTATE))
7501 break;
7502
7503 count += first_count;
7504 varop = XEXP (varop, 0);
7505 continue;
7506 }
7507
7508 if (code == ASHIFTRT
7509 || (code == ROTATE && first_code == ASHIFTRT)
5f4f0e22 7510 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
230d793d
RS
7511 || (GET_MODE (varop) != result_mode
7512 && (first_code == ASHIFTRT || first_code == ROTATE
7513 || code == ROTATE)))
7514 break;
7515
7516 /* To compute the mask to apply after the shift, shift the
951553af 7517 nonzero bits of the inner shift the same way the
230d793d
RS
7518 outer shift will. */
7519
951553af 7520 mask_rtx = GEN_INT (nonzero_bits (varop, GET_MODE (varop)));
230d793d
RS
7521
7522 mask_rtx
7523 = simplify_binary_operation (code, result_mode, mask_rtx,
5f4f0e22 7524 GEN_INT (count));
230d793d
RS
7525
7526 /* Give up if we can't compute an outer operation to use. */
7527 if (mask_rtx == 0
7528 || GET_CODE (mask_rtx) != CONST_INT
7529 || ! merge_outer_ops (&outer_op, &outer_const, AND,
7530 INTVAL (mask_rtx),
7531 result_mode, &complement_p))
7532 break;
7533
7534 /* If the shifts are in the same direction, we add the
7535 counts. Otherwise, we subtract them. */
7536 if ((code == ASHIFTRT || code == LSHIFTRT)
7537 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
7538 count += first_count;
7539 else
7540 count -= first_count;
7541
7542 /* If COUNT is positive, the new shift is usually CODE,
7543 except for the two exceptions below, in which case it is
7544 FIRST_CODE. If the count is negative, FIRST_CODE should
7545 always be used */
7546 if (count > 0
7547 && ((first_code == ROTATE && code == ASHIFT)
7548 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7549 code = first_code;
7550 else if (count < 0)
7551 code = first_code, count = - count;
7552
7553 varop = XEXP (varop, 0);
7554 continue;
7555 }
7556
7557 /* If we have (A << B << C) for any shift, we can convert this to
7558 (A << C << B). This wins if A is a constant. Only try this if
7559 B is not a constant. */
7560
7561 else if (GET_CODE (varop) == code
7562 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7563 && 0 != (new
7564 = simplify_binary_operation (code, mode,
7565 XEXP (varop, 0),
5f4f0e22 7566 GEN_INT (count))))
230d793d
RS
7567 {
7568 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7569 count = 0;
7570 continue;
7571 }
7572 break;
7573
7574 case NOT:
7575 /* Make this fit the case below. */
7576 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5f4f0e22 7577 GEN_INT (GET_MODE_MASK (mode)));
230d793d
RS
7578 continue;
7579
7580 case IOR:
7581 case AND:
7582 case XOR:
7583 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7584 with C the size of VAROP - 1 and the shift is logical if
7585 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7586 we have an (le X 0) operation. If we have an arithmetic shift
7587 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7588 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7589
7590 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7591 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7592 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7593 && (code == LSHIFTRT || code == ASHIFTRT)
7594 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7595 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7596 {
7597 count = 0;
7598 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7599 const0_rtx);
7600
7601 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7602 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7603
7604 continue;
7605 }
7606
7607 /* If we have (shift (logical)), move the logical to the outside
7608 to allow it to possibly combine with another logical and the
7609 shift to combine with another shift. This also canonicalizes to
7610 what a ZERO_EXTRACT looks like. Also, some machines have
7611 (and (shift)) insns. */
7612
7613 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7614 && (new = simplify_binary_operation (code, result_mode,
7615 XEXP (varop, 1),
5f4f0e22 7616 GEN_INT (count))) != 0
230d793d
RS
7617 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7618 INTVAL (new), result_mode, &complement_p))
7619 {
7620 varop = XEXP (varop, 0);
7621 continue;
7622 }
7623
7624 /* If we can't do that, try to simplify the shift in each arm of the
7625 logical expression, make a new logical expression, and apply
7626 the inverse distributive law. */
7627 {
00d4ca1c 7628 rtx lhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d 7629 XEXP (varop, 0), count);
00d4ca1c 7630 rtx rhs = simplify_shift_const (NULL_RTX, code, shift_mode,
230d793d
RS
7631 XEXP (varop, 1), count);
7632
00d4ca1c 7633 varop = gen_binary (GET_CODE (varop), GET_MODE (varop), lhs, rhs);
230d793d
RS
7634 varop = apply_distributive_law (varop);
7635
7636 count = 0;
7637 }
7638 break;
7639
7640 case EQ:
7641 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7642 says that the sign bit can be tested, FOO has mode MODE, C is
7643 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
951553af 7644 may be nonzero. */
230d793d
RS
7645 if (code == LSHIFT
7646 && XEXP (varop, 1) == const0_rtx
7647 && GET_MODE (XEXP (varop, 0)) == result_mode
7648 && count == GET_MODE_BITSIZE (result_mode) - 1
5f4f0e22 7649 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
230d793d 7650 && ((STORE_FLAG_VALUE
5f4f0e22 7651 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
951553af 7652 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7653 && merge_outer_ops (&outer_op, &outer_const, XOR,
7654 (HOST_WIDE_INT) 1, result_mode,
7655 &complement_p))
230d793d
RS
7656 {
7657 varop = XEXP (varop, 0);
7658 count = 0;
7659 continue;
7660 }
7661 break;
7662
7663 case NEG:
d0ab8cd3
RK
7664 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7665 than the number of bits in the mode is equivalent to A. */
7666 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
951553af 7667 && nonzero_bits (XEXP (varop, 0), result_mode) == 1)
230d793d 7668 {
d0ab8cd3 7669 varop = XEXP (varop, 0);
230d793d
RS
7670 count = 0;
7671 continue;
7672 }
7673
7674 /* NEG commutes with ASHIFT since it is multiplication. Move the
7675 NEG outside to allow shifts to combine. */
7676 if (code == ASHIFT
5f4f0e22
CH
7677 && merge_outer_ops (&outer_op, &outer_const, NEG,
7678 (HOST_WIDE_INT) 0, result_mode,
7679 &complement_p))
230d793d
RS
7680 {
7681 varop = XEXP (varop, 0);
7682 continue;
7683 }
7684 break;
7685
7686 case PLUS:
d0ab8cd3
RK
7687 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7688 is one less than the number of bits in the mode is
7689 equivalent to (xor A 1). */
230d793d
RS
7690 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7691 && XEXP (varop, 1) == constm1_rtx
951553af 7692 && nonzero_bits (XEXP (varop, 0), result_mode) == 1
5f4f0e22
CH
7693 && merge_outer_ops (&outer_op, &outer_const, XOR,
7694 (HOST_WIDE_INT) 1, result_mode,
7695 &complement_p))
230d793d
RS
7696 {
7697 count = 0;
7698 varop = XEXP (varop, 0);
7699 continue;
7700 }
7701
3f508eca 7702 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
951553af 7703 that might be nonzero in BAR are those being shifted out and those
3f508eca
RK
7704 bits are known zero in FOO, we can replace the PLUS with FOO.
7705 Similarly in the other operand order. This code occurs when
7706 we are computing the size of a variable-size array. */
7707
7708 if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 7709 && count < HOST_BITS_PER_WIDE_INT
951553af
RK
7710 && nonzero_bits (XEXP (varop, 1), result_mode) >> count == 0
7711 && (nonzero_bits (XEXP (varop, 1), result_mode)
7712 & nonzero_bits (XEXP (varop, 0), result_mode)) == 0)
3f508eca
RK
7713 {
7714 varop = XEXP (varop, 0);
7715 continue;
7716 }
7717 else if ((code == ASHIFTRT || code == LSHIFTRT)
5f4f0e22 7718 && count < HOST_BITS_PER_WIDE_INT
ac49a949 7719 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
951553af 7720 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
3f508eca 7721 >> count)
951553af
RK
7722 && 0 == (nonzero_bits (XEXP (varop, 0), result_mode)
7723 & nonzero_bits (XEXP (varop, 1),
3f508eca
RK
7724 result_mode)))
7725 {
7726 varop = XEXP (varop, 1);
7727 continue;
7728 }
7729
230d793d
RS
7730 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7731 if (code == ASHIFT
7732 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7733 && (new = simplify_binary_operation (ASHIFT, result_mode,
7734 XEXP (varop, 1),
5f4f0e22 7735 GEN_INT (count))) != 0
230d793d
RS
7736 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7737 INTVAL (new), result_mode, &complement_p))
7738 {
7739 varop = XEXP (varop, 0);
7740 continue;
7741 }
7742 break;
7743
7744 case MINUS:
7745 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7746 with C the size of VAROP - 1 and the shift is logical if
7747 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7748 we have a (gt X 0) operation. If the shift is arithmetic with
7749 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7750 we have a (neg (gt X 0)) operation. */
7751
7752 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7753 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7754 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7755 && (code == LSHIFTRT || code == ASHIFTRT)
7756 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7757 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7758 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7759 {
7760 count = 0;
7761 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7762 const0_rtx);
7763
7764 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7765 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7766
7767 continue;
7768 }
7769 break;
7770 }
7771
7772 break;
7773 }
7774
7775 /* We need to determine what mode to do the shift in. If the shift is
7776 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7777 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7778 The code we care about is that of the shift that will actually be done,
7779 not the shift that was originally requested. */
7780 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7781
7782 /* We have now finished analyzing the shift. The result should be
7783 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7784 OUTER_OP is non-NIL, it is an operation that needs to be applied
7785 to the result of the shift. OUTER_CONST is the relevant constant,
7786 but we must turn off all bits turned off in the shift.
7787
7788 If we were passed a value for X, see if we can use any pieces of
7789 it. If not, make new rtx. */
7790
7791 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7792 && GET_CODE (XEXP (x, 1)) == CONST_INT
7793 && INTVAL (XEXP (x, 1)) == count)
7794 const_rtx = XEXP (x, 1);
7795 else
5f4f0e22 7796 const_rtx = GEN_INT (count);
230d793d
RS
7797
7798 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7799 && GET_MODE (XEXP (x, 0)) == shift_mode
7800 && SUBREG_REG (XEXP (x, 0)) == varop)
7801 varop = XEXP (x, 0);
7802 else if (GET_MODE (varop) != shift_mode)
7803 varop = gen_lowpart_for_combine (shift_mode, varop);
7804
7805 /* If we can't make the SUBREG, try to return what we were given. */
7806 if (GET_CODE (varop) == CLOBBER)
7807 return x ? x : varop;
7808
7809 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7810 if (new != 0)
7811 x = new;
7812 else
7813 {
7814 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7815 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7816
7817 SUBST (XEXP (x, 0), varop);
7818 SUBST (XEXP (x, 1), const_rtx);
7819 }
7820
224eeff2
RK
7821 /* If we have an outer operation and we just made a shift, it is
7822 possible that we could have simplified the shift were it not
7823 for the outer operation. So try to do the simplification
7824 recursively. */
7825
7826 if (outer_op != NIL && GET_CODE (x) == code
7827 && GET_CODE (XEXP (x, 1)) == CONST_INT)
7828 x = simplify_shift_const (x, code, shift_mode, XEXP (x, 0),
7829 INTVAL (XEXP (x, 1)));
7830
230d793d
RS
7831 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7832 turn off all the bits that the shift would have turned off. */
7833 if (orig_code == LSHIFTRT && result_mode != shift_mode)
5f4f0e22 7834 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
230d793d
RS
7835 GET_MODE_MASK (result_mode) >> orig_count);
7836
7837 /* Do the remainder of the processing in RESULT_MODE. */
7838 x = gen_lowpart_for_combine (result_mode, x);
7839
7840 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7841 operation. */
7842 if (complement_p)
7843 x = gen_unary (NOT, result_mode, x);
7844
7845 if (outer_op != NIL)
7846 {
5f4f0e22 7847 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
7848 outer_const &= GET_MODE_MASK (result_mode);
7849
7850 if (outer_op == AND)
5f4f0e22 7851 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
230d793d
RS
7852 else if (outer_op == SET)
7853 /* This means that we have determined that the result is
7854 equivalent to a constant. This should be rare. */
5f4f0e22 7855 x = GEN_INT (outer_const);
230d793d
RS
7856 else if (GET_RTX_CLASS (outer_op) == '1')
7857 x = gen_unary (outer_op, result_mode, x);
7858 else
5f4f0e22 7859 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
230d793d
RS
7860 }
7861
7862 return x;
7863}
7864\f
7865/* Like recog, but we receive the address of a pointer to a new pattern.
7866 We try to match the rtx that the pointer points to.
7867 If that fails, we may try to modify or replace the pattern,
7868 storing the replacement into the same pointer object.
7869
7870 Modifications include deletion or addition of CLOBBERs.
7871
7872 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7873 the CLOBBERs are placed.
7874
7875 The value is the final insn code from the pattern ultimately matched,
7876 or -1. */
7877
7878static int
7879recog_for_combine (pnewpat, insn, pnotes)
7880 rtx *pnewpat;
7881 rtx insn;
7882 rtx *pnotes;
7883{
7884 register rtx pat = *pnewpat;
7885 int insn_code_number;
7886 int num_clobbers_to_add = 0;
7887 int i;
7888 rtx notes = 0;
7889
974f4146
RK
7890 /* If PAT is a PARALLEL, check to see if it contains the CLOBBER
7891 we use to indicate that something didn't match. If we find such a
7892 thing, force rejection. */
d96023cf 7893 if (GET_CODE (pat) == PARALLEL)
974f4146 7894 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
d96023cf
RK
7895 if (GET_CODE (XVECEXP (pat, 0, i)) == CLOBBER
7896 && XEXP (XVECEXP (pat, 0, i), 0) == const0_rtx)
974f4146
RK
7897 return -1;
7898
230d793d
RS
7899 /* Is the result of combination a valid instruction? */
7900 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7901
7902 /* If it isn't, there is the possibility that we previously had an insn
7903 that clobbered some register as a side effect, but the combined
7904 insn doesn't need to do that. So try once more without the clobbers
7905 unless this represents an ASM insn. */
7906
7907 if (insn_code_number < 0 && ! check_asm_operands (pat)
7908 && GET_CODE (pat) == PARALLEL)
7909 {
7910 int pos;
7911
7912 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7913 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7914 {
7915 if (i != pos)
7916 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7917 pos++;
7918 }
7919
7920 SUBST_INT (XVECLEN (pat, 0), pos);
7921
7922 if (pos == 1)
7923 pat = XVECEXP (pat, 0, 0);
7924
7925 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7926 }
7927
7928 /* If we had any clobbers to add, make a new pattern than contains
7929 them. Then check to make sure that all of them are dead. */
7930 if (num_clobbers_to_add)
7931 {
7932 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7933 gen_rtvec (GET_CODE (pat) == PARALLEL
7934 ? XVECLEN (pat, 0) + num_clobbers_to_add
7935 : num_clobbers_to_add + 1));
7936
7937 if (GET_CODE (pat) == PARALLEL)
7938 for (i = 0; i < XVECLEN (pat, 0); i++)
7939 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7940 else
7941 XVECEXP (newpat, 0, 0) = pat;
7942
7943 add_clobbers (newpat, insn_code_number);
7944
7945 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7946 i < XVECLEN (newpat, 0); i++)
7947 {
7948 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7949 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7950 return -1;
7951 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7952 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7953 }
7954 pat = newpat;
7955 }
7956
7957 *pnewpat = pat;
7958 *pnotes = notes;
7959
7960 return insn_code_number;
7961}
7962\f
7963/* Like gen_lowpart but for use by combine. In combine it is not possible
7964 to create any new pseudoregs. However, it is safe to create
7965 invalid memory addresses, because combine will try to recognize
7966 them and all they will do is make the combine attempt fail.
7967
7968 If for some reason this cannot do its job, an rtx
7969 (clobber (const_int 0)) is returned.
7970 An insn containing that will not be recognized. */
7971
7972#undef gen_lowpart
7973
7974static rtx
7975gen_lowpart_for_combine (mode, x)
7976 enum machine_mode mode;
7977 register rtx x;
7978{
7979 rtx result;
7980
7981 if (GET_MODE (x) == mode)
7982 return x;
7983
eae957a8
RK
7984 /* We can only support MODE being wider than a word if X is a
7985 constant integer or has a mode the same size. */
7986
7987 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD
7988 && ! ((GET_MODE (x) == VOIDmode
7989 && (GET_CODE (x) == CONST_INT
7990 || GET_CODE (x) == CONST_DOUBLE))
7991 || GET_MODE_SIZE (GET_MODE (x)) == GET_MODE_SIZE (mode)))
230d793d
RS
7992 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7993
7994 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
7995 won't know what to do. So we will strip off the SUBREG here and
7996 process normally. */
7997 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
7998 {
7999 x = SUBREG_REG (x);
8000 if (GET_MODE (x) == mode)
8001 return x;
8002 }
8003
8004 result = gen_lowpart_common (mode, x);
8005 if (result)
8006 return result;
8007
8008 if (GET_CODE (x) == MEM)
8009 {
8010 register int offset = 0;
8011 rtx new;
8012
8013 /* Refuse to work on a volatile memory ref or one with a mode-dependent
8014 address. */
8015 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
8016 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8017
8018 /* If we want to refer to something bigger than the original memref,
8019 generate a perverse subreg instead. That will force a reload
8020 of the original memref X. */
8021 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
8022 return gen_rtx (SUBREG, mode, x, 0);
8023
8024#if WORDS_BIG_ENDIAN
8025 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
8026 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
8027#endif
8028#if BYTES_BIG_ENDIAN
8029 /* Adjust the address so that the address-after-the-data
8030 is unchanged. */
8031 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
8032 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
8033#endif
8034 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
8035 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
8036 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
8037 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
8038 return new;
8039 }
8040
8041 /* If X is a comparison operator, rewrite it in a new mode. This
8042 probably won't match, but may allow further simplifications. */
8043 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
8044 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
8045
8046 /* If we couldn't simplify X any other way, just enclose it in a
8047 SUBREG. Normally, this SUBREG won't match, but some patterns may
a7c99304 8048 include an explicit SUBREG or we may simplify it further in combine. */
230d793d 8049 else
dfbe1b2f
RK
8050 {
8051 int word = 0;
8052
8053 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
8054 word = ((GET_MODE_SIZE (GET_MODE (x))
8055 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
8056 / UNITS_PER_WORD);
8057 return gen_rtx (SUBREG, mode, x, word);
8058 }
230d793d
RS
8059}
8060\f
8061/* Make an rtx expression. This is a subset of gen_rtx and only supports
8062 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
8063
8064 If the identical expression was previously in the insn (in the undobuf),
8065 it will be returned. Only if it is not found will a new expression
8066 be made. */
8067
8068/*VARARGS2*/
8069static rtx
8070gen_rtx_combine (va_alist)
8071 va_dcl
8072{
8073 va_list p;
8074 enum rtx_code code;
8075 enum machine_mode mode;
8076 int n_args;
8077 rtx args[3];
8078 int i, j;
8079 char *fmt;
8080 rtx rt;
8081
8082 va_start (p);
8083 code = va_arg (p, enum rtx_code);
8084 mode = va_arg (p, enum machine_mode);
8085 n_args = GET_RTX_LENGTH (code);
8086 fmt = GET_RTX_FORMAT (code);
8087
8088 if (n_args == 0 || n_args > 3)
8089 abort ();
8090
8091 /* Get each arg and verify that it is supposed to be an expression. */
8092 for (j = 0; j < n_args; j++)
8093 {
8094 if (*fmt++ != 'e')
8095 abort ();
8096
8097 args[j] = va_arg (p, rtx);
8098 }
8099
8100 /* See if this is in undobuf. Be sure we don't use objects that came
8101 from another insn; this could produce circular rtl structures. */
8102
8103 for (i = previous_num_undos; i < undobuf.num_undo; i++)
8104 if (!undobuf.undo[i].is_int
f5393ab9
RS
8105 && GET_CODE (undobuf.undo[i].old_contents.r) == code
8106 && GET_MODE (undobuf.undo[i].old_contents.r) == mode)
230d793d
RS
8107 {
8108 for (j = 0; j < n_args; j++)
f5393ab9 8109 if (XEXP (undobuf.undo[i].old_contents.r, j) != args[j])
230d793d
RS
8110 break;
8111
8112 if (j == n_args)
f5393ab9 8113 return undobuf.undo[i].old_contents.r;
230d793d
RS
8114 }
8115
8116 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
8117 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
8118 rt = rtx_alloc (code);
8119 PUT_MODE (rt, mode);
8120 XEXP (rt, 0) = args[0];
8121 if (n_args > 1)
8122 {
8123 XEXP (rt, 1) = args[1];
8124 if (n_args > 2)
8125 XEXP (rt, 2) = args[2];
8126 }
8127 return rt;
8128}
8129
8130/* These routines make binary and unary operations by first seeing if they
8131 fold; if not, a new expression is allocated. */
8132
8133static rtx
8134gen_binary (code, mode, op0, op1)
8135 enum rtx_code code;
8136 enum machine_mode mode;
8137 rtx op0, op1;
8138{
8139 rtx result;
1a26b032
RK
8140 rtx tem;
8141
8142 if (GET_RTX_CLASS (code) == 'c'
8143 && (GET_CODE (op0) == CONST_INT
8144 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
8145 tem = op0, op0 = op1, op1 = tem;
230d793d
RS
8146
8147 if (GET_RTX_CLASS (code) == '<')
8148 {
8149 enum machine_mode op_mode = GET_MODE (op0);
8150 if (op_mode == VOIDmode)
8151 op_mode = GET_MODE (op1);
8152 result = simplify_relational_operation (code, op_mode, op0, op1);
8153 }
8154 else
8155 result = simplify_binary_operation (code, mode, op0, op1);
8156
8157 if (result)
8158 return result;
8159
8160 /* Put complex operands first and constants second. */
8161 if (GET_RTX_CLASS (code) == 'c'
8162 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
8163 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
8164 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
8165 || (GET_CODE (op0) == SUBREG
8166 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
8167 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
8168 return gen_rtx_combine (code, mode, op1, op0);
8169
8170 return gen_rtx_combine (code, mode, op0, op1);
8171}
8172
8173static rtx
8174gen_unary (code, mode, op0)
8175 enum rtx_code code;
8176 enum machine_mode mode;
8177 rtx op0;
8178{
8179 rtx result = simplify_unary_operation (code, mode, op0, mode);
8180
8181 if (result)
8182 return result;
8183
8184 return gen_rtx_combine (code, mode, op0);
8185}
8186\f
8187/* Simplify a comparison between *POP0 and *POP1 where CODE is the
8188 comparison code that will be tested.
8189
8190 The result is a possibly different comparison code to use. *POP0 and
8191 *POP1 may be updated.
8192
8193 It is possible that we might detect that a comparison is either always
8194 true or always false. However, we do not perform general constant
5089e22e 8195 folding in combine, so this knowledge isn't useful. Such tautologies
230d793d
RS
8196 should have been detected earlier. Hence we ignore all such cases. */
8197
8198static enum rtx_code
8199simplify_comparison (code, pop0, pop1)
8200 enum rtx_code code;
8201 rtx *pop0;
8202 rtx *pop1;
8203{
8204 rtx op0 = *pop0;
8205 rtx op1 = *pop1;
8206 rtx tem, tem1;
8207 int i;
8208 enum machine_mode mode, tmode;
8209
8210 /* Try a few ways of applying the same transformation to both operands. */
8211 while (1)
8212 {
8213 /* If both operands are the same constant shift, see if we can ignore the
8214 shift. We can if the shift is a rotate or if the bits shifted out of
951553af 8215 this shift are known to be zero for both inputs and if the type of
230d793d
RS
8216 comparison is compatible with the shift. */
8217 if (GET_CODE (op0) == GET_CODE (op1)
5f4f0e22 8218 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
230d793d
RS
8219 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
8220 || ((GET_CODE (op0) == LSHIFTRT
8221 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
8222 && (code != GT && code != LT && code != GE && code != LE))
8223 || (GET_CODE (op0) == ASHIFTRT
8224 && (code != GTU && code != LTU
8225 && code != GEU && code != GEU)))
8226 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8227 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22 8228 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
230d793d
RS
8229 && XEXP (op0, 1) == XEXP (op1, 1))
8230 {
8231 enum machine_mode mode = GET_MODE (op0);
5f4f0e22 8232 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8233 int shift_count = INTVAL (XEXP (op0, 1));
8234
8235 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
8236 mask &= (mask >> shift_count) << shift_count;
8237 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
8238 mask = (mask & (mask << shift_count)) >> shift_count;
8239
951553af
RK
8240 if ((nonzero_bits (XEXP (op0, 0), mode) & ~ mask) == 0
8241 && (nonzero_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
230d793d
RS
8242 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
8243 else
8244 break;
8245 }
8246
8247 /* If both operands are AND's of a paradoxical SUBREG by constant, the
8248 SUBREGs are of the same mode, and, in both cases, the AND would
8249 be redundant if the comparison was done in the narrower mode,
8250 do the comparison in the narrower mode (e.g., we are AND'ing with 1
951553af
RK
8251 and the operand's possibly nonzero bits are 0xffffff01; in that case
8252 if we only care about QImode, we don't need the AND). This case
8253 occurs if the output mode of an scc insn is not SImode and
230d793d
RS
8254 STORE_FLAG_VALUE == 1 (e.g., the 386). */
8255
8256 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
8257 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8258 && GET_CODE (XEXP (op1, 1)) == CONST_INT
8259 && GET_CODE (XEXP (op0, 0)) == SUBREG
8260 && GET_CODE (XEXP (op1, 0)) == SUBREG
8261 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
8262 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
8263 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
8264 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
ac49a949
RS
8265 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
8266 <= HOST_BITS_PER_WIDE_INT)
951553af 8267 && (nonzero_bits (SUBREG_REG (XEXP (op0, 0)),
230d793d
RS
8268 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
8269 & ~ INTVAL (XEXP (op0, 1))) == 0
951553af 8270 && (nonzero_bits (SUBREG_REG (XEXP (op1, 0)),
230d793d
RS
8271 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
8272 & ~ INTVAL (XEXP (op1, 1))) == 0)
8273 {
8274 op0 = SUBREG_REG (XEXP (op0, 0));
8275 op1 = SUBREG_REG (XEXP (op1, 0));
8276
8277 /* the resulting comparison is always unsigned since we masked off
8278 the original sign bit. */
8279 code = unsigned_condition (code);
8280 }
8281 else
8282 break;
8283 }
8284
8285 /* If the first operand is a constant, swap the operands and adjust the
8286 comparison code appropriately. */
8287 if (CONSTANT_P (op0))
8288 {
8289 tem = op0, op0 = op1, op1 = tem;
8290 code = swap_condition (code);
8291 }
8292
8293 /* We now enter a loop during which we will try to simplify the comparison.
8294 For the most part, we only are concerned with comparisons with zero,
8295 but some things may really be comparisons with zero but not start
8296 out looking that way. */
8297
8298 while (GET_CODE (op1) == CONST_INT)
8299 {
8300 enum machine_mode mode = GET_MODE (op0);
8301 int mode_width = GET_MODE_BITSIZE (mode);
5f4f0e22 8302 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
230d793d
RS
8303 int equality_comparison_p;
8304 int sign_bit_comparison_p;
8305 int unsigned_comparison_p;
5f4f0e22 8306 HOST_WIDE_INT const_op;
230d793d
RS
8307
8308 /* We only want to handle integral modes. This catches VOIDmode,
8309 CCmode, and the floating-point modes. An exception is that we
8310 can handle VOIDmode if OP0 is a COMPARE or a comparison
8311 operation. */
8312
8313 if (GET_MODE_CLASS (mode) != MODE_INT
8314 && ! (mode == VOIDmode
8315 && (GET_CODE (op0) == COMPARE
8316 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
8317 break;
8318
8319 /* Get the constant we are comparing against and turn off all bits
8320 not on in our mode. */
8321 const_op = INTVAL (op1);
5f4f0e22 8322 if (mode_width <= HOST_BITS_PER_WIDE_INT)
4803a34a 8323 const_op &= mask;
230d793d
RS
8324
8325 /* If we are comparing against a constant power of two and the value
951553af 8326 being compared can only have that single bit nonzero (e.g., it was
230d793d
RS
8327 `and'ed with that bit), we can replace this with a comparison
8328 with zero. */
8329 if (const_op
8330 && (code == EQ || code == NE || code == GE || code == GEU
8331 || code == LT || code == LTU)
5f4f0e22 8332 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8333 && exact_log2 (const_op) >= 0
951553af 8334 && nonzero_bits (op0, mode) == const_op)
230d793d
RS
8335 {
8336 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
8337 op1 = const0_rtx, const_op = 0;
8338 }
8339
d0ab8cd3
RK
8340 /* Similarly, if we are comparing a value known to be either -1 or
8341 0 with -1, change it to the opposite comparison against zero. */
8342
8343 if (const_op == -1
8344 && (code == EQ || code == NE || code == GT || code == LE
8345 || code == GEU || code == LTU)
8346 && num_sign_bit_copies (op0, mode) == mode_width)
8347 {
8348 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
8349 op1 = const0_rtx, const_op = 0;
8350 }
8351
230d793d 8352 /* Do some canonicalizations based on the comparison code. We prefer
4803a34a
RK
8353 comparisons against zero and then prefer equality comparisons.
8354 If we can reduce the size of a constant, we will do that too. */
230d793d
RS
8355
8356 switch (code)
8357 {
8358 case LT:
4803a34a
RK
8359 /* < C is equivalent to <= (C - 1) */
8360 if (const_op > 0)
230d793d 8361 {
4803a34a 8362 const_op -= 1;
5f4f0e22 8363 op1 = GEN_INT (const_op);
230d793d
RS
8364 code = LE;
8365 /* ... fall through to LE case below. */
8366 }
8367 else
8368 break;
8369
8370 case LE:
4803a34a
RK
8371 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
8372 if (const_op < 0)
8373 {
8374 const_op += 1;
5f4f0e22 8375 op1 = GEN_INT (const_op);
4803a34a
RK
8376 code = LT;
8377 }
230d793d
RS
8378
8379 /* If we are doing a <= 0 comparison on a value known to have
8380 a zero sign bit, we can replace this with == 0. */
8381 else if (const_op == 0
5f4f0e22 8382 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8383 && (nonzero_bits (op0, mode)
5f4f0e22 8384 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8385 code = EQ;
8386 break;
8387
8388 case GE:
4803a34a
RK
8389 /* >= C is equivalent to > (C - 1). */
8390 if (const_op > 0)
230d793d 8391 {
4803a34a 8392 const_op -= 1;
5f4f0e22 8393 op1 = GEN_INT (const_op);
230d793d
RS
8394 code = GT;
8395 /* ... fall through to GT below. */
8396 }
8397 else
8398 break;
8399
8400 case GT:
4803a34a
RK
8401 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
8402 if (const_op < 0)
8403 {
8404 const_op += 1;
5f4f0e22 8405 op1 = GEN_INT (const_op);
4803a34a
RK
8406 code = GE;
8407 }
230d793d
RS
8408
8409 /* If we are doing a > 0 comparison on a value known to have
8410 a zero sign bit, we can replace this with != 0. */
8411 else if (const_op == 0
5f4f0e22 8412 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8413 && (nonzero_bits (op0, mode)
5f4f0e22 8414 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
230d793d
RS
8415 code = NE;
8416 break;
8417
230d793d 8418 case LTU:
4803a34a
RK
8419 /* < C is equivalent to <= (C - 1). */
8420 if (const_op > 0)
8421 {
8422 const_op -= 1;
5f4f0e22 8423 op1 = GEN_INT (const_op);
4803a34a
RK
8424 code = LEU;
8425 /* ... fall through ... */
8426 }
d0ab8cd3
RK
8427
8428 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
8429 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8430 {
8431 const_op = 0, op1 = const0_rtx;
8432 code = GE;
8433 break;
8434 }
4803a34a
RK
8435 else
8436 break;
230d793d
RS
8437
8438 case LEU:
8439 /* unsigned <= 0 is equivalent to == 0 */
8440 if (const_op == 0)
8441 code = EQ;
d0ab8cd3
RK
8442
8443 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
8444 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8445 {
8446 const_op = 0, op1 = const0_rtx;
8447 code = GE;
8448 }
230d793d
RS
8449 break;
8450
4803a34a
RK
8451 case GEU:
8452 /* >= C is equivalent to < (C - 1). */
8453 if (const_op > 1)
8454 {
8455 const_op -= 1;
5f4f0e22 8456 op1 = GEN_INT (const_op);
4803a34a
RK
8457 code = GTU;
8458 /* ... fall through ... */
8459 }
d0ab8cd3
RK
8460
8461 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
8462 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
8463 {
8464 const_op = 0, op1 = const0_rtx;
8465 code = LT;
8466 }
4803a34a
RK
8467 else
8468 break;
8469
230d793d
RS
8470 case GTU:
8471 /* unsigned > 0 is equivalent to != 0 */
8472 if (const_op == 0)
8473 code = NE;
d0ab8cd3
RK
8474
8475 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
8476 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
8477 {
8478 const_op = 0, op1 = const0_rtx;
8479 code = LT;
8480 }
230d793d
RS
8481 break;
8482 }
8483
8484 /* Compute some predicates to simplify code below. */
8485
8486 equality_comparison_p = (code == EQ || code == NE);
8487 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
8488 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
8489 || code == LEU);
8490
6139ff20
RK
8491 /* If this is a sign bit comparison and we can do arithmetic in
8492 MODE, say that we will only be needing the sign bit of OP0. */
8493 if (sign_bit_comparison_p
8494 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8495 op0 = force_to_mode (op0, mode,
8496 ((HOST_WIDE_INT) 1
8497 << (GET_MODE_BITSIZE (mode) - 1)),
8498 NULL_RTX);
8499
230d793d
RS
8500 /* Now try cases based on the opcode of OP0. If none of the cases
8501 does a "continue", we exit this loop immediately after the
8502 switch. */
8503
8504 switch (GET_CODE (op0))
8505 {
8506 case ZERO_EXTRACT:
8507 /* If we are extracting a single bit from a variable position in
8508 a constant that has only a single bit set and are comparing it
8509 with zero, we can convert this into an equality comparison
8510 between the position and the location of the single bit. We can't
8511 do this if bit endian and we don't have an extzv since we then
8512 can't know what mode to use for the endianness adjustment. */
8513
8514#if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
8515 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
8516 && XEXP (op0, 1) == const1_rtx
8517 && equality_comparison_p && const_op == 0
8518 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
8519 {
8520#if BITS_BIG_ENDIAN
8521 i = (GET_MODE_BITSIZE
8522 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
8523#endif
8524
8525 op0 = XEXP (op0, 2);
5f4f0e22 8526 op1 = GEN_INT (i);
230d793d
RS
8527 const_op = i;
8528
8529 /* Result is nonzero iff shift count is equal to I. */
8530 code = reverse_condition (code);
8531 continue;
8532 }
8533#endif
8534
8535 /* ... fall through ... */
8536
8537 case SIGN_EXTRACT:
8538 tem = expand_compound_operation (op0);
8539 if (tem != op0)
8540 {
8541 op0 = tem;
8542 continue;
8543 }
8544 break;
8545
8546 case NOT:
8547 /* If testing for equality, we can take the NOT of the constant. */
8548 if (equality_comparison_p
8549 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
8550 {
8551 op0 = XEXP (op0, 0);
8552 op1 = tem;
8553 continue;
8554 }
8555
8556 /* If just looking at the sign bit, reverse the sense of the
8557 comparison. */
8558 if (sign_bit_comparison_p)
8559 {
8560 op0 = XEXP (op0, 0);
8561 code = (code == GE ? LT : GE);
8562 continue;
8563 }
8564 break;
8565
8566 case NEG:
8567 /* If testing for equality, we can take the NEG of the constant. */
8568 if (equality_comparison_p
8569 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
8570 {
8571 op0 = XEXP (op0, 0);
8572 op1 = tem;
8573 continue;
8574 }
8575
8576 /* The remaining cases only apply to comparisons with zero. */
8577 if (const_op != 0)
8578 break;
8579
8580 /* When X is ABS or is known positive,
8581 (neg X) is < 0 if and only if X != 0. */
8582
8583 if (sign_bit_comparison_p
8584 && (GET_CODE (XEXP (op0, 0)) == ABS
5f4f0e22 8585 || (mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8586 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 8587 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
230d793d
RS
8588 {
8589 op0 = XEXP (op0, 0);
8590 code = (code == LT ? NE : EQ);
8591 continue;
8592 }
8593
3bed8141
RK
8594 /* If we have NEG of something whose two high-order bits are the
8595 same, we know that "(-a) < 0" is equivalent to "a > 0". */
8596 if (num_sign_bit_copies (op0, mode) >= 2)
230d793d
RS
8597 {
8598 op0 = XEXP (op0, 0);
8599 code = swap_condition (code);
8600 continue;
8601 }
8602 break;
8603
8604 case ROTATE:
8605 /* If we are testing equality and our count is a constant, we
8606 can perform the inverse operation on our RHS. */
8607 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8608 && (tem = simplify_binary_operation (ROTATERT, mode,
8609 op1, XEXP (op0, 1))) != 0)
8610 {
8611 op0 = XEXP (op0, 0);
8612 op1 = tem;
8613 continue;
8614 }
8615
8616 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8617 a particular bit. Convert it to an AND of a constant of that
8618 bit. This will be converted into a ZERO_EXTRACT. */
8619 if (const_op == 0 && sign_bit_comparison_p
8620 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8621 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 8622 {
5f4f0e22
CH
8623 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8624 ((HOST_WIDE_INT) 1
8625 << (mode_width - 1
8626 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
8627 code = (code == LT ? NE : EQ);
8628 continue;
8629 }
8630
8631 /* ... fall through ... */
8632
8633 case ABS:
8634 /* ABS is ignorable inside an equality comparison with zero. */
8635 if (const_op == 0 && equality_comparison_p)
8636 {
8637 op0 = XEXP (op0, 0);
8638 continue;
8639 }
8640 break;
8641
8642
8643 case SIGN_EXTEND:
8644 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8645 to (compare FOO CONST) if CONST fits in FOO's mode and we
8646 are either testing inequality or have an unsigned comparison
8647 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8648 if (! unsigned_comparison_p
8649 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
8650 <= HOST_BITS_PER_WIDE_INT)
8651 && ((unsigned HOST_WIDE_INT) const_op
8652 < (((HOST_WIDE_INT) 1
8653 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
230d793d
RS
8654 {
8655 op0 = XEXP (op0, 0);
8656 continue;
8657 }
8658 break;
8659
8660 case SUBREG:
a687e897
RK
8661 /* Check for the case where we are comparing A - C1 with C2,
8662 both constants are smaller than 1/2 the maxium positive
8663 value in MODE, and the comparison is equality or unsigned.
8664 In that case, if A is either zero-extended to MODE or has
8665 sufficient sign bits so that the high-order bit in MODE
8666 is a copy of the sign in the inner mode, we can prove that it is
8667 safe to do the operation in the wider mode. This simplifies
8668 many range checks. */
8669
8670 if (mode_width <= HOST_BITS_PER_WIDE_INT
8671 && subreg_lowpart_p (op0)
8672 && GET_CODE (SUBREG_REG (op0)) == PLUS
8673 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8674 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8675 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8676 < GET_MODE_MASK (mode) / 2)
adb7a1cb 8677 && (unsigned HOST_WIDE_INT) const_op < GET_MODE_MASK (mode) / 2
951553af
RK
8678 && (0 == (nonzero_bits (XEXP (SUBREG_REG (op0), 0),
8679 GET_MODE (SUBREG_REG (op0)))
a687e897
RK
8680 & ~ GET_MODE_MASK (mode))
8681 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8682 GET_MODE (SUBREG_REG (op0)))
8683 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8684 - GET_MODE_BITSIZE (mode)))))
8685 {
8686 op0 = SUBREG_REG (op0);
8687 continue;
8688 }
8689
fe0cf571
RK
8690 /* If the inner mode is narrower and we are extracting the low part,
8691 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8692 if (subreg_lowpart_p (op0)
89f1c7f2
RS
8693 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8694 /* Fall through */ ;
8695 else
230d793d
RS
8696 break;
8697
8698 /* ... fall through ... */
8699
8700 case ZERO_EXTEND:
8701 if ((unsigned_comparison_p || equality_comparison_p)
8702 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
5f4f0e22
CH
8703 <= HOST_BITS_PER_WIDE_INT)
8704 && ((unsigned HOST_WIDE_INT) const_op
230d793d
RS
8705 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8706 {
8707 op0 = XEXP (op0, 0);
8708 continue;
8709 }
8710 break;
8711
8712 case PLUS:
20fdd649 8713 /* (eq (plus X A) B) -> (eq X (minus B A)). We can only do
5089e22e 8714 this for equality comparisons due to pathological cases involving
230d793d 8715 overflows. */
20fdd649
RK
8716 if (equality_comparison_p
8717 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8718 op1, XEXP (op0, 1))))
230d793d
RS
8719 {
8720 op0 = XEXP (op0, 0);
8721 op1 = tem;
8722 continue;
8723 }
8724
8725 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8726 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8727 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8728 {
8729 op0 = XEXP (XEXP (op0, 0), 0);
8730 code = (code == LT ? EQ : NE);
8731 continue;
8732 }
8733 break;
8734
8735 case MINUS:
20fdd649
RK
8736 /* (eq (minus A B) C) -> (eq A (plus B C)) or
8737 (eq B (minus A C)), whichever simplifies. We can only do
8738 this for equality comparisons due to pathological cases involving
8739 overflows. */
8740 if (equality_comparison_p
8741 && 0 != (tem = simplify_binary_operation (PLUS, mode,
8742 XEXP (op0, 1), op1)))
8743 {
8744 op0 = XEXP (op0, 0);
8745 op1 = tem;
8746 continue;
8747 }
8748
8749 if (equality_comparison_p
8750 && 0 != (tem = simplify_binary_operation (MINUS, mode,
8751 XEXP (op0, 0), op1)))
8752 {
8753 op0 = XEXP (op0, 1);
8754 op1 = tem;
8755 continue;
8756 }
8757
230d793d
RS
8758 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8759 of bits in X minus 1, is one iff X > 0. */
8760 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8761 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8762 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8763 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8764 {
8765 op0 = XEXP (op0, 1);
8766 code = (code == GE ? LE : GT);
8767 continue;
8768 }
8769 break;
8770
8771 case XOR:
8772 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8773 if C is zero or B is a constant. */
8774 if (equality_comparison_p
8775 && 0 != (tem = simplify_binary_operation (XOR, mode,
8776 XEXP (op0, 1), op1)))
8777 {
8778 op0 = XEXP (op0, 0);
8779 op1 = tem;
8780 continue;
8781 }
8782 break;
8783
8784 case EQ: case NE:
8785 case LT: case LTU: case LE: case LEU:
8786 case GT: case GTU: case GE: case GEU:
8787 /* We can't do anything if OP0 is a condition code value, rather
8788 than an actual data value. */
8789 if (const_op != 0
8790#ifdef HAVE_cc0
8791 || XEXP (op0, 0) == cc0_rtx
8792#endif
8793 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8794 break;
8795
8796 /* Get the two operands being compared. */
8797 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8798 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8799 else
8800 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8801
8802 /* Check for the cases where we simply want the result of the
8803 earlier test or the opposite of that result. */
8804 if (code == NE
8805 || (code == EQ && reversible_comparison_p (op0))
5f4f0e22 8806 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
3f508eca 8807 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
230d793d 8808 && (STORE_FLAG_VALUE
5f4f0e22
CH
8809 & (((HOST_WIDE_INT) 1
8810 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
230d793d
RS
8811 && (code == LT
8812 || (code == GE && reversible_comparison_p (op0)))))
8813 {
8814 code = (code == LT || code == NE
8815 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8816 op0 = tem, op1 = tem1;
8817 continue;
8818 }
8819 break;
8820
8821 case IOR:
8822 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8823 iff X <= 0. */
8824 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8825 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8826 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8827 {
8828 op0 = XEXP (op0, 1);
8829 code = (code == GE ? GT : LE);
8830 continue;
8831 }
8832 break;
8833
8834 case AND:
8835 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8836 will be converted to a ZERO_EXTRACT later. */
8837 if (const_op == 0 && equality_comparison_p
8838 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8839 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8840 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8841 {
8842 op0 = simplify_and_const_int
8843 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8844 XEXP (op0, 1),
8845 XEXP (XEXP (op0, 0), 1)),
5f4f0e22 8846 (HOST_WIDE_INT) 1);
230d793d
RS
8847 continue;
8848 }
8849
8850 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8851 zero and X is a comparison and C1 and C2 describe only bits set
8852 in STORE_FLAG_VALUE, we can compare with X. */
8853 if (const_op == 0 && equality_comparison_p
5f4f0e22 8854 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d
RS
8855 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8856 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8857 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8858 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
5f4f0e22 8859 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
230d793d
RS
8860 {
8861 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8862 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8863 if ((~ STORE_FLAG_VALUE & mask) == 0
8864 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8865 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8866 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8867 {
8868 op0 = XEXP (XEXP (op0, 0), 0);
8869 continue;
8870 }
8871 }
8872
8873 /* If we are doing an equality comparison of an AND of a bit equal
8874 to the sign bit, replace this with a LT or GE comparison of
8875 the underlying value. */
8876 if (equality_comparison_p
8877 && const_op == 0
8878 && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8879 && mode_width <= HOST_BITS_PER_WIDE_INT
230d793d 8880 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
5f4f0e22 8881 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
230d793d
RS
8882 {
8883 op0 = XEXP (op0, 0);
8884 code = (code == EQ ? GE : LT);
8885 continue;
8886 }
8887
8888 /* If this AND operation is really a ZERO_EXTEND from a narrower
8889 mode, the constant fits within that mode, and this is either an
8890 equality or unsigned comparison, try to do this comparison in
8891 the narrower mode. */
8892 if ((equality_comparison_p || unsigned_comparison_p)
8893 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8894 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8895 & GET_MODE_MASK (mode))
8896 + 1)) >= 0
8897 && const_op >> i == 0
8898 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8899 {
8900 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8901 continue;
8902 }
8903 break;
8904
8905 case ASHIFT:
8906 case LSHIFT:
8907 /* If we have (compare (xshift FOO N) (const_int C)) and
8908 the high order N bits of FOO (N+1 if an inequality comparison)
951553af 8909 are known to be zero, we can do this by comparing FOO with C
230d793d
RS
8910 shifted right N bits so long as the low-order N bits of C are
8911 zero. */
8912 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8913 && INTVAL (XEXP (op0, 1)) >= 0
8914 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
5f4f0e22
CH
8915 < HOST_BITS_PER_WIDE_INT)
8916 && ((const_op
34785d05 8917 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
5f4f0e22 8918 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8919 && (nonzero_bits (XEXP (op0, 0), mode)
230d793d
RS
8920 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8921 + ! equality_comparison_p))) == 0)
8922 {
8923 const_op >>= INTVAL (XEXP (op0, 1));
5f4f0e22 8924 op1 = GEN_INT (const_op);
230d793d
RS
8925 op0 = XEXP (op0, 0);
8926 continue;
8927 }
8928
dfbe1b2f 8929 /* If we are doing a sign bit comparison, it means we are testing
230d793d 8930 a particular bit. Convert it to the appropriate AND. */
dfbe1b2f 8931 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
5f4f0e22 8932 && mode_width <= HOST_BITS_PER_WIDE_INT)
230d793d 8933 {
5f4f0e22
CH
8934 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8935 ((HOST_WIDE_INT) 1
8936 << (mode_width - 1
8937 - INTVAL (XEXP (op0, 1)))));
230d793d
RS
8938 code = (code == LT ? NE : EQ);
8939 continue;
8940 }
dfbe1b2f
RK
8941
8942 /* If this an equality comparison with zero and we are shifting
8943 the low bit to the sign bit, we can convert this to an AND of the
8944 low-order bit. */
8945 if (const_op == 0 && equality_comparison_p
8946 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8947 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8948 {
5f4f0e22
CH
8949 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8950 (HOST_WIDE_INT) 1);
dfbe1b2f
RK
8951 continue;
8952 }
230d793d
RS
8953 break;
8954
8955 case ASHIFTRT:
d0ab8cd3
RK
8956 /* If this is an equality comparison with zero, we can do this
8957 as a logical shift, which might be much simpler. */
8958 if (equality_comparison_p && const_op == 0
8959 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8960 {
8961 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8962 XEXP (op0, 0),
8963 INTVAL (XEXP (op0, 1)));
8964 continue;
8965 }
8966
230d793d
RS
8967 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8968 do the comparison in a narrower mode. */
8969 if (! unsigned_comparison_p
8970 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8971 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8972 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8973 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
22331794 8974 MODE_INT, 1)) != BLKmode
5f4f0e22
CH
8975 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8976 || ((unsigned HOST_WIDE_INT) - const_op
8977 <= GET_MODE_MASK (tmode))))
230d793d
RS
8978 {
8979 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8980 continue;
8981 }
8982
8983 /* ... fall through ... */
8984 case LSHIFTRT:
8985 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
951553af 8986 the low order N bits of FOO are known to be zero, we can do this
230d793d
RS
8987 by comparing FOO with C shifted left N bits so long as no
8988 overflow occurs. */
8989 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8990 && INTVAL (XEXP (op0, 1)) >= 0
5f4f0e22
CH
8991 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8992 && mode_width <= HOST_BITS_PER_WIDE_INT
951553af 8993 && (nonzero_bits (XEXP (op0, 0), mode)
5f4f0e22 8994 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
230d793d
RS
8995 && (const_op == 0
8996 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
8997 < mode_width)))
8998 {
8999 const_op <<= INTVAL (XEXP (op0, 1));
5f4f0e22 9000 op1 = GEN_INT (const_op);
230d793d
RS
9001 op0 = XEXP (op0, 0);
9002 continue;
9003 }
9004
9005 /* If we are using this shift to extract just the sign bit, we
9006 can replace this with an LT or GE comparison. */
9007 if (const_op == 0
9008 && (equality_comparison_p || sign_bit_comparison_p)
9009 && GET_CODE (XEXP (op0, 1)) == CONST_INT
9010 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
9011 {
9012 op0 = XEXP (op0, 0);
9013 code = (code == NE || code == GT ? LT : GE);
9014 continue;
9015 }
9016 break;
9017 }
9018
9019 break;
9020 }
9021
9022 /* Now make any compound operations involved in this comparison. Then,
9023 check for an outmost SUBREG on OP0 that isn't doing anything or is
9024 paradoxical. The latter case can only occur when it is known that the
9025 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
9026 We can never remove a SUBREG for a non-equality comparison because the
9027 sign bit is in a different place in the underlying object. */
9028
9029 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
9030 op1 = make_compound_operation (op1, SET);
9031
9032 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9033 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9034 && (code == NE || code == EQ)
9035 && ((GET_MODE_SIZE (GET_MODE (op0))
9036 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
9037 {
9038 op0 = SUBREG_REG (op0);
9039 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
9040 }
9041
9042 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
9043 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9044 && (code == NE || code == EQ)
ac49a949
RS
9045 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
9046 <= HOST_BITS_PER_WIDE_INT)
951553af 9047 && (nonzero_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9048 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
9049 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
9050 op1),
951553af 9051 (nonzero_bits (tem, GET_MODE (SUBREG_REG (op0)))
230d793d
RS
9052 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
9053 op0 = SUBREG_REG (op0), op1 = tem;
9054
9055 /* We now do the opposite procedure: Some machines don't have compare
9056 insns in all modes. If OP0's mode is an integer mode smaller than a
9057 word and we can't do a compare in that mode, see if there is a larger
a687e897
RK
9058 mode for which we can do the compare. There are a number of cases in
9059 which we can use the wider mode. */
230d793d
RS
9060
9061 mode = GET_MODE (op0);
9062 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
9063 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
9064 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
9065 for (tmode = GET_MODE_WIDER_MODE (mode);
5f4f0e22
CH
9066 (tmode != VOIDmode
9067 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
230d793d 9068 tmode = GET_MODE_WIDER_MODE (tmode))
a687e897 9069 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
230d793d 9070 {
951553af 9071 /* If the only nonzero bits in OP0 and OP1 are those in the
a687e897
RK
9072 narrower mode and this is an equality or unsigned comparison,
9073 we can use the wider mode. Similarly for sign-extended
9074 values and equality or signed comparisons. */
9075 if (((code == EQ || code == NE
9076 || code == GEU || code == GTU || code == LEU || code == LTU)
951553af
RK
9077 && (nonzero_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
9078 && (nonzero_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0)
a687e897
RK
9079 || ((code == EQ || code == NE
9080 || code == GE || code == GT || code == LE || code == LT)
9081 && (num_sign_bit_copies (op0, tmode)
58744483 9082 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
a687e897 9083 && (num_sign_bit_copies (op1, tmode)
58744483 9084 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
a687e897
RK
9085 {
9086 op0 = gen_lowpart_for_combine (tmode, op0);
9087 op1 = gen_lowpart_for_combine (tmode, op1);
9088 break;
9089 }
230d793d 9090
a687e897
RK
9091 /* If this is a test for negative, we can make an explicit
9092 test of the sign bit. */
9093
9094 if (op1 == const0_rtx && (code == LT || code == GE)
9095 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
230d793d 9096 {
a687e897
RK
9097 op0 = gen_binary (AND, tmode,
9098 gen_lowpart_for_combine (tmode, op0),
5f4f0e22
CH
9099 GEN_INT ((HOST_WIDE_INT) 1
9100 << (GET_MODE_BITSIZE (mode) - 1)));
230d793d 9101 code = (code == LT) ? NE : EQ;
a687e897 9102 break;
230d793d 9103 }
230d793d
RS
9104 }
9105
9106 *pop0 = op0;
9107 *pop1 = op1;
9108
9109 return code;
9110}
9111\f
9112/* Return 1 if we know that X, a comparison operation, is not operating
9113 on a floating-point value or is EQ or NE, meaning that we can safely
9114 reverse it. */
9115
9116static int
9117reversible_comparison_p (x)
9118 rtx x;
9119{
9120 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
9121 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
9122 return 1;
9123
9124 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
9125 {
9126 case MODE_INT:
3ad2180a
RK
9127 case MODE_PARTIAL_INT:
9128 case MODE_COMPLEX_INT:
230d793d
RS
9129 return 1;
9130
9131 case MODE_CC:
9132 x = get_last_value (XEXP (x, 0));
9133 return (x && GET_CODE (x) == COMPARE
3ad2180a 9134 && ! FLOAT_MODE_P (GET_MODE (XEXP (x, 0))));
230d793d
RS
9135 }
9136
9137 return 0;
9138}
9139\f
9140/* Utility function for following routine. Called when X is part of a value
9141 being stored into reg_last_set_value. Sets reg_last_set_table_tick
9142 for each register mentioned. Similar to mention_regs in cse.c */
9143
9144static void
9145update_table_tick (x)
9146 rtx x;
9147{
9148 register enum rtx_code code = GET_CODE (x);
9149 register char *fmt = GET_RTX_FORMAT (code);
9150 register int i;
9151
9152 if (code == REG)
9153 {
9154 int regno = REGNO (x);
9155 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9156 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9157
9158 for (i = regno; i < endregno; i++)
9159 reg_last_set_table_tick[i] = label_tick;
9160
9161 return;
9162 }
9163
9164 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9165 /* Note that we can't have an "E" in values stored; see
9166 get_last_value_validate. */
9167 if (fmt[i] == 'e')
9168 update_table_tick (XEXP (x, i));
9169}
9170
9171/* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
9172 are saying that the register is clobbered and we no longer know its
7988fd36
RK
9173 value. If INSN is zero, don't update reg_last_set; this is only permitted
9174 with VALUE also zero and is used to invalidate the register. */
230d793d
RS
9175
9176static void
9177record_value_for_reg (reg, insn, value)
9178 rtx reg;
9179 rtx insn;
9180 rtx value;
9181{
9182 int regno = REGNO (reg);
9183 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9184 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
9185 int i;
9186
9187 /* If VALUE contains REG and we have a previous value for REG, substitute
9188 the previous value. */
9189 if (value && insn && reg_overlap_mentioned_p (reg, value))
9190 {
9191 rtx tem;
9192
9193 /* Set things up so get_last_value is allowed to see anything set up to
9194 our insn. */
9195 subst_low_cuid = INSN_CUID (insn);
9196 tem = get_last_value (reg);
9197
9198 if (tem)
9199 value = replace_rtx (copy_rtx (value), reg, tem);
9200 }
9201
9202 /* For each register modified, show we don't know its value, that
ef026f91
RS
9203 we don't know about its bitwise content, that its value has been
9204 updated, and that we don't know the location of the death of the
9205 register. */
230d793d
RS
9206 for (i = regno; i < endregno; i ++)
9207 {
9208 if (insn)
9209 reg_last_set[i] = insn;
9210 reg_last_set_value[i] = 0;
ef026f91
RS
9211 reg_last_set_mode[i] = 0;
9212 reg_last_set_nonzero_bits[i] = 0;
9213 reg_last_set_sign_bit_copies[i] = 0;
230d793d
RS
9214 reg_last_death[i] = 0;
9215 }
9216
9217 /* Mark registers that are being referenced in this value. */
9218 if (value)
9219 update_table_tick (value);
9220
9221 /* Now update the status of each register being set.
9222 If someone is using this register in this block, set this register
9223 to invalid since we will get confused between the two lives in this
9224 basic block. This makes using this register always invalid. In cse, we
9225 scan the table to invalidate all entries using this register, but this
9226 is too much work for us. */
9227
9228 for (i = regno; i < endregno; i++)
9229 {
9230 reg_last_set_label[i] = label_tick;
9231 if (value && reg_last_set_table_tick[i] == label_tick)
9232 reg_last_set_invalid[i] = 1;
9233 else
9234 reg_last_set_invalid[i] = 0;
9235 }
9236
9237 /* The value being assigned might refer to X (like in "x++;"). In that
9238 case, we must replace it with (clobber (const_int 0)) to prevent
9239 infinite loops. */
9240 if (value && ! get_last_value_validate (&value,
9241 reg_last_set_label[regno], 0))
9242 {
9243 value = copy_rtx (value);
9244 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
9245 value = 0;
9246 }
9247
55310dad
RK
9248 /* For the main register being modified, update the value, the mode, the
9249 nonzero bits, and the number of sign bit copies. */
9250
230d793d
RS
9251 reg_last_set_value[regno] = value;
9252
55310dad
RK
9253 if (value)
9254 {
2afabb48 9255 subst_low_cuid = INSN_CUID (insn);
55310dad
RK
9256 reg_last_set_mode[regno] = GET_MODE (reg);
9257 reg_last_set_nonzero_bits[regno] = nonzero_bits (value, GET_MODE (reg));
9258 reg_last_set_sign_bit_copies[regno]
9259 = num_sign_bit_copies (value, GET_MODE (reg));
9260 }
230d793d
RS
9261}
9262
9263/* Used for communication between the following two routines. */
9264static rtx record_dead_insn;
9265
9266/* Called via note_stores from record_dead_and_set_regs to handle one
9267 SET or CLOBBER in an insn. */
9268
9269static void
9270record_dead_and_set_regs_1 (dest, setter)
9271 rtx dest, setter;
9272{
9273 if (GET_CODE (dest) == REG)
9274 {
9275 /* If we are setting the whole register, we know its value. Otherwise
9276 show that we don't know the value. We can handle SUBREG in
9277 some cases. */
9278 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
9279 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
9280 else if (GET_CODE (setter) == SET
9281 && GET_CODE (SET_DEST (setter)) == SUBREG
9282 && SUBREG_REG (SET_DEST (setter)) == dest
9283 && subreg_lowpart_p (SET_DEST (setter)))
d0ab8cd3
RK
9284 record_value_for_reg (dest, record_dead_insn,
9285 gen_lowpart_for_combine (GET_MODE (dest),
9286 SET_SRC (setter)));
230d793d 9287 else
5f4f0e22 9288 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
230d793d
RS
9289 }
9290 else if (GET_CODE (dest) == MEM
9291 /* Ignore pushes, they clobber nothing. */
9292 && ! push_operand (dest, GET_MODE (dest)))
9293 mem_last_set = INSN_CUID (record_dead_insn);
9294}
9295
9296/* Update the records of when each REG was most recently set or killed
9297 for the things done by INSN. This is the last thing done in processing
9298 INSN in the combiner loop.
9299
ef026f91
RS
9300 We update reg_last_set, reg_last_set_value, reg_last_set_mode,
9301 reg_last_set_nonzero_bits, reg_last_set_sign_bit_copies, reg_last_death,
9302 and also the similar information mem_last_set (which insn most recently
9303 modified memory) and last_call_cuid (which insn was the most recent
9304 subroutine call). */
230d793d
RS
9305
9306static void
9307record_dead_and_set_regs (insn)
9308 rtx insn;
9309{
9310 register rtx link;
55310dad
RK
9311 int i;
9312
230d793d
RS
9313 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
9314 {
dbc131f3
RK
9315 if (REG_NOTE_KIND (link) == REG_DEAD
9316 && GET_CODE (XEXP (link, 0)) == REG)
9317 {
9318 int regno = REGNO (XEXP (link, 0));
9319 int endregno
9320 = regno + (regno < FIRST_PSEUDO_REGISTER
9321 ? HARD_REGNO_NREGS (regno, GET_MODE (XEXP (link, 0)))
9322 : 1);
dbc131f3
RK
9323
9324 for (i = regno; i < endregno; i++)
9325 reg_last_death[i] = insn;
9326 }
230d793d 9327 else if (REG_NOTE_KIND (link) == REG_INC)
5f4f0e22 9328 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
230d793d
RS
9329 }
9330
9331 if (GET_CODE (insn) == CALL_INSN)
55310dad
RK
9332 {
9333 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
9334 if (call_used_regs[i])
9335 {
9336 reg_last_set_value[i] = 0;
ef026f91
RS
9337 reg_last_set_mode[i] = 0;
9338 reg_last_set_nonzero_bits[i] = 0;
9339 reg_last_set_sign_bit_copies[i] = 0;
55310dad
RK
9340 reg_last_death[i] = 0;
9341 }
9342
9343 last_call_cuid = mem_last_set = INSN_CUID (insn);
9344 }
230d793d
RS
9345
9346 record_dead_insn = insn;
9347 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
9348}
9349\f
9350/* Utility routine for the following function. Verify that all the registers
9351 mentioned in *LOC are valid when *LOC was part of a value set when
9352 label_tick == TICK. Return 0 if some are not.
9353
9354 If REPLACE is non-zero, replace the invalid reference with
9355 (clobber (const_int 0)) and return 1. This replacement is useful because
9356 we often can get useful information about the form of a value (e.g., if
9357 it was produced by a shift that always produces -1 or 0) even though
9358 we don't know exactly what registers it was produced from. */
9359
9360static int
9361get_last_value_validate (loc, tick, replace)
9362 rtx *loc;
9363 int tick;
9364 int replace;
9365{
9366 rtx x = *loc;
9367 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
9368 int len = GET_RTX_LENGTH (GET_CODE (x));
9369 int i;
9370
9371 if (GET_CODE (x) == REG)
9372 {
9373 int regno = REGNO (x);
9374 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9375 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9376 int j;
9377
9378 for (j = regno; j < endregno; j++)
9379 if (reg_last_set_invalid[j]
9380 /* If this is a pseudo-register that was only set once, it is
9381 always valid. */
9382 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
9383 && reg_last_set_label[j] > tick))
9384 {
9385 if (replace)
9386 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
9387 return replace;
9388 }
9389
9390 return 1;
9391 }
9392
9393 for (i = 0; i < len; i++)
9394 if ((fmt[i] == 'e'
9395 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
9396 /* Don't bother with these. They shouldn't occur anyway. */
9397 || fmt[i] == 'E')
9398 return 0;
9399
9400 /* If we haven't found a reason for it to be invalid, it is valid. */
9401 return 1;
9402}
9403
9404/* Get the last value assigned to X, if known. Some registers
9405 in the value may be replaced with (clobber (const_int 0)) if their value
9406 is known longer known reliably. */
9407
9408static rtx
9409get_last_value (x)
9410 rtx x;
9411{
9412 int regno;
9413 rtx value;
9414
9415 /* If this is a non-paradoxical SUBREG, get the value of its operand and
9416 then convert it to the desired mode. If this is a paradoxical SUBREG,
9417 we cannot predict what values the "extra" bits might have. */
9418 if (GET_CODE (x) == SUBREG
9419 && subreg_lowpart_p (x)
9420 && (GET_MODE_SIZE (GET_MODE (x))
9421 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
9422 && (value = get_last_value (SUBREG_REG (x))) != 0)
9423 return gen_lowpart_for_combine (GET_MODE (x), value);
9424
9425 if (GET_CODE (x) != REG)
9426 return 0;
9427
9428 regno = REGNO (x);
9429 value = reg_last_set_value[regno];
9430
d0ab8cd3 9431 /* If we don't have a value or if it isn't for this basic block, return 0. */
230d793d
RS
9432
9433 if (value == 0
9434 || (reg_n_sets[regno] != 1
55310dad 9435 && reg_last_set_label[regno] != label_tick))
230d793d
RS
9436 return 0;
9437
d0ab8cd3 9438 /* If the value was set in a later insn that the ones we are processing,
4090a6b3
RK
9439 we can't use it even if the register was only set once, but make a quick
9440 check to see if the previous insn set it to something. This is commonly
9441 the case when the same pseudo is used by repeated insns. */
d0ab8cd3 9442
4090a6b3 9443 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
d0ab8cd3
RK
9444 {
9445 rtx insn, set;
9446
2fc9c644 9447 for (insn = prev_nonnote_insn (subst_insn);
d0ab8cd3 9448 insn && INSN_CUID (insn) >= subst_low_cuid;
2fc9c644 9449 insn = prev_nonnote_insn (insn))
d0ab8cd3
RK
9450 ;
9451
9452 if (insn
9453 && (set = single_set (insn)) != 0
9454 && rtx_equal_p (SET_DEST (set), x))
9455 {
9456 value = SET_SRC (set);
9457
9458 /* Make sure that VALUE doesn't reference X. Replace any
9459 expliit references with a CLOBBER. If there are any remaining
9460 references (rare), don't use the value. */
9461
9462 if (reg_mentioned_p (x, value))
9463 value = replace_rtx (copy_rtx (value), x,
9464 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
9465
9466 if (reg_overlap_mentioned_p (x, value))
9467 return 0;
9468 }
9469 else
9470 return 0;
9471 }
9472
9473 /* If the value has all its registers valid, return it. */
230d793d
RS
9474 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
9475 return value;
9476
9477 /* Otherwise, make a copy and replace any invalid register with
9478 (clobber (const_int 0)). If that fails for some reason, return 0. */
9479
9480 value = copy_rtx (value);
9481 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
9482 return value;
9483
9484 return 0;
9485}
9486\f
9487/* Return nonzero if expression X refers to a REG or to memory
9488 that is set in an instruction more recent than FROM_CUID. */
9489
9490static int
9491use_crosses_set_p (x, from_cuid)
9492 register rtx x;
9493 int from_cuid;
9494{
9495 register char *fmt;
9496 register int i;
9497 register enum rtx_code code = GET_CODE (x);
9498
9499 if (code == REG)
9500 {
9501 register int regno = REGNO (x);
e28f5732
RK
9502 int endreg = regno + (regno < FIRST_PSEUDO_REGISTER
9503 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
9504
230d793d
RS
9505#ifdef PUSH_ROUNDING
9506 /* Don't allow uses of the stack pointer to be moved,
9507 because we don't know whether the move crosses a push insn. */
9508 if (regno == STACK_POINTER_REGNUM)
9509 return 1;
9510#endif
e28f5732
RK
9511 for (;regno < endreg; regno++)
9512 if (reg_last_set[regno]
9513 && INSN_CUID (reg_last_set[regno]) > from_cuid)
9514 return 1;
9515 return 0;
230d793d
RS
9516 }
9517
9518 if (code == MEM && mem_last_set > from_cuid)
9519 return 1;
9520
9521 fmt = GET_RTX_FORMAT (code);
9522
9523 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
9524 {
9525 if (fmt[i] == 'E')
9526 {
9527 register int j;
9528 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9529 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
9530 return 1;
9531 }
9532 else if (fmt[i] == 'e'
9533 && use_crosses_set_p (XEXP (x, i), from_cuid))
9534 return 1;
9535 }
9536 return 0;
9537}
9538\f
9539/* Define three variables used for communication between the following
9540 routines. */
9541
9542static int reg_dead_regno, reg_dead_endregno;
9543static int reg_dead_flag;
9544
9545/* Function called via note_stores from reg_dead_at_p.
9546
9547 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
9548 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
9549
9550static void
9551reg_dead_at_p_1 (dest, x)
9552 rtx dest;
9553 rtx x;
9554{
9555 int regno, endregno;
9556
9557 if (GET_CODE (dest) != REG)
9558 return;
9559
9560 regno = REGNO (dest);
9561 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
9562 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
9563
9564 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
9565 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
9566}
9567
9568/* Return non-zero if REG is known to be dead at INSN.
9569
9570 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
9571 referencing REG, it is dead. If we hit a SET referencing REG, it is
9572 live. Otherwise, see if it is live or dead at the start of the basic
9573 block we are in. */
9574
9575static int
9576reg_dead_at_p (reg, insn)
9577 rtx reg;
9578 rtx insn;
9579{
9580 int block, i;
9581
9582 /* Set variables for reg_dead_at_p_1. */
9583 reg_dead_regno = REGNO (reg);
9584 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
9585 ? HARD_REGNO_NREGS (reg_dead_regno,
9586 GET_MODE (reg))
9587 : 1);
9588
9589 reg_dead_flag = 0;
9590
9591 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
9592 beginning of function. */
9593 for (; insn && GET_CODE (insn) != CODE_LABEL;
9594 insn = prev_nonnote_insn (insn))
9595 {
9596 note_stores (PATTERN (insn), reg_dead_at_p_1);
9597 if (reg_dead_flag)
9598 return reg_dead_flag == 1 ? 1 : 0;
9599
9600 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
9601 return 1;
9602 }
9603
9604 /* Get the basic block number that we were in. */
9605 if (insn == 0)
9606 block = 0;
9607 else
9608 {
9609 for (block = 0; block < n_basic_blocks; block++)
9610 if (insn == basic_block_head[block])
9611 break;
9612
9613 if (block == n_basic_blocks)
9614 return 0;
9615 }
9616
9617 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
5f4f0e22
CH
9618 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
9619 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
230d793d
RS
9620 return 0;
9621
9622 return 1;
9623}
9624\f
9625/* Remove register number REGNO from the dead registers list of INSN.
9626
9627 Return the note used to record the death, if there was one. */
9628
9629rtx
9630remove_death (regno, insn)
9631 int regno;
9632 rtx insn;
9633{
9634 register rtx note = find_regno_note (insn, REG_DEAD, regno);
9635
9636 if (note)
1a26b032
RK
9637 {
9638 reg_n_deaths[regno]--;
9639 remove_note (insn, note);
9640 }
230d793d
RS
9641
9642 return note;
9643}
9644
9645/* For each register (hardware or pseudo) used within expression X, if its
9646 death is in an instruction with cuid between FROM_CUID (inclusive) and
9647 TO_INSN (exclusive), put a REG_DEAD note for that register in the
9648 list headed by PNOTES.
9649
9650 This is done when X is being merged by combination into TO_INSN. These
9651 notes will then be distributed as needed. */
9652
9653static void
9654move_deaths (x, from_cuid, to_insn, pnotes)
9655 rtx x;
9656 int from_cuid;
9657 rtx to_insn;
9658 rtx *pnotes;
9659{
9660 register char *fmt;
9661 register int len, i;
9662 register enum rtx_code code = GET_CODE (x);
9663
9664 if (code == REG)
9665 {
9666 register int regno = REGNO (x);
9667 register rtx where_dead = reg_last_death[regno];
9668
9669 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9670 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9671 {
dbc131f3 9672 rtx note = remove_death (regno, where_dead);
230d793d
RS
9673
9674 /* It is possible for the call above to return 0. This can occur
9675 when reg_last_death points to I2 or I1 that we combined with.
dbc131f3
RK
9676 In that case make a new note.
9677
9678 We must also check for the case where X is a hard register
9679 and NOTE is a death note for a range of hard registers
9680 including X. In that case, we must put REG_DEAD notes for
9681 the remaining registers in place of NOTE. */
9682
9683 if (note != 0 && regno < FIRST_PSEUDO_REGISTER
9684 && (GET_MODE_SIZE (GET_MODE (XEXP (note, 0)))
9685 != GET_MODE_SIZE (GET_MODE (x))))
9686 {
9687 int deadregno = REGNO (XEXP (note, 0));
9688 int deadend
9689 = (deadregno + HARD_REGNO_NREGS (deadregno,
9690 GET_MODE (XEXP (note, 0))));
9691 int ourend = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9692 int i;
9693
9694 for (i = deadregno; i < deadend; i++)
9695 if (i < regno || i >= ourend)
9696 REG_NOTES (where_dead)
9697 = gen_rtx (EXPR_LIST, REG_DEAD,
9698 gen_rtx (REG, word_mode, i),
9699 REG_NOTES (where_dead));
9700 }
230d793d 9701
dbc131f3 9702 if (note != 0 && GET_MODE (XEXP (note, 0)) == GET_MODE (x))
230d793d
RS
9703 {
9704 XEXP (note, 1) = *pnotes;
9705 *pnotes = note;
9706 }
9707 else
9708 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
1a26b032
RK
9709
9710 reg_n_deaths[regno]++;
230d793d
RS
9711 }
9712
9713 return;
9714 }
9715
9716 else if (GET_CODE (x) == SET)
9717 {
9718 rtx dest = SET_DEST (x);
9719
9720 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9721
a7c99304
RK
9722 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9723 that accesses one word of a multi-word item, some
9724 piece of everything register in the expression is used by
9725 this insn, so remove any old death. */
9726
9727 if (GET_CODE (dest) == ZERO_EXTRACT
9728 || GET_CODE (dest) == STRICT_LOW_PART
9729 || (GET_CODE (dest) == SUBREG
9730 && (((GET_MODE_SIZE (GET_MODE (dest))
9731 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9732 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9733 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
230d793d 9734 {
a7c99304
RK
9735 move_deaths (dest, from_cuid, to_insn, pnotes);
9736 return;
230d793d
RS
9737 }
9738
a7c99304
RK
9739 /* If this is some other SUBREG, we know it replaces the entire
9740 value, so use that as the destination. */
9741 if (GET_CODE (dest) == SUBREG)
9742 dest = SUBREG_REG (dest);
9743
9744 /* If this is a MEM, adjust deaths of anything used in the address.
9745 For a REG (the only other possibility), the entire value is
9746 being replaced so the old value is not used in this insn. */
230d793d
RS
9747
9748 if (GET_CODE (dest) == MEM)
9749 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9750 return;
9751 }
9752
9753 else if (GET_CODE (x) == CLOBBER)
9754 return;
9755
9756 len = GET_RTX_LENGTH (code);
9757 fmt = GET_RTX_FORMAT (code);
9758
9759 for (i = 0; i < len; i++)
9760 {
9761 if (fmt[i] == 'E')
9762 {
9763 register int j;
9764 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9765 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9766 }
9767 else if (fmt[i] == 'e')
9768 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9769 }
9770}
9771\f
a7c99304
RK
9772/* Return 1 if X is the target of a bit-field assignment in BODY, the
9773 pattern of an insn. X must be a REG. */
230d793d
RS
9774
9775static int
a7c99304
RK
9776reg_bitfield_target_p (x, body)
9777 rtx x;
230d793d
RS
9778 rtx body;
9779{
9780 int i;
9781
9782 if (GET_CODE (body) == SET)
a7c99304
RK
9783 {
9784 rtx dest = SET_DEST (body);
9785 rtx target;
9786 int regno, tregno, endregno, endtregno;
9787
9788 if (GET_CODE (dest) == ZERO_EXTRACT)
9789 target = XEXP (dest, 0);
9790 else if (GET_CODE (dest) == STRICT_LOW_PART)
9791 target = SUBREG_REG (XEXP (dest, 0));
9792 else
9793 return 0;
9794
9795 if (GET_CODE (target) == SUBREG)
9796 target = SUBREG_REG (target);
9797
9798 if (GET_CODE (target) != REG)
9799 return 0;
9800
9801 tregno = REGNO (target), regno = REGNO (x);
9802 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9803 return target == x;
9804
9805 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9806 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9807
9808 return endregno > tregno && regno < endtregno;
9809 }
230d793d
RS
9810
9811 else if (GET_CODE (body) == PARALLEL)
9812 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
a7c99304 9813 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
230d793d
RS
9814 return 1;
9815
9816 return 0;
9817}
9818\f
9819/* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9820 as appropriate. I3 and I2 are the insns resulting from the combination
9821 insns including FROM (I2 may be zero).
9822
9823 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9824 not need REG_DEAD notes because they are being substituted for. This
9825 saves searching in the most common cases.
9826
9827 Each note in the list is either ignored or placed on some insns, depending
9828 on the type of note. */
9829
9830static void
9831distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9832 rtx notes;
9833 rtx from_insn;
9834 rtx i3, i2;
9835 rtx elim_i2, elim_i1;
9836{
9837 rtx note, next_note;
9838 rtx tem;
9839
9840 for (note = notes; note; note = next_note)
9841 {
9842 rtx place = 0, place2 = 0;
9843
9844 /* If this NOTE references a pseudo register, ensure it references
9845 the latest copy of that register. */
9846 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9847 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9848 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9849
9850 next_note = XEXP (note, 1);
9851 switch (REG_NOTE_KIND (note))
9852 {
9853 case REG_UNUSED:
9854 /* If this register is set or clobbered in I3, put the note there
9855 unless there is one already. */
9856 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9857 {
9858 if (! (GET_CODE (XEXP (note, 0)) == REG
9859 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9860 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9861 place = i3;
9862 }
9863 /* Otherwise, if this register is used by I3, then this register
9864 now dies here, so we must put a REG_DEAD note here unless there
9865 is one already. */
9866 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9867 && ! (GET_CODE (XEXP (note, 0)) == REG
9868 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9869 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9870 {
9871 PUT_REG_NOTE_KIND (note, REG_DEAD);
9872 place = i3;
9873 }
9874 break;
9875
9876 case REG_EQUAL:
9877 case REG_EQUIV:
9878 case REG_NONNEG:
9879 /* These notes say something about results of an insn. We can
9880 only support them if they used to be on I3 in which case they
a687e897
RK
9881 remain on I3. Otherwise they are ignored.
9882
9883 If the note refers to an expression that is not a constant, we
9884 must also ignore the note since we cannot tell whether the
9885 equivalence is still true. It might be possible to do
9886 slightly better than this (we only have a problem if I2DEST
9887 or I1DEST is present in the expression), but it doesn't
9888 seem worth the trouble. */
9889
9890 if (from_insn == i3
9891 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
230d793d
RS
9892 place = i3;
9893 break;
9894
9895 case REG_INC:
9896 case REG_NO_CONFLICT:
9897 case REG_LABEL:
9898 /* These notes say something about how a register is used. They must
9899 be present on any use of the register in I2 or I3. */
9900 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9901 place = i3;
9902
9903 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9904 {
9905 if (place)
9906 place2 = i2;
9907 else
9908 place = i2;
9909 }
9910 break;
9911
9912 case REG_WAS_0:
9913 /* It is too much trouble to try to see if this note is still
9914 correct in all situations. It is better to simply delete it. */
9915 break;
9916
9917 case REG_RETVAL:
9918 /* If the insn previously containing this note still exists,
9919 put it back where it was. Otherwise move it to the previous
9920 insn. Adjust the corresponding REG_LIBCALL note. */
9921 if (GET_CODE (from_insn) != NOTE)
9922 place = from_insn;
9923 else
9924 {
5f4f0e22 9925 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
230d793d
RS
9926 place = prev_real_insn (from_insn);
9927 if (tem && place)
9928 XEXP (tem, 0) = place;
9929 }
9930 break;
9931
9932 case REG_LIBCALL:
9933 /* This is handled similarly to REG_RETVAL. */
9934 if (GET_CODE (from_insn) != NOTE)
9935 place = from_insn;
9936 else
9937 {
5f4f0e22 9938 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
230d793d
RS
9939 place = next_real_insn (from_insn);
9940 if (tem && place)
9941 XEXP (tem, 0) = place;
9942 }
9943 break;
9944
9945 case REG_DEAD:
9946 /* If the register is used as an input in I3, it dies there.
9947 Similarly for I2, if it is non-zero and adjacent to I3.
9948
9949 If the register is not used as an input in either I3 or I2
9950 and it is not one of the registers we were supposed to eliminate,
9951 there are two possibilities. We might have a non-adjacent I2
9952 or we might have somehow eliminated an additional register
9953 from a computation. For example, we might have had A & B where
9954 we discover that B will always be zero. In this case we will
9955 eliminate the reference to A.
9956
9957 In both cases, we must search to see if we can find a previous
9958 use of A and put the death note there. */
9959
9960 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9961 place = i3;
9962 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9963 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9964 place = i2;
9965
9966 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9967 break;
9968
510dd77e
RK
9969 /* If the register is used in both I2 and I3 and it dies in I3,
9970 we might have added another reference to it. If reg_n_refs
9971 was 2, bump it to 3. This has to be correct since the
9972 register must have been set somewhere. The reason this is
9973 done is because local-alloc.c treats 2 references as a
9974 special case. */
9975
9976 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9977 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9978 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9979 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9980
230d793d
RS
9981 if (place == 0)
9982 for (tem = prev_nonnote_insn (i3);
9983 tem && (GET_CODE (tem) == INSN
9984 || GET_CODE (tem) == CALL_INSN);
9985 tem = prev_nonnote_insn (tem))
9986 {
9987 /* If the register is being set at TEM, see if that is all
9988 TEM is doing. If so, delete TEM. Otherwise, make this
9989 into a REG_UNUSED note instead. */
9990 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9991 {
9992 rtx set = single_set (tem);
9993
5089e22e
RS
9994 /* Verify that it was the set, and not a clobber that
9995 modified the register. */
9996
9997 if (set != 0 && ! side_effects_p (SET_SRC (set))
9998 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
230d793d
RS
9999 {
10000 /* Move the notes and links of TEM elsewhere.
10001 This might delete other dead insns recursively.
10002 First set the pattern to something that won't use
10003 any register. */
10004
10005 PATTERN (tem) = pc_rtx;
10006
5f4f0e22
CH
10007 distribute_notes (REG_NOTES (tem), tem, tem,
10008 NULL_RTX, NULL_RTX, NULL_RTX);
230d793d
RS
10009 distribute_links (LOG_LINKS (tem));
10010
10011 PUT_CODE (tem, NOTE);
10012 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
10013 NOTE_SOURCE_FILE (tem) = 0;
10014 }
10015 else
10016 {
10017 PUT_REG_NOTE_KIND (note, REG_UNUSED);
10018
10019 /* If there isn't already a REG_UNUSED note, put one
10020 here. */
10021 if (! find_regno_note (tem, REG_UNUSED,
10022 REGNO (XEXP (note, 0))))
10023 place = tem;
10024 break;
10025 }
10026 }
10027 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
10028 {
10029 place = tem;
10030 break;
10031 }
10032 }
10033
10034 /* If the register is set or already dead at PLACE, we needn't do
10035 anything with this note if it is still a REG_DEAD note.
10036
10037 Note that we cannot use just `dead_or_set_p' here since we can
10038 convert an assignment to a register into a bit-field assignment.
10039 Therefore, we must also omit the note if the register is the
10040 target of a bitfield assignment. */
10041
10042 if (place && REG_NOTE_KIND (note) == REG_DEAD)
10043 {
10044 int regno = REGNO (XEXP (note, 0));
10045
10046 if (dead_or_set_p (place, XEXP (note, 0))
10047 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
10048 {
10049 /* Unless the register previously died in PLACE, clear
10050 reg_last_death. [I no longer understand why this is
10051 being done.] */
10052 if (reg_last_death[regno] != place)
10053 reg_last_death[regno] = 0;
10054 place = 0;
10055 }
10056 else
10057 reg_last_death[regno] = place;
10058
10059 /* If this is a death note for a hard reg that is occupying
10060 multiple registers, ensure that we are still using all
10061 parts of the object. If we find a piece of the object
10062 that is unused, we must add a USE for that piece before
10063 PLACE and put the appropriate REG_DEAD note on it.
10064
10065 An alternative would be to put a REG_UNUSED for the pieces
10066 on the insn that set the register, but that can't be done if
10067 it is not in the same block. It is simpler, though less
10068 efficient, to add the USE insns. */
10069
10070 if (place && regno < FIRST_PSEUDO_REGISTER
10071 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
10072 {
10073 int endregno
10074 = regno + HARD_REGNO_NREGS (regno,
10075 GET_MODE (XEXP (note, 0)));
10076 int all_used = 1;
10077 int i;
10078
10079 for (i = regno; i < endregno; i++)
10080 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
10081 {
10082 rtx piece = gen_rtx (REG, word_mode, i);
28f6d3af
RK
10083 rtx p;
10084
10085 /* See if we already placed a USE note for this
10086 register in front of PLACE. */
10087 for (p = place;
10088 GET_CODE (PREV_INSN (p)) == INSN
10089 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
10090 p = PREV_INSN (p))
10091 if (rtx_equal_p (piece,
10092 XEXP (PATTERN (PREV_INSN (p)), 0)))
10093 {
10094 p = 0;
10095 break;
10096 }
10097
10098 if (p)
10099 {
10100 rtx use_insn
10101 = emit_insn_before (gen_rtx (USE, VOIDmode,
10102 piece),
10103 p);
10104 REG_NOTES (use_insn)
10105 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
10106 REG_NOTES (use_insn));
10107 }
230d793d 10108
5089e22e 10109 all_used = 0;
230d793d
RS
10110 }
10111
a394b17b
JW
10112 /* Check for the case where the register dying partially
10113 overlaps the register set by this insn. */
10114 if (all_used)
10115 for (i = regno; i < endregno; i++)
10116 if (dead_or_set_regno_p (place, i))
10117 {
10118 all_used = 0;
10119 break;
10120 }
10121
230d793d
RS
10122 if (! all_used)
10123 {
10124 /* Put only REG_DEAD notes for pieces that are
10125 still used and that are not already dead or set. */
10126
10127 for (i = regno; i < endregno; i++)
10128 {
10129 rtx piece = gen_rtx (REG, word_mode, i);
10130
10131 if (reg_referenced_p (piece, PATTERN (place))
10132 && ! dead_or_set_p (place, piece)
10133 && ! reg_bitfield_target_p (piece,
10134 PATTERN (place)))
10135 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
10136 piece,
10137 REG_NOTES (place));
10138 }
10139
10140 place = 0;
10141 }
10142 }
10143 }
10144 break;
10145
10146 default:
10147 /* Any other notes should not be present at this point in the
10148 compilation. */
10149 abort ();
10150 }
10151
10152 if (place)
10153 {
10154 XEXP (note, 1) = REG_NOTES (place);
10155 REG_NOTES (place) = note;
10156 }
1a26b032
RK
10157 else if ((REG_NOTE_KIND (note) == REG_DEAD
10158 || REG_NOTE_KIND (note) == REG_UNUSED)
10159 && GET_CODE (XEXP (note, 0)) == REG)
10160 reg_n_deaths[REGNO (XEXP (note, 0))]--;
230d793d
RS
10161
10162 if (place2)
1a26b032
RK
10163 {
10164 if ((REG_NOTE_KIND (note) == REG_DEAD
10165 || REG_NOTE_KIND (note) == REG_UNUSED)
10166 && GET_CODE (XEXP (note, 0)) == REG)
10167 reg_n_deaths[REGNO (XEXP (note, 0))]++;
10168
10169 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
10170 XEXP (note, 0), REG_NOTES (place2));
10171 }
230d793d
RS
10172 }
10173}
10174\f
10175/* Similarly to above, distribute the LOG_LINKS that used to be present on
5089e22e
RS
10176 I3, I2, and I1 to new locations. This is also called in one case to
10177 add a link pointing at I3 when I3's destination is changed. */
230d793d
RS
10178
10179static void
10180distribute_links (links)
10181 rtx links;
10182{
10183 rtx link, next_link;
10184
10185 for (link = links; link; link = next_link)
10186 {
10187 rtx place = 0;
10188 rtx insn;
10189 rtx set, reg;
10190
10191 next_link = XEXP (link, 1);
10192
10193 /* If the insn that this link points to is a NOTE or isn't a single
10194 set, ignore it. In the latter case, it isn't clear what we
10195 can do other than ignore the link, since we can't tell which
10196 register it was for. Such links wouldn't be used by combine
10197 anyway.
10198
10199 It is not possible for the destination of the target of the link to
10200 have been changed by combine. The only potential of this is if we
10201 replace I3, I2, and I1 by I3 and I2. But in that case the
10202 destination of I2 also remains unchanged. */
10203
10204 if (GET_CODE (XEXP (link, 0)) == NOTE
10205 || (set = single_set (XEXP (link, 0))) == 0)
10206 continue;
10207
10208 reg = SET_DEST (set);
10209 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
10210 || GET_CODE (reg) == SIGN_EXTRACT
10211 || GET_CODE (reg) == STRICT_LOW_PART)
10212 reg = XEXP (reg, 0);
10213
10214 /* A LOG_LINK is defined as being placed on the first insn that uses
10215 a register and points to the insn that sets the register. Start
10216 searching at the next insn after the target of the link and stop
10217 when we reach a set of the register or the end of the basic block.
10218
10219 Note that this correctly handles the link that used to point from
5089e22e 10220 I3 to I2. Also note that not much searching is typically done here
230d793d
RS
10221 since most links don't point very far away. */
10222
10223 for (insn = NEXT_INSN (XEXP (link, 0));
0d4d42c3
RK
10224 (insn && (this_basic_block == n_basic_blocks - 1
10225 || basic_block_head[this_basic_block + 1] != insn));
230d793d
RS
10226 insn = NEXT_INSN (insn))
10227 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
10228 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
10229 {
10230 if (reg_referenced_p (reg, PATTERN (insn)))
10231 place = insn;
10232 break;
10233 }
10234
10235 /* If we found a place to put the link, place it there unless there
10236 is already a link to the same insn as LINK at that point. */
10237
10238 if (place)
10239 {
10240 rtx link2;
10241
10242 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
10243 if (XEXP (link2, 0) == XEXP (link, 0))
10244 break;
10245
10246 if (link2 == 0)
10247 {
10248 XEXP (link, 1) = LOG_LINKS (place);
10249 LOG_LINKS (place) = link;
10250 }
10251 }
10252 }
10253}
10254\f
10255void
10256dump_combine_stats (file)
10257 FILE *file;
10258{
10259 fprintf
10260 (file,
10261 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
10262 combine_attempts, combine_merges, combine_extras, combine_successes);
10263}
10264
10265void
10266dump_combine_total_stats (file)
10267 FILE *file;
10268{
10269 fprintf
10270 (file,
10271 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
10272 total_attempts, total_merges, total_extras, total_successes);
10273}
This page took 1.362427 seconds and 5 git commands to generate.